hash
stringlengths
64
64
content
stringlengths
0
1.51M
56fb14f7034ff182c1b3a277558221702af00d4e0b6866741aa0178171341da8
""" MySQL database backend for Django. Requires mysqlclient: https://pypi.org/project/mysqlclient/ """ from django.core.exceptions import ImproperlyConfigured from django.db import IntegrityError from django.db.backends import utils as backend_utils from django.db.backends.base.base import BaseDatabaseWrapper from django.utils.asyncio import async_unsafe from django.utils.functional import cached_property from django.utils.regex_helper import _lazy_re_compile try: import MySQLdb as Database except ImportError as err: raise ImproperlyConfigured( 'Error loading MySQLdb module.\n' 'Did you install mysqlclient?' ) from err from MySQLdb.constants import CLIENT, FIELD_TYPE from MySQLdb.converters import conversions # Some of these import MySQLdb, so import them after checking if it's installed. from .client import DatabaseClient from .creation import DatabaseCreation from .features import DatabaseFeatures from .introspection import DatabaseIntrospection from .operations import DatabaseOperations from .schema import DatabaseSchemaEditor from .validation import DatabaseValidation version = Database.version_info if version < (1, 4, 0): raise ImproperlyConfigured('mysqlclient 1.4.0 or newer is required; you have %s.' % Database.__version__) # MySQLdb returns TIME columns as timedelta -- they are more like timedelta in # terms of actual behavior as they are signed and include days -- and Django # expects time. django_conversions = { **conversions, **{FIELD_TYPE.TIME: backend_utils.typecast_time}, } # This should match the numerical portion of the version numbers (we can treat # versions like 5.0.24 and 5.0.24a as the same). server_version_re = _lazy_re_compile(r'(\d{1,2})\.(\d{1,2})\.(\d{1,2})') class CursorWrapper: """ A thin wrapper around MySQLdb's normal cursor class that catches particular exception instances and reraises them with the correct types. Implemented as a wrapper, rather than a subclass, so that it isn't stuck to the particular underlying representation returned by Connection.cursor(). """ codes_for_integrityerror = ( 1048, # Column cannot be null 1690, # BIGINT UNSIGNED value is out of range 3819, # CHECK constraint is violated 4025, # CHECK constraint failed ) def __init__(self, cursor): self.cursor = cursor def execute(self, query, args=None): try: # args is None means no string interpolation return self.cursor.execute(query, args) except Database.OperationalError as e: # Map some error codes to IntegrityError, since they seem to be # misclassified and Django would prefer the more logical place. if e.args[0] in self.codes_for_integrityerror: raise IntegrityError(*tuple(e.args)) raise def executemany(self, query, args): try: return self.cursor.executemany(query, args) except Database.OperationalError as e: # Map some error codes to IntegrityError, since they seem to be # misclassified and Django would prefer the more logical place. if e.args[0] in self.codes_for_integrityerror: raise IntegrityError(*tuple(e.args)) raise def __getattr__(self, attr): return getattr(self.cursor, attr) def __iter__(self): return iter(self.cursor) class DatabaseWrapper(BaseDatabaseWrapper): vendor = 'mysql' # This dictionary maps Field objects to their associated MySQL column # types, as strings. Column-type strings can contain format strings; they'll # be interpolated against the values of Field.__dict__ before being output. # If a column type is set to None, it won't be included in the output. data_types = { 'AutoField': 'integer AUTO_INCREMENT', 'BigAutoField': 'bigint AUTO_INCREMENT', 'BinaryField': 'longblob', 'BooleanField': 'bool', 'CharField': 'varchar(%(max_length)s)', 'DateField': 'date', 'DateTimeField': 'datetime(6)', 'DecimalField': 'numeric(%(max_digits)s, %(decimal_places)s)', 'DurationField': 'bigint', 'FileField': 'varchar(%(max_length)s)', 'FilePathField': 'varchar(%(max_length)s)', 'FloatField': 'double precision', 'IntegerField': 'integer', 'BigIntegerField': 'bigint', 'IPAddressField': 'char(15)', 'GenericIPAddressField': 'char(39)', 'JSONField': 'json', 'OneToOneField': 'integer', 'PositiveBigIntegerField': 'bigint UNSIGNED', 'PositiveIntegerField': 'integer UNSIGNED', 'PositiveSmallIntegerField': 'smallint UNSIGNED', 'SlugField': 'varchar(%(max_length)s)', 'SmallAutoField': 'smallint AUTO_INCREMENT', 'SmallIntegerField': 'smallint', 'TextField': 'longtext', 'TimeField': 'time(6)', 'UUIDField': 'char(32)', } # For these data types: # - MySQL < 8.0.13 and MariaDB < 10.2.1 don't accept default values and # implicitly treat them as nullable # - all versions of MySQL and MariaDB don't support full width database # indexes _limited_data_types = ( 'tinyblob', 'blob', 'mediumblob', 'longblob', 'tinytext', 'text', 'mediumtext', 'longtext', 'json', ) operators = { 'exact': '= %s', 'iexact': 'LIKE %s', 'contains': 'LIKE BINARY %s', 'icontains': 'LIKE %s', 'gt': '> %s', 'gte': '>= %s', 'lt': '< %s', 'lte': '<= %s', 'startswith': 'LIKE BINARY %s', 'endswith': 'LIKE BINARY %s', 'istartswith': 'LIKE %s', 'iendswith': 'LIKE %s', } # The patterns below are used to generate SQL pattern lookup clauses when # the right-hand side of the lookup isn't a raw string (it might be an expression # or the result of a bilateral transformation). # In those cases, special characters for LIKE operators (e.g. \, *, _) should be # escaped on database side. # # Note: we use str.format() here for readability as '%' is used as a wildcard for # the LIKE operator. pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\\', '\\\\'), '%%', '\%%'), '_', '\_')" pattern_ops = { 'contains': "LIKE BINARY CONCAT('%%', {}, '%%')", 'icontains': "LIKE CONCAT('%%', {}, '%%')", 'startswith': "LIKE BINARY CONCAT({}, '%%')", 'istartswith': "LIKE CONCAT({}, '%%')", 'endswith': "LIKE BINARY CONCAT('%%', {})", 'iendswith': "LIKE CONCAT('%%', {})", } isolation_levels = { 'read uncommitted', 'read committed', 'repeatable read', 'serializable', } Database = Database SchemaEditorClass = DatabaseSchemaEditor # Classes instantiated in __init__(). client_class = DatabaseClient creation_class = DatabaseCreation features_class = DatabaseFeatures introspection_class = DatabaseIntrospection ops_class = DatabaseOperations validation_class = DatabaseValidation def get_connection_params(self): kwargs = { 'conv': django_conversions, 'charset': 'utf8', } settings_dict = self.settings_dict if settings_dict['USER']: kwargs['user'] = settings_dict['USER'] if settings_dict['NAME']: kwargs['database'] = settings_dict['NAME'] if settings_dict['PASSWORD']: kwargs['password'] = settings_dict['PASSWORD'] if settings_dict['HOST'].startswith('/'): kwargs['unix_socket'] = settings_dict['HOST'] elif settings_dict['HOST']: kwargs['host'] = settings_dict['HOST'] if settings_dict['PORT']: kwargs['port'] = int(settings_dict['PORT']) # We need the number of potentially affected rows after an # "UPDATE", not the number of changed rows. kwargs['client_flag'] = CLIENT.FOUND_ROWS # Validate the transaction isolation level, if specified. options = settings_dict['OPTIONS'].copy() isolation_level = options.pop('isolation_level', 'read committed') if isolation_level: isolation_level = isolation_level.lower() if isolation_level not in self.isolation_levels: raise ImproperlyConfigured( "Invalid transaction isolation level '%s' specified.\n" "Use one of %s, or None." % ( isolation_level, ', '.join("'%s'" % s for s in sorted(self.isolation_levels)) )) self.isolation_level = isolation_level kwargs.update(options) return kwargs @async_unsafe def get_new_connection(self, conn_params): connection = Database.connect(**conn_params) # bytes encoder in mysqlclient doesn't work and was added only to # prevent KeyErrors in Django < 2.0. We can remove this workaround when # mysqlclient 2.1 becomes the minimal mysqlclient supported by Django. # See https://github.com/PyMySQL/mysqlclient/issues/489 if connection.encoders.get(bytes) is bytes: connection.encoders.pop(bytes) return connection def init_connection_state(self): assignments = [] if self.features.is_sql_auto_is_null_enabled: # SQL_AUTO_IS_NULL controls whether an AUTO_INCREMENT column on # a recently inserted row will return when the field is tested # for NULL. Disabling this brings this aspect of MySQL in line # with SQL standards. assignments.append('SET SQL_AUTO_IS_NULL = 0') if self.isolation_level: assignments.append('SET SESSION TRANSACTION ISOLATION LEVEL %s' % self.isolation_level.upper()) if assignments: with self.cursor() as cursor: cursor.execute('; '.join(assignments)) @async_unsafe def create_cursor(self, name=None): cursor = self.connection.cursor() return CursorWrapper(cursor) def _rollback(self): try: BaseDatabaseWrapper._rollback(self) except Database.NotSupportedError: pass def _set_autocommit(self, autocommit): with self.wrap_database_errors: self.connection.autocommit(autocommit) def disable_constraint_checking(self): """ Disable foreign key checks, primarily for use in adding rows with forward references. Always return True to indicate constraint checks need to be re-enabled. """ with self.cursor() as cursor: cursor.execute('SET foreign_key_checks=0') return True def enable_constraint_checking(self): """ Re-enable foreign key checks after they have been disabled. """ # Override needs_rollback in case constraint_checks_disabled is # nested inside transaction.atomic. self.needs_rollback, needs_rollback = False, self.needs_rollback try: with self.cursor() as cursor: cursor.execute('SET foreign_key_checks=1') finally: self.needs_rollback = needs_rollback def check_constraints(self, table_names=None): """ Check each table name in `table_names` for rows with invalid foreign key references. This method is intended to be used in conjunction with `disable_constraint_checking()` and `enable_constraint_checking()`, to determine if rows with invalid references were entered while constraint checks were off. """ with self.cursor() as cursor: if table_names is None: table_names = self.introspection.table_names(cursor) for table_name in table_names: primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name) if not primary_key_column_name: continue key_columns = self.introspection.get_key_columns(cursor, table_name) for column_name, referenced_table_name, referenced_column_name in key_columns: cursor.execute( """ SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING LEFT JOIN `%s` as REFERRED ON (REFERRING.`%s` = REFERRED.`%s`) WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL """ % ( primary_key_column_name, column_name, table_name, referenced_table_name, column_name, referenced_column_name, column_name, referenced_column_name, ) ) for bad_row in cursor.fetchall(): raise IntegrityError( "The row in table '%s' with primary key '%s' has an invalid " "foreign key: %s.%s contains a value '%s' that does not " "have a corresponding value in %s.%s." % ( table_name, bad_row[0], table_name, column_name, bad_row[1], referenced_table_name, referenced_column_name, ) ) def is_usable(self): try: self.connection.ping() except Database.Error: return False else: return True @cached_property def display_name(self): return 'MariaDB' if self.mysql_is_mariadb else 'MySQL' @cached_property def data_type_check_constraints(self): if self.features.supports_column_check_constraints: check_constraints = { 'PositiveBigIntegerField': '`%(column)s` >= 0', 'PositiveIntegerField': '`%(column)s` >= 0', 'PositiveSmallIntegerField': '`%(column)s` >= 0', } if self.mysql_is_mariadb and self.mysql_version < (10, 4, 3): # MariaDB < 10.4.3 doesn't automatically use the JSON_VALID as # a check constraint. check_constraints['JSONField'] = 'JSON_VALID(`%(column)s`)' return check_constraints return {} @cached_property def mysql_server_data(self): with self.temporary_connection() as cursor: # Select some server variables and test if the time zone # definitions are installed. CONVERT_TZ returns NULL if 'UTC' # timezone isn't loaded into the mysql.time_zone table. cursor.execute(""" SELECT VERSION(), @@sql_mode, @@default_storage_engine, @@sql_auto_is_null, @@lower_case_table_names, CONVERT_TZ('2001-01-01 01:00:00', 'UTC', 'UTC') IS NOT NULL """) row = cursor.fetchone() return { 'version': row[0], 'sql_mode': row[1], 'default_storage_engine': row[2], 'sql_auto_is_null': bool(row[3]), 'lower_case_table_names': bool(row[4]), 'has_zoneinfo_database': bool(row[5]), } @cached_property def mysql_server_info(self): return self.mysql_server_data['version'] @cached_property def mysql_version(self): match = server_version_re.match(self.mysql_server_info) if not match: raise Exception('Unable to determine MySQL version from version string %r' % self.mysql_server_info) return tuple(int(x) for x in match.groups()) @cached_property def mysql_is_mariadb(self): return 'mariadb' in self.mysql_server_info.lower() @cached_property def sql_mode(self): sql_mode = self.mysql_server_data['sql_mode'] return set(sql_mode.split(',') if sql_mode else ())
8109d8de24a360e0a818dcd692b42f1089fce3ba19f968370d7fcfc64b28abcc
import uuid from django.conf import settings from django.db.backends.base.operations import BaseDatabaseOperations from django.utils import timezone from django.utils.encoding import force_str class DatabaseOperations(BaseDatabaseOperations): compiler_module = "django.db.backends.mysql.compiler" # MySQL stores positive fields as UNSIGNED ints. integer_field_ranges = { **BaseDatabaseOperations.integer_field_ranges, 'PositiveSmallIntegerField': (0, 65535), 'PositiveIntegerField': (0, 4294967295), 'PositiveBigIntegerField': (0, 18446744073709551615), } cast_data_types = { 'AutoField': 'signed integer', 'BigAutoField': 'signed integer', 'SmallAutoField': 'signed integer', 'CharField': 'char(%(max_length)s)', 'DecimalField': 'decimal(%(max_digits)s, %(decimal_places)s)', 'TextField': 'char', 'IntegerField': 'signed integer', 'BigIntegerField': 'signed integer', 'SmallIntegerField': 'signed integer', 'PositiveBigIntegerField': 'unsigned integer', 'PositiveIntegerField': 'unsigned integer', 'PositiveSmallIntegerField': 'unsigned integer', 'DurationField': 'signed integer', } cast_char_field_without_max_length = 'char' explain_prefix = 'EXPLAIN' def date_extract_sql(self, lookup_type, field_name): # https://dev.mysql.com/doc/mysql/en/date-and-time-functions.html if lookup_type == 'week_day': # DAYOFWEEK() returns an integer, 1-7, Sunday=1. return "DAYOFWEEK(%s)" % field_name elif lookup_type == 'iso_week_day': # WEEKDAY() returns an integer, 0-6, Monday=0. return "WEEKDAY(%s) + 1" % field_name elif lookup_type == 'week': # Override the value of default_week_format for consistency with # other database backends. # Mode 3: Monday, 1-53, with 4 or more days this year. return "WEEK(%s, 3)" % field_name elif lookup_type == 'iso_year': # Get the year part from the YEARWEEK function, which returns a # number as year * 100 + week. return "TRUNCATE(YEARWEEK(%s, 3), -2) / 100" % field_name else: # EXTRACT returns 1-53 based on ISO-8601 for the week number. return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name) def date_trunc_sql(self, lookup_type, field_name, tzname=None): field_name = self._convert_field_to_tz(field_name, tzname) fields = { 'year': '%%Y-01-01', 'month': '%%Y-%%m-01', } # Use double percents to escape. if lookup_type in fields: format_str = fields[lookup_type] return "CAST(DATE_FORMAT(%s, '%s') AS DATE)" % (field_name, format_str) elif lookup_type == 'quarter': return "MAKEDATE(YEAR(%s), 1) + INTERVAL QUARTER(%s) QUARTER - INTERVAL 1 QUARTER" % ( field_name, field_name ) elif lookup_type == 'week': return "DATE_SUB(%s, INTERVAL WEEKDAY(%s) DAY)" % ( field_name, field_name ) else: return "DATE(%s)" % (field_name) def _prepare_tzname_delta(self, tzname): if '+' in tzname: return tzname[tzname.find('+'):] elif '-' in tzname: return tzname[tzname.find('-'):] return tzname def _convert_field_to_tz(self, field_name, tzname): if tzname and settings.USE_TZ and self.connection.timezone_name != tzname: field_name = "CONVERT_TZ(%s, '%s', '%s')" % ( field_name, self.connection.timezone_name, self._prepare_tzname_delta(tzname), ) return field_name def datetime_cast_date_sql(self, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) return "DATE(%s)" % field_name def datetime_cast_time_sql(self, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) return "TIME(%s)" % field_name def datetime_extract_sql(self, lookup_type, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) return self.date_extract_sql(lookup_type, field_name) def datetime_trunc_sql(self, lookup_type, field_name, tzname): field_name = self._convert_field_to_tz(field_name, tzname) fields = ['year', 'month', 'day', 'hour', 'minute', 'second'] format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s') # Use double percents to escape. format_def = ('0000-', '01', '-01', ' 00:', '00', ':00') if lookup_type == 'quarter': return ( "CAST(DATE_FORMAT(MAKEDATE(YEAR({field_name}), 1) + " "INTERVAL QUARTER({field_name}) QUARTER - " + "INTERVAL 1 QUARTER, '%%Y-%%m-01 00:00:00') AS DATETIME)" ).format(field_name=field_name) if lookup_type == 'week': return ( "CAST(DATE_FORMAT(DATE_SUB({field_name}, " "INTERVAL WEEKDAY({field_name}) DAY), " "'%%Y-%%m-%%d 00:00:00') AS DATETIME)" ).format(field_name=field_name) try: i = fields.index(lookup_type) + 1 except ValueError: sql = field_name else: format_str = ''.join(format[:i] + format_def[i:]) sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str) return sql def time_trunc_sql(self, lookup_type, field_name, tzname=None): field_name = self._convert_field_to_tz(field_name, tzname) fields = { 'hour': '%%H:00:00', 'minute': '%%H:%%i:00', 'second': '%%H:%%i:%%s', } # Use double percents to escape. if lookup_type in fields: format_str = fields[lookup_type] return "CAST(DATE_FORMAT(%s, '%s') AS TIME)" % (field_name, format_str) else: return "TIME(%s)" % (field_name) def fetch_returned_insert_rows(self, cursor): """ Given a cursor object that has just performed an INSERT...RETURNING statement into a table, return the tuple of returned data. """ return cursor.fetchall() def format_for_duration_arithmetic(self, sql): return 'INTERVAL %s MICROSECOND' % sql def force_no_ordering(self): """ "ORDER BY NULL" prevents MySQL from implicitly ordering by grouped columns. If no ordering would otherwise be applied, we don't want any implicit sorting going on. """ return [(None, ("NULL", [], False))] def adapt_decimalfield_value(self, value, max_digits=None, decimal_places=None): return value def last_executed_query(self, cursor, sql, params): # With MySQLdb, cursor objects have an (undocumented) "_executed" # attribute where the exact query sent to the database is saved. # See MySQLdb/cursors.py in the source distribution. # MySQLdb returns string, PyMySQL bytes. return force_str(getattr(cursor, '_executed', None), errors='replace') def no_limit_value(self): # 2**64 - 1, as recommended by the MySQL documentation return 18446744073709551615 def quote_name(self, name): if name.startswith("`") and name.endswith("`"): return name # Quoting once is enough. return "`%s`" % name def return_insert_columns(self, fields): # MySQL and MariaDB < 10.5.0 don't support an INSERT...RETURNING # statement. if not fields: return '', () columns = [ '%s.%s' % ( self.quote_name(field.model._meta.db_table), self.quote_name(field.column), ) for field in fields ] return 'RETURNING %s' % ', '.join(columns), () def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False): if not tables: return [] sql = ['SET FOREIGN_KEY_CHECKS = 0;'] if reset_sequences: # It's faster to TRUNCATE tables that require a sequence reset # since ALTER TABLE AUTO_INCREMENT is slower than TRUNCATE. sql.extend( '%s %s;' % ( style.SQL_KEYWORD('TRUNCATE'), style.SQL_FIELD(self.quote_name(table_name)), ) for table_name in tables ) else: # Otherwise issue a simple DELETE since it's faster than TRUNCATE # and preserves sequences. sql.extend( '%s %s %s;' % ( style.SQL_KEYWORD('DELETE'), style.SQL_KEYWORD('FROM'), style.SQL_FIELD(self.quote_name(table_name)), ) for table_name in tables ) sql.append('SET FOREIGN_KEY_CHECKS = 1;') return sql def sequence_reset_by_name_sql(self, style, sequences): return [ '%s %s %s %s = 1;' % ( style.SQL_KEYWORD('ALTER'), style.SQL_KEYWORD('TABLE'), style.SQL_FIELD(self.quote_name(sequence_info['table'])), style.SQL_FIELD('AUTO_INCREMENT'), ) for sequence_info in sequences ] def validate_autopk_value(self, value): # Zero in AUTO_INCREMENT field does not work without the # NO_AUTO_VALUE_ON_ZERO SQL mode. if value == 0 and not self.connection.features.allows_auto_pk_0: raise ValueError('The database backend does not accept 0 as a ' 'value for AutoField.') return value def adapt_datetimefield_value(self, value): if value is None: return None # Expression values are adapted by the database. if hasattr(value, 'resolve_expression'): return value # MySQL doesn't support tz-aware datetimes if timezone.is_aware(value): if settings.USE_TZ: value = timezone.make_naive(value, self.connection.timezone) else: raise ValueError("MySQL backend does not support timezone-aware datetimes when USE_TZ is False.") return str(value) def adapt_timefield_value(self, value): if value is None: return None # Expression values are adapted by the database. if hasattr(value, 'resolve_expression'): return value # MySQL doesn't support tz-aware times if timezone.is_aware(value): raise ValueError("MySQL backend does not support timezone-aware times.") return value.isoformat(timespec='microseconds') def max_name_length(self): return 64 def pk_default_value(self): return 'NULL' def bulk_insert_sql(self, fields, placeholder_rows): placeholder_rows_sql = (", ".join(row) for row in placeholder_rows) values_sql = ", ".join("(%s)" % sql for sql in placeholder_rows_sql) return "VALUES " + values_sql def combine_expression(self, connector, sub_expressions): if connector == '^': return 'POW(%s)' % ','.join(sub_expressions) # Convert the result to a signed integer since MySQL's binary operators # return an unsigned integer. elif connector in ('&', '|', '<<', '#'): connector = '^' if connector == '#' else connector return 'CONVERT(%s, SIGNED)' % connector.join(sub_expressions) elif connector == '>>': lhs, rhs = sub_expressions return 'FLOOR(%(lhs)s / POW(2, %(rhs)s))' % {'lhs': lhs, 'rhs': rhs} return super().combine_expression(connector, sub_expressions) def get_db_converters(self, expression): converters = super().get_db_converters(expression) internal_type = expression.output_field.get_internal_type() if internal_type == 'BooleanField': converters.append(self.convert_booleanfield_value) elif internal_type == 'DateTimeField': if settings.USE_TZ: converters.append(self.convert_datetimefield_value) elif internal_type == 'UUIDField': converters.append(self.convert_uuidfield_value) return converters def convert_booleanfield_value(self, value, expression, connection): if value in (0, 1): value = bool(value) return value def convert_datetimefield_value(self, value, expression, connection): if value is not None: value = timezone.make_aware(value, self.connection.timezone) return value def convert_uuidfield_value(self, value, expression, connection): if value is not None: value = uuid.UUID(value) return value def binary_placeholder_sql(self, value): return '_binary %s' if value is not None and not hasattr(value, 'as_sql') else '%s' def subtract_temporals(self, internal_type, lhs, rhs): lhs_sql, lhs_params = lhs rhs_sql, rhs_params = rhs if internal_type == 'TimeField': if self.connection.mysql_is_mariadb: # MariaDB includes the microsecond component in TIME_TO_SEC as # a decimal. MySQL returns an integer without microseconds. return 'CAST((TIME_TO_SEC(%(lhs)s) - TIME_TO_SEC(%(rhs)s)) * 1000000 AS SIGNED)' % { 'lhs': lhs_sql, 'rhs': rhs_sql }, (*lhs_params, *rhs_params) return ( "((TIME_TO_SEC(%(lhs)s) * 1000000 + MICROSECOND(%(lhs)s)) -" " (TIME_TO_SEC(%(rhs)s) * 1000000 + MICROSECOND(%(rhs)s)))" ) % {'lhs': lhs_sql, 'rhs': rhs_sql}, tuple(lhs_params) * 2 + tuple(rhs_params) * 2 params = (*rhs_params, *lhs_params) return "TIMESTAMPDIFF(MICROSECOND, %s, %s)" % (rhs_sql, lhs_sql), params def explain_query_prefix(self, format=None, **options): # Alias MySQL's TRADITIONAL to TEXT for consistency with other backends. if format and format.upper() == 'TEXT': format = 'TRADITIONAL' elif not format and 'TREE' in self.connection.features.supported_explain_formats: # Use TREE by default (if supported) as it's more informative. format = 'TREE' analyze = options.pop('analyze', False) prefix = super().explain_query_prefix(format, **options) if analyze and self.connection.features.supports_explain_analyze: # MariaDB uses ANALYZE instead of EXPLAIN ANALYZE. prefix = 'ANALYZE' if self.connection.mysql_is_mariadb else prefix + ' ANALYZE' if format and not (analyze and not self.connection.mysql_is_mariadb): # Only MariaDB supports the analyze option with formats. prefix += ' FORMAT=%s' % format return prefix def regex_lookup(self, lookup_type): # REGEXP BINARY doesn't work correctly in MySQL 8+ and REGEXP_LIKE # doesn't exist in MySQL 5.x or in MariaDB. if self.connection.mysql_version < (8, 0, 0) or self.connection.mysql_is_mariadb: if lookup_type == 'regex': return '%s REGEXP BINARY %s' return '%s REGEXP %s' match_option = 'c' if lookup_type == 'regex' else 'i' return "REGEXP_LIKE(%%s, %%s, '%s')" % match_option def insert_statement(self, ignore_conflicts=False): return 'INSERT IGNORE INTO' if ignore_conflicts else super().insert_statement(ignore_conflicts) def lookup_cast(self, lookup_type, internal_type=None): lookup = '%s' if internal_type == 'JSONField': if self.connection.mysql_is_mariadb or lookup_type in ( 'iexact', 'contains', 'icontains', 'startswith', 'istartswith', 'endswith', 'iendswith', 'regex', 'iregex', ): lookup = 'JSON_UNQUOTE(%s)' return lookup
249699905f990ab9c28bbf4a9726afbb526c5b3457214013ab8baa96e2b25813
from django.db.backends.base.schema import BaseDatabaseSchemaEditor from django.db.models import NOT_PROVIDED class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): sql_rename_table = "RENAME TABLE %(old_table)s TO %(new_table)s" sql_alter_column_null = "MODIFY %(column)s %(type)s NULL" sql_alter_column_not_null = "MODIFY %(column)s %(type)s NOT NULL" sql_alter_column_type = "MODIFY %(column)s %(type)s" sql_alter_column_collate = "MODIFY %(column)s %(type)s%(collation)s" sql_alter_column_no_default_null = 'ALTER COLUMN %(column)s SET DEFAULT NULL' # No 'CASCADE' which works as a no-op in MySQL but is undocumented sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s" sql_delete_unique = "ALTER TABLE %(table)s DROP INDEX %(name)s" sql_create_column_inline_fk = ( ', ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) ' 'REFERENCES %(to_table)s(%(to_column)s)' ) sql_delete_fk = "ALTER TABLE %(table)s DROP FOREIGN KEY %(name)s" sql_delete_index = "DROP INDEX %(name)s ON %(table)s" sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)" sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY" sql_create_index = 'CREATE INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s' @property def sql_delete_check(self): if self.connection.mysql_is_mariadb: # The name of the column check constraint is the same as the field # name on MariaDB. Adding IF EXISTS clause prevents migrations # crash. Constraint is removed during a "MODIFY" column statement. return 'ALTER TABLE %(table)s DROP CONSTRAINT IF EXISTS %(name)s' return 'ALTER TABLE %(table)s DROP CHECK %(name)s' @property def sql_rename_column(self): # MariaDB >= 10.5.2 and MySQL >= 8.0.4 support an # "ALTER TABLE ... RENAME COLUMN" statement. if self.connection.mysql_is_mariadb: if self.connection.mysql_version >= (10, 5, 2): return super().sql_rename_column elif self.connection.mysql_version >= (8, 0, 4): return super().sql_rename_column return 'ALTER TABLE %(table)s CHANGE %(old_column)s %(new_column)s %(type)s' def quote_value(self, value): self.connection.ensure_connection() if isinstance(value, str): value = value.replace('%', '%%') # MySQLdb escapes to string, PyMySQL to bytes. quoted = self.connection.connection.escape(value, self.connection.connection.encoders) if isinstance(value, str) and isinstance(quoted, bytes): quoted = quoted.decode() return quoted def _is_limited_data_type(self, field): db_type = field.db_type(self.connection) return db_type is not None and db_type.lower() in self.connection._limited_data_types def skip_default(self, field): if not self._supports_limited_data_type_defaults: return self._is_limited_data_type(field) return False def skip_default_on_alter(self, field): if self._is_limited_data_type(field) and not self.connection.mysql_is_mariadb: # MySQL doesn't support defaults for BLOB and TEXT in the # ALTER COLUMN statement. return True return False @property def _supports_limited_data_type_defaults(self): # MariaDB >= 10.2.1 and MySQL >= 8.0.13 supports defaults for BLOB # and TEXT. if self.connection.mysql_is_mariadb: return self.connection.mysql_version >= (10, 2, 1) return self.connection.mysql_version >= (8, 0, 13) def _column_default_sql(self, field): if ( not self.connection.mysql_is_mariadb and self._supports_limited_data_type_defaults and self._is_limited_data_type(field) ): # MySQL supports defaults for BLOB and TEXT columns only if the # default value is written as an expression i.e. in parentheses. return '(%s)' return super()._column_default_sql(field) def add_field(self, model, field): super().add_field(model, field) # Simulate the effect of a one-off default. # field.default may be unhashable, so a set isn't used for "in" check. if self.skip_default(field) and field.default not in (None, NOT_PROVIDED): effective_default = self.effective_default(field) self.execute('UPDATE %(table)s SET %(column)s = %%s' % { 'table': self.quote_name(model._meta.db_table), 'column': self.quote_name(field.column), }, [effective_default]) def _field_should_be_indexed(self, model, field): create_index = super()._field_should_be_indexed(model, field) storage = self.connection.introspection.get_storage_engine( self.connection.cursor(), model._meta.db_table ) # No need to create an index for ForeignKey fields except if # db_constraint=False because the index from that constraint won't be # created. if (storage == "InnoDB" and create_index and field.get_internal_type() == 'ForeignKey' and field.db_constraint): return False return not self._is_limited_data_type(field) and create_index def _delete_composed_index(self, model, fields, *args): """ MySQL can remove an implicit FK index on a field when that field is covered by another index like a unique_together. "covered" here means that the more complex index starts like the simpler one. http://bugs.mysql.com/bug.php?id=37910 / Django ticket #24757 We check here before removing the [unique|index]_together if we have to recreate a FK index. """ first_field = model._meta.get_field(fields[0]) if first_field.get_internal_type() == 'ForeignKey': constraint_names = self._constraint_names(model, [first_field.column], index=True) if not constraint_names: self.execute( self._create_index_sql(model, fields=[first_field], suffix='') ) return super()._delete_composed_index(model, fields, *args) def _set_field_new_type_null_status(self, field, new_type): """ Keep the null property of the old field. If it has changed, it will be handled separately. """ if field.null: new_type += " NULL" else: new_type += " NOT NULL" return new_type def _alter_column_type_sql(self, model, old_field, new_field, new_type): new_type = self._set_field_new_type_null_status(old_field, new_type) return super()._alter_column_type_sql(model, old_field, new_field, new_type) def _rename_field_sql(self, table, old_field, new_field, new_type): new_type = self._set_field_new_type_null_status(old_field, new_type) return super()._rename_field_sql(table, old_field, new_field, new_type)
fdc9078c6a297955cc5aee6fb52039190048acefb05ad38a238572619e2e18c5
from django.db.backends.base.client import BaseDatabaseClient class DatabaseClient(BaseDatabaseClient): executable_name = 'mysql' @classmethod def settings_to_cmd_args_env(cls, settings_dict, parameters): args = [cls.executable_name] env = None database = settings_dict['OPTIONS'].get( 'database', settings_dict['OPTIONS'].get('db', settings_dict['NAME']), ) user = settings_dict['OPTIONS'].get('user', settings_dict['USER']) password = settings_dict['OPTIONS'].get( 'password', settings_dict['OPTIONS'].get('passwd', settings_dict['PASSWORD']) ) host = settings_dict['OPTIONS'].get('host', settings_dict['HOST']) port = settings_dict['OPTIONS'].get('port', settings_dict['PORT']) server_ca = settings_dict['OPTIONS'].get('ssl', {}).get('ca') client_cert = settings_dict['OPTIONS'].get('ssl', {}).get('cert') client_key = settings_dict['OPTIONS'].get('ssl', {}).get('key') defaults_file = settings_dict['OPTIONS'].get('read_default_file') charset = settings_dict['OPTIONS'].get('charset') # Seems to be no good way to set sql_mode with CLI. if defaults_file: args += ["--defaults-file=%s" % defaults_file] if user: args += ["--user=%s" % user] if password: # The MYSQL_PWD environment variable usage is discouraged per # MySQL's documentation due to the possibility of exposure through # `ps` on old Unix flavors but --password suffers from the same # flaw on even more systems. Usage of an environment variable also # prevents password exposure if the subprocess.run(check=True) call # raises a CalledProcessError since the string representation of # the latter includes all of the provided `args`. env = {'MYSQL_PWD': password} if host: if '/' in host: args += ["--socket=%s" % host] else: args += ["--host=%s" % host] if port: args += ["--port=%s" % port] if server_ca: args += ["--ssl-ca=%s" % server_ca] if client_cert: args += ["--ssl-cert=%s" % client_cert] if client_key: args += ["--ssl-key=%s" % client_key] if charset: args += ['--default-character-set=%s' % charset] if database: args += [database] args.extend(parameters) return args, env
8bde614671e92e46c5d9278fe869e1bb6dd70fb996b178f26b9149ecaa68d328
import signal from django.db.backends.base.client import BaseDatabaseClient class DatabaseClient(BaseDatabaseClient): executable_name = 'psql' @classmethod def settings_to_cmd_args_env(cls, settings_dict, parameters): args = [cls.executable_name] options = settings_dict.get('OPTIONS', {}) host = settings_dict.get('HOST') port = settings_dict.get('PORT') dbname = settings_dict.get('NAME') user = settings_dict.get('USER') passwd = settings_dict.get('PASSWORD') passfile = options.get('passfile') service = options.get('service') sslmode = options.get('sslmode') sslrootcert = options.get('sslrootcert') sslcert = options.get('sslcert') sslkey = options.get('sslkey') if not dbname and not service: # Connect to the default 'postgres' db. dbname = 'postgres' if user: args += ['-U', user] if host: args += ['-h', host] if port: args += ['-p', str(port)] if dbname: args += [dbname] args.extend(parameters) env = {} if passwd: env['PGPASSWORD'] = str(passwd) if service: env['PGSERVICE'] = str(service) if sslmode: env['PGSSLMODE'] = str(sslmode) if sslrootcert: env['PGSSLROOTCERT'] = str(sslrootcert) if sslcert: env['PGSSLCERT'] = str(sslcert) if sslkey: env['PGSSLKEY'] = str(sslkey) if passfile: env['PGPASSFILE'] = str(passfile) return args, (env or None) def runshell(self, parameters): sigint_handler = signal.getsignal(signal.SIGINT) try: # Allow SIGINT to pass to psql to abort queries. signal.signal(signal.SIGINT, signal.SIG_IGN) super().runshell(parameters) finally: # Restore the original SIGINT handler. signal.signal(signal.SIGINT, sigint_handler)
e2eec7651b2ccbd295d67844ac9f8a1a54ad83c75e2579aab47f4b0b6bac4326
import sys from psycopg2 import errorcodes from django.core.exceptions import ImproperlyConfigured from django.db.backends.base.creation import BaseDatabaseCreation from django.db.backends.utils import strip_quotes class DatabaseCreation(BaseDatabaseCreation): def _quote_name(self, name): return self.connection.ops.quote_name(name) def _get_database_create_suffix(self, encoding=None, template=None): suffix = "" if encoding: suffix += " ENCODING '{}'".format(encoding) if template: suffix += " TEMPLATE {}".format(self._quote_name(template)) return suffix and "WITH" + suffix def sql_table_creation_suffix(self): test_settings = self.connection.settings_dict['TEST'] if test_settings.get('COLLATION') is not None: raise ImproperlyConfigured( 'PostgreSQL does not support collation setting at database ' 'creation time.' ) return self._get_database_create_suffix( encoding=test_settings['CHARSET'], template=test_settings.get('TEMPLATE'), ) def _database_exists(self, cursor, database_name): cursor.execute('SELECT 1 FROM pg_catalog.pg_database WHERE datname = %s', [strip_quotes(database_name)]) return cursor.fetchone() is not None def _execute_create_test_db(self, cursor, parameters, keepdb=False): try: if keepdb and self._database_exists(cursor, parameters['dbname']): # If the database should be kept and it already exists, don't # try to create a new one. return super()._execute_create_test_db(cursor, parameters, keepdb) except Exception as e: if getattr(e.__cause__, 'pgcode', '') != errorcodes.DUPLICATE_DATABASE: # All errors except "database already exists" cancel tests. self.log('Got an error creating the test database: %s' % e) sys.exit(2) elif not keepdb: # If the database should be kept, ignore "database already # exists". raise def _clone_test_db(self, suffix, verbosity, keepdb=False): # CREATE DATABASE ... WITH TEMPLATE ... requires closing connections # to the template database. self.connection.close() source_database_name = self.connection.settings_dict['NAME'] target_database_name = self.get_test_db_clone_settings(suffix)['NAME'] test_db_params = { 'dbname': self._quote_name(target_database_name), 'suffix': self._get_database_create_suffix(template=source_database_name), } with self._nodb_cursor() as cursor: try: self._execute_create_test_db(cursor, test_db_params, keepdb) except Exception: try: if verbosity >= 1: self.log('Destroying old test database for alias %s...' % ( self._get_database_display_str(verbosity, target_database_name), )) cursor.execute('DROP DATABASE %(dbname)s' % test_db_params) self._execute_create_test_db(cursor, test_db_params, keepdb) except Exception as e: self.log('Got an error cloning the test database: %s' % e) sys.exit(2)
ca9aae222ccdac37db5b7fe17f6f9aa42db7ceca6ae30a01d5e078b844df2c6e
import operator import platform from django.db import transaction from django.db.backends.base.features import BaseDatabaseFeatures from django.db.utils import OperationalError from django.utils.functional import cached_property from .base import Database class DatabaseFeatures(BaseDatabaseFeatures): # SQLite can read from a cursor since SQLite 3.6.5, subject to the caveat # that statements within a connection aren't isolated from each other. See # https://sqlite.org/isolation.html. can_use_chunked_reads = True test_db_allows_multiple_connections = False supports_unspecified_pk = True supports_timezones = False max_query_params = 999 supports_mixed_date_datetime_comparisons = False supports_transactions = True atomic_transactions = False can_rollback_ddl = True can_create_inline_fk = False supports_paramstyle_pyformat = False can_clone_databases = True supports_temporal_subtraction = True ignores_table_name_case = True supports_cast_with_precision = False time_cast_precision = 3 can_release_savepoints = True # Is "ALTER TABLE ... RENAME COLUMN" supported? can_alter_table_rename_column = Database.sqlite_version_info >= (3, 25, 0) supports_parentheses_in_compound = False # Deferred constraint checks can be emulated on SQLite < 3.20 but not in a # reasonably performant way. supports_pragma_foreign_key_check = Database.sqlite_version_info >= (3, 20, 0) can_defer_constraint_checks = supports_pragma_foreign_key_check supports_functions_in_partial_indexes = Database.sqlite_version_info >= (3, 15, 0) supports_over_clause = Database.sqlite_version_info >= (3, 25, 0) supports_frame_range_fixed_distance = Database.sqlite_version_info >= (3, 28, 0) supports_aggregate_filter_clause = Database.sqlite_version_info >= (3, 30, 1) supports_order_by_nulls_modifier = Database.sqlite_version_info >= (3, 30, 0) order_by_nulls_first = True supports_json_field_contains = False test_collations = { 'ci': 'nocase', 'cs': 'binary', 'non_default': 'nocase', } @cached_property def django_test_skips(self): skips = { 'SQLite stores values rounded to 15 significant digits.': { 'model_fields.test_decimalfield.DecimalFieldTests.test_fetch_from_db_without_float_rounding', }, 'SQLite naively remakes the table on field alteration.': { 'schema.tests.SchemaTests.test_unique_no_unnecessary_fk_drops', 'schema.tests.SchemaTests.test_unique_and_reverse_m2m', 'schema.tests.SchemaTests.test_alter_field_default_doesnt_perform_queries', 'schema.tests.SchemaTests.test_rename_column_renames_deferred_sql_references', }, "SQLite doesn't have a constraint.": { 'model_fields.test_integerfield.PositiveIntegerFieldTests.test_negative_values', }, "SQLite doesn't support negative precision for ROUND().": { 'db_functions.math.test_round.RoundTests.test_null_with_negative_precision', 'db_functions.math.test_round.RoundTests.test_decimal_with_negative_precision', 'db_functions.math.test_round.RoundTests.test_float_with_negative_precision', 'db_functions.math.test_round.RoundTests.test_integer_with_negative_precision', }, } if Database.sqlite_version_info < (3, 27): skips.update({ 'Nondeterministic failure on SQLite < 3.27.': { 'expressions_window.tests.WindowFunctionTests.test_subquery_row_range_rank', }, }) if self.connection.is_in_memory_db(): skips.update({ "the sqlite backend's close() method is a no-op when using an " "in-memory database": { 'servers.test_liveserverthread.LiveServerThreadTest.test_closes_connections', 'servers.tests.LiveServerTestCloseConnectionTest.test_closes_connections', }, }) return skips @cached_property def supports_atomic_references_rename(self): # SQLite 3.28.0 bundled with MacOS 10.15 does not support renaming # references atomically. if platform.mac_ver()[0].startswith('10.15.') and Database.sqlite_version_info == (3, 28, 0): return False return Database.sqlite_version_info >= (3, 26, 0) @cached_property def introspected_field_types(self): return{ **super().introspected_field_types, 'BigAutoField': 'AutoField', 'DurationField': 'BigIntegerField', 'GenericIPAddressField': 'CharField', 'SmallAutoField': 'AutoField', } @cached_property def supports_json_field(self): with self.connection.cursor() as cursor: try: with transaction.atomic(self.connection.alias): cursor.execute('SELECT JSON(\'{"a": "b"}\')') except OperationalError: return False return True can_introspect_json_field = property(operator.attrgetter('supports_json_field')) has_json_object_function = property(operator.attrgetter('supports_json_field')) @cached_property def can_return_columns_from_insert(self): return Database.sqlite_version_info >= (3, 35) can_return_rows_from_bulk_insert = property(operator.attrgetter('can_return_columns_from_insert'))
01653d673fd2a8b836eab258ded6d682a574b00300cbff0a9c25c17a4ad8e201
""" SQLite backend for the sqlite3 module in the standard library. """ import datetime import decimal import functools import hashlib import math import operator import random import re import statistics import warnings from itertools import chain from sqlite3 import dbapi2 as Database import pytz from django.core.exceptions import ImproperlyConfigured from django.db import IntegrityError from django.db.backends import utils as backend_utils from django.db.backends.base.base import BaseDatabaseWrapper from django.utils import timezone from django.utils.asyncio import async_unsafe from django.utils.dateparse import parse_datetime, parse_time from django.utils.duration import duration_microseconds from django.utils.regex_helper import _lazy_re_compile from .client import DatabaseClient from .creation import DatabaseCreation from .features import DatabaseFeatures from .introspection import DatabaseIntrospection from .operations import DatabaseOperations from .schema import DatabaseSchemaEditor def decoder(conv_func): """ Convert bytestrings from Python's sqlite3 interface to a regular string. """ return lambda s: conv_func(s.decode()) def none_guard(func): """ Decorator that returns None if any of the arguments to the decorated function are None. Many SQL functions return NULL if any of their arguments are NULL. This decorator simplifies the implementation of this for the custom functions registered below. """ @functools.wraps(func) def wrapper(*args, **kwargs): return None if None in args else func(*args, **kwargs) return wrapper def list_aggregate(function): """ Return an aggregate class that accumulates values in a list and applies the provided function to the data. """ return type('ListAggregate', (list,), {'finalize': function, 'step': list.append}) def check_sqlite_version(): if Database.sqlite_version_info < (3, 9, 0): raise ImproperlyConfigured( 'SQLite 3.9.0 or later is required (found %s).' % Database.sqlite_version ) check_sqlite_version() Database.register_converter("bool", b'1'.__eq__) Database.register_converter("time", decoder(parse_time)) Database.register_converter("datetime", decoder(parse_datetime)) Database.register_converter("timestamp", decoder(parse_datetime)) Database.register_adapter(decimal.Decimal, str) class DatabaseWrapper(BaseDatabaseWrapper): vendor = 'sqlite' display_name = 'SQLite' # SQLite doesn't actually support most of these types, but it "does the right # thing" given more verbose field definitions, so leave them as is so that # schema inspection is more useful. data_types = { 'AutoField': 'integer', 'BigAutoField': 'integer', 'BinaryField': 'BLOB', 'BooleanField': 'bool', 'CharField': 'varchar(%(max_length)s)', 'DateField': 'date', 'DateTimeField': 'datetime', 'DecimalField': 'decimal', 'DurationField': 'bigint', 'FileField': 'varchar(%(max_length)s)', 'FilePathField': 'varchar(%(max_length)s)', 'FloatField': 'real', 'IntegerField': 'integer', 'BigIntegerField': 'bigint', 'IPAddressField': 'char(15)', 'GenericIPAddressField': 'char(39)', 'JSONField': 'text', 'OneToOneField': 'integer', 'PositiveBigIntegerField': 'bigint unsigned', 'PositiveIntegerField': 'integer unsigned', 'PositiveSmallIntegerField': 'smallint unsigned', 'SlugField': 'varchar(%(max_length)s)', 'SmallAutoField': 'integer', 'SmallIntegerField': 'smallint', 'TextField': 'text', 'TimeField': 'time', 'UUIDField': 'char(32)', } data_type_check_constraints = { 'PositiveBigIntegerField': '"%(column)s" >= 0', 'JSONField': '(JSON_VALID("%(column)s") OR "%(column)s" IS NULL)', 'PositiveIntegerField': '"%(column)s" >= 0', 'PositiveSmallIntegerField': '"%(column)s" >= 0', } data_types_suffix = { 'AutoField': 'AUTOINCREMENT', 'BigAutoField': 'AUTOINCREMENT', 'SmallAutoField': 'AUTOINCREMENT', } # SQLite requires LIKE statements to include an ESCAPE clause if the value # being escaped has a percent or underscore in it. # See https://www.sqlite.org/lang_expr.html for an explanation. operators = { 'exact': '= %s', 'iexact': "LIKE %s ESCAPE '\\'", 'contains': "LIKE %s ESCAPE '\\'", 'icontains': "LIKE %s ESCAPE '\\'", 'regex': 'REGEXP %s', 'iregex': "REGEXP '(?i)' || %s", 'gt': '> %s', 'gte': '>= %s', 'lt': '< %s', 'lte': '<= %s', 'startswith': "LIKE %s ESCAPE '\\'", 'endswith': "LIKE %s ESCAPE '\\'", 'istartswith': "LIKE %s ESCAPE '\\'", 'iendswith': "LIKE %s ESCAPE '\\'", } # The patterns below are used to generate SQL pattern lookup clauses when # the right-hand side of the lookup isn't a raw string (it might be an expression # or the result of a bilateral transformation). # In those cases, special characters for LIKE operators (e.g. \, *, _) should be # escaped on database side. # # Note: we use str.format() here for readability as '%' is used as a wildcard for # the LIKE operator. pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\', '\\'), '%%', '\%%'), '_', '\_')" pattern_ops = { 'contains': r"LIKE '%%' || {} || '%%' ESCAPE '\'", 'icontains': r"LIKE '%%' || UPPER({}) || '%%' ESCAPE '\'", 'startswith': r"LIKE {} || '%%' ESCAPE '\'", 'istartswith': r"LIKE UPPER({}) || '%%' ESCAPE '\'", 'endswith': r"LIKE '%%' || {} ESCAPE '\'", 'iendswith': r"LIKE '%%' || UPPER({}) ESCAPE '\'", } Database = Database SchemaEditorClass = DatabaseSchemaEditor # Classes instantiated in __init__(). client_class = DatabaseClient creation_class = DatabaseCreation features_class = DatabaseFeatures introspection_class = DatabaseIntrospection ops_class = DatabaseOperations def get_connection_params(self): settings_dict = self.settings_dict if not settings_dict['NAME']: raise ImproperlyConfigured( "settings.DATABASES is improperly configured. " "Please supply the NAME value.") kwargs = { 'database': settings_dict['NAME'], 'detect_types': Database.PARSE_DECLTYPES | Database.PARSE_COLNAMES, **settings_dict['OPTIONS'], } # Always allow the underlying SQLite connection to be shareable # between multiple threads. The safe-guarding will be handled at a # higher level by the `BaseDatabaseWrapper.allow_thread_sharing` # property. This is necessary as the shareability is disabled by # default in pysqlite and it cannot be changed once a connection is # opened. if 'check_same_thread' in kwargs and kwargs['check_same_thread']: warnings.warn( 'The `check_same_thread` option was provided and set to ' 'True. It will be overridden with False. Use the ' '`DatabaseWrapper.allow_thread_sharing` property instead ' 'for controlling thread shareability.', RuntimeWarning ) kwargs.update({'check_same_thread': False, 'uri': True}) return kwargs @async_unsafe def get_new_connection(self, conn_params): conn = Database.connect(**conn_params) create_deterministic_function = functools.partial( conn.create_function, deterministic=True, ) create_deterministic_function('django_date_extract', 2, _sqlite_datetime_extract) create_deterministic_function('django_date_trunc', 4, _sqlite_date_trunc) create_deterministic_function('django_datetime_cast_date', 3, _sqlite_datetime_cast_date) create_deterministic_function('django_datetime_cast_time', 3, _sqlite_datetime_cast_time) create_deterministic_function('django_datetime_extract', 4, _sqlite_datetime_extract) create_deterministic_function('django_datetime_trunc', 4, _sqlite_datetime_trunc) create_deterministic_function('django_time_extract', 2, _sqlite_time_extract) create_deterministic_function('django_time_trunc', 4, _sqlite_time_trunc) create_deterministic_function('django_time_diff', 2, _sqlite_time_diff) create_deterministic_function('django_timestamp_diff', 2, _sqlite_timestamp_diff) create_deterministic_function('django_format_dtdelta', 3, _sqlite_format_dtdelta) create_deterministic_function('regexp', 2, _sqlite_regexp) create_deterministic_function('ACOS', 1, none_guard(math.acos)) create_deterministic_function('ASIN', 1, none_guard(math.asin)) create_deterministic_function('ATAN', 1, none_guard(math.atan)) create_deterministic_function('ATAN2', 2, none_guard(math.atan2)) create_deterministic_function('BITXOR', 2, none_guard(operator.xor)) create_deterministic_function('CEILING', 1, none_guard(math.ceil)) create_deterministic_function('COS', 1, none_guard(math.cos)) create_deterministic_function('COT', 1, none_guard(lambda x: 1 / math.tan(x))) create_deterministic_function('DEGREES', 1, none_guard(math.degrees)) create_deterministic_function('EXP', 1, none_guard(math.exp)) create_deterministic_function('FLOOR', 1, none_guard(math.floor)) create_deterministic_function('LN', 1, none_guard(math.log)) create_deterministic_function('LOG', 2, none_guard(lambda x, y: math.log(y, x))) create_deterministic_function('LPAD', 3, _sqlite_lpad) create_deterministic_function('MD5', 1, none_guard(lambda x: hashlib.md5(x.encode()).hexdigest())) create_deterministic_function('MOD', 2, none_guard(math.fmod)) create_deterministic_function('PI', 0, lambda: math.pi) create_deterministic_function('POWER', 2, none_guard(operator.pow)) create_deterministic_function('RADIANS', 1, none_guard(math.radians)) create_deterministic_function('REPEAT', 2, none_guard(operator.mul)) create_deterministic_function('REVERSE', 1, none_guard(lambda x: x[::-1])) create_deterministic_function('RPAD', 3, _sqlite_rpad) create_deterministic_function('SHA1', 1, none_guard(lambda x: hashlib.sha1(x.encode()).hexdigest())) create_deterministic_function('SHA224', 1, none_guard(lambda x: hashlib.sha224(x.encode()).hexdigest())) create_deterministic_function('SHA256', 1, none_guard(lambda x: hashlib.sha256(x.encode()).hexdigest())) create_deterministic_function('SHA384', 1, none_guard(lambda x: hashlib.sha384(x.encode()).hexdigest())) create_deterministic_function('SHA512', 1, none_guard(lambda x: hashlib.sha512(x.encode()).hexdigest())) create_deterministic_function('SIGN', 1, none_guard(lambda x: (x > 0) - (x < 0))) create_deterministic_function('SIN', 1, none_guard(math.sin)) create_deterministic_function('SQRT', 1, none_guard(math.sqrt)) create_deterministic_function('TAN', 1, none_guard(math.tan)) # Don't use the built-in RANDOM() function because it returns a value # in the range [-1 * 2^63, 2^63 - 1] instead of [0, 1). conn.create_function('RAND', 0, random.random) conn.create_aggregate('STDDEV_POP', 1, list_aggregate(statistics.pstdev)) conn.create_aggregate('STDDEV_SAMP', 1, list_aggregate(statistics.stdev)) conn.create_aggregate('VAR_POP', 1, list_aggregate(statistics.pvariance)) conn.create_aggregate('VAR_SAMP', 1, list_aggregate(statistics.variance)) conn.execute('PRAGMA foreign_keys = ON') return conn def init_connection_state(self): pass def create_cursor(self, name=None): return self.connection.cursor(factory=SQLiteCursorWrapper) @async_unsafe def close(self): self.validate_thread_sharing() # If database is in memory, closing the connection destroys the # database. To prevent accidental data loss, ignore close requests on # an in-memory db. if not self.is_in_memory_db(): BaseDatabaseWrapper.close(self) def _savepoint_allowed(self): # When 'isolation_level' is not None, sqlite3 commits before each # savepoint; it's a bug. When it is None, savepoints don't make sense # because autocommit is enabled. The only exception is inside 'atomic' # blocks. To work around that bug, on SQLite, 'atomic' starts a # transaction explicitly rather than simply disable autocommit. return self.in_atomic_block def _set_autocommit(self, autocommit): if autocommit: level = None else: # sqlite3's internal default is ''. It's different from None. # See Modules/_sqlite/connection.c. level = '' # 'isolation_level' is a misleading API. # SQLite always runs at the SERIALIZABLE isolation level. with self.wrap_database_errors: self.connection.isolation_level = level def disable_constraint_checking(self): with self.cursor() as cursor: cursor.execute('PRAGMA foreign_keys = OFF') # Foreign key constraints cannot be turned off while in a multi- # statement transaction. Fetch the current state of the pragma # to determine if constraints are effectively disabled. enabled = cursor.execute('PRAGMA foreign_keys').fetchone()[0] return not bool(enabled) def enable_constraint_checking(self): with self.cursor() as cursor: cursor.execute('PRAGMA foreign_keys = ON') def check_constraints(self, table_names=None): """ Check each table name in `table_names` for rows with invalid foreign key references. This method is intended to be used in conjunction with `disable_constraint_checking()` and `enable_constraint_checking()`, to determine if rows with invalid references were entered while constraint checks were off. """ if self.features.supports_pragma_foreign_key_check: with self.cursor() as cursor: if table_names is None: violations = cursor.execute('PRAGMA foreign_key_check').fetchall() else: violations = chain.from_iterable( cursor.execute( 'PRAGMA foreign_key_check(%s)' % self.ops.quote_name(table_name) ).fetchall() for table_name in table_names ) # See https://www.sqlite.org/pragma.html#pragma_foreign_key_check for table_name, rowid, referenced_table_name, foreign_key_index in violations: foreign_key = cursor.execute( 'PRAGMA foreign_key_list(%s)' % self.ops.quote_name(table_name) ).fetchall()[foreign_key_index] column_name, referenced_column_name = foreign_key[3:5] primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name) primary_key_value, bad_value = cursor.execute( 'SELECT %s, %s FROM %s WHERE rowid = %%s' % ( self.ops.quote_name(primary_key_column_name), self.ops.quote_name(column_name), self.ops.quote_name(table_name), ), (rowid,), ).fetchone() raise IntegrityError( "The row in table '%s' with primary key '%s' has an " "invalid foreign key: %s.%s contains a value '%s' that " "does not have a corresponding value in %s.%s." % ( table_name, primary_key_value, table_name, column_name, bad_value, referenced_table_name, referenced_column_name ) ) else: with self.cursor() as cursor: if table_names is None: table_names = self.introspection.table_names(cursor) for table_name in table_names: primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name) if not primary_key_column_name: continue key_columns = self.introspection.get_key_columns(cursor, table_name) for column_name, referenced_table_name, referenced_column_name in key_columns: cursor.execute( """ SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING LEFT JOIN `%s` as REFERRED ON (REFERRING.`%s` = REFERRED.`%s`) WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL """ % ( primary_key_column_name, column_name, table_name, referenced_table_name, column_name, referenced_column_name, column_name, referenced_column_name, ) ) for bad_row in cursor.fetchall(): raise IntegrityError( "The row in table '%s' with primary key '%s' has an " "invalid foreign key: %s.%s contains a value '%s' that " "does not have a corresponding value in %s.%s." % ( table_name, bad_row[0], table_name, column_name, bad_row[1], referenced_table_name, referenced_column_name, ) ) def is_usable(self): return True def _start_transaction_under_autocommit(self): """ Start a transaction explicitly in autocommit mode. Staying in autocommit mode works around a bug of sqlite3 that breaks savepoints when autocommit is disabled. """ self.cursor().execute("BEGIN") def is_in_memory_db(self): return self.creation.is_in_memory_db(self.settings_dict['NAME']) FORMAT_QMARK_REGEX = _lazy_re_compile(r'(?<!%)%s') class SQLiteCursorWrapper(Database.Cursor): """ Django uses "format" style placeholders, but pysqlite2 uses "qmark" style. This fixes it -- but note that if you want to use a literal "%s" in a query, you'll need to use "%%s". """ def execute(self, query, params=None): if params is None: return Database.Cursor.execute(self, query) query = self.convert_query(query) return Database.Cursor.execute(self, query, params) def executemany(self, query, param_list): query = self.convert_query(query) return Database.Cursor.executemany(self, query, param_list) def convert_query(self, query): return FORMAT_QMARK_REGEX.sub('?', query).replace('%%', '%') def _sqlite_datetime_parse(dt, tzname=None, conn_tzname=None): if dt is None: return None try: dt = backend_utils.typecast_timestamp(dt) except (TypeError, ValueError): return None if conn_tzname: dt = dt.replace(tzinfo=pytz.timezone(conn_tzname)) if tzname is not None and tzname != conn_tzname: sign_index = tzname.find('+') + tzname.find('-') + 1 if sign_index > -1: sign = tzname[sign_index] tzname, offset = tzname.split(sign) if offset: hours, minutes = offset.split(':') offset_delta = datetime.timedelta(hours=int(hours), minutes=int(minutes)) dt += offset_delta if sign == '+' else -offset_delta dt = timezone.localtime(dt, pytz.timezone(tzname)) return dt def _sqlite_date_trunc(lookup_type, dt, tzname, conn_tzname): dt = _sqlite_datetime_parse(dt, tzname, conn_tzname) if dt is None: return None if lookup_type == 'year': return "%i-01-01" % dt.year elif lookup_type == 'quarter': month_in_quarter = dt.month - (dt.month - 1) % 3 return '%i-%02i-01' % (dt.year, month_in_quarter) elif lookup_type == 'month': return "%i-%02i-01" % (dt.year, dt.month) elif lookup_type == 'week': dt = dt - datetime.timedelta(days=dt.weekday()) return "%i-%02i-%02i" % (dt.year, dt.month, dt.day) elif lookup_type == 'day': return "%i-%02i-%02i" % (dt.year, dt.month, dt.day) def _sqlite_time_trunc(lookup_type, dt, tzname, conn_tzname): if dt is None: return None dt_parsed = _sqlite_datetime_parse(dt, tzname, conn_tzname) if dt_parsed is None: try: dt = backend_utils.typecast_time(dt) except (ValueError, TypeError): return None else: dt = dt_parsed if lookup_type == 'hour': return "%02i:00:00" % dt.hour elif lookup_type == 'minute': return "%02i:%02i:00" % (dt.hour, dt.minute) elif lookup_type == 'second': return "%02i:%02i:%02i" % (dt.hour, dt.minute, dt.second) def _sqlite_datetime_cast_date(dt, tzname, conn_tzname): dt = _sqlite_datetime_parse(dt, tzname, conn_tzname) if dt is None: return None return dt.date().isoformat() def _sqlite_datetime_cast_time(dt, tzname, conn_tzname): dt = _sqlite_datetime_parse(dt, tzname, conn_tzname) if dt is None: return None return dt.time().isoformat() def _sqlite_datetime_extract(lookup_type, dt, tzname=None, conn_tzname=None): dt = _sqlite_datetime_parse(dt, tzname, conn_tzname) if dt is None: return None if lookup_type == 'week_day': return (dt.isoweekday() % 7) + 1 elif lookup_type == 'iso_week_day': return dt.isoweekday() elif lookup_type == 'week': return dt.isocalendar()[1] elif lookup_type == 'quarter': return math.ceil(dt.month / 3) elif lookup_type == 'iso_year': return dt.isocalendar()[0] else: return getattr(dt, lookup_type) def _sqlite_datetime_trunc(lookup_type, dt, tzname, conn_tzname): dt = _sqlite_datetime_parse(dt, tzname, conn_tzname) if dt is None: return None if lookup_type == 'year': return "%i-01-01 00:00:00" % dt.year elif lookup_type == 'quarter': month_in_quarter = dt.month - (dt.month - 1) % 3 return '%i-%02i-01 00:00:00' % (dt.year, month_in_quarter) elif lookup_type == 'month': return "%i-%02i-01 00:00:00" % (dt.year, dt.month) elif lookup_type == 'week': dt = dt - datetime.timedelta(days=dt.weekday()) return "%i-%02i-%02i 00:00:00" % (dt.year, dt.month, dt.day) elif lookup_type == 'day': return "%i-%02i-%02i 00:00:00" % (dt.year, dt.month, dt.day) elif lookup_type == 'hour': return "%i-%02i-%02i %02i:00:00" % (dt.year, dt.month, dt.day, dt.hour) elif lookup_type == 'minute': return "%i-%02i-%02i %02i:%02i:00" % (dt.year, dt.month, dt.day, dt.hour, dt.minute) elif lookup_type == 'second': return "%i-%02i-%02i %02i:%02i:%02i" % (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second) def _sqlite_time_extract(lookup_type, dt): if dt is None: return None try: dt = backend_utils.typecast_time(dt) except (ValueError, TypeError): return None return getattr(dt, lookup_type) def _sqlite_prepare_dtdelta_param(conn, param): if conn in ['+', '-']: if isinstance(param, int): return datetime.timedelta(0, 0, param) else: return backend_utils.typecast_timestamp(param) return param @none_guard def _sqlite_format_dtdelta(conn, lhs, rhs): """ LHS and RHS can be either: - An integer number of microseconds - A string representing a datetime - A scalar value, e.g. float """ conn = conn.strip() try: real_lhs = _sqlite_prepare_dtdelta_param(conn, lhs) real_rhs = _sqlite_prepare_dtdelta_param(conn, rhs) except (ValueError, TypeError): return None if conn == '+': # typecast_timestamp returns a date or a datetime without timezone. # It will be formatted as "%Y-%m-%d" or "%Y-%m-%d %H:%M:%S[.%f]" out = str(real_lhs + real_rhs) elif conn == '-': out = str(real_lhs - real_rhs) elif conn == '*': out = real_lhs * real_rhs else: out = real_lhs / real_rhs return out @none_guard def _sqlite_time_diff(lhs, rhs): left = backend_utils.typecast_time(lhs) right = backend_utils.typecast_time(rhs) return ( (left.hour * 60 * 60 * 1000000) + (left.minute * 60 * 1000000) + (left.second * 1000000) + (left.microsecond) - (right.hour * 60 * 60 * 1000000) - (right.minute * 60 * 1000000) - (right.second * 1000000) - (right.microsecond) ) @none_guard def _sqlite_timestamp_diff(lhs, rhs): left = backend_utils.typecast_timestamp(lhs) right = backend_utils.typecast_timestamp(rhs) return duration_microseconds(left - right) @none_guard def _sqlite_regexp(re_pattern, re_string): return bool(re.search(re_pattern, str(re_string))) @none_guard def _sqlite_lpad(text, length, fill_text): if len(text) >= length: return text[:length] return (fill_text * length)[:length - len(text)] + text @none_guard def _sqlite_rpad(text, length, fill_text): return (text + fill_text * length)[:length]
1d665a82e9de321840b6af7181b9fa198922f749735ec50acb1edbaa3df50d6a
import datetime import decimal import uuid from functools import lru_cache from itertools import chain from django.conf import settings from django.core.exceptions import FieldError from django.db import DatabaseError, NotSupportedError, models from django.db.backends.base.operations import BaseDatabaseOperations from django.db.models.expressions import Col from django.utils import timezone from django.utils.dateparse import parse_date, parse_datetime, parse_time from django.utils.functional import cached_property class DatabaseOperations(BaseDatabaseOperations): cast_char_field_without_max_length = 'text' cast_data_types = { 'DateField': 'TEXT', 'DateTimeField': 'TEXT', } explain_prefix = 'EXPLAIN QUERY PLAN' # List of datatypes to that cannot be extracted with JSON_EXTRACT() on # SQLite. Use JSON_TYPE() instead. jsonfield_datatype_values = frozenset(['null', 'false', 'true']) def bulk_batch_size(self, fields, objs): """ SQLite has a compile-time default (SQLITE_LIMIT_VARIABLE_NUMBER) of 999 variables per query. If there's only a single field to insert, the limit is 500 (SQLITE_MAX_COMPOUND_SELECT). """ if len(fields) == 1: return 500 elif len(fields) > 1: return self.connection.features.max_query_params // len(fields) else: return len(objs) def check_expression_support(self, expression): bad_fields = (models.DateField, models.DateTimeField, models.TimeField) bad_aggregates = (models.Sum, models.Avg, models.Variance, models.StdDev) if isinstance(expression, bad_aggregates): for expr in expression.get_source_expressions(): try: output_field = expr.output_field except (AttributeError, FieldError): # Not every subexpression has an output_field which is fine # to ignore. pass else: if isinstance(output_field, bad_fields): raise NotSupportedError( 'You cannot use Sum, Avg, StdDev, and Variance ' 'aggregations on date/time fields in sqlite3 ' 'since date/time is saved as text.' ) if ( isinstance(expression, models.Aggregate) and expression.distinct and len(expression.source_expressions) > 1 ): raise NotSupportedError( "SQLite doesn't support DISTINCT on aggregate functions " "accepting multiple arguments." ) def date_extract_sql(self, lookup_type, field_name): """ Support EXTRACT with a user-defined function django_date_extract() that's registered in connect(). Use single quotes because this is a string and could otherwise cause a collision with a field name. """ return "django_date_extract('%s', %s)" % (lookup_type.lower(), field_name) def fetch_returned_insert_rows(self, cursor): """ Given a cursor object that has just performed an INSERT...RETURNING statement into a table, return the list of returned data. """ return cursor.fetchall() def format_for_duration_arithmetic(self, sql): """Do nothing since formatting is handled in the custom function.""" return sql def date_trunc_sql(self, lookup_type, field_name, tzname=None): return "django_date_trunc('%s', %s, %s, %s)" % ( lookup_type.lower(), field_name, *self._convert_tznames_to_sql(tzname), ) def time_trunc_sql(self, lookup_type, field_name, tzname=None): return "django_time_trunc('%s', %s, %s, %s)" % ( lookup_type.lower(), field_name, *self._convert_tznames_to_sql(tzname), ) def _convert_tznames_to_sql(self, tzname): if tzname and settings.USE_TZ: return "'%s'" % tzname, "'%s'" % self.connection.timezone_name return 'NULL', 'NULL' def datetime_cast_date_sql(self, field_name, tzname): return 'django_datetime_cast_date(%s, %s, %s)' % ( field_name, *self._convert_tznames_to_sql(tzname), ) def datetime_cast_time_sql(self, field_name, tzname): return 'django_datetime_cast_time(%s, %s, %s)' % ( field_name, *self._convert_tznames_to_sql(tzname), ) def datetime_extract_sql(self, lookup_type, field_name, tzname): return "django_datetime_extract('%s', %s, %s, %s)" % ( lookup_type.lower(), field_name, *self._convert_tznames_to_sql(tzname), ) def datetime_trunc_sql(self, lookup_type, field_name, tzname): return "django_datetime_trunc('%s', %s, %s, %s)" % ( lookup_type.lower(), field_name, *self._convert_tznames_to_sql(tzname), ) def time_extract_sql(self, lookup_type, field_name): return "django_time_extract('%s', %s)" % (lookup_type.lower(), field_name) def pk_default_value(self): return "NULL" def _quote_params_for_last_executed_query(self, params): """ Only for last_executed_query! Don't use this to execute SQL queries! """ # This function is limited both by SQLITE_LIMIT_VARIABLE_NUMBER (the # number of parameters, default = 999) and SQLITE_MAX_COLUMN (the # number of return values, default = 2000). Since Python's sqlite3 # module doesn't expose the get_limit() C API, assume the default # limits are in effect and split the work in batches if needed. BATCH_SIZE = 999 if len(params) > BATCH_SIZE: results = () for index in range(0, len(params), BATCH_SIZE): chunk = params[index:index + BATCH_SIZE] results += self._quote_params_for_last_executed_query(chunk) return results sql = 'SELECT ' + ', '.join(['QUOTE(?)'] * len(params)) # Bypass Django's wrappers and use the underlying sqlite3 connection # to avoid logging this query - it would trigger infinite recursion. cursor = self.connection.connection.cursor() # Native sqlite3 cursors cannot be used as context managers. try: return cursor.execute(sql, params).fetchone() finally: cursor.close() def last_executed_query(self, cursor, sql, params): # Python substitutes parameters in Modules/_sqlite/cursor.c with: # pysqlite_statement_bind_parameters(self->statement, parameters, allow_8bit_chars); # Unfortunately there is no way to reach self->statement from Python, # so we quote and substitute parameters manually. if params: if isinstance(params, (list, tuple)): params = self._quote_params_for_last_executed_query(params) else: values = tuple(params.values()) values = self._quote_params_for_last_executed_query(values) params = dict(zip(params, values)) return sql % params # For consistency with SQLiteCursorWrapper.execute(), just return sql # when there are no parameters. See #13648 and #17158. else: return sql def quote_name(self, name): if name.startswith('"') and name.endswith('"'): return name # Quoting once is enough. return '"%s"' % name def no_limit_value(self): return -1 def __references_graph(self, table_name): query = """ WITH tables AS ( SELECT %s name UNION SELECT sqlite_master.name FROM sqlite_master JOIN tables ON (sql REGEXP %s || tables.name || %s) ) SELECT name FROM tables; """ params = ( table_name, r'(?i)\s+references\s+("|\')?', r'("|\')?\s*\(', ) with self.connection.cursor() as cursor: results = cursor.execute(query, params) return [row[0] for row in results.fetchall()] @cached_property def _references_graph(self): # 512 is large enough to fit the ~330 tables (as of this writing) in # Django's test suite. return lru_cache(maxsize=512)(self.__references_graph) def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False): if tables and allow_cascade: # Simulate TRUNCATE CASCADE by recursively collecting the tables # referencing the tables to be flushed. tables = set(chain.from_iterable(self._references_graph(table) for table in tables)) sql = ['%s %s %s;' % ( style.SQL_KEYWORD('DELETE'), style.SQL_KEYWORD('FROM'), style.SQL_FIELD(self.quote_name(table)) ) for table in tables] if reset_sequences: sequences = [{'table': table} for table in tables] sql.extend(self.sequence_reset_by_name_sql(style, sequences)) return sql def sequence_reset_by_name_sql(self, style, sequences): if not sequences: return [] return [ '%s %s %s %s = 0 %s %s %s (%s);' % ( style.SQL_KEYWORD('UPDATE'), style.SQL_TABLE(self.quote_name('sqlite_sequence')), style.SQL_KEYWORD('SET'), style.SQL_FIELD(self.quote_name('seq')), style.SQL_KEYWORD('WHERE'), style.SQL_FIELD(self.quote_name('name')), style.SQL_KEYWORD('IN'), ', '.join([ "'%s'" % sequence_info['table'] for sequence_info in sequences ]), ), ] def adapt_datetimefield_value(self, value): if value is None: return None # Expression values are adapted by the database. if hasattr(value, 'resolve_expression'): return value # SQLite doesn't support tz-aware datetimes if timezone.is_aware(value): if settings.USE_TZ: value = timezone.make_naive(value, self.connection.timezone) else: raise ValueError("SQLite backend does not support timezone-aware datetimes when USE_TZ is False.") return str(value) def adapt_timefield_value(self, value): if value is None: return None # Expression values are adapted by the database. if hasattr(value, 'resolve_expression'): return value # SQLite doesn't support tz-aware datetimes if timezone.is_aware(value): raise ValueError("SQLite backend does not support timezone-aware times.") return str(value) def get_db_converters(self, expression): converters = super().get_db_converters(expression) internal_type = expression.output_field.get_internal_type() if internal_type == 'DateTimeField': converters.append(self.convert_datetimefield_value) elif internal_type == 'DateField': converters.append(self.convert_datefield_value) elif internal_type == 'TimeField': converters.append(self.convert_timefield_value) elif internal_type == 'DecimalField': converters.append(self.get_decimalfield_converter(expression)) elif internal_type == 'UUIDField': converters.append(self.convert_uuidfield_value) elif internal_type == 'BooleanField': converters.append(self.convert_booleanfield_value) return converters def convert_datetimefield_value(self, value, expression, connection): if value is not None: if not isinstance(value, datetime.datetime): value = parse_datetime(value) if settings.USE_TZ and not timezone.is_aware(value): value = timezone.make_aware(value, self.connection.timezone) return value def convert_datefield_value(self, value, expression, connection): if value is not None: if not isinstance(value, datetime.date): value = parse_date(value) return value def convert_timefield_value(self, value, expression, connection): if value is not None: if not isinstance(value, datetime.time): value = parse_time(value) return value def get_decimalfield_converter(self, expression): # SQLite stores only 15 significant digits. Digits coming from # float inaccuracy must be removed. create_decimal = decimal.Context(prec=15).create_decimal_from_float if isinstance(expression, Col): quantize_value = decimal.Decimal(1).scaleb(-expression.output_field.decimal_places) def converter(value, expression, connection): if value is not None: return create_decimal(value).quantize(quantize_value, context=expression.output_field.context) else: def converter(value, expression, connection): if value is not None: return create_decimal(value) return converter def convert_uuidfield_value(self, value, expression, connection): if value is not None: value = uuid.UUID(value) return value def convert_booleanfield_value(self, value, expression, connection): return bool(value) if value in (1, 0) else value def bulk_insert_sql(self, fields, placeholder_rows): return " UNION ALL ".join( "SELECT %s" % ", ".join(row) for row in placeholder_rows ) def combine_expression(self, connector, sub_expressions): # SQLite doesn't have a ^ operator, so use the user-defined POWER # function that's registered in connect(). if connector == '^': return 'POWER(%s)' % ','.join(sub_expressions) elif connector == '#': return 'BITXOR(%s)' % ','.join(sub_expressions) return super().combine_expression(connector, sub_expressions) def combine_duration_expression(self, connector, sub_expressions): if connector not in ['+', '-', '*', '/']: raise DatabaseError('Invalid connector for timedelta: %s.' % connector) fn_params = ["'%s'" % connector] + sub_expressions if len(fn_params) > 3: raise ValueError('Too many params for timedelta operations.') return "django_format_dtdelta(%s)" % ', '.join(fn_params) def integer_field_range(self, internal_type): # SQLite doesn't enforce any integer constraints return (None, None) def subtract_temporals(self, internal_type, lhs, rhs): lhs_sql, lhs_params = lhs rhs_sql, rhs_params = rhs params = (*lhs_params, *rhs_params) if internal_type == 'TimeField': return 'django_time_diff(%s, %s)' % (lhs_sql, rhs_sql), params return 'django_timestamp_diff(%s, %s)' % (lhs_sql, rhs_sql), params def insert_statement(self, ignore_conflicts=False): return 'INSERT OR IGNORE INTO' if ignore_conflicts else super().insert_statement(ignore_conflicts) def return_insert_columns(self, fields): # SQLite < 3.35 doesn't support an INSERT...RETURNING statement. if not fields: return '', () columns = [ '%s.%s' % ( self.quote_name(field.model._meta.db_table), self.quote_name(field.column), ) for field in fields ] return 'RETURNING %s' % ', '.join(columns), ()
c21960ffbdc3edc5127defd717a415cb39cfb28d2c8af0905f08708f1dbe8e47
import copy from decimal import Decimal from django.apps.registry import Apps from django.db import NotSupportedError from django.db.backends.base.schema import BaseDatabaseSchemaEditor from django.db.backends.ddl_references import Statement from django.db.backends.utils import strip_quotes from django.db.models import UniqueConstraint from django.db.transaction import atomic class DatabaseSchemaEditor(BaseDatabaseSchemaEditor): sql_delete_table = "DROP TABLE %(table)s" sql_create_fk = None sql_create_inline_fk = "REFERENCES %(to_table)s (%(to_column)s) DEFERRABLE INITIALLY DEFERRED" sql_create_unique = "CREATE UNIQUE INDEX %(name)s ON %(table)s (%(columns)s)" sql_delete_unique = "DROP INDEX %(name)s" def __enter__(self): # Some SQLite schema alterations need foreign key constraints to be # disabled. Enforce it here for the duration of the schema edition. if not self.connection.disable_constraint_checking(): raise NotSupportedError( 'SQLite schema editor cannot be used while foreign key ' 'constraint checks are enabled. Make sure to disable them ' 'before entering a transaction.atomic() context because ' 'SQLite does not support disabling them in the middle of ' 'a multi-statement transaction.' ) return super().__enter__() def __exit__(self, exc_type, exc_value, traceback): self.connection.check_constraints() super().__exit__(exc_type, exc_value, traceback) self.connection.enable_constraint_checking() def quote_value(self, value): # The backend "mostly works" without this function and there are use # cases for compiling Python without the sqlite3 libraries (e.g. # security hardening). try: import sqlite3 value = sqlite3.adapt(value) except ImportError: pass except sqlite3.ProgrammingError: pass # Manual emulation of SQLite parameter quoting if isinstance(value, bool): return str(int(value)) elif isinstance(value, (Decimal, float, int)): return str(value) elif isinstance(value, str): return "'%s'" % value.replace("\'", "\'\'") elif value is None: return "NULL" elif isinstance(value, (bytes, bytearray, memoryview)): # Bytes are only allowed for BLOB fields, encoded as string # literals containing hexadecimal data and preceded by a single "X" # character. return "X'%s'" % value.hex() else: raise ValueError("Cannot quote parameter value %r of type %s" % (value, type(value))) def _is_referenced_by_fk_constraint(self, table_name, column_name=None, ignore_self=False): """ Return whether or not the provided table name is referenced by another one. If `column_name` is specified, only references pointing to that column are considered. If `ignore_self` is True, self-referential constraints are ignored. """ with self.connection.cursor() as cursor: for other_table in self.connection.introspection.get_table_list(cursor): if ignore_self and other_table.name == table_name: continue constraints = self.connection.introspection._get_foreign_key_constraints(cursor, other_table.name) for constraint in constraints.values(): constraint_table, constraint_column = constraint['foreign_key'] if (constraint_table == table_name and (column_name is None or constraint_column == column_name)): return True return False def alter_db_table(self, model, old_db_table, new_db_table, disable_constraints=True): if (not self.connection.features.supports_atomic_references_rename and disable_constraints and self._is_referenced_by_fk_constraint(old_db_table)): if self.connection.in_atomic_block: raise NotSupportedError(( 'Renaming the %r table while in a transaction is not ' 'supported on SQLite < 3.26 because it would break referential ' 'integrity. Try adding `atomic = False` to the Migration class.' ) % old_db_table) self.connection.enable_constraint_checking() super().alter_db_table(model, old_db_table, new_db_table) self.connection.disable_constraint_checking() else: super().alter_db_table(model, old_db_table, new_db_table) def alter_field(self, model, old_field, new_field, strict=False): if not self._field_should_be_altered(old_field, new_field): return old_field_name = old_field.name table_name = model._meta.db_table _, old_column_name = old_field.get_attname_column() if (new_field.name != old_field_name and not self.connection.features.supports_atomic_references_rename and self._is_referenced_by_fk_constraint(table_name, old_column_name, ignore_self=True)): if self.connection.in_atomic_block: raise NotSupportedError(( 'Renaming the %r.%r column while in a transaction is not ' 'supported on SQLite < 3.26 because it would break referential ' 'integrity. Try adding `atomic = False` to the Migration class.' ) % (model._meta.db_table, old_field_name)) with atomic(self.connection.alias): super().alter_field(model, old_field, new_field, strict=strict) # Follow SQLite's documented procedure for performing changes # that don't affect the on-disk content. # https://sqlite.org/lang_altertable.html#otheralter with self.connection.cursor() as cursor: schema_version = cursor.execute('PRAGMA schema_version').fetchone()[0] cursor.execute('PRAGMA writable_schema = 1') references_template = ' REFERENCES "%s" ("%%s") ' % table_name new_column_name = new_field.get_attname_column()[1] search = references_template % old_column_name replacement = references_template % new_column_name cursor.execute('UPDATE sqlite_master SET sql = replace(sql, %s, %s)', (search, replacement)) cursor.execute('PRAGMA schema_version = %d' % (schema_version + 1)) cursor.execute('PRAGMA writable_schema = 0') # The integrity check will raise an exception and rollback # the transaction if the sqlite_master updates corrupt the # database. cursor.execute('PRAGMA integrity_check') # Perform a VACUUM to refresh the database representation from # the sqlite_master table. with self.connection.cursor() as cursor: cursor.execute('VACUUM') else: super().alter_field(model, old_field, new_field, strict=strict) def _remake_table(self, model, create_field=None, delete_field=None, alter_field=None): """ Shortcut to transform a model from old_model into new_model This follows the correct procedure to perform non-rename or column addition operations based on SQLite's documentation https://www.sqlite.org/lang_altertable.html#caution The essential steps are: 1. Create a table with the updated definition called "new__app_model" 2. Copy the data from the existing "app_model" table to the new table 3. Drop the "app_model" table 4. Rename the "new__app_model" table to "app_model" 5. Restore any index of the previous "app_model" table. """ # Self-referential fields must be recreated rather than copied from # the old model to ensure their remote_field.field_name doesn't refer # to an altered field. def is_self_referential(f): return f.is_relation and f.remote_field.model is model # Work out the new fields dict / mapping body = { f.name: f.clone() if is_self_referential(f) else f for f in model._meta.local_concrete_fields } # Since mapping might mix column names and default values, # its values must be already quoted. mapping = {f.column: self.quote_name(f.column) for f in model._meta.local_concrete_fields} # This maps field names (not columns) for things like unique_together rename_mapping = {} # If any of the new or altered fields is introducing a new PK, # remove the old one restore_pk_field = None if getattr(create_field, 'primary_key', False) or ( alter_field and getattr(alter_field[1], 'primary_key', False)): for name, field in list(body.items()): if field.primary_key: field.primary_key = False restore_pk_field = field if field.auto_created: del body[name] del mapping[field.column] # Add in any created fields if create_field: body[create_field.name] = create_field # Choose a default and insert it into the copy map if not create_field.many_to_many and create_field.concrete: mapping[create_field.column] = self.quote_value( self.effective_default(create_field) ) # Add in any altered fields if alter_field: old_field, new_field = alter_field body.pop(old_field.name, None) mapping.pop(old_field.column, None) body[new_field.name] = new_field if old_field.null and not new_field.null: case_sql = "coalesce(%(col)s, %(default)s)" % { 'col': self.quote_name(old_field.column), 'default': self.quote_value(self.effective_default(new_field)) } mapping[new_field.column] = case_sql else: mapping[new_field.column] = self.quote_name(old_field.column) rename_mapping[old_field.name] = new_field.name # Remove any deleted fields if delete_field: del body[delete_field.name] del mapping[delete_field.column] # Remove any implicit M2M tables if delete_field.many_to_many and delete_field.remote_field.through._meta.auto_created: return self.delete_model(delete_field.remote_field.through) # Work inside a new app registry apps = Apps() # Work out the new value of unique_together, taking renames into # account unique_together = [ [rename_mapping.get(n, n) for n in unique] for unique in model._meta.unique_together ] # Work out the new value for index_together, taking renames into # account index_together = [ [rename_mapping.get(n, n) for n in index] for index in model._meta.index_together ] indexes = model._meta.indexes if delete_field: indexes = [ index for index in indexes if delete_field.name not in index.fields ] constraints = list(model._meta.constraints) # Provide isolated instances of the fields to the new model body so # that the existing model's internals aren't interfered with when # the dummy model is constructed. body_copy = copy.deepcopy(body) # Construct a new model with the new fields to allow self referential # primary key to resolve to. This model won't ever be materialized as a # table and solely exists for foreign key reference resolution purposes. # This wouldn't be required if the schema editor was operating on model # states instead of rendered models. meta_contents = { 'app_label': model._meta.app_label, 'db_table': model._meta.db_table, 'unique_together': unique_together, 'index_together': index_together, 'indexes': indexes, 'constraints': constraints, 'apps': apps, } meta = type("Meta", (), meta_contents) body_copy['Meta'] = meta body_copy['__module__'] = model.__module__ type(model._meta.object_name, model.__bases__, body_copy) # Construct a model with a renamed table name. body_copy = copy.deepcopy(body) meta_contents = { 'app_label': model._meta.app_label, 'db_table': 'new__%s' % strip_quotes(model._meta.db_table), 'unique_together': unique_together, 'index_together': index_together, 'indexes': indexes, 'constraints': constraints, 'apps': apps, } meta = type("Meta", (), meta_contents) body_copy['Meta'] = meta body_copy['__module__'] = model.__module__ new_model = type('New%s' % model._meta.object_name, model.__bases__, body_copy) # Create a new table with the updated schema. self.create_model(new_model) # Copy data from the old table into the new table self.execute("INSERT INTO %s (%s) SELECT %s FROM %s" % ( self.quote_name(new_model._meta.db_table), ', '.join(self.quote_name(x) for x in mapping), ', '.join(mapping.values()), self.quote_name(model._meta.db_table), )) # Delete the old table to make way for the new self.delete_model(model, handle_autom2m=False) # Rename the new table to take way for the old self.alter_db_table( new_model, new_model._meta.db_table, model._meta.db_table, disable_constraints=False, ) # Run deferred SQL on correct table for sql in self.deferred_sql: self.execute(sql) self.deferred_sql = [] # Fix any PK-removed field if restore_pk_field: restore_pk_field.primary_key = True def delete_model(self, model, handle_autom2m=True): if handle_autom2m: super().delete_model(model) else: # Delete the table (and only that) self.execute(self.sql_delete_table % { "table": self.quote_name(model._meta.db_table), }) # Remove all deferred statements referencing the deleted table. for sql in list(self.deferred_sql): if isinstance(sql, Statement) and sql.references_table(model._meta.db_table): self.deferred_sql.remove(sql) def add_field(self, model, field): """ Create a field on a model. Usually involves adding a column, but may involve adding a table instead (for M2M fields). """ # Special-case implicit M2M tables if field.many_to_many and field.remote_field.through._meta.auto_created: return self.create_model(field.remote_field.through) self._remake_table(model, create_field=field) def remove_field(self, model, field): """ Remove a field from a model. Usually involves deleting a column, but for M2Ms may involve deleting a table. """ # M2M fields are a special case if field.many_to_many: # For implicit M2M tables, delete the auto-created table if field.remote_field.through._meta.auto_created: self.delete_model(field.remote_field.through) # For explicit "through" M2M fields, do nothing # For everything else, remake. else: # It might not actually have a column behind it if field.db_parameters(connection=self.connection)['type'] is None: return self._remake_table(model, delete_field=field) def _alter_field(self, model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict=False): """Perform a "physical" (non-ManyToMany) field update.""" # Use "ALTER TABLE ... RENAME COLUMN" if only the column name # changed and there aren't any constraints. if (self.connection.features.can_alter_table_rename_column and old_field.column != new_field.column and self.column_sql(model, old_field) == self.column_sql(model, new_field) and not (old_field.remote_field and old_field.db_constraint or new_field.remote_field and new_field.db_constraint)): return self.execute(self._rename_field_sql(model._meta.db_table, old_field, new_field, new_type)) # Alter by remaking table self._remake_table(model, alter_field=(old_field, new_field)) # Rebuild tables with FKs pointing to this field. if new_field.unique and old_type != new_type: related_models = set() opts = new_field.model._meta for remote_field in opts.related_objects: # Ignore self-relationship since the table was already rebuilt. if remote_field.related_model == model: continue if not remote_field.many_to_many: if remote_field.field_name == new_field.name: related_models.add(remote_field.related_model) elif new_field.primary_key and remote_field.through._meta.auto_created: related_models.add(remote_field.through) if new_field.primary_key: for many_to_many in opts.many_to_many: # Ignore self-relationship since the table was already rebuilt. if many_to_many.related_model == model: continue if many_to_many.remote_field.through._meta.auto_created: related_models.add(many_to_many.remote_field.through) for related_model in related_models: self._remake_table(related_model) def _alter_many_to_many(self, model, old_field, new_field, strict): """Alter M2Ms to repoint their to= endpoints.""" if old_field.remote_field.through._meta.db_table == new_field.remote_field.through._meta.db_table: # The field name didn't change, but some options did; we have to propagate this altering. self._remake_table( old_field.remote_field.through, alter_field=( # We need the field that points to the target model, so we can tell alter_field to change it - # this is m2m_reverse_field_name() (as opposed to m2m_field_name, which points to our model) old_field.remote_field.through._meta.get_field(old_field.m2m_reverse_field_name()), new_field.remote_field.through._meta.get_field(new_field.m2m_reverse_field_name()), ), ) return # Make a new through table self.create_model(new_field.remote_field.through) # Copy the data across self.execute("INSERT INTO %s (%s) SELECT %s FROM %s" % ( self.quote_name(new_field.remote_field.through._meta.db_table), ', '.join([ "id", new_field.m2m_column_name(), new_field.m2m_reverse_name(), ]), ', '.join([ "id", old_field.m2m_column_name(), old_field.m2m_reverse_name(), ]), self.quote_name(old_field.remote_field.through._meta.db_table), )) # Delete the old through table self.delete_model(old_field.remote_field.through) def add_constraint(self, model, constraint): if isinstance(constraint, UniqueConstraint) and ( constraint.condition or constraint.contains_expressions or constraint.include or constraint.deferrable ): super().add_constraint(model, constraint) else: self._remake_table(model) def remove_constraint(self, model, constraint): if isinstance(constraint, UniqueConstraint) and ( constraint.condition or constraint.contains_expressions or constraint.include or constraint.deferrable ): super().remove_constraint(model, constraint) else: self._remake_table(model) def _collate_sql(self, collation): return 'COLLATE ' + collation
4b6a39f3100ae6af000300249da4caf5726001641c003c676946346ab8b65c2e
""" HTTP server that implements the Python WSGI protocol (PEP 333, rev 1.21). Based on wsgiref.simple_server which is part of the standard library since 2.5. This is a simple server for use in testing or debugging Django apps. It hasn't been reviewed for security issues. DON'T USE IT FOR PRODUCTION USE! """ import logging import socket import socketserver import sys from wsgiref import simple_server from django.core.exceptions import ImproperlyConfigured from django.core.handlers.wsgi import LimitedStream from django.core.wsgi import get_wsgi_application from django.db import connections from django.utils.module_loading import import_string __all__ = ('WSGIServer', 'WSGIRequestHandler') logger = logging.getLogger('django.server') def get_internal_wsgi_application(): """ Load and return the WSGI application as configured by the user in ``settings.WSGI_APPLICATION``. With the default ``startproject`` layout, this will be the ``application`` object in ``projectname/wsgi.py``. This function, and the ``WSGI_APPLICATION`` setting itself, are only useful for Django's internal server (runserver); external WSGI servers should just be configured to point to the correct application object directly. If settings.WSGI_APPLICATION is not set (is ``None``), return whatever ``django.core.wsgi.get_wsgi_application`` returns. """ from django.conf import settings app_path = getattr(settings, 'WSGI_APPLICATION') if app_path is None: return get_wsgi_application() try: return import_string(app_path) except ImportError as err: raise ImproperlyConfigured( "WSGI application '%s' could not be loaded; " "Error importing module." % app_path ) from err def is_broken_pipe_error(): exc_type, _, _ = sys.exc_info() return issubclass(exc_type, ( BrokenPipeError, ConnectionAbortedError, ConnectionResetError, )) class WSGIServer(simple_server.WSGIServer): """BaseHTTPServer that implements the Python WSGI protocol""" request_queue_size = 10 def __init__(self, *args, ipv6=False, allow_reuse_address=True, **kwargs): if ipv6: self.address_family = socket.AF_INET6 self.allow_reuse_address = allow_reuse_address super().__init__(*args, **kwargs) def handle_error(self, request, client_address): if is_broken_pipe_error(): logger.info("- Broken pipe from %s\n", client_address) else: super().handle_error(request, client_address) class ThreadedWSGIServer(socketserver.ThreadingMixIn, WSGIServer): """A threaded version of the WSGIServer""" daemon_threads = True def __init__(self, *args, connections_override=None, **kwargs): super().__init__(*args, **kwargs) self.connections_override = connections_override # socketserver.ThreadingMixIn.process_request() passes this method as # the target to a new Thread object. def process_request_thread(self, request, client_address): if self.connections_override: # Override this thread's database connections with the ones # provided by the parent thread. for alias, conn in self.connections_override.items(): connections[alias] = conn super().process_request_thread(request, client_address) def _close_connections(self): # Used for mocking in tests. connections.close_all() def close_request(self, request): self._close_connections() super().close_request(request) class ServerHandler(simple_server.ServerHandler): http_version = '1.1' def __init__(self, stdin, stdout, stderr, environ, **kwargs): """ Use a LimitedStream so that unread request data will be ignored at the end of the request. WSGIRequest uses a LimitedStream but it shouldn't discard the data since the upstream servers usually do this. This fix applies only for testserver/runserver. """ try: content_length = int(environ.get('CONTENT_LENGTH')) except (ValueError, TypeError): content_length = 0 super().__init__(LimitedStream(stdin, content_length), stdout, stderr, environ, **kwargs) def cleanup_headers(self): super().cleanup_headers() # HTTP/1.1 requires support for persistent connections. Send 'close' if # the content length is unknown to prevent clients from reusing the # connection. if 'Content-Length' not in self.headers: self.headers['Connection'] = 'close' # Persistent connections require threading server. elif not isinstance(self.request_handler.server, socketserver.ThreadingMixIn): self.headers['Connection'] = 'close' # Mark the connection for closing if it's set as such above or if the # application sent the header. if self.headers.get('Connection') == 'close': self.request_handler.close_connection = True def close(self): self.get_stdin()._read_limited() super().close() class WSGIRequestHandler(simple_server.WSGIRequestHandler): protocol_version = 'HTTP/1.1' def address_string(self): # Short-circuit parent method to not call socket.getfqdn return self.client_address[0] def log_message(self, format, *args): extra = { 'request': self.request, 'server_time': self.log_date_time_string(), } if args[1][0] == '4': # 0x16 = Handshake, 0x03 = SSL 3.0 or TLS 1.x if args[0].startswith('\x16\x03'): extra['status_code'] = 500 logger.error( "You're accessing the development server over HTTPS, but " "it only supports HTTP.\n", extra=extra, ) return if args[1].isdigit() and len(args[1]) == 3: status_code = int(args[1]) extra['status_code'] = status_code if status_code >= 500: level = logger.error elif status_code >= 400: level = logger.warning else: level = logger.info else: level = logger.info level(format, *args, extra=extra) def get_environ(self): # Strip all headers with underscores in the name before constructing # the WSGI environ. This prevents header-spoofing based on ambiguity # between underscores and dashes both normalized to underscores in WSGI # env vars. Nginx and Apache 2.4+ both do this as well. for k in self.headers: if '_' in k: del self.headers[k] return super().get_environ() def handle(self): self.close_connection = True self.handle_one_request() while not self.close_connection: self.handle_one_request() try: self.connection.shutdown(socket.SHUT_WR) except (AttributeError, OSError): pass def handle_one_request(self): """Copy of WSGIRequestHandler.handle() but with different ServerHandler""" self.raw_requestline = self.rfile.readline(65537) if len(self.raw_requestline) > 65536: self.requestline = '' self.request_version = '' self.command = '' self.send_error(414) return if not self.parse_request(): # An error code has been sent, just exit return handler = ServerHandler( self.rfile, self.wfile, self.get_stderr(), self.get_environ() ) handler.request_handler = self # backpointer for logging & connection closing handler.run(self.server.get_app()) def run(addr, port, wsgi_handler, ipv6=False, threading=False, server_cls=WSGIServer): server_address = (addr, port) if threading: httpd_cls = type('WSGIServer', (socketserver.ThreadingMixIn, server_cls), {}) else: httpd_cls = server_cls httpd = httpd_cls(server_address, WSGIRequestHandler, ipv6=ipv6) if threading: # ThreadingMixIn.daemon_threads indicates how threads will behave on an # abrupt shutdown; like quitting the server by the user or restarting # by the auto-reloader. True means the server will not wait for thread # termination before it quits. This will make auto-reloader faster # and will prevent the need to kill the server manually if a thread # isn't terminating correctly. httpd.daemon_threads = True httpd.set_app(wsgi_handler) httpd.serve_forever()
91f789256c05806b70066421ba92c0fae7ef12fffa3c904b70174769f2cf1ee1
import os import pathlib from django.core.exceptions import SuspiciousFileOperation def validate_file_name(name, allow_relative_path=False): # Remove potentially dangerous names if os.path.basename(name) in {'', '.', '..'}: raise SuspiciousFileOperation("Could not derive file name from '%s'" % name) if allow_relative_path: # Use PurePosixPath() because this branch is checked only in # FileField.generate_filename() where all file paths are expected to be # Unix style (with forward slashes). path = pathlib.PurePosixPath(name) if path.is_absolute() or '..' in path.parts: raise SuspiciousFileOperation( "Detected path traversal attempt in '%s'" % name ) elif name != os.path.basename(name): raise SuspiciousFileOperation("File name '%s' includes path elements" % name) return name class FileProxyMixin: """ A mixin class used to forward file methods to an underlaying file object. The internal file object has to be called "file":: class FileProxy(FileProxyMixin): def __init__(self, file): self.file = file """ encoding = property(lambda self: self.file.encoding) fileno = property(lambda self: self.file.fileno) flush = property(lambda self: self.file.flush) isatty = property(lambda self: self.file.isatty) newlines = property(lambda self: self.file.newlines) read = property(lambda self: self.file.read) readinto = property(lambda self: self.file.readinto) readline = property(lambda self: self.file.readline) readlines = property(lambda self: self.file.readlines) seek = property(lambda self: self.file.seek) tell = property(lambda self: self.file.tell) truncate = property(lambda self: self.file.truncate) write = property(lambda self: self.file.write) writelines = property(lambda self: self.file.writelines) @property def closed(self): return not self.file or self.file.closed def readable(self): if self.closed: return False if hasattr(self.file, 'readable'): return self.file.readable() return True def writable(self): if self.closed: return False if hasattr(self.file, 'writable'): return self.file.writable() return 'w' in getattr(self.file, 'mode', '') def seekable(self): if self.closed: return False if hasattr(self.file, 'seekable'): return self.file.seekable() return True def __iter__(self): return iter(self.file)
fc5656e49f5ec30136a19993d2a85f4fd6d09aeaf2cef4a45cc3d616bd0253c0
""" Classes representing uploaded files. """ import os from io import BytesIO from django.conf import settings from django.core.files import temp as tempfile from django.core.files.base import File from django.core.files.utils import validate_file_name __all__ = ('UploadedFile', 'TemporaryUploadedFile', 'InMemoryUploadedFile', 'SimpleUploadedFile') class UploadedFile(File): """ An abstract uploaded file (``TemporaryUploadedFile`` and ``InMemoryUploadedFile`` are the built-in concrete subclasses). An ``UploadedFile`` object behaves somewhat like a file object and represents some file data that the user submitted with a form. """ def __init__(self, file=None, name=None, content_type=None, size=None, charset=None, content_type_extra=None): super().__init__(file, name) self.size = size self.content_type = content_type self.charset = charset self.content_type_extra = content_type_extra def __repr__(self): return "<%s: %s (%s)>" % (self.__class__.__name__, self.name, self.content_type) def _get_name(self): return self._name def _set_name(self, name): # Sanitize the file name so that it can't be dangerous. if name is not None: # Just use the basename of the file -- anything else is dangerous. name = os.path.basename(name) # File names longer than 255 characters can cause problems on older OSes. if len(name) > 255: name, ext = os.path.splitext(name) ext = ext[:255] name = name[:255 - len(ext)] + ext name = validate_file_name(name) self._name = name name = property(_get_name, _set_name) class TemporaryUploadedFile(UploadedFile): """ A file uploaded to a temporary location (i.e. stream-to-disk). """ def __init__(self, name, content_type, size, charset, content_type_extra=None): _, ext = os.path.splitext(name) file = tempfile.NamedTemporaryFile(suffix='.upload' + ext, dir=settings.FILE_UPLOAD_TEMP_DIR) super().__init__(file, name, content_type, size, charset, content_type_extra) def temporary_file_path(self): """Return the full path of this file.""" return self.file.name def close(self): try: return self.file.close() except FileNotFoundError: # The file was moved or deleted before the tempfile could unlink # it. Still sets self.file.close_called and calls # self.file.file.close() before the exception. pass class InMemoryUploadedFile(UploadedFile): """ A file uploaded into memory (i.e. stream-to-memory). """ def __init__(self, file, field_name, name, content_type, size, charset, content_type_extra=None): super().__init__(file, name, content_type, size, charset, content_type_extra) self.field_name = field_name def open(self, mode=None): self.file.seek(0) return self def chunks(self, chunk_size=None): self.file.seek(0) yield self.read() def multiple_chunks(self, chunk_size=None): # Since it's in memory, we'll never have multiple chunks. return False class SimpleUploadedFile(InMemoryUploadedFile): """ A simple representation of a file, which just has content, size, and a name. """ def __init__(self, name, content, content_type='text/plain'): content = content or b'' super().__init__(BytesIO(content), None, name, content_type, len(content), None, None) @classmethod def from_dict(cls, file_dict): """ Create a SimpleUploadedFile object from a dictionary with keys: - filename - content-type - content """ return cls(file_dict['filename'], file_dict['content'], file_dict.get('content-type', 'text/plain'))
8f8b3044b6734c89f141d54e7d0d45efbc1e3330d805f087ac406cb08e1c1abe
import os import pathlib from datetime import datetime from urllib.parse import urljoin from django.conf import settings from django.core.exceptions import SuspiciousFileOperation from django.core.files import File, locks from django.core.files.move import file_move_safe from django.core.files.utils import validate_file_name from django.core.signals import setting_changed from django.utils import timezone from django.utils._os import safe_join from django.utils.crypto import get_random_string from django.utils.deconstruct import deconstructible from django.utils.encoding import filepath_to_uri from django.utils.functional import LazyObject, cached_property from django.utils.module_loading import import_string from django.utils.text import get_valid_filename __all__ = ( 'Storage', 'FileSystemStorage', 'DefaultStorage', 'default_storage', 'get_storage_class', ) class Storage: """ A base storage class, providing some default behaviors that all other storage systems can inherit or override, as necessary. """ # The following methods represent a public interface to private methods. # These shouldn't be overridden by subclasses unless absolutely necessary. def open(self, name, mode='rb'): """Retrieve the specified file from storage.""" return self._open(name, mode) def save(self, name, content, max_length=None): """ Save new content to the file specified by name. The content should be a proper File object or any Python file-like object, ready to be read from the beginning. """ # Get the proper name for the file, as it will actually be saved. if name is None: name = content.name if not hasattr(content, 'chunks'): content = File(content, name) name = self.get_available_name(name, max_length=max_length) return self._save(name, content) # These methods are part of the public API, with default implementations. def get_valid_name(self, name): """ Return a filename, based on the provided filename, that's suitable for use in the target storage system. """ return get_valid_filename(name) def get_alternative_name(self, file_root, file_ext): """ Return an alternative filename, by adding an underscore and a random 7 character alphanumeric string (before the file extension, if one exists) to the filename. """ return '%s_%s%s' % (file_root, get_random_string(7), file_ext) def get_available_name(self, name, max_length=None): """ Return a filename that's free on the target storage system and available for new content to be written to. """ dir_name, file_name = os.path.split(name) if '..' in pathlib.PurePath(dir_name).parts: raise SuspiciousFileOperation("Detected path traversal attempt in '%s'" % dir_name) validate_file_name(file_name) file_root, file_ext = os.path.splitext(file_name) # If the filename already exists, generate an alternative filename # until it doesn't exist. # Truncate original name if required, so the new filename does not # exceed the max_length. while self.exists(name) or (max_length and len(name) > max_length): # file_ext includes the dot. name = os.path.join(dir_name, self.get_alternative_name(file_root, file_ext)) if max_length is None: continue # Truncate file_root if max_length exceeded. truncation = len(name) - max_length if truncation > 0: file_root = file_root[:-truncation] # Entire file_root was truncated in attempt to find an available filename. if not file_root: raise SuspiciousFileOperation( 'Storage can not find an available filename for "%s". ' 'Please make sure that the corresponding file field ' 'allows sufficient "max_length".' % name ) name = os.path.join(dir_name, self.get_alternative_name(file_root, file_ext)) return name def generate_filename(self, filename): """ Validate the filename by calling get_valid_name() and return a filename to be passed to the save() method. """ # `filename` may include a path as returned by FileField.upload_to. dirname, filename = os.path.split(filename) if '..' in pathlib.PurePath(dirname).parts: raise SuspiciousFileOperation("Detected path traversal attempt in '%s'" % dirname) return os.path.normpath(os.path.join(dirname, self.get_valid_name(filename))) def path(self, name): """ Return a local filesystem path where the file can be retrieved using Python's built-in open() function. Storage systems that can't be accessed using open() should *not* implement this method. """ raise NotImplementedError("This backend doesn't support absolute paths.") # The following methods form the public API for storage systems, but with # no default implementations. Subclasses must implement *all* of these. def delete(self, name): """ Delete the specified file from the storage system. """ raise NotImplementedError('subclasses of Storage must provide a delete() method') def exists(self, name): """ Return True if a file referenced by the given name already exists in the storage system, or False if the name is available for a new file. """ raise NotImplementedError('subclasses of Storage must provide an exists() method') def listdir(self, path): """ List the contents of the specified path. Return a 2-tuple of lists: the first item being directories, the second item being files. """ raise NotImplementedError('subclasses of Storage must provide a listdir() method') def size(self, name): """ Return the total size, in bytes, of the file specified by name. """ raise NotImplementedError('subclasses of Storage must provide a size() method') def url(self, name): """ Return an absolute URL where the file's contents can be accessed directly by a web browser. """ raise NotImplementedError('subclasses of Storage must provide a url() method') def get_accessed_time(self, name): """ Return the last accessed time (as a datetime) of the file specified by name. The datetime will be timezone-aware if USE_TZ=True. """ raise NotImplementedError('subclasses of Storage must provide a get_accessed_time() method') def get_created_time(self, name): """ Return the creation time (as a datetime) of the file specified by name. The datetime will be timezone-aware if USE_TZ=True. """ raise NotImplementedError('subclasses of Storage must provide a get_created_time() method') def get_modified_time(self, name): """ Return the last modified time (as a datetime) of the file specified by name. The datetime will be timezone-aware if USE_TZ=True. """ raise NotImplementedError('subclasses of Storage must provide a get_modified_time() method') @deconstructible class FileSystemStorage(Storage): """ Standard filesystem storage """ # The combination of O_CREAT and O_EXCL makes os.open() raise OSError if # the file already exists before it's opened. OS_OPEN_FLAGS = os.O_WRONLY | os.O_CREAT | os.O_EXCL | getattr(os, 'O_BINARY', 0) def __init__(self, location=None, base_url=None, file_permissions_mode=None, directory_permissions_mode=None): self._location = location self._base_url = base_url self._file_permissions_mode = file_permissions_mode self._directory_permissions_mode = directory_permissions_mode setting_changed.connect(self._clear_cached_properties) def _clear_cached_properties(self, setting, **kwargs): """Reset setting based property values.""" if setting == 'MEDIA_ROOT': self.__dict__.pop('base_location', None) self.__dict__.pop('location', None) elif setting == 'MEDIA_URL': self.__dict__.pop('base_url', None) elif setting == 'FILE_UPLOAD_PERMISSIONS': self.__dict__.pop('file_permissions_mode', None) elif setting == 'FILE_UPLOAD_DIRECTORY_PERMISSIONS': self.__dict__.pop('directory_permissions_mode', None) def _value_or_setting(self, value, setting): return setting if value is None else value @cached_property def base_location(self): return self._value_or_setting(self._location, settings.MEDIA_ROOT) @cached_property def location(self): return os.path.abspath(self.base_location) @cached_property def base_url(self): if self._base_url is not None and not self._base_url.endswith('/'): self._base_url += '/' return self._value_or_setting(self._base_url, settings.MEDIA_URL) @cached_property def file_permissions_mode(self): return self._value_or_setting(self._file_permissions_mode, settings.FILE_UPLOAD_PERMISSIONS) @cached_property def directory_permissions_mode(self): return self._value_or_setting(self._directory_permissions_mode, settings.FILE_UPLOAD_DIRECTORY_PERMISSIONS) def _open(self, name, mode='rb'): return File(open(self.path(name), mode)) def _save(self, name, content): full_path = self.path(name) # Create any intermediate directories that do not exist. directory = os.path.dirname(full_path) try: if self.directory_permissions_mode is not None: # Set the umask because os.makedirs() doesn't apply the "mode" # argument to intermediate-level directories. old_umask = os.umask(0o777 & ~self.directory_permissions_mode) try: os.makedirs(directory, self.directory_permissions_mode, exist_ok=True) finally: os.umask(old_umask) else: os.makedirs(directory, exist_ok=True) except FileExistsError: raise FileExistsError('%s exists and is not a directory.' % directory) # There's a potential race condition between get_available_name and # saving the file; it's possible that two threads might return the # same name, at which point all sorts of fun happens. So we need to # try to create the file, but if it already exists we have to go back # to get_available_name() and try again. while True: try: # This file has a file path that we can move. if hasattr(content, 'temporary_file_path'): file_move_safe(content.temporary_file_path(), full_path) # This is a normal uploadedfile that we can stream. else: # The current umask value is masked out by os.open! fd = os.open(full_path, self.OS_OPEN_FLAGS, 0o666) _file = None try: locks.lock(fd, locks.LOCK_EX) for chunk in content.chunks(): if _file is None: mode = 'wb' if isinstance(chunk, bytes) else 'wt' _file = os.fdopen(fd, mode) _file.write(chunk) finally: locks.unlock(fd) if _file is not None: _file.close() else: os.close(fd) except FileExistsError: # A new name is needed if the file exists. name = self.get_available_name(name) full_path = self.path(name) else: # OK, the file save worked. Break out of the loop. break if self.file_permissions_mode is not None: os.chmod(full_path, self.file_permissions_mode) # Store filenames with forward slashes, even on Windows. return str(name).replace('\\', '/') def delete(self, name): if not name: raise ValueError('The name must be given to delete().') name = self.path(name) # If the file or directory exists, delete it from the filesystem. try: if os.path.isdir(name): os.rmdir(name) else: os.remove(name) except FileNotFoundError: # FileNotFoundError is raised if the file or directory was removed # concurrently. pass def exists(self, name): return os.path.lexists(self.path(name)) def listdir(self, path): path = self.path(path) directories, files = [], [] with os.scandir(path) as entries: for entry in entries: if entry.is_dir(): directories.append(entry.name) else: files.append(entry.name) return directories, files def path(self, name): return safe_join(self.location, name) def size(self, name): return os.path.getsize(self.path(name)) def url(self, name): if self.base_url is None: raise ValueError("This file is not accessible via a URL.") url = filepath_to_uri(name) if url is not None: url = url.lstrip('/') return urljoin(self.base_url, url) def _datetime_from_timestamp(self, ts): """ If timezone support is enabled, make an aware datetime object in UTC; otherwise make a naive one in the local timezone. """ tz = timezone.utc if settings.USE_TZ else None return datetime.fromtimestamp(ts, tz=tz) def get_accessed_time(self, name): return self._datetime_from_timestamp(os.path.getatime(self.path(name))) def get_created_time(self, name): return self._datetime_from_timestamp(os.path.getctime(self.path(name))) def get_modified_time(self, name): return self._datetime_from_timestamp(os.path.getmtime(self.path(name))) def get_storage_class(import_path=None): return import_string(import_path or settings.DEFAULT_FILE_STORAGE) class DefaultStorage(LazyObject): def _setup(self): self._wrapped = get_storage_class()() default_storage = DefaultStorage()
cebaffc8fdc5b92068f2b965fd4b4be83ad612925f40d4cb95dc8c8055e28f8c
import pathlib from django.conf import settings from django.core.cache import DEFAULT_CACHE_ALIAS, caches from django.core.cache.backends.filebased import FileBasedCache from . import Error, Tags, Warning, register E001 = Error( "You must define a '%s' cache in your CACHES setting." % DEFAULT_CACHE_ALIAS, id='caches.E001', ) @register(Tags.caches) def check_default_cache_is_configured(app_configs, **kwargs): if DEFAULT_CACHE_ALIAS not in settings.CACHES: return [E001] return [] @register(Tags.caches, deploy=True) def check_cache_location_not_exposed(app_configs, **kwargs): errors = [] for name in ('MEDIA_ROOT', 'STATIC_ROOT', 'STATICFILES_DIRS'): setting = getattr(settings, name, None) if not setting: continue if name == 'STATICFILES_DIRS': paths = set() for staticfiles_dir in setting: if isinstance(staticfiles_dir, (list, tuple)): _, staticfiles_dir = staticfiles_dir paths.add(pathlib.Path(staticfiles_dir).resolve()) else: paths = {pathlib.Path(setting).resolve()} for alias in settings.CACHES: cache = caches[alias] if not isinstance(cache, FileBasedCache): continue cache_path = pathlib.Path(cache._dir).resolve() if any(path == cache_path for path in paths): relation = 'matches' elif any(path in cache_path.parents for path in paths): relation = 'is inside' elif any(cache_path in path.parents for path in paths): relation = 'contains' else: continue errors.append(Warning( f"Your '{alias}' cache configuration might expose your cache " f"or lead to corruption of your data because its LOCATION " f"{relation} {name}.", id='caches.W002', )) return errors @register(Tags.caches) def check_file_based_cache_is_absolute(app_configs, **kwargs): errors = [] for alias, config in settings.CACHES.items(): cache = caches[alias] if not isinstance(cache, FileBasedCache): continue if not pathlib.Path(config['LOCATION']).is_absolute(): errors.append(Warning( f"Your '{alias}' cache LOCATION path is relative. Use an " f"absolute path instead.", id='caches.W003', )) return errors
a7772f9a6bfdb7feb1a12a908916600a04e7380a7455fd47154e1e41934846f7
import cgi import mimetypes import os import posixpath import shutil import stat import tempfile from importlib import import_module from urllib.request import urlretrieve import django from django.conf import settings from django.core.management.base import BaseCommand, CommandError from django.core.management.utils import handle_extensions from django.template import Context, Engine from django.utils import archive from django.utils.version import get_docs_version class TemplateCommand(BaseCommand): """ Copy either a Django application layout template or a Django project layout template into the specified directory. :param style: A color style object (see django.core.management.color). :param app_or_project: The string 'app' or 'project'. :param name: The name of the application or project. :param directory: The directory to which the template should be copied. :param options: The additional variables passed to project or app templates """ requires_system_checks = [] # The supported URL schemes url_schemes = ['http', 'https', 'ftp'] # Rewrite the following suffixes when determining the target filename. rewrite_template_suffixes = ( # Allow shipping invalid .py files without byte-compilation. ('.py-tpl', '.py'), ) def add_arguments(self, parser): parser.add_argument('name', help='Name of the application or project.') parser.add_argument('directory', nargs='?', help='Optional destination directory') parser.add_argument('--template', help='The path or URL to load the template from.') parser.add_argument( '--extension', '-e', dest='extensions', action='append', default=['py'], help='The file extension(s) to render (default: "py"). ' 'Separate multiple extensions with commas, or use ' '-e multiple times.' ) parser.add_argument( '--name', '-n', dest='files', action='append', default=[], help='The file name(s) to render. Separate multiple file names ' 'with commas, or use -n multiple times.' ) def handle(self, app_or_project, name, target=None, **options): self.app_or_project = app_or_project self.a_or_an = 'an' if app_or_project == 'app' else 'a' self.paths_to_remove = [] self.verbosity = options['verbosity'] self.validate_name(name) # if some directory is given, make sure it's nicely expanded if target is None: top_dir = os.path.join(os.getcwd(), name) try: os.makedirs(top_dir) except FileExistsError: raise CommandError("'%s' already exists" % top_dir) except OSError as e: raise CommandError(e) else: top_dir = os.path.abspath(os.path.expanduser(target)) if app_or_project == 'app': self.validate_name(os.path.basename(top_dir), 'directory') if not os.path.exists(top_dir): raise CommandError("Destination directory '%s' does not " "exist, please create it first." % top_dir) extensions = tuple(handle_extensions(options['extensions'])) extra_files = [] for file in options['files']: extra_files.extend(map(lambda x: x.strip(), file.split(','))) if self.verbosity >= 2: self.stdout.write( 'Rendering %s template files with extensions: %s' % (app_or_project, ', '.join(extensions)) ) self.stdout.write( 'Rendering %s template files with filenames: %s' % (app_or_project, ', '.join(extra_files)) ) base_name = '%s_name' % app_or_project base_subdir = '%s_template' % app_or_project base_directory = '%s_directory' % app_or_project camel_case_name = 'camel_case_%s_name' % app_or_project camel_case_value = ''.join(x for x in name.title() if x != '_') context = Context({ **options, base_name: name, base_directory: top_dir, camel_case_name: camel_case_value, 'docs_version': get_docs_version(), 'django_version': django.__version__, }, autoescape=False) # Setup a stub settings environment for template rendering if not settings.configured: settings.configure() django.setup() template_dir = self.handle_template(options['template'], base_subdir) prefix_length = len(template_dir) + 1 for root, dirs, files in os.walk(template_dir): path_rest = root[prefix_length:] relative_dir = path_rest.replace(base_name, name) if relative_dir: target_dir = os.path.join(top_dir, relative_dir) os.makedirs(target_dir, exist_ok=True) for dirname in dirs[:]: if dirname.startswith('.') or dirname == '__pycache__': dirs.remove(dirname) for filename in files: if filename.endswith(('.pyo', '.pyc', '.py.class')): # Ignore some files as they cause various breakages. continue old_path = os.path.join(root, filename) new_path = os.path.join( top_dir, relative_dir, filename.replace(base_name, name) ) for old_suffix, new_suffix in self.rewrite_template_suffixes: if new_path.endswith(old_suffix): new_path = new_path[:-len(old_suffix)] + new_suffix break # Only rewrite once if os.path.exists(new_path): raise CommandError( "%s already exists. Overlaying %s %s into an existing " "directory won't replace conflicting files." % ( new_path, self.a_or_an, app_or_project, ) ) # Only render the Python files, as we don't want to # accidentally render Django templates files if new_path.endswith(extensions) or filename in extra_files: with open(old_path, encoding='utf-8') as template_file: content = template_file.read() template = Engine().from_string(content) content = template.render(context) with open(new_path, 'w', encoding='utf-8') as new_file: new_file.write(content) else: shutil.copyfile(old_path, new_path) if self.verbosity >= 2: self.stdout.write('Creating %s' % new_path) try: shutil.copymode(old_path, new_path) self.make_writeable(new_path) except OSError: self.stderr.write( "Notice: Couldn't set permission bits on %s. You're " "probably using an uncommon filesystem setup. No " "problem." % new_path, self.style.NOTICE) if self.paths_to_remove: if self.verbosity >= 2: self.stdout.write('Cleaning up temporary files.') for path_to_remove in self.paths_to_remove: if os.path.isfile(path_to_remove): os.remove(path_to_remove) else: shutil.rmtree(path_to_remove) def handle_template(self, template, subdir): """ Determine where the app or project templates are. Use django.__path__[0] as the default because the Django install directory isn't known. """ if template is None: return os.path.join(django.__path__[0], 'conf', subdir) else: if template.startswith('file://'): template = template[7:] expanded_template = os.path.expanduser(template) expanded_template = os.path.normpath(expanded_template) if os.path.isdir(expanded_template): return expanded_template if self.is_url(template): # downloads the file and returns the path absolute_path = self.download(template) else: absolute_path = os.path.abspath(expanded_template) if os.path.exists(absolute_path): return self.extract(absolute_path) raise CommandError("couldn't handle %s template %s." % (self.app_or_project, template)) def validate_name(self, name, name_or_dir='name'): if name is None: raise CommandError('you must provide {an} {app} name'.format( an=self.a_or_an, app=self.app_or_project, )) # Check it's a valid directory name. if not name.isidentifier(): raise CommandError( "'{name}' is not a valid {app} {type}. Please make sure the " "{type} is a valid identifier.".format( name=name, app=self.app_or_project, type=name_or_dir, ) ) # Check it cannot be imported. try: import_module(name) except ImportError: pass else: raise CommandError( "'{name}' conflicts with the name of an existing Python " "module and cannot be used as {an} {app} {type}. Please try " "another {type}.".format( name=name, an=self.a_or_an, app=self.app_or_project, type=name_or_dir, ) ) def download(self, url): """ Download the given URL and return the file name. """ def cleanup_url(url): tmp = url.rstrip('/') filename = tmp.split('/')[-1] if url.endswith('/'): display_url = tmp + '/' else: display_url = url return filename, display_url prefix = 'django_%s_template_' % self.app_or_project tempdir = tempfile.mkdtemp(prefix=prefix, suffix='_download') self.paths_to_remove.append(tempdir) filename, display_url = cleanup_url(url) if self.verbosity >= 2: self.stdout.write('Downloading %s' % display_url) try: the_path, info = urlretrieve(url, os.path.join(tempdir, filename)) except OSError as e: raise CommandError("couldn't download URL %s to %s: %s" % (url, filename, e)) used_name = the_path.split('/')[-1] # Trying to get better name from response headers content_disposition = info.get('content-disposition') if content_disposition: _, params = cgi.parse_header(content_disposition) guessed_filename = params.get('filename') or used_name else: guessed_filename = used_name # Falling back to content type guessing ext = self.splitext(guessed_filename)[1] content_type = info.get('content-type') if not ext and content_type: ext = mimetypes.guess_extension(content_type) if ext: guessed_filename += ext # Move the temporary file to a filename that has better # chances of being recognized by the archive utils if used_name != guessed_filename: guessed_path = os.path.join(tempdir, guessed_filename) shutil.move(the_path, guessed_path) return guessed_path # Giving up return the_path def splitext(self, the_path): """ Like os.path.splitext, but takes off .tar, too """ base, ext = posixpath.splitext(the_path) if base.lower().endswith('.tar'): ext = base[-4:] + ext base = base[:-4] return base, ext def extract(self, filename): """ Extract the given file to a temporary directory and return the path of the directory with the extracted content. """ prefix = 'django_%s_template_' % self.app_or_project tempdir = tempfile.mkdtemp(prefix=prefix, suffix='_extract') self.paths_to_remove.append(tempdir) if self.verbosity >= 2: self.stdout.write('Extracting %s' % filename) try: archive.extract(filename, tempdir) return tempdir except (archive.ArchiveException, OSError) as e: raise CommandError("couldn't extract file %s to %s: %s" % (filename, tempdir, e)) def is_url(self, template): """Return True if the name looks like a URL.""" if ':' not in template: return False scheme = template.split(':', 1)[0].lower() return scheme in self.url_schemes def make_writeable(self, filename): """ Make sure that the file is writeable. Useful if our source is read-only. """ if not os.access(filename, os.W_OK): st = os.stat(filename) new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR os.chmod(filename, new_permissions)
1dddd81c9aadfe6bc866fa440b0e740b874db24d698f717dbc34979cb4bfa60d
""" Sets up the terminal color scheme. """ import functools import os import sys from django.utils import termcolors try: import colorama colorama.init() except (ImportError, OSError): HAS_COLORAMA = False else: HAS_COLORAMA = True def supports_color(): """ Return True if the running system's terminal supports color, and False otherwise. """ def vt_codes_enabled_in_windows_registry(): """ Check the Windows Registry to see if VT code handling has been enabled by default, see https://superuser.com/a/1300251/447564. """ try: # winreg is only available on Windows. import winreg except ImportError: return False else: reg_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, 'Console') try: reg_key_value, _ = winreg.QueryValueEx(reg_key, 'VirtualTerminalLevel') except FileNotFoundError: return False else: return reg_key_value == 1 # isatty is not always implemented, #6223. is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty() return is_a_tty and ( sys.platform != 'win32' or HAS_COLORAMA or 'ANSICON' in os.environ or # Windows Terminal supports VT codes. 'WT_SESSION' in os.environ or # Microsoft Visual Studio Code's built-in terminal supports colors. os.environ.get('TERM_PROGRAM') == 'vscode' or vt_codes_enabled_in_windows_registry() ) class Style: pass def make_style(config_string=''): """ Create a Style object from the given config_string. If config_string is empty django.utils.termcolors.DEFAULT_PALETTE is used. """ style = Style() color_settings = termcolors.parse_color_setting(config_string) # The nocolor palette has all available roles. # Use that palette as the basis for populating # the palette as defined in the environment. for role in termcolors.PALETTES[termcolors.NOCOLOR_PALETTE]: if color_settings: format = color_settings.get(role, {}) style_func = termcolors.make_style(**format) else: def style_func(x): return x setattr(style, role, style_func) # For backwards compatibility, # set style for ERROR_OUTPUT == ERROR style.ERROR_OUTPUT = style.ERROR return style @functools.lru_cache(maxsize=None) def no_style(): """ Return a Style object with no color scheme. """ return make_style('nocolor') def color_style(force_color=False): """ Return a Style object from the Django color scheme. """ if not force_color and not supports_color(): return no_style() return make_style(os.environ.get('DJANGO_COLORS', ''))
6e59468b983cd247599f13f0101fc6994959a2e0b2d50f58d145095fc64e1e9c
""" Caching framework. This package defines set of cache backends that all conform to a simple API. In a nutshell, a cache is a set of values -- which can be any object that may be pickled -- identified by string keys. For the complete API, see the abstract BaseCache class in django.core.cache.backends.base. Client code should use the `cache` variable defined here to access the default cache backend and look up non-default cache backends in the `caches` dict-like object. See docs/topics/cache.txt for information on the public API. """ from django.core import signals from django.core.cache.backends.base import ( BaseCache, CacheKeyWarning, InvalidCacheBackendError, InvalidCacheKey, ) from django.utils.connection import BaseConnectionHandler, ConnectionProxy from django.utils.module_loading import import_string __all__ = [ 'cache', 'caches', 'DEFAULT_CACHE_ALIAS', 'InvalidCacheBackendError', 'CacheKeyWarning', 'BaseCache', 'InvalidCacheKey', ] DEFAULT_CACHE_ALIAS = 'default' class CacheHandler(BaseConnectionHandler): settings_name = 'CACHES' exception_class = InvalidCacheBackendError def create_connection(self, alias): params = self.settings[alias].copy() backend = params.pop('BACKEND') location = params.pop('LOCATION', '') try: backend_cls = import_string(backend) except ImportError as e: raise InvalidCacheBackendError( "Could not find backend '%s': %s" % (backend, e) ) from e return backend_cls(location, params) def all(self, initialized_only=False): return [ self[alias] for alias in self # If initialized_only is True, return only initialized caches. if not initialized_only or hasattr(self._connections, alias) ] caches = CacheHandler() cache = ConnectionProxy(caches, DEFAULT_CACHE_ALIAS) def close_caches(**kwargs): # Some caches need to do a cleanup at the end of a request cycle. If not # implemented in a particular backend cache.close() is a no-op. for cache in caches.all(initialized_only=True): cache.close() signals.request_finished.connect(close_caches)
ac6201cedc17e30e7c22e5ab86002abad7453aa69917ef59e994b5a87b9698d0
import logging import sys import tempfile import traceback from asgiref.sync import ThreadSensitiveContext, sync_to_async from django.conf import settings from django.core import signals from django.core.exceptions import RequestAborted, RequestDataTooBig from django.core.handlers import base from django.http import ( FileResponse, HttpRequest, HttpResponse, HttpResponseBadRequest, HttpResponseServerError, QueryDict, parse_cookie, ) from django.urls import set_script_prefix from django.utils.functional import cached_property logger = logging.getLogger('django.request') class ASGIRequest(HttpRequest): """ Custom request subclass that decodes from an ASGI-standard request dict and wraps request body handling. """ # Number of seconds until a Request gives up on trying to read a request # body and aborts. body_receive_timeout = 60 def __init__(self, scope, body_file): self.scope = scope self._post_parse_error = False self._read_started = False self.resolver_match = None self.script_name = self.scope.get('root_path', '') if self.script_name and scope['path'].startswith(self.script_name): # TODO: Better is-prefix checking, slash handling? self.path_info = scope['path'][len(self.script_name):] else: self.path_info = scope['path'] # The Django path is different from ASGI scope path args, it should # combine with script name. if self.script_name: self.path = '%s/%s' % ( self.script_name.rstrip('/'), self.path_info.replace('/', '', 1), ) else: self.path = scope['path'] # HTTP basics. self.method = self.scope['method'].upper() # Ensure query string is encoded correctly. query_string = self.scope.get('query_string', '') if isinstance(query_string, bytes): query_string = query_string.decode() self.META = { 'REQUEST_METHOD': self.method, 'QUERY_STRING': query_string, 'SCRIPT_NAME': self.script_name, 'PATH_INFO': self.path_info, # WSGI-expecting code will need these for a while 'wsgi.multithread': True, 'wsgi.multiprocess': True, } if self.scope.get('client'): self.META['REMOTE_ADDR'] = self.scope['client'][0] self.META['REMOTE_HOST'] = self.META['REMOTE_ADDR'] self.META['REMOTE_PORT'] = self.scope['client'][1] if self.scope.get('server'): self.META['SERVER_NAME'] = self.scope['server'][0] self.META['SERVER_PORT'] = str(self.scope['server'][1]) else: self.META['SERVER_NAME'] = 'unknown' self.META['SERVER_PORT'] = '0' # Headers go into META. for name, value in self.scope.get('headers', []): name = name.decode('latin1') if name == 'content-length': corrected_name = 'CONTENT_LENGTH' elif name == 'content-type': corrected_name = 'CONTENT_TYPE' else: corrected_name = 'HTTP_%s' % name.upper().replace('-', '_') # HTTP/2 say only ASCII chars are allowed in headers, but decode # latin1 just in case. value = value.decode('latin1') if corrected_name in self.META: value = self.META[corrected_name] + ',' + value self.META[corrected_name] = value # Pull out request encoding, if provided. self._set_content_type_params(self.META) # Directly assign the body file to be our stream. self._stream = body_file # Other bits. self.resolver_match = None @cached_property def GET(self): return QueryDict(self.META['QUERY_STRING']) def _get_scheme(self): return self.scope.get('scheme') or super()._get_scheme() def _get_post(self): if not hasattr(self, '_post'): self._load_post_and_files() return self._post def _set_post(self, post): self._post = post def _get_files(self): if not hasattr(self, '_files'): self._load_post_and_files() return self._files POST = property(_get_post, _set_post) FILES = property(_get_files) @cached_property def COOKIES(self): return parse_cookie(self.META.get('HTTP_COOKIE', '')) class ASGIHandler(base.BaseHandler): """Handler for ASGI requests.""" request_class = ASGIRequest # Size to chunk response bodies into for multiple response messages. chunk_size = 2 ** 16 def __init__(self): super().__init__() self.load_middleware(is_async=True) async def __call__(self, scope, receive, send): """ Async entrypoint - parses the request and hands off to get_response. """ # Serve only HTTP connections. # FIXME: Allow to override this. if scope['type'] != 'http': raise ValueError( 'Django can only handle ASGI/HTTP connections, not %s.' % scope['type'] ) async with ThreadSensitiveContext(): await self.handle(scope, receive, send) async def handle(self, scope, receive, send): """ Handles the ASGI request. Called via the __call__ method. """ # Receive the HTTP request body as a stream object. try: body_file = await self.read_body(receive) except RequestAborted: return # Request is complete and can be served. set_script_prefix(self.get_script_prefix(scope)) await sync_to_async(signals.request_started.send, thread_sensitive=True)(sender=self.__class__, scope=scope) # Get the request and check for basic issues. request, error_response = self.create_request(scope, body_file) if request is None: await self.send_response(error_response, send) return # Get the response, using the async mode of BaseHandler. response = await self.get_response_async(request) response._handler_class = self.__class__ # Increase chunk size on file responses (ASGI servers handles low-level # chunking). if isinstance(response, FileResponse): response.block_size = self.chunk_size # Send the response. await self.send_response(response, send) async def read_body(self, receive): """Reads a HTTP body from an ASGI connection.""" # Use the tempfile that auto rolls-over to a disk file as it fills up. body_file = tempfile.SpooledTemporaryFile(max_size=settings.FILE_UPLOAD_MAX_MEMORY_SIZE, mode='w+b') while True: message = await receive() if message['type'] == 'http.disconnect': # Early client disconnect. raise RequestAborted() # Add a body chunk from the message, if provided. if 'body' in message: body_file.write(message['body']) # Quit out if that's the end. if not message.get('more_body', False): break body_file.seek(0) return body_file def create_request(self, scope, body_file): """ Create the Request object and returns either (request, None) or (None, response) if there is an error response. """ try: return self.request_class(scope, body_file), None except UnicodeDecodeError: logger.warning( 'Bad Request (UnicodeDecodeError)', exc_info=sys.exc_info(), extra={'status_code': 400}, ) return None, HttpResponseBadRequest() except RequestDataTooBig: return None, HttpResponse('413 Payload too large', status=413) def handle_uncaught_exception(self, request, resolver, exc_info): """Last-chance handler for exceptions.""" # There's no WSGI server to catch the exception further up # if this fails, so translate it into a plain text response. try: return super().handle_uncaught_exception(request, resolver, exc_info) except Exception: return HttpResponseServerError( traceback.format_exc() if settings.DEBUG else 'Internal Server Error', content_type='text/plain', ) async def send_response(self, response, send): """Encode and send a response out over ASGI.""" # Collect cookies into headers. Have to preserve header case as there # are some non-RFC compliant clients that require e.g. Content-Type. response_headers = [] for header, value in response.items(): if isinstance(header, str): header = header.encode('ascii') if isinstance(value, str): value = value.encode('latin1') response_headers.append((bytes(header), bytes(value))) for c in response.cookies.values(): response_headers.append( (b'Set-Cookie', c.output(header='').encode('ascii').strip()) ) # Initial response message. await send({ 'type': 'http.response.start', 'status': response.status_code, 'headers': response_headers, }) # Streaming responses need to be pinned to their iterator. if response.streaming: # Access `__iter__` and not `streaming_content` directly in case # it has been overridden in a subclass. for part in response: for chunk, _ in self.chunk_bytes(part): await send({ 'type': 'http.response.body', 'body': chunk, # Ignore "more" as there may be more parts; instead, # use an empty final closing message with False. 'more_body': True, }) # Final closing message. await send({'type': 'http.response.body'}) # Other responses just need chunking. else: # Yield chunks of response. for chunk, last in self.chunk_bytes(response.content): await send({ 'type': 'http.response.body', 'body': chunk, 'more_body': not last, }) await sync_to_async(response.close, thread_sensitive=True)() @classmethod def chunk_bytes(cls, data): """ Chunks some data up so it can be sent in reasonable size messages. Yields (chunk, last_chunk) tuples. """ position = 0 if not data: yield data, True return while position < len(data): yield ( data[position:position + cls.chunk_size], (position + cls.chunk_size) >= len(data), ) position += cls.chunk_size def get_script_prefix(self, scope): """ Return the script prefix to use from either the scope or a setting. """ if settings.FORCE_SCRIPT_NAME: return settings.FORCE_SCRIPT_NAME return scope.get('root_path', '') or ''
47b1aa2653165efd0989661aace0177f790b690ea85d3c96cb7ccc9bc43473b2
from io import BytesIO from django.conf import settings from django.core import signals from django.core.handlers import base from django.http import HttpRequest, QueryDict, parse_cookie from django.urls import set_script_prefix from django.utils.encoding import repercent_broken_unicode from django.utils.functional import cached_property from django.utils.regex_helper import _lazy_re_compile _slashes_re = _lazy_re_compile(br'/+') class LimitedStream: """Wrap another stream to disallow reading it past a number of bytes.""" def __init__(self, stream, limit, buf_size=64 * 1024 * 1024): self.stream = stream self.remaining = limit self.buffer = b'' self.buf_size = buf_size def _read_limited(self, size=None): if size is None or size > self.remaining: size = self.remaining if size == 0: return b'' result = self.stream.read(size) self.remaining -= len(result) return result def read(self, size=None): if size is None: result = self.buffer + self._read_limited() self.buffer = b'' elif size < len(self.buffer): result = self.buffer[:size] self.buffer = self.buffer[size:] else: # size >= len(self.buffer) result = self.buffer + self._read_limited(size - len(self.buffer)) self.buffer = b'' return result def readline(self, size=None): while b'\n' not in self.buffer and \ (size is None or len(self.buffer) < size): if size: # since size is not None here, len(self.buffer) < size chunk = self._read_limited(size - len(self.buffer)) else: chunk = self._read_limited() if not chunk: break self.buffer += chunk sio = BytesIO(self.buffer) if size: line = sio.readline(size) else: line = sio.readline() self.buffer = sio.read() return line class WSGIRequest(HttpRequest): def __init__(self, environ): script_name = get_script_name(environ) # If PATH_INFO is empty (e.g. accessing the SCRIPT_NAME URL without a # trailing slash), operate as if '/' was requested. path_info = get_path_info(environ) or '/' self.environ = environ self.path_info = path_info # be careful to only replace the first slash in the path because of # http://test/something and http://test//something being different as # stated in https://www.ietf.org/rfc/rfc2396.txt self.path = '%s/%s' % (script_name.rstrip('/'), path_info.replace('/', '', 1)) self.META = environ self.META['PATH_INFO'] = path_info self.META['SCRIPT_NAME'] = script_name self.method = environ['REQUEST_METHOD'].upper() # Set content_type, content_params, and encoding. self._set_content_type_params(environ) try: content_length = int(environ.get('CONTENT_LENGTH')) except (ValueError, TypeError): content_length = 0 self._stream = LimitedStream(self.environ['wsgi.input'], content_length) self._read_started = False self.resolver_match = None def _get_scheme(self): return self.environ.get('wsgi.url_scheme') @cached_property def GET(self): # The WSGI spec says 'QUERY_STRING' may be absent. raw_query_string = get_bytes_from_wsgi(self.environ, 'QUERY_STRING', '') return QueryDict(raw_query_string, encoding=self._encoding) def _get_post(self): if not hasattr(self, '_post'): self._load_post_and_files() return self._post def _set_post(self, post): self._post = post @cached_property def COOKIES(self): raw_cookie = get_str_from_wsgi(self.environ, 'HTTP_COOKIE', '') return parse_cookie(raw_cookie) @property def FILES(self): if not hasattr(self, '_files'): self._load_post_and_files() return self._files POST = property(_get_post, _set_post) class WSGIHandler(base.BaseHandler): request_class = WSGIRequest def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.load_middleware() def __call__(self, environ, start_response): set_script_prefix(get_script_name(environ)) signals.request_started.send(sender=self.__class__, environ=environ) request = self.request_class(environ) response = self.get_response(request) response._handler_class = self.__class__ status = '%d %s' % (response.status_code, response.reason_phrase) response_headers = [ *response.items(), *(('Set-Cookie', c.output(header='')) for c in response.cookies.values()), ] start_response(status, response_headers) if getattr(response, 'file_to_stream', None) is not None and environ.get('wsgi.file_wrapper'): # If `wsgi.file_wrapper` is used the WSGI server does not call # .close on the response, but on the file wrapper. Patch it to use # response.close instead which takes care of closing all files. response.file_to_stream.close = response.close response = environ['wsgi.file_wrapper'](response.file_to_stream, response.block_size) return response def get_path_info(environ): """Return the HTTP request's PATH_INFO as a string.""" path_info = get_bytes_from_wsgi(environ, 'PATH_INFO', '/') return repercent_broken_unicode(path_info).decode() def get_script_name(environ): """ Return the equivalent of the HTTP request's SCRIPT_NAME environment variable. If Apache mod_rewrite is used, return what would have been the script name prior to any rewriting (so it's the script name as seen from the client's perspective), unless the FORCE_SCRIPT_NAME setting is set (to anything). """ if settings.FORCE_SCRIPT_NAME is not None: return settings.FORCE_SCRIPT_NAME # If Apache's mod_rewrite had a whack at the URL, Apache set either # SCRIPT_URL or REDIRECT_URL to the full resource URL before applying any # rewrites. Unfortunately not every web server (lighttpd!) passes this # information through all the time, so FORCE_SCRIPT_NAME, above, is still # needed. script_url = get_bytes_from_wsgi(environ, 'SCRIPT_URL', '') or get_bytes_from_wsgi(environ, 'REDIRECT_URL', '') if script_url: if b'//' in script_url: # mod_wsgi squashes multiple successive slashes in PATH_INFO, # do the same with script_url before manipulating paths (#17133). script_url = _slashes_re.sub(b'/', script_url) path_info = get_bytes_from_wsgi(environ, 'PATH_INFO', '') script_name = script_url[:-len(path_info)] if path_info else script_url else: script_name = get_bytes_from_wsgi(environ, 'SCRIPT_NAME', '') return script_name.decode() def get_bytes_from_wsgi(environ, key, default): """ Get a value from the WSGI environ dictionary as bytes. key and default should be strings. """ value = environ.get(key, default) # Non-ASCII values in the WSGI environ are arbitrarily decoded with # ISO-8859-1. This is wrong for Django websites where UTF-8 is the default. # Re-encode to recover the original bytestring. return value.encode('iso-8859-1') def get_str_from_wsgi(environ, key, default): """ Get a value from the WSGI environ dictionary as str. key and default should be str objects. """ value = get_bytes_from_wsgi(environ, key, default) return value.decode(errors='replace')
6affeafaa7b49f02f2133a82e95add6564c8e8aad7d3f85290d1c321a6a9f472
from django.conf import settings from django.core.exceptions import ImproperlyConfigured from .. import Error, Tags, Warning, register CROSS_ORIGIN_OPENER_POLICY_VALUES = { 'same-origin', 'same-origin-allow-popups', 'unsafe-none', } REFERRER_POLICY_VALUES = { 'no-referrer', 'no-referrer-when-downgrade', 'origin', 'origin-when-cross-origin', 'same-origin', 'strict-origin', 'strict-origin-when-cross-origin', 'unsafe-url', } SECRET_KEY_INSECURE_PREFIX = 'django-insecure-' SECRET_KEY_MIN_LENGTH = 50 SECRET_KEY_MIN_UNIQUE_CHARACTERS = 5 W001 = Warning( "You do not have 'django.middleware.security.SecurityMiddleware' " "in your MIDDLEWARE so the SECURE_HSTS_SECONDS, " "SECURE_CONTENT_TYPE_NOSNIFF, SECURE_REFERRER_POLICY, " "SECURE_CROSS_ORIGIN_OPENER_POLICY, and SECURE_SSL_REDIRECT settings will " "have no effect.", id='security.W001', ) W002 = Warning( "You do not have " "'django.middleware.clickjacking.XFrameOptionsMiddleware' in your " "MIDDLEWARE, so your pages will not be served with an " "'x-frame-options' header. Unless there is a good reason for your " "site to be served in a frame, you should consider enabling this " "header to help prevent clickjacking attacks.", id='security.W002', ) W004 = Warning( "You have not set a value for the SECURE_HSTS_SECONDS setting. " "If your entire site is served only over SSL, you may want to consider " "setting a value and enabling HTTP Strict Transport Security. " "Be sure to read the documentation first; enabling HSTS carelessly " "can cause serious, irreversible problems.", id='security.W004', ) W005 = Warning( "You have not set the SECURE_HSTS_INCLUDE_SUBDOMAINS setting to True. " "Without this, your site is potentially vulnerable to attack " "via an insecure connection to a subdomain. Only set this to True if " "you are certain that all subdomains of your domain should be served " "exclusively via SSL.", id='security.W005', ) W006 = Warning( "Your SECURE_CONTENT_TYPE_NOSNIFF setting is not set to True, " "so your pages will not be served with an " "'X-Content-Type-Options: nosniff' header. " "You should consider enabling this header to prevent the " "browser from identifying content types incorrectly.", id='security.W006', ) W008 = Warning( "Your SECURE_SSL_REDIRECT setting is not set to True. " "Unless your site should be available over both SSL and non-SSL " "connections, you may want to either set this setting True " "or configure a load balancer or reverse-proxy server " "to redirect all connections to HTTPS.", id='security.W008', ) W009 = Warning( "Your SECRET_KEY has less than %(min_length)s characters, less than " "%(min_unique_chars)s unique characters, or it's prefixed with " "'%(insecure_prefix)s' indicating that it was generated automatically by " "Django. Please generate a long and random SECRET_KEY, otherwise many of " "Django's security-critical features will be vulnerable to attack." % { 'min_length': SECRET_KEY_MIN_LENGTH, 'min_unique_chars': SECRET_KEY_MIN_UNIQUE_CHARACTERS, 'insecure_prefix': SECRET_KEY_INSECURE_PREFIX, }, id='security.W009', ) W018 = Warning( "You should not have DEBUG set to True in deployment.", id='security.W018', ) W019 = Warning( "You have " "'django.middleware.clickjacking.XFrameOptionsMiddleware' in your " "MIDDLEWARE, but X_FRAME_OPTIONS is not set to 'DENY'. " "Unless there is a good reason for your site to serve other parts of " "itself in a frame, you should change it to 'DENY'.", id='security.W019', ) W020 = Warning( "ALLOWED_HOSTS must not be empty in deployment.", id='security.W020', ) W021 = Warning( "You have not set the SECURE_HSTS_PRELOAD setting to True. Without this, " "your site cannot be submitted to the browser preload list.", id='security.W021', ) W022 = Warning( 'You have not set the SECURE_REFERRER_POLICY setting. Without this, your ' 'site will not send a Referrer-Policy header. You should consider ' 'enabling this header to protect user privacy.', id='security.W022', ) E023 = Error( 'You have set the SECURE_REFERRER_POLICY setting to an invalid value.', hint='Valid values are: {}.'.format(', '.join(sorted(REFERRER_POLICY_VALUES))), id='security.E023', ) E024 = Error( 'You have set the SECURE_CROSS_ORIGIN_OPENER_POLICY setting to an invalid ' 'value.', hint='Valid values are: {}.'.format( ', '.join(sorted(CROSS_ORIGIN_OPENER_POLICY_VALUES)), ), id='security.E024', ) def _security_middleware(): return 'django.middleware.security.SecurityMiddleware' in settings.MIDDLEWARE def _xframe_middleware(): return 'django.middleware.clickjacking.XFrameOptionsMiddleware' in settings.MIDDLEWARE @register(Tags.security, deploy=True) def check_security_middleware(app_configs, **kwargs): passed_check = _security_middleware() return [] if passed_check else [W001] @register(Tags.security, deploy=True) def check_xframe_options_middleware(app_configs, **kwargs): passed_check = _xframe_middleware() return [] if passed_check else [W002] @register(Tags.security, deploy=True) def check_sts(app_configs, **kwargs): passed_check = not _security_middleware() or settings.SECURE_HSTS_SECONDS return [] if passed_check else [W004] @register(Tags.security, deploy=True) def check_sts_include_subdomains(app_configs, **kwargs): passed_check = ( not _security_middleware() or not settings.SECURE_HSTS_SECONDS or settings.SECURE_HSTS_INCLUDE_SUBDOMAINS is True ) return [] if passed_check else [W005] @register(Tags.security, deploy=True) def check_sts_preload(app_configs, **kwargs): passed_check = ( not _security_middleware() or not settings.SECURE_HSTS_SECONDS or settings.SECURE_HSTS_PRELOAD is True ) return [] if passed_check else [W021] @register(Tags.security, deploy=True) def check_content_type_nosniff(app_configs, **kwargs): passed_check = ( not _security_middleware() or settings.SECURE_CONTENT_TYPE_NOSNIFF is True ) return [] if passed_check else [W006] @register(Tags.security, deploy=True) def check_ssl_redirect(app_configs, **kwargs): passed_check = ( not _security_middleware() or settings.SECURE_SSL_REDIRECT is True ) return [] if passed_check else [W008] @register(Tags.security, deploy=True) def check_secret_key(app_configs, **kwargs): try: secret_key = settings.SECRET_KEY except (ImproperlyConfigured, AttributeError): passed_check = False else: passed_check = ( len(set(secret_key)) >= SECRET_KEY_MIN_UNIQUE_CHARACTERS and len(secret_key) >= SECRET_KEY_MIN_LENGTH and not secret_key.startswith(SECRET_KEY_INSECURE_PREFIX) ) return [] if passed_check else [W009] @register(Tags.security, deploy=True) def check_debug(app_configs, **kwargs): passed_check = not settings.DEBUG return [] if passed_check else [W018] @register(Tags.security, deploy=True) def check_xframe_deny(app_configs, **kwargs): passed_check = ( not _xframe_middleware() or settings.X_FRAME_OPTIONS == 'DENY' ) return [] if passed_check else [W019] @register(Tags.security, deploy=True) def check_allowed_hosts(app_configs, **kwargs): return [] if settings.ALLOWED_HOSTS else [W020] @register(Tags.security, deploy=True) def check_referrer_policy(app_configs, **kwargs): if _security_middleware(): if settings.SECURE_REFERRER_POLICY is None: return [W022] # Support a comma-separated string or iterable of values to allow fallback. if isinstance(settings.SECURE_REFERRER_POLICY, str): values = {v.strip() for v in settings.SECURE_REFERRER_POLICY.split(',')} else: values = set(settings.SECURE_REFERRER_POLICY) if not values <= REFERRER_POLICY_VALUES: return [E023] return [] @register(Tags.security, deploy=True) def check_cross_origin_opener_policy(app_configs, **kwargs): if ( _security_middleware() and settings.SECURE_CROSS_ORIGIN_OPENER_POLICY is not None and settings.SECURE_CROSS_ORIGIN_OPENER_POLICY not in CROSS_ORIGIN_OPENER_POLICY_VALUES ): return [E024] return []
92b67eec983a5493bc6c54d9a966a4b3c662906ae6d8a6f7205ab902533829ca
import sys import time from importlib import import_module from django.apps import apps from django.core.management.base import ( BaseCommand, CommandError, no_translations, ) from django.core.management.sql import ( emit_post_migrate_signal, emit_pre_migrate_signal, ) from django.db import DEFAULT_DB_ALIAS, connections, router from django.db.migrations.autodetector import MigrationAutodetector from django.db.migrations.executor import MigrationExecutor from django.db.migrations.loader import AmbiguityError from django.db.migrations.state import ModelState, ProjectState from django.utils.module_loading import module_has_submodule from django.utils.text import Truncator class Command(BaseCommand): help = "Updates database schema. Manages both apps with migrations and those without." requires_system_checks = [] def add_arguments(self, parser): parser.add_argument( '--skip-checks', action='store_true', help='Skip system checks.', ) parser.add_argument( 'app_label', nargs='?', help='App label of an application to synchronize the state.', ) parser.add_argument( 'migration_name', nargs='?', help='Database state will be brought to the state after that ' 'migration. Use the name "zero" to unapply all migrations.', ) parser.add_argument( '--noinput', '--no-input', action='store_false', dest='interactive', help='Tells Django to NOT prompt the user for input of any kind.', ) parser.add_argument( '--database', default=DEFAULT_DB_ALIAS, help='Nominates a database to synchronize. Defaults to the "default" database.', ) parser.add_argument( '--fake', action='store_true', help='Mark migrations as run without actually running them.', ) parser.add_argument( '--fake-initial', action='store_true', help='Detect if tables already exist and fake-apply initial migrations if so. Make sure ' 'that the current database schema matches your initial migration before using this ' 'flag. Django will only check for an existing table name.', ) parser.add_argument( '--plan', action='store_true', help='Shows a list of the migration actions that will be performed.', ) parser.add_argument( '--run-syncdb', action='store_true', help='Creates tables for apps without migrations.', ) parser.add_argument( '--check', action='store_true', dest='check_unapplied', help='Exits with a non-zero status if unapplied migrations exist.', ) @no_translations def handle(self, *args, **options): database = options['database'] if not options['skip_checks']: self.check(databases=[database]) self.verbosity = options['verbosity'] self.interactive = options['interactive'] # Import the 'management' module within each installed app, to register # dispatcher events. for app_config in apps.get_app_configs(): if module_has_submodule(app_config.module, "management"): import_module('.management', app_config.name) # Get the database we're operating from connection = connections[database] # Hook for backends needing any database preparation connection.prepare_database() # Work out which apps have migrations and which do not executor = MigrationExecutor(connection, self.migration_progress_callback) # Raise an error if any migrations are applied before their dependencies. executor.loader.check_consistent_history(connection) # Before anything else, see if there's conflicting apps and drop out # hard if there are any conflicts = executor.loader.detect_conflicts() if conflicts: name_str = "; ".join( "%s in %s" % (", ".join(names), app) for app, names in conflicts.items() ) raise CommandError( "Conflicting migrations detected; multiple leaf nodes in the " "migration graph: (%s).\nTo fix them run " "'python manage.py makemigrations --merge'" % name_str ) # If they supplied command line arguments, work out what they mean. run_syncdb = options['run_syncdb'] target_app_labels_only = True if options['app_label']: # Validate app_label. app_label = options['app_label'] try: apps.get_app_config(app_label) except LookupError as err: raise CommandError(str(err)) if run_syncdb: if app_label in executor.loader.migrated_apps: raise CommandError("Can't use run_syncdb with app '%s' as it has migrations." % app_label) elif app_label not in executor.loader.migrated_apps: raise CommandError("App '%s' does not have migrations." % app_label) if options['app_label'] and options['migration_name']: migration_name = options['migration_name'] if migration_name == "zero": targets = [(app_label, None)] else: try: migration = executor.loader.get_migration_by_prefix(app_label, migration_name) except AmbiguityError: raise CommandError( "More than one migration matches '%s' in app '%s'. " "Please be more specific." % (migration_name, app_label) ) except KeyError: raise CommandError("Cannot find a migration matching '%s' from app '%s'." % ( migration_name, app_label)) target = (app_label, migration.name) # Partially applied squashed migrations are not included in the # graph, use the last replacement instead. if ( target not in executor.loader.graph.nodes and target in executor.loader.replacements ): incomplete_migration = executor.loader.replacements[target] target = incomplete_migration.replaces[-1] targets = [target] target_app_labels_only = False elif options['app_label']: targets = [key for key in executor.loader.graph.leaf_nodes() if key[0] == app_label] else: targets = executor.loader.graph.leaf_nodes() plan = executor.migration_plan(targets) exit_dry = plan and options['check_unapplied'] if options['plan']: self.stdout.write('Planned operations:', self.style.MIGRATE_LABEL) if not plan: self.stdout.write(' No planned migration operations.') for migration, backwards in plan: self.stdout.write(str(migration), self.style.MIGRATE_HEADING) for operation in migration.operations: message, is_error = self.describe_operation(operation, backwards) style = self.style.WARNING if is_error else None self.stdout.write(' ' + message, style) if exit_dry: sys.exit(1) return if exit_dry: sys.exit(1) # At this point, ignore run_syncdb if there aren't any apps to sync. run_syncdb = options['run_syncdb'] and executor.loader.unmigrated_apps # Print some useful info if self.verbosity >= 1: self.stdout.write(self.style.MIGRATE_HEADING("Operations to perform:")) if run_syncdb: if options['app_label']: self.stdout.write( self.style.MIGRATE_LABEL(" Synchronize unmigrated app: %s" % app_label) ) else: self.stdout.write( self.style.MIGRATE_LABEL(" Synchronize unmigrated apps: ") + (", ".join(sorted(executor.loader.unmigrated_apps))) ) if target_app_labels_only: self.stdout.write( self.style.MIGRATE_LABEL(" Apply all migrations: ") + (", ".join(sorted({a for a, n in targets})) or "(none)") ) else: if targets[0][1] is None: self.stdout.write( self.style.MIGRATE_LABEL(' Unapply all migrations: ') + str(targets[0][0]) ) else: self.stdout.write(self.style.MIGRATE_LABEL( " Target specific migration: ") + "%s, from %s" % (targets[0][1], targets[0][0]) ) pre_migrate_state = executor._create_project_state(with_applied_migrations=True) pre_migrate_apps = pre_migrate_state.apps emit_pre_migrate_signal( self.verbosity, self.interactive, connection.alias, stdout=self.stdout, apps=pre_migrate_apps, plan=plan, ) # Run the syncdb phase. if run_syncdb: if self.verbosity >= 1: self.stdout.write(self.style.MIGRATE_HEADING("Synchronizing apps without migrations:")) if options['app_label']: self.sync_apps(connection, [app_label]) else: self.sync_apps(connection, executor.loader.unmigrated_apps) # Migrate! if self.verbosity >= 1: self.stdout.write(self.style.MIGRATE_HEADING("Running migrations:")) if not plan: if self.verbosity >= 1: self.stdout.write(" No migrations to apply.") # If there's changes that aren't in migrations yet, tell them how to fix it. autodetector = MigrationAutodetector( executor.loader.project_state(), ProjectState.from_apps(apps), ) changes = autodetector.changes(graph=executor.loader.graph) if changes: self.stdout.write(self.style.NOTICE( " Your models in app(s): %s have changes that are not " "yet reflected in a migration, and so won't be " "applied." % ", ".join(repr(app) for app in sorted(changes)) )) self.stdout.write(self.style.NOTICE( " Run 'manage.py makemigrations' to make new " "migrations, and then re-run 'manage.py migrate' to " "apply them." )) fake = False fake_initial = False else: fake = options['fake'] fake_initial = options['fake_initial'] post_migrate_state = executor.migrate( targets, plan=plan, state=pre_migrate_state.clone(), fake=fake, fake_initial=fake_initial, ) # post_migrate signals have access to all models. Ensure that all models # are reloaded in case any are delayed. post_migrate_state.clear_delayed_apps_cache() post_migrate_apps = post_migrate_state.apps # Re-render models of real apps to include relationships now that # we've got a final state. This wouldn't be necessary if real apps # models were rendered with relationships in the first place. with post_migrate_apps.bulk_update(): model_keys = [] for model_state in post_migrate_apps.real_models: model_key = model_state.app_label, model_state.name_lower model_keys.append(model_key) post_migrate_apps.unregister_model(*model_key) post_migrate_apps.render_multiple([ ModelState.from_model(apps.get_model(*model)) for model in model_keys ]) # Send the post_migrate signal, so individual apps can do whatever they need # to do at this point. emit_post_migrate_signal( self.verbosity, self.interactive, connection.alias, stdout=self.stdout, apps=post_migrate_apps, plan=plan, ) def migration_progress_callback(self, action, migration=None, fake=False): if self.verbosity >= 1: compute_time = self.verbosity > 1 if action == "apply_start": if compute_time: self.start = time.monotonic() self.stdout.write(" Applying %s..." % migration, ending="") self.stdout.flush() elif action == "apply_success": elapsed = " (%.3fs)" % (time.monotonic() - self.start) if compute_time else "" if fake: self.stdout.write(self.style.SUCCESS(" FAKED" + elapsed)) else: self.stdout.write(self.style.SUCCESS(" OK" + elapsed)) elif action == "unapply_start": if compute_time: self.start = time.monotonic() self.stdout.write(" Unapplying %s..." % migration, ending="") self.stdout.flush() elif action == "unapply_success": elapsed = " (%.3fs)" % (time.monotonic() - self.start) if compute_time else "" if fake: self.stdout.write(self.style.SUCCESS(" FAKED" + elapsed)) else: self.stdout.write(self.style.SUCCESS(" OK" + elapsed)) elif action == "render_start": if compute_time: self.start = time.monotonic() self.stdout.write(" Rendering model states...", ending="") self.stdout.flush() elif action == "render_success": elapsed = " (%.3fs)" % (time.monotonic() - self.start) if compute_time else "" self.stdout.write(self.style.SUCCESS(" DONE" + elapsed)) def sync_apps(self, connection, app_labels): """Run the old syncdb-style operation on a list of app_labels.""" with connection.cursor() as cursor: tables = connection.introspection.table_names(cursor) # Build the manifest of apps and models that are to be synchronized. all_models = [ ( app_config.label, router.get_migratable_models(app_config, connection.alias, include_auto_created=False), ) for app_config in apps.get_app_configs() if app_config.models_module is not None and app_config.label in app_labels ] def model_installed(model): opts = model._meta converter = connection.introspection.identifier_converter return not ( (converter(opts.db_table) in tables) or (opts.auto_created and converter(opts.auto_created._meta.db_table) in tables) ) manifest = { app_name: list(filter(model_installed, model_list)) for app_name, model_list in all_models } # Create the tables for each model if self.verbosity >= 1: self.stdout.write(' Creating tables...') with connection.schema_editor() as editor: for app_name, model_list in manifest.items(): for model in model_list: # Never install unmanaged models, etc. if not model._meta.can_migrate(connection): continue if self.verbosity >= 3: self.stdout.write( ' Processing %s.%s model' % (app_name, model._meta.object_name) ) if self.verbosity >= 1: self.stdout.write(' Creating table %s' % model._meta.db_table) editor.create_model(model) # Deferred SQL is executed when exiting the editor's context. if self.verbosity >= 1: self.stdout.write(' Running deferred SQL...') @staticmethod def describe_operation(operation, backwards): """Return a string that describes a migration operation for --plan.""" prefix = '' is_error = False if hasattr(operation, 'code'): code = operation.reverse_code if backwards else operation.code action = (code.__doc__ or '') if code else None elif hasattr(operation, 'sql'): action = operation.reverse_sql if backwards else operation.sql else: action = '' if backwards: prefix = 'Undo ' if action is not None: action = str(action).replace('\n', '') elif backwards: action = 'IRREVERSIBLE' is_error = True if action: action = ' -> ' + action truncated = Truncator(action) return prefix + operation.describe() + truncated.chars(40), is_error
929175db6ef9132320c3539edeff8d1ae4094785f870849dfc6dbf479f1fadab
import errno import os import re import socket import sys from datetime import datetime from django.conf import settings from django.core.management.base import BaseCommand, CommandError from django.core.servers.basehttp import ( WSGIServer, get_internal_wsgi_application, run, ) from django.utils import autoreload from django.utils.regex_helper import _lazy_re_compile naiveip_re = _lazy_re_compile(r"""^(?: (?P<addr> (?P<ipv4>\d{1,3}(?:\.\d{1,3}){3}) | # IPv4 address (?P<ipv6>\[[a-fA-F0-9:]+\]) | # IPv6 address (?P<fqdn>[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*) # FQDN ):)?(?P<port>\d+)$""", re.X) class Command(BaseCommand): help = "Starts a lightweight web server for development." # Validation is called explicitly each time the server is reloaded. requires_system_checks = [] stealth_options = ('shutdown_message',) default_addr = '127.0.0.1' default_addr_ipv6 = '::1' default_port = '8000' protocol = 'http' server_cls = WSGIServer def add_arguments(self, parser): parser.add_argument( 'addrport', nargs='?', help='Optional port number, or ipaddr:port' ) parser.add_argument( '--ipv6', '-6', action='store_true', dest='use_ipv6', help='Tells Django to use an IPv6 address.', ) parser.add_argument( '--nothreading', action='store_false', dest='use_threading', help='Tells Django to NOT use threading.', ) parser.add_argument( '--noreload', action='store_false', dest='use_reloader', help='Tells Django to NOT use the auto-reloader.', ) parser.add_argument( '--skip-checks', action='store_true', help='Skip system checks.', ) def execute(self, *args, **options): if options['no_color']: # We rely on the environment because it's currently the only # way to reach WSGIRequestHandler. This seems an acceptable # compromise considering `runserver` runs indefinitely. os.environ["DJANGO_COLORS"] = "nocolor" super().execute(*args, **options) def get_handler(self, *args, **options): """Return the default WSGI handler for the runner.""" return get_internal_wsgi_application() def handle(self, *args, **options): if not settings.DEBUG and not settings.ALLOWED_HOSTS: raise CommandError('You must set settings.ALLOWED_HOSTS if DEBUG is False.') self.use_ipv6 = options['use_ipv6'] if self.use_ipv6 and not socket.has_ipv6: raise CommandError('Your Python does not support IPv6.') self._raw_ipv6 = False if not options['addrport']: self.addr = '' self.port = self.default_port else: m = re.match(naiveip_re, options['addrport']) if m is None: raise CommandError('"%s" is not a valid port number ' 'or address:port pair.' % options['addrport']) self.addr, _ipv4, _ipv6, _fqdn, self.port = m.groups() if not self.port.isdigit(): raise CommandError("%r is not a valid port number." % self.port) if self.addr: if _ipv6: self.addr = self.addr[1:-1] self.use_ipv6 = True self._raw_ipv6 = True elif self.use_ipv6 and not _fqdn: raise CommandError('"%s" is not a valid IPv6 address.' % self.addr) if not self.addr: self.addr = self.default_addr_ipv6 if self.use_ipv6 else self.default_addr self._raw_ipv6 = self.use_ipv6 self.run(**options) def run(self, **options): """Run the server, using the autoreloader if needed.""" use_reloader = options['use_reloader'] if use_reloader: autoreload.run_with_reloader(self.inner_run, **options) else: self.inner_run(None, **options) def inner_run(self, *args, **options): # If an exception was silenced in ManagementUtility.execute in order # to be raised in the child process, raise it now. autoreload.raise_last_exception() threading = options['use_threading'] # 'shutdown_message' is a stealth option. shutdown_message = options.get('shutdown_message', '') quit_command = 'CTRL-BREAK' if sys.platform == 'win32' else 'CONTROL-C' if not options['skip_checks']: self.stdout.write('Performing system checks...\n\n') self.check(display_num_errors=True) # Need to check migrations here, so can't use the # requires_migrations_check attribute. self.check_migrations() now = datetime.now().strftime('%B %d, %Y - %X') self.stdout.write(now) self.stdout.write(( "Django version %(version)s, using settings %(settings)r\n" "Starting development server at %(protocol)s://%(addr)s:%(port)s/\n" "Quit the server with %(quit_command)s." ) % { "version": self.get_version(), "settings": settings.SETTINGS_MODULE, "protocol": self.protocol, "addr": '[%s]' % self.addr if self._raw_ipv6 else self.addr, "port": self.port, "quit_command": quit_command, }) try: handler = self.get_handler(*args, **options) run(self.addr, int(self.port), handler, ipv6=self.use_ipv6, threading=threading, server_cls=self.server_cls) except OSError as e: # Use helpful error messages instead of ugly tracebacks. ERRORS = { errno.EACCES: "You don't have permission to access that port.", errno.EADDRINUSE: "That port is already in use.", errno.EADDRNOTAVAIL: "That IP address can't be assigned to.", } try: error_text = ERRORS[e.errno] except KeyError: error_text = e self.stderr.write("Error: %s" % error_text) # Need to use an OS exit because sys.exit doesn't work in a thread os._exit(1) except KeyboardInterrupt: if shutdown_message: self.stdout.write(shutdown_message) sys.exit(0)
a971adefb201daf7fe9c93971ff2aca9f863dbc04e011ec6d25181a41298d9a9
import functools import glob import gzip import os import sys import warnings import zipfile from itertools import product from django.apps import apps from django.conf import settings from django.core import serializers from django.core.exceptions import ImproperlyConfigured from django.core.management.base import BaseCommand, CommandError from django.core.management.color import no_style from django.core.management.utils import parse_apps_and_model_labels from django.db import ( DEFAULT_DB_ALIAS, DatabaseError, IntegrityError, connections, router, transaction, ) from django.utils.functional import cached_property try: import bz2 has_bz2 = True except ImportError: has_bz2 = False try: import lzma has_lzma = True except ImportError: has_lzma = False READ_STDIN = '-' class Command(BaseCommand): help = 'Installs the named fixture(s) in the database.' missing_args_message = ( "No database fixture specified. Please provide the path of at least " "one fixture in the command line." ) def add_arguments(self, parser): parser.add_argument('args', metavar='fixture', nargs='+', help='Fixture labels.') parser.add_argument( '--database', default=DEFAULT_DB_ALIAS, help='Nominates a specific database to load fixtures into. Defaults to the "default" database.', ) parser.add_argument( '--app', dest='app_label', help='Only look for fixtures in the specified app.', ) parser.add_argument( '--ignorenonexistent', '-i', action='store_true', dest='ignore', help='Ignores entries in the serialized data for fields that do not ' 'currently exist on the model.', ) parser.add_argument( '-e', '--exclude', action='append', default=[], help='An app_label or app_label.ModelName to exclude. Can be used multiple times.', ) parser.add_argument( '--format', help='Format of serialized data when reading from stdin.', ) def handle(self, *fixture_labels, **options): self.ignore = options['ignore'] self.using = options['database'] self.app_label = options['app_label'] self.verbosity = options['verbosity'] self.excluded_models, self.excluded_apps = parse_apps_and_model_labels(options['exclude']) self.format = options['format'] with transaction.atomic(using=self.using): self.loaddata(fixture_labels) # Close the DB connection -- unless we're still in a transaction. This # is required as a workaround for an edge case in MySQL: if the same # connection is used to create tables, load data, and query, the query # can return incorrect results. See Django #7572, MySQL #37735. if transaction.get_autocommit(self.using): connections[self.using].close() @cached_property def compression_formats(self): """A dict mapping format names to (open function, mode arg) tuples.""" # Forcing binary mode may be revisited after dropping Python 2 support (see #22399) compression_formats = { None: (open, 'rb'), 'gz': (gzip.GzipFile, 'rb'), 'zip': (SingleZipReader, 'r'), 'stdin': (lambda *args: sys.stdin, None), } if has_bz2: compression_formats['bz2'] = (bz2.BZ2File, 'r') if has_lzma: compression_formats['lzma'] = (lzma.LZMAFile, 'r') compression_formats['xz'] = (lzma.LZMAFile, 'r') return compression_formats def reset_sequences(self, connection, models): """Reset database sequences for the given connection and models.""" sequence_sql = connection.ops.sequence_reset_sql(no_style(), models) if sequence_sql: if self.verbosity >= 2: self.stdout.write('Resetting sequences') with connection.cursor() as cursor: for line in sequence_sql: cursor.execute(line) def loaddata(self, fixture_labels): connection = connections[self.using] # Keep a count of the installed objects and fixtures self.fixture_count = 0 self.loaded_object_count = 0 self.fixture_object_count = 0 self.models = set() self.serialization_formats = serializers.get_public_serializer_formats() # Django's test suite repeatedly tries to load initial_data fixtures # from apps that don't have any fixtures. Because disabling constraint # checks can be expensive on some database (especially MSSQL), bail # out early if no fixtures are found. for fixture_label in fixture_labels: if self.find_fixtures(fixture_label): break else: return self.objs_with_deferred_fields = [] with connection.constraint_checks_disabled(): for fixture_label in fixture_labels: self.load_label(fixture_label) for obj in self.objs_with_deferred_fields: obj.save_deferred_fields(using=self.using) # Since we disabled constraint checks, we must manually check for # any invalid keys that might have been added table_names = [model._meta.db_table for model in self.models] try: connection.check_constraints(table_names=table_names) except Exception as e: e.args = ("Problem installing fixtures: %s" % e,) raise # If we found even one object in a fixture, we need to reset the # database sequences. if self.loaded_object_count > 0: self.reset_sequences(connection, self.models) if self.verbosity >= 1: if self.fixture_object_count == self.loaded_object_count: self.stdout.write( "Installed %d object(s) from %d fixture(s)" % (self.loaded_object_count, self.fixture_count) ) else: self.stdout.write( "Installed %d object(s) (of %d) from %d fixture(s)" % (self.loaded_object_count, self.fixture_object_count, self.fixture_count) ) def save_obj(self, obj): """Save an object if permitted.""" if ( obj.object._meta.app_config in self.excluded_apps or type(obj.object) in self.excluded_models ): return False saved = False if router.allow_migrate_model(self.using, obj.object.__class__): saved = True self.models.add(obj.object.__class__) try: obj.save(using=self.using) # psycopg2 raises ValueError if data contains NUL chars. except (DatabaseError, IntegrityError, ValueError) as e: e.args = ('Could not load %(object_label)s(pk=%(pk)s): %(error_msg)s' % { 'object_label': obj.object._meta.label, 'pk': obj.object.pk, 'error_msg': e, },) raise if obj.deferred_fields: self.objs_with_deferred_fields.append(obj) return saved def load_label(self, fixture_label): """Load fixtures files for a given label.""" show_progress = self.verbosity >= 3 for fixture_file, fixture_dir, fixture_name in self.find_fixtures(fixture_label): _, ser_fmt, cmp_fmt = self.parse_name(os.path.basename(fixture_file)) open_method, mode = self.compression_formats[cmp_fmt] fixture = open_method(fixture_file, mode) self.fixture_count += 1 objects_in_fixture = 0 loaded_objects_in_fixture = 0 if self.verbosity >= 2: self.stdout.write( "Installing %s fixture '%s' from %s." % (ser_fmt, fixture_name, humanize(fixture_dir)) ) try: objects = serializers.deserialize( ser_fmt, fixture, using=self.using, ignorenonexistent=self.ignore, handle_forward_references=True, ) for obj in objects: objects_in_fixture += 1 if self.save_obj(obj): loaded_objects_in_fixture += 1 if show_progress: self.stdout.write( '\rProcessed %i object(s).' % loaded_objects_in_fixture, ending='' ) except Exception as e: if not isinstance(e, CommandError): e.args = ("Problem installing fixture '%s': %s" % (fixture_file, e),) raise finally: fixture.close() if objects_in_fixture and show_progress: self.stdout.write() # Add a newline after progress indicator. self.loaded_object_count += loaded_objects_in_fixture self.fixture_object_count += objects_in_fixture # Warn if the fixture we loaded contains 0 objects. if objects_in_fixture == 0: warnings.warn( "No fixture data found for '%s'. (File format may be " "invalid.)" % fixture_name, RuntimeWarning ) def get_fixture_name_and_dirs(self, fixture_name): dirname, basename = os.path.split(fixture_name) if os.path.isabs(fixture_name): fixture_dirs = [dirname] else: fixture_dirs = self.fixture_dirs if os.path.sep in os.path.normpath(fixture_name): fixture_dirs = [os.path.join(dir_, dirname) for dir_ in fixture_dirs] return basename, fixture_dirs def get_targets(self, fixture_name, ser_fmt, cmp_fmt): databases = [self.using, None] cmp_fmts = self.compression_formats if cmp_fmt is None else [cmp_fmt] ser_fmts = self.serialization_formats if ser_fmt is None else [ser_fmt] return { '%s.%s' % ( fixture_name, '.'.join([ext for ext in combo if ext]), ) for combo in product(databases, ser_fmts, cmp_fmts) } def find_fixture_files_in_dir(self, fixture_dir, fixture_name, targets): fixture_files_in_dir = [] path = os.path.join(fixture_dir, fixture_name) for candidate in glob.iglob(glob.escape(path) + '*'): if os.path.basename(candidate) in targets: # Save the fixture_dir and fixture_name for future error # messages. fixture_files_in_dir.append((candidate, fixture_dir, fixture_name)) return fixture_files_in_dir @functools.lru_cache(maxsize=None) def find_fixtures(self, fixture_label): """Find fixture files for a given label.""" if fixture_label == READ_STDIN: return [(READ_STDIN, None, READ_STDIN)] fixture_name, ser_fmt, cmp_fmt = self.parse_name(fixture_label) if self.verbosity >= 2: self.stdout.write("Loading '%s' fixtures..." % fixture_name) fixture_name, fixture_dirs = self.get_fixture_name_and_dirs(fixture_name) targets = self.get_targets(fixture_name, ser_fmt, cmp_fmt) fixture_files = [] for fixture_dir in fixture_dirs: if self.verbosity >= 2: self.stdout.write("Checking %s for fixtures..." % humanize(fixture_dir)) fixture_files_in_dir = self.find_fixture_files_in_dir( fixture_dir, fixture_name, targets, ) if self.verbosity >= 2 and not fixture_files_in_dir: self.stdout.write("No fixture '%s' in %s." % (fixture_name, humanize(fixture_dir))) # Check kept for backwards-compatibility; it isn't clear why # duplicates are only allowed in different directories. if len(fixture_files_in_dir) > 1: raise CommandError( "Multiple fixtures named '%s' in %s. Aborting." % (fixture_name, humanize(fixture_dir))) fixture_files.extend(fixture_files_in_dir) if not fixture_files: raise CommandError("No fixture named '%s' found." % fixture_name) return fixture_files @cached_property def fixture_dirs(self): """ Return a list of fixture directories. The list contains the 'fixtures' subdirectory of each installed application, if it exists, the directories in FIXTURE_DIRS, and the current directory. """ dirs = [] fixture_dirs = settings.FIXTURE_DIRS if len(fixture_dirs) != len(set(fixture_dirs)): raise ImproperlyConfigured("settings.FIXTURE_DIRS contains duplicates.") for app_config in apps.get_app_configs(): app_label = app_config.label app_dir = os.path.join(app_config.path, 'fixtures') if app_dir in fixture_dirs: raise ImproperlyConfigured( "'%s' is a default fixture directory for the '%s' app " "and cannot be listed in settings.FIXTURE_DIRS." % (app_dir, app_label) ) if self.app_label and app_label != self.app_label: continue if os.path.isdir(app_dir): dirs.append(app_dir) dirs.extend(fixture_dirs) dirs.append('') return [os.path.realpath(d) for d in dirs] def parse_name(self, fixture_name): """ Split fixture name in name, serialization format, compression format. """ if fixture_name == READ_STDIN: if not self.format: raise CommandError('--format must be specified when reading from stdin.') return READ_STDIN, self.format, 'stdin' parts = fixture_name.rsplit('.', 2) if len(parts) > 1 and parts[-1] in self.compression_formats: cmp_fmt = parts[-1] parts = parts[:-1] else: cmp_fmt = None if len(parts) > 1: if parts[-1] in self.serialization_formats: ser_fmt = parts[-1] parts = parts[:-1] else: raise CommandError( "Problem installing fixture '%s': %s is not a known " "serialization format." % ('.'.join(parts[:-1]), parts[-1])) else: ser_fmt = None name = '.'.join(parts) return name, ser_fmt, cmp_fmt class SingleZipReader(zipfile.ZipFile): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) if len(self.namelist()) != 1: raise ValueError("Zip-compressed fixtures must contain one file.") def read(self): return zipfile.ZipFile.read(self, self.namelist()[0]) def humanize(dirname): return "'%s'" % dirname if dirname else 'absolute path'
ca41d2a60dc7deaa6123e864b8a503f0eeb11fc81ca95588e29b23f5361742cd
import glob import os import re import sys from functools import total_ordering from itertools import dropwhile import django from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.core.files.temp import NamedTemporaryFile from django.core.management.base import BaseCommand, CommandError from django.core.management.utils import ( find_command, handle_extensions, is_ignored_path, popen_wrapper, ) from django.utils.encoding import DEFAULT_LOCALE_ENCODING from django.utils.functional import cached_property from django.utils.jslex import prepare_js_for_gettext from django.utils.regex_helper import _lazy_re_compile from django.utils.text import get_text_list from django.utils.translation import templatize plural_forms_re = _lazy_re_compile(r'^(?P<value>"Plural-Forms.+?\\n")\s*$', re.MULTILINE | re.DOTALL) STATUS_OK = 0 NO_LOCALE_DIR = object() def check_programs(*programs): for program in programs: if find_command(program) is None: raise CommandError( "Can't find %s. Make sure you have GNU gettext tools 0.15 or " "newer installed." % program ) @total_ordering class TranslatableFile: def __init__(self, dirpath, file_name, locale_dir): self.file = file_name self.dirpath = dirpath self.locale_dir = locale_dir def __repr__(self): return "<%s: %s>" % ( self.__class__.__name__, os.sep.join([self.dirpath, self.file]), ) def __eq__(self, other): return self.path == other.path def __lt__(self, other): return self.path < other.path @property def path(self): return os.path.join(self.dirpath, self.file) class BuildFile: """ Represent the state of a translatable file during the build process. """ def __init__(self, command, domain, translatable): self.command = command self.domain = domain self.translatable = translatable @cached_property def is_templatized(self): if self.domain == 'djangojs': return self.command.gettext_version < (0, 18, 3) elif self.domain == 'django': file_ext = os.path.splitext(self.translatable.file)[1] return file_ext != '.py' return False @cached_property def path(self): return self.translatable.path @cached_property def work_path(self): """ Path to a file which is being fed into GNU gettext pipeline. This may be either a translatable or its preprocessed version. """ if not self.is_templatized: return self.path extension = { 'djangojs': 'c', 'django': 'py', }.get(self.domain) filename = '%s.%s' % (self.translatable.file, extension) return os.path.join(self.translatable.dirpath, filename) def preprocess(self): """ Preprocess (if necessary) a translatable file before passing it to xgettext GNU gettext utility. """ if not self.is_templatized: return with open(self.path, encoding='utf-8') as fp: src_data = fp.read() if self.domain == 'djangojs': content = prepare_js_for_gettext(src_data) elif self.domain == 'django': content = templatize(src_data, origin=self.path[2:]) with open(self.work_path, 'w', encoding='utf-8') as fp: fp.write(content) def postprocess_messages(self, msgs): """ Postprocess messages generated by xgettext GNU gettext utility. Transform paths as if these messages were generated from original translatable files rather than from preprocessed versions. """ if not self.is_templatized: return msgs # Remove '.py' suffix if os.name == 'nt': # Preserve '.\' prefix on Windows to respect gettext behavior old_path = self.work_path new_path = self.path else: old_path = self.work_path[2:] new_path = self.path[2:] return re.sub( r'^(#: .*)(' + re.escape(old_path) + r')', lambda match: match[0].replace(old_path, new_path), msgs, flags=re.MULTILINE ) def cleanup(self): """ Remove a preprocessed copy of a translatable file (if any). """ if self.is_templatized: # This check is needed for the case of a symlinked file and its # source being processed inside a single group (locale dir); # removing either of those two removes both. if os.path.exists(self.work_path): os.unlink(self.work_path) def normalize_eols(raw_contents): """ Take a block of raw text that will be passed through str.splitlines() to get universal newlines treatment. Return the resulting block of text with normalized `\n` EOL sequences ready to be written to disk using current platform's native EOLs. """ lines_list = raw_contents.splitlines() # Ensure last line has its EOL if lines_list and lines_list[-1]: lines_list.append('') return '\n'.join(lines_list) def write_pot_file(potfile, msgs): """ Write the `potfile` with the `msgs` contents, making sure its format is valid. """ pot_lines = msgs.splitlines() if os.path.exists(potfile): # Strip the header lines = dropwhile(len, pot_lines) else: lines = [] found, header_read = False, False for line in pot_lines: if not found and not header_read: if 'charset=CHARSET' in line: found = True line = line.replace('charset=CHARSET', 'charset=UTF-8') if not line and not found: header_read = True lines.append(line) msgs = '\n'.join(lines) # Force newlines of POT files to '\n' to work around # https://savannah.gnu.org/bugs/index.php?52395 with open(potfile, 'a', encoding='utf-8', newline='\n') as fp: fp.write(msgs) class Command(BaseCommand): help = ( "Runs over the entire source tree of the current directory and " "pulls out all strings marked for translation. It creates (or updates) a message " "file in the conf/locale (in the django tree) or locale (for projects and " "applications) directory.\n\nYou must run this command with one of either the " "--locale, --exclude, or --all options." ) translatable_file_class = TranslatableFile build_file_class = BuildFile requires_system_checks = [] msgmerge_options = ['-q', '--previous'] msguniq_options = ['--to-code=utf-8'] msgattrib_options = ['--no-obsolete'] xgettext_options = ['--from-code=UTF-8', '--add-comments=Translators'] def add_arguments(self, parser): parser.add_argument( '--locale', '-l', default=[], action='append', help='Creates or updates the message files for the given locale(s) (e.g. pt_BR). ' 'Can be used multiple times.', ) parser.add_argument( '--exclude', '-x', default=[], action='append', help='Locales to exclude. Default is none. Can be used multiple times.', ) parser.add_argument( '--domain', '-d', default='django', help='The domain of the message files (default: "django").', ) parser.add_argument( '--all', '-a', action='store_true', help='Updates the message files for all existing locales.', ) parser.add_argument( '--extension', '-e', dest='extensions', action='append', help='The file extension(s) to examine (default: "html,txt,py", or "js" ' 'if the domain is "djangojs"). Separate multiple extensions with ' 'commas, or use -e multiple times.', ) parser.add_argument( '--symlinks', '-s', action='store_true', help='Follows symlinks to directories when examining source code ' 'and templates for translation strings.', ) parser.add_argument( '--ignore', '-i', action='append', dest='ignore_patterns', default=[], metavar='PATTERN', help='Ignore files or directories matching this glob-style pattern. ' 'Use multiple times to ignore more.', ) parser.add_argument( '--no-default-ignore', action='store_false', dest='use_default_ignore_patterns', help="Don't ignore the common glob-style patterns 'CVS', '.*', '*~' and '*.pyc'.", ) parser.add_argument( '--no-wrap', action='store_true', help="Don't break long message lines into several lines.", ) parser.add_argument( '--no-location', action='store_true', help="Don't write '#: filename:line' lines.", ) parser.add_argument( '--add-location', choices=('full', 'file', 'never'), const='full', nargs='?', help=( "Controls '#: filename:line' lines. If the option is 'full' " "(the default if not given), the lines include both file name " "and line number. If it's 'file', the line number is omitted. If " "it's 'never', the lines are suppressed (same as --no-location). " "--add-location requires gettext 0.19 or newer." ), ) parser.add_argument( '--no-obsolete', action='store_true', help="Remove obsolete message strings.", ) parser.add_argument( '--keep-pot', action='store_true', help="Keep .pot file after making messages. Useful when debugging.", ) def handle(self, *args, **options): locale = options['locale'] exclude = options['exclude'] self.domain = options['domain'] self.verbosity = options['verbosity'] process_all = options['all'] extensions = options['extensions'] self.symlinks = options['symlinks'] ignore_patterns = options['ignore_patterns'] if options['use_default_ignore_patterns']: ignore_patterns += ['CVS', '.*', '*~', '*.pyc'] self.ignore_patterns = list(set(ignore_patterns)) # Avoid messing with mutable class variables if options['no_wrap']: self.msgmerge_options = self.msgmerge_options[:] + ['--no-wrap'] self.msguniq_options = self.msguniq_options[:] + ['--no-wrap'] self.msgattrib_options = self.msgattrib_options[:] + ['--no-wrap'] self.xgettext_options = self.xgettext_options[:] + ['--no-wrap'] if options['no_location']: self.msgmerge_options = self.msgmerge_options[:] + ['--no-location'] self.msguniq_options = self.msguniq_options[:] + ['--no-location'] self.msgattrib_options = self.msgattrib_options[:] + ['--no-location'] self.xgettext_options = self.xgettext_options[:] + ['--no-location'] if options['add_location']: if self.gettext_version < (0, 19): raise CommandError( "The --add-location option requires gettext 0.19 or later. " "You have %s." % '.'.join(str(x) for x in self.gettext_version) ) arg_add_location = "--add-location=%s" % options['add_location'] self.msgmerge_options = self.msgmerge_options[:] + [arg_add_location] self.msguniq_options = self.msguniq_options[:] + [arg_add_location] self.msgattrib_options = self.msgattrib_options[:] + [arg_add_location] self.xgettext_options = self.xgettext_options[:] + [arg_add_location] self.no_obsolete = options['no_obsolete'] self.keep_pot = options['keep_pot'] if self.domain not in ('django', 'djangojs'): raise CommandError("currently makemessages only supports domains " "'django' and 'djangojs'") if self.domain == 'djangojs': exts = extensions or ['js'] else: exts = extensions or ['html', 'txt', 'py'] self.extensions = handle_extensions(exts) if (not locale and not exclude and not process_all) or self.domain is None: raise CommandError( "Type '%s help %s' for usage information." % (os.path.basename(sys.argv[0]), sys.argv[1]) ) if self.verbosity > 1: self.stdout.write( 'examining files with the extensions: %s' % get_text_list(list(self.extensions), 'and') ) self.invoked_for_django = False self.locale_paths = [] self.default_locale_path = None if os.path.isdir(os.path.join('conf', 'locale')): self.locale_paths = [os.path.abspath(os.path.join('conf', 'locale'))] self.default_locale_path = self.locale_paths[0] self.invoked_for_django = True else: if self.settings_available: self.locale_paths.extend(settings.LOCALE_PATHS) # Allow to run makemessages inside an app dir if os.path.isdir('locale'): self.locale_paths.append(os.path.abspath('locale')) if self.locale_paths: self.default_locale_path = self.locale_paths[0] os.makedirs(self.default_locale_path, exist_ok=True) # Build locale list looks_like_locale = re.compile(r'[a-z]{2}') locale_dirs = filter(os.path.isdir, glob.glob('%s/*' % self.default_locale_path)) all_locales = [ lang_code for lang_code in map(os.path.basename, locale_dirs) if looks_like_locale.match(lang_code) ] # Account for excluded locales if process_all: locales = all_locales else: locales = locale or all_locales locales = set(locales).difference(exclude) if locales: check_programs('msguniq', 'msgmerge', 'msgattrib') check_programs('xgettext') try: potfiles = self.build_potfiles() # Build po files for each selected locale for locale in locales: if '-' in locale: self.stdout.write( 'invalid locale %s, did you mean %s?' % ( locale, locale.replace('-', '_'), ), ) continue if self.verbosity > 0: self.stdout.write('processing locale %s' % locale) for potfile in potfiles: self.write_po_file(potfile, locale) finally: if not self.keep_pot: self.remove_potfiles() @cached_property def gettext_version(self): # Gettext tools will output system-encoded bytestrings instead of UTF-8, # when looking up the version. It's especially a problem on Windows. out, err, status = popen_wrapper( ['xgettext', '--version'], stdout_encoding=DEFAULT_LOCALE_ENCODING, ) m = re.search(r'(\d+)\.(\d+)\.?(\d+)?', out) if m: return tuple(int(d) for d in m.groups() if d is not None) else: raise CommandError("Unable to get gettext version. Is it installed?") @cached_property def settings_available(self): try: settings.LOCALE_PATHS except ImproperlyConfigured: if self.verbosity > 1: self.stderr.write("Running without configured settings.") return False return True def build_potfiles(self): """ Build pot files and apply msguniq to them. """ file_list = self.find_files(".") self.remove_potfiles() self.process_files(file_list) potfiles = [] for path in self.locale_paths: potfile = os.path.join(path, '%s.pot' % self.domain) if not os.path.exists(potfile): continue args = ['msguniq'] + self.msguniq_options + [potfile] msgs, errors, status = popen_wrapper(args) if errors: if status != STATUS_OK: raise CommandError( "errors happened while running msguniq\n%s" % errors) elif self.verbosity > 0: self.stdout.write(errors) msgs = normalize_eols(msgs) with open(potfile, 'w', encoding='utf-8') as fp: fp.write(msgs) potfiles.append(potfile) return potfiles def remove_potfiles(self): for path in self.locale_paths: pot_path = os.path.join(path, '%s.pot' % self.domain) if os.path.exists(pot_path): os.unlink(pot_path) def find_files(self, root): """ Get all files in the given root. Also check that there is a matching locale dir for each file. """ all_files = [] ignored_roots = [] if self.settings_available: ignored_roots = [os.path.normpath(p) for p in (settings.MEDIA_ROOT, settings.STATIC_ROOT) if p] for dirpath, dirnames, filenames in os.walk(root, topdown=True, followlinks=self.symlinks): for dirname in dirnames[:]: if (is_ignored_path(os.path.normpath(os.path.join(dirpath, dirname)), self.ignore_patterns) or os.path.join(os.path.abspath(dirpath), dirname) in ignored_roots): dirnames.remove(dirname) if self.verbosity > 1: self.stdout.write('ignoring directory %s' % dirname) elif dirname == 'locale': dirnames.remove(dirname) self.locale_paths.insert(0, os.path.join(os.path.abspath(dirpath), dirname)) for filename in filenames: file_path = os.path.normpath(os.path.join(dirpath, filename)) file_ext = os.path.splitext(filename)[1] if file_ext not in self.extensions or is_ignored_path(file_path, self.ignore_patterns): if self.verbosity > 1: self.stdout.write('ignoring file %s in %s' % (filename, dirpath)) else: locale_dir = None for path in self.locale_paths: if os.path.abspath(dirpath).startswith(os.path.dirname(path)): locale_dir = path break locale_dir = locale_dir or self.default_locale_path or NO_LOCALE_DIR all_files.append(self.translatable_file_class(dirpath, filename, locale_dir)) return sorted(all_files) def process_files(self, file_list): """ Group translatable files by locale directory and run pot file build process for each group. """ file_groups = {} for translatable in file_list: file_group = file_groups.setdefault(translatable.locale_dir, []) file_group.append(translatable) for locale_dir, files in file_groups.items(): self.process_locale_dir(locale_dir, files) def process_locale_dir(self, locale_dir, files): """ Extract translatable literals from the specified files, creating or updating the POT file for a given locale directory. Use the xgettext GNU gettext utility. """ build_files = [] for translatable in files: if self.verbosity > 1: self.stdout.write('processing file %s in %s' % ( translatable.file, translatable.dirpath )) if self.domain not in ('djangojs', 'django'): continue build_file = self.build_file_class(self, self.domain, translatable) try: build_file.preprocess() except UnicodeDecodeError as e: self.stdout.write( 'UnicodeDecodeError: skipped file %s in %s (reason: %s)' % ( translatable.file, translatable.dirpath, e, ) ) continue except BaseException: # Cleanup before exit. for build_file in build_files: build_file.cleanup() raise build_files.append(build_file) if self.domain == 'djangojs': is_templatized = build_file.is_templatized args = [ 'xgettext', '-d', self.domain, '--language=%s' % ('C' if is_templatized else 'JavaScript',), '--keyword=gettext_noop', '--keyword=gettext_lazy', '--keyword=ngettext_lazy:1,2', '--keyword=pgettext:1c,2', '--keyword=npgettext:1c,2,3', '--output=-', ] elif self.domain == 'django': args = [ 'xgettext', '-d', self.domain, '--language=Python', '--keyword=gettext_noop', '--keyword=gettext_lazy', '--keyword=ngettext_lazy:1,2', '--keyword=pgettext:1c,2', '--keyword=npgettext:1c,2,3', '--keyword=pgettext_lazy:1c,2', '--keyword=npgettext_lazy:1c,2,3', '--output=-', ] else: return input_files = [bf.work_path for bf in build_files] with NamedTemporaryFile(mode='w+') as input_files_list: input_files_list.write('\n'.join(input_files)) input_files_list.flush() args.extend(['--files-from', input_files_list.name]) args.extend(self.xgettext_options) msgs, errors, status = popen_wrapper(args) if errors: if status != STATUS_OK: for build_file in build_files: build_file.cleanup() raise CommandError( 'errors happened while running xgettext on %s\n%s' % ('\n'.join(input_files), errors) ) elif self.verbosity > 0: # Print warnings self.stdout.write(errors) if msgs: if locale_dir is NO_LOCALE_DIR: for build_file in build_files: build_file.cleanup() file_path = os.path.normpath(build_files[0].path) raise CommandError( "Unable to find a locale path to store translations for " "file %s. Make sure the 'locale' directory exists in an " "app or LOCALE_PATHS setting is set." % file_path ) for build_file in build_files: msgs = build_file.postprocess_messages(msgs) potfile = os.path.join(locale_dir, '%s.pot' % self.domain) write_pot_file(potfile, msgs) for build_file in build_files: build_file.cleanup() def write_po_file(self, potfile, locale): """ Create or update the PO file for self.domain and `locale`. Use contents of the existing `potfile`. Use msgmerge and msgattrib GNU gettext utilities. """ basedir = os.path.join(os.path.dirname(potfile), locale, 'LC_MESSAGES') os.makedirs(basedir, exist_ok=True) pofile = os.path.join(basedir, '%s.po' % self.domain) if os.path.exists(pofile): args = ['msgmerge'] + self.msgmerge_options + [pofile, potfile] msgs, errors, status = popen_wrapper(args) if errors: if status != STATUS_OK: raise CommandError( "errors happened while running msgmerge\n%s" % errors) elif self.verbosity > 0: self.stdout.write(errors) else: with open(potfile, encoding='utf-8') as fp: msgs = fp.read() if not self.invoked_for_django: msgs = self.copy_plural_forms(msgs, locale) msgs = normalize_eols(msgs) msgs = msgs.replace( "#. #-#-#-#-# %s.pot (PACKAGE VERSION) #-#-#-#-#\n" % self.domain, "") with open(pofile, 'w', encoding='utf-8') as fp: fp.write(msgs) if self.no_obsolete: args = ['msgattrib'] + self.msgattrib_options + ['-o', pofile, pofile] msgs, errors, status = popen_wrapper(args) if errors: if status != STATUS_OK: raise CommandError( "errors happened while running msgattrib\n%s" % errors) elif self.verbosity > 0: self.stdout.write(errors) def copy_plural_forms(self, msgs, locale): """ Copy plural forms header contents from a Django catalog of locale to the msgs string, inserting it at the right place. msgs should be the contents of a newly created .po file. """ django_dir = os.path.normpath(os.path.join(os.path.dirname(django.__file__))) if self.domain == 'djangojs': domains = ('djangojs', 'django') else: domains = ('django',) for domain in domains: django_po = os.path.join(django_dir, 'conf', 'locale', locale, 'LC_MESSAGES', '%s.po' % domain) if os.path.exists(django_po): with open(django_po, encoding='utf-8') as fp: m = plural_forms_re.search(fp.read()) if m: plural_form_line = m['value'] if self.verbosity > 1: self.stdout.write('copying plural forms: %s' % plural_form_line) lines = [] found = False for line in msgs.splitlines(): if not found and (not line or plural_forms_re.search(line)): line = plural_form_line found = True lines.append(line) msgs = '\n'.join(lines) break return msgs
9ec909560e4f419393515549b4cb3d916932806a85f36febf84e649cbe12319f
from django.apps import apps from django.core.management.base import BaseCommand, CommandError from django.db import DEFAULT_DB_ALIAS, connections from django.db.migrations.loader import AmbiguityError, MigrationLoader class Command(BaseCommand): help = "Prints the SQL statements for the named migration." output_transaction = True def add_arguments(self, parser): parser.add_argument('app_label', help='App label of the application containing the migration.') parser.add_argument('migration_name', help='Migration name to print the SQL for.') parser.add_argument( '--database', default=DEFAULT_DB_ALIAS, help='Nominates a database to create SQL for. Defaults to the "default" database.', ) parser.add_argument( '--backwards', action='store_true', help='Creates SQL to unapply the migration, rather than to apply it', ) def execute(self, *args, **options): # sqlmigrate doesn't support coloring its output but we need to force # no_color=True so that the BEGIN/COMMIT statements added by # output_transaction don't get colored either. options['no_color'] = True return super().execute(*args, **options) def handle(self, *args, **options): # Get the database we're operating from connection = connections[options['database']] # Load up a loader to get all the migration data, but don't replace # migrations. loader = MigrationLoader(connection, replace_migrations=False) # Resolve command-line arguments into a migration app_label, migration_name = options['app_label'], options['migration_name'] # Validate app_label try: apps.get_app_config(app_label) except LookupError as err: raise CommandError(str(err)) if app_label not in loader.migrated_apps: raise CommandError("App '%s' does not have migrations" % app_label) try: migration = loader.get_migration_by_prefix(app_label, migration_name) except AmbiguityError: raise CommandError("More than one migration matches '%s' in app '%s'. Please be more specific." % ( migration_name, app_label)) except KeyError: raise CommandError("Cannot find a migration matching '%s' from app '%s'. Is it in INSTALLED_APPS?" % ( migration_name, app_label)) target = (app_label, migration.name) # Show begin/end around output for atomic migrations, if the database # supports transactional DDL. self.output_transaction = migration.atomic and connection.features.can_rollback_ddl # Make a plan that represents just the requested migrations and show SQL # for it plan = [(loader.graph.nodes[target], options['backwards'])] sql_statements = loader.collect_sql(plan) if not sql_statements and options['verbosity'] >= 1: self.stderr.write('No operations found.') return '\n'.join(sql_statements)
b1f090541fb870a875233bdffefe72252a7784ceeca85fde4f1c5e2054c0eb70
import sys from django.apps import apps from django.core.management.base import BaseCommand from django.db import DEFAULT_DB_ALIAS, connections from django.db.migrations.loader import MigrationLoader from django.db.migrations.recorder import MigrationRecorder class Command(BaseCommand): help = "Shows all available migrations for the current project" def add_arguments(self, parser): parser.add_argument( 'app_label', nargs='*', help='App labels of applications to limit the output to.', ) parser.add_argument( '--database', default=DEFAULT_DB_ALIAS, help=( 'Nominates a database to show migrations for. Defaults to the ' '"default" database.' ), ) formats = parser.add_mutually_exclusive_group() formats.add_argument( '--list', '-l', action='store_const', dest='format', const='list', help=( 'Shows a list of all migrations and which are applied. ' 'With a verbosity level of 2 or above, the applied datetimes ' 'will be included.' ), ) formats.add_argument( '--plan', '-p', action='store_const', dest='format', const='plan', help=( 'Shows all migrations in the order they will be applied. ' 'With a verbosity level of 2 or above all direct migration dependencies ' 'and reverse dependencies (run_before) will be included.' ) ) parser.set_defaults(format='list') def handle(self, *args, **options): self.verbosity = options['verbosity'] # Get the database we're operating from db = options['database'] connection = connections[db] if options['format'] == "plan": return self.show_plan(connection, options['app_label']) else: return self.show_list(connection, options['app_label']) def _validate_app_names(self, loader, app_names): has_bad_names = False for app_name in app_names: try: apps.get_app_config(app_name) except LookupError as err: self.stderr.write(str(err)) has_bad_names = True if has_bad_names: sys.exit(2) def show_list(self, connection, app_names=None): """ Show a list of all migrations on the system, or only those of some named apps. """ # Load migrations from disk/DB loader = MigrationLoader(connection, ignore_no_migrations=True) recorder = MigrationRecorder(connection) recorded_migrations = recorder.applied_migrations() graph = loader.graph # If we were passed a list of apps, validate it if app_names: self._validate_app_names(loader, app_names) # Otherwise, show all apps in alphabetic order else: app_names = sorted(loader.migrated_apps) # For each app, print its migrations in order from oldest (roots) to # newest (leaves). for app_name in app_names: self.stdout.write(app_name, self.style.MIGRATE_LABEL) shown = set() for node in graph.leaf_nodes(app_name): for plan_node in graph.forwards_plan(node): if plan_node not in shown and plan_node[0] == app_name: # Give it a nice title if it's a squashed one title = plan_node[1] if graph.nodes[plan_node].replaces: title += " (%s squashed migrations)" % len(graph.nodes[plan_node].replaces) applied_migration = loader.applied_migrations.get(plan_node) # Mark it as applied/unapplied if applied_migration: if plan_node in recorded_migrations: output = ' [X] %s' % title else: title += " Run 'manage.py migrate' to finish recording." output = ' [-] %s' % title if self.verbosity >= 2 and hasattr(applied_migration, 'applied'): output += ' (applied at %s)' % applied_migration.applied.strftime('%Y-%m-%d %H:%M:%S') self.stdout.write(output) else: self.stdout.write(" [ ] %s" % title) shown.add(plan_node) # If we didn't print anything, then a small message if not shown: self.stdout.write(" (no migrations)", self.style.ERROR) def show_plan(self, connection, app_names=None): """ Show all known migrations (or only those of the specified app_names) in the order they will be applied. """ # Load migrations from disk/DB loader = MigrationLoader(connection) graph = loader.graph if app_names: self._validate_app_names(loader, app_names) targets = [key for key in graph.leaf_nodes() if key[0] in app_names] else: targets = graph.leaf_nodes() plan = [] seen = set() # Generate the plan for target in targets: for migration in graph.forwards_plan(target): if migration not in seen: node = graph.node_map[migration] plan.append(node) seen.add(migration) # Output def print_deps(node): out = [] for parent in sorted(node.parents): out.append("%s.%s" % parent.key) if out: return " ... (%s)" % ", ".join(out) return "" for node in plan: deps = "" if self.verbosity >= 2: deps = print_deps(node) if node.key in loader.applied_migrations: self.stdout.write("[X] %s.%s%s" % (node.key[0], node.key[1], deps)) else: self.stdout.write("[ ] %s.%s%s" % (node.key[0], node.key[1], deps)) if not plan: self.stdout.write('(no migrations)', self.style.ERROR)
0951837f54243788820026035e71a58aa0768d880c8a5408ab18bbe128d34dfa
import os import select import sys import traceback from django.core.management import BaseCommand, CommandError from django.utils.datastructures import OrderedSet class Command(BaseCommand): help = ( "Runs a Python interactive interpreter. Tries to use IPython or " "bpython, if one of them is available. Any standard input is executed " "as code." ) requires_system_checks = [] shells = ['ipython', 'bpython', 'python'] def add_arguments(self, parser): parser.add_argument( '--no-startup', action='store_true', help='When using plain Python, ignore the PYTHONSTARTUP environment variable and ~/.pythonrc.py script.', ) parser.add_argument( '-i', '--interface', choices=self.shells, help='Specify an interactive interpreter interface. Available options: "ipython", "bpython", and "python"', ) parser.add_argument( '-c', '--command', help='Instead of opening an interactive shell, run a command as Django and exit.', ) def ipython(self, options): from IPython import start_ipython start_ipython(argv=[]) def bpython(self, options): import bpython bpython.embed() def python(self, options): import code # Set up a dictionary to serve as the environment for the shell. imported_objects = {} # We want to honor both $PYTHONSTARTUP and .pythonrc.py, so follow system # conventions and get $PYTHONSTARTUP first then .pythonrc.py. if not options['no_startup']: for pythonrc in OrderedSet([os.environ.get("PYTHONSTARTUP"), os.path.expanduser('~/.pythonrc.py')]): if not pythonrc: continue if not os.path.isfile(pythonrc): continue with open(pythonrc) as handle: pythonrc_code = handle.read() # Match the behavior of the cpython shell where an error in # PYTHONSTARTUP prints an exception and continues. try: exec(compile(pythonrc_code, pythonrc, 'exec'), imported_objects) except Exception: traceback.print_exc() # By default, this will set up readline to do tab completion and to read and # write history to the .python_history file, but this can be overridden by # $PYTHONSTARTUP or ~/.pythonrc.py. try: sys.__interactivehook__() except Exception: # Match the behavior of the cpython shell where an error in # sys.__interactivehook__ prints a warning and the exception and continues. print("Failed calling sys.__interactivehook__") traceback.print_exc() # Set up tab completion for objects imported by $PYTHONSTARTUP or # ~/.pythonrc.py. try: import readline import rlcompleter readline.set_completer(rlcompleter.Completer(imported_objects).complete) except ImportError: pass # Start the interactive interpreter. code.interact(local=imported_objects) def handle(self, **options): # Execute the command and exit. if options['command']: exec(options['command'], globals()) return # Execute stdin if it has anything to read and exit. # Not supported on Windows due to select.select() limitations. if sys.platform != 'win32' and not sys.stdin.isatty() and select.select([sys.stdin], [], [], 0)[0]: exec(sys.stdin.read(), globals()) return available_shells = [options['interface']] if options['interface'] else self.shells for shell in available_shells: try: return getattr(self, shell)(options) except ImportError: pass raise CommandError("Couldn't import {} interface.".format(shell))
7238982b8ad2635ca8496f85304e836ae61b587021946b8cb31d7df7d0f9bc6c
"Database cache backend." import base64 import pickle from datetime import datetime from django.conf import settings from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache from django.db import DatabaseError, connections, models, router, transaction from django.utils import timezone class Options: """A class that will quack like a Django model _meta class. This allows cache operations to be controlled by the router """ def __init__(self, table): self.db_table = table self.app_label = 'django_cache' self.model_name = 'cacheentry' self.verbose_name = 'cache entry' self.verbose_name_plural = 'cache entries' self.object_name = 'CacheEntry' self.abstract = False self.managed = True self.proxy = False self.swapped = False class BaseDatabaseCache(BaseCache): def __init__(self, table, params): super().__init__(params) self._table = table class CacheEntry: _meta = Options(table) self.cache_model_class = CacheEntry class DatabaseCache(BaseDatabaseCache): # This class uses cursors provided by the database connection. This means # it reads expiration values as aware or naive datetimes, depending on the # value of USE_TZ and whether the database supports time zones. The ORM's # conversion and adaptation infrastructure is then used to avoid comparing # aware and naive datetimes accidentally. pickle_protocol = pickle.HIGHEST_PROTOCOL def get(self, key, default=None, version=None): return self.get_many([key], version).get(key, default) def get_many(self, keys, version=None): if not keys: return {} key_map = {} for key in keys: self.validate_key(key) key_map[self.make_key(key, version)] = key db = router.db_for_read(self.cache_model_class) connection = connections[db] quote_name = connection.ops.quote_name table = quote_name(self._table) with connection.cursor() as cursor: cursor.execute( 'SELECT %s, %s, %s FROM %s WHERE %s IN (%s)' % ( quote_name('cache_key'), quote_name('value'), quote_name('expires'), table, quote_name('cache_key'), ', '.join(['%s'] * len(key_map)), ), list(key_map), ) rows = cursor.fetchall() result = {} expired_keys = [] expression = models.Expression(output_field=models.DateTimeField()) converters = (connection.ops.get_db_converters(expression) + expression.get_db_converters(connection)) for key, value, expires in rows: for converter in converters: expires = converter(expires, expression, connection) if expires < timezone.now(): expired_keys.append(key) else: value = connection.ops.process_clob(value) value = pickle.loads(base64.b64decode(value.encode())) result[key_map.get(key)] = value self._base_delete_many(expired_keys) return result def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): key = self.make_key(key, version=version) self.validate_key(key) self._base_set('set', key, value, timeout) def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): key = self.make_key(key, version=version) self.validate_key(key) return self._base_set('add', key, value, timeout) def touch(self, key, timeout=DEFAULT_TIMEOUT, version=None): key = self.make_key(key, version=version) self.validate_key(key) return self._base_set('touch', key, None, timeout) def _base_set(self, mode, key, value, timeout=DEFAULT_TIMEOUT): timeout = self.get_backend_timeout(timeout) db = router.db_for_write(self.cache_model_class) connection = connections[db] quote_name = connection.ops.quote_name table = quote_name(self._table) with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM %s" % table) num = cursor.fetchone()[0] now = timezone.now() now = now.replace(microsecond=0) if timeout is None: exp = datetime.max else: tz = timezone.utc if settings.USE_TZ else None exp = datetime.fromtimestamp(timeout, tz=tz) exp = exp.replace(microsecond=0) if num > self._max_entries: self._cull(db, cursor, now, num) pickled = pickle.dumps(value, self.pickle_protocol) # The DB column is expecting a string, so make sure the value is a # string, not bytes. Refs #19274. b64encoded = base64.b64encode(pickled).decode('latin1') try: # Note: typecasting for datetimes is needed by some 3rd party # database backends. All core backends work without typecasting, # so be careful about changes here - test suite will NOT pick # regressions. with transaction.atomic(using=db): cursor.execute( 'SELECT %s, %s FROM %s WHERE %s = %%s' % ( quote_name('cache_key'), quote_name('expires'), table, quote_name('cache_key'), ), [key] ) result = cursor.fetchone() if result: current_expires = result[1] expression = models.Expression(output_field=models.DateTimeField()) for converter in (connection.ops.get_db_converters(expression) + expression.get_db_converters(connection)): current_expires = converter(current_expires, expression, connection) exp = connection.ops.adapt_datetimefield_value(exp) if result and mode == 'touch': cursor.execute( 'UPDATE %s SET %s = %%s WHERE %s = %%s' % ( table, quote_name('expires'), quote_name('cache_key') ), [exp, key] ) elif result and (mode == 'set' or (mode == 'add' and current_expires < now)): cursor.execute( 'UPDATE %s SET %s = %%s, %s = %%s WHERE %s = %%s' % ( table, quote_name('value'), quote_name('expires'), quote_name('cache_key'), ), [b64encoded, exp, key] ) elif mode != 'touch': cursor.execute( 'INSERT INTO %s (%s, %s, %s) VALUES (%%s, %%s, %%s)' % ( table, quote_name('cache_key'), quote_name('value'), quote_name('expires'), ), [key, b64encoded, exp] ) else: return False # touch failed. except DatabaseError: # To be threadsafe, updates/inserts are allowed to fail silently return False else: return True def delete(self, key, version=None): self.validate_key(key) return self._base_delete_many([self.make_key(key, version)]) def delete_many(self, keys, version=None): key_list = [] for key in keys: self.validate_key(key) key_list.append(self.make_key(key, version)) self._base_delete_many(key_list) def _base_delete_many(self, keys): if not keys: return False db = router.db_for_write(self.cache_model_class) connection = connections[db] quote_name = connection.ops.quote_name table = quote_name(self._table) with connection.cursor() as cursor: cursor.execute( 'DELETE FROM %s WHERE %s IN (%s)' % ( table, quote_name('cache_key'), ', '.join(['%s'] * len(keys)), ), keys, ) return bool(cursor.rowcount) def has_key(self, key, version=None): key = self.make_key(key, version=version) self.validate_key(key) db = router.db_for_read(self.cache_model_class) connection = connections[db] quote_name = connection.ops.quote_name now = timezone.now().replace(microsecond=0, tzinfo=None) with connection.cursor() as cursor: cursor.execute( 'SELECT %s FROM %s WHERE %s = %%s and expires > %%s' % ( quote_name('cache_key'), quote_name(self._table), quote_name('cache_key'), ), [key, connection.ops.adapt_datetimefield_value(now)] ) return cursor.fetchone() is not None def _cull(self, db, cursor, now, num): if self._cull_frequency == 0: self.clear() else: connection = connections[db] table = connection.ops.quote_name(self._table) cursor.execute("DELETE FROM %s WHERE expires < %%s" % table, [connection.ops.adapt_datetimefield_value(now)]) deleted_count = cursor.rowcount remaining_num = num - deleted_count if remaining_num > self._max_entries: cull_num = remaining_num // self._cull_frequency cursor.execute( connection.ops.cache_key_culling_sql() % table, [cull_num]) last_cache_key = cursor.fetchone() if last_cache_key: cursor.execute( 'DELETE FROM %s WHERE cache_key < %%s' % table, [last_cache_key[0]], ) def clear(self): db = router.db_for_write(self.cache_model_class) connection = connections[db] table = connection.ops.quote_name(self._table) with connection.cursor() as cursor: cursor.execute('DELETE FROM %s' % table)
d3a73edc4fc16f863eca5b154b9a6b6e6746bb1aa2f1bc60ee275a15e6fc4e7a
import warnings from urllib.parse import urlencode from urllib.request import urlopen from django.apps import apps as django_apps from django.conf import settings from django.core import paginator from django.core.exceptions import ImproperlyConfigured from django.urls import NoReverseMatch, reverse from django.utils import translation from django.utils.deprecation import RemovedInDjango50Warning PING_URL = "https://www.google.com/webmasters/tools/ping" class SitemapNotFound(Exception): pass def ping_google(sitemap_url=None, ping_url=PING_URL, sitemap_uses_https=True): """ Alert Google that the sitemap for the current site has been updated. If sitemap_url is provided, it should be an absolute path to the sitemap for this site -- e.g., '/sitemap.xml'. If sitemap_url is not provided, this function will attempt to deduce it by using urls.reverse(). """ sitemap_full_url = _get_sitemap_full_url(sitemap_url, sitemap_uses_https) params = urlencode({'sitemap': sitemap_full_url}) urlopen('%s?%s' % (ping_url, params)) def _get_sitemap_full_url(sitemap_url, sitemap_uses_https=True): if not django_apps.is_installed('django.contrib.sites'): raise ImproperlyConfigured("ping_google requires django.contrib.sites, which isn't installed.") if sitemap_url is None: try: # First, try to get the "index" sitemap URL. sitemap_url = reverse('django.contrib.sitemaps.views.index') except NoReverseMatch: try: # Next, try for the "global" sitemap URL. sitemap_url = reverse('django.contrib.sitemaps.views.sitemap') except NoReverseMatch: pass if sitemap_url is None: raise SitemapNotFound("You didn't provide a sitemap_url, and the sitemap URL couldn't be auto-detected.") Site = django_apps.get_model('sites.Site') current_site = Site.objects.get_current() scheme = 'https' if sitemap_uses_https else 'http' return '%s://%s%s' % (scheme, current_site.domain, sitemap_url) class Sitemap: # This limit is defined by Google. See the index documentation at # https://www.sitemaps.org/protocol.html#index. limit = 50000 # If protocol is None, the URLs in the sitemap will use the protocol # with which the sitemap was requested. protocol = None # Enables generating URLs for all languages. i18n = False # Override list of languages to use. languages = None # Enables generating alternate/hreflang links. alternates = False # Add an alternate/hreflang link with value 'x-default'. x_default = False def _get(self, name, item, default=None): try: attr = getattr(self, name) except AttributeError: return default if callable(attr): if self.i18n: # Split the (item, lang_code) tuples again for the location, # priority, lastmod and changefreq method calls. item, lang_code = item return attr(item) return attr def _languages(self): if self.languages is not None: return self.languages return [lang_code for lang_code, _ in settings.LANGUAGES] def _items(self): if self.i18n: # Create (item, lang_code) tuples for all items and languages. # This is necessary to paginate with all languages already considered. items = [ (item, lang_code) for lang_code in self._languages() for item in self.items() ] return items return self.items() def _location(self, item, force_lang_code=None): if self.i18n: obj, lang_code = item # Activate language from item-tuple or forced one before calling location. with translation.override(force_lang_code or lang_code): return self._get('location', item) return self._get('location', item) @property def paginator(self): return paginator.Paginator(self._items(), self.limit) def items(self): return [] def location(self, item): return item.get_absolute_url() def get_protocol(self, protocol=None): # Determine protocol if self.protocol is None and protocol is None: warnings.warn( "The default sitemap protocol will be changed from 'http' to " "'https' in Django 5.0. Set Sitemap.protocol to silence this " "warning.", category=RemovedInDjango50Warning, stacklevel=2, ) # RemovedInDjango50Warning: when the deprecation ends, replace 'http' # with 'https'. return self.protocol or protocol or 'http' def get_domain(self, site=None): # Determine domain if site is None: if django_apps.is_installed('django.contrib.sites'): Site = django_apps.get_model('sites.Site') try: site = Site.objects.get_current() except Site.DoesNotExist: pass if site is None: raise ImproperlyConfigured( "To use sitemaps, either enable the sites framework or pass " "a Site/RequestSite object in your view." ) return site.domain def get_urls(self, page=1, site=None, protocol=None): protocol = self.get_protocol(protocol) domain = self.get_domain(site) return self._urls(page, protocol, domain) def _urls(self, page, protocol, domain): urls = [] latest_lastmod = None all_items_lastmod = True # track if all items have a lastmod paginator_page = self.paginator.page(page) for item in paginator_page.object_list: loc = f'{protocol}://{domain}{self._location(item)}' priority = self._get('priority', item) lastmod = self._get('lastmod', item) if all_items_lastmod: all_items_lastmod = lastmod is not None if (all_items_lastmod and (latest_lastmod is None or lastmod > latest_lastmod)): latest_lastmod = lastmod url_info = { 'item': item, 'location': loc, 'lastmod': lastmod, 'changefreq': self._get('changefreq', item), 'priority': str(priority if priority is not None else ''), 'alternates': [], } if self.i18n and self.alternates: for lang_code in self._languages(): loc = f'{protocol}://{domain}{self._location(item, lang_code)}' url_info['alternates'].append({ 'location': loc, 'lang_code': lang_code, }) if self.x_default: lang_code = settings.LANGUAGE_CODE loc = f'{protocol}://{domain}{self._location(item, lang_code)}' loc = loc.replace(f'/{lang_code}/', '/', 1) url_info['alternates'].append({ 'location': loc, 'lang_code': 'x-default', }) urls.append(url_info) if all_items_lastmod and latest_lastmod: self.latest_lastmod = latest_lastmod return urls class GenericSitemap(Sitemap): priority = None changefreq = None def __init__(self, info_dict, priority=None, changefreq=None, protocol=None): self.queryset = info_dict['queryset'] self.date_field = info_dict.get('date_field') self.priority = self.priority or priority self.changefreq = self.changefreq or changefreq self.protocol = self.protocol or protocol def items(self): # Make sure to return a clone; we don't want premature evaluation. return self.queryset.filter() def lastmod(self, item): if self.date_field is not None: return getattr(item, self.date_field) return None
46fd9946f8e9566b2fc4e46bee6457e0f795f54cfd7479ec077837981bbd994c
import datetime from functools import wraps from django.contrib.sites.shortcuts import get_current_site from django.core.paginator import EmptyPage, PageNotAnInteger from django.http import Http404 from django.template.response import TemplateResponse from django.urls import reverse from django.utils import timezone from django.utils.http import http_date def x_robots_tag(func): @wraps(func) def inner(request, *args, **kwargs): response = func(request, *args, **kwargs) response.headers['X-Robots-Tag'] = 'noindex, noodp, noarchive' return response return inner @x_robots_tag def index(request, sitemaps, template_name='sitemap_index.xml', content_type='application/xml', sitemap_url_name='django.contrib.sitemaps.views.sitemap'): req_protocol = request.scheme req_site = get_current_site(request) sites = [] # all sections' sitemap URLs for section, site in sitemaps.items(): # For each section label, add links of all pages of its sitemap # (usually generated by the `sitemap` view). if callable(site): site = site() protocol = req_protocol if site.protocol is None else site.protocol sitemap_url = reverse(sitemap_url_name, kwargs={'section': section}) absolute_url = '%s://%s%s' % (protocol, req_site.domain, sitemap_url) sites.append(absolute_url) # Add links to all pages of the sitemap. for page in range(2, site.paginator.num_pages + 1): sites.append('%s?p=%s' % (absolute_url, page)) return TemplateResponse(request, template_name, {'sitemaps': sites}, content_type=content_type) @x_robots_tag def sitemap(request, sitemaps, section=None, template_name='sitemap.xml', content_type='application/xml'): req_protocol = request.scheme req_site = get_current_site(request) if section is not None: if section not in sitemaps: raise Http404("No sitemap available for section: %r" % section) maps = [sitemaps[section]] else: maps = sitemaps.values() page = request.GET.get("p", 1) lastmod = None all_sites_lastmod = True urls = [] for site in maps: try: if callable(site): site = site() urls.extend(site.get_urls(page=page, site=req_site, protocol=req_protocol)) if all_sites_lastmod: site_lastmod = getattr(site, 'latest_lastmod', None) if site_lastmod is not None: if not isinstance(site_lastmod, datetime.datetime): site_lastmod = datetime.datetime.combine(site_lastmod, datetime.time.min) if timezone.is_naive(site_lastmod): site_lastmod = timezone.make_aware(site_lastmod, timezone.utc) lastmod = site_lastmod if lastmod is None else max(lastmod, site_lastmod) else: all_sites_lastmod = False except EmptyPage: raise Http404("Page %s empty" % page) except PageNotAnInteger: raise Http404("No page '%s'" % page) response = TemplateResponse(request, template_name, {'urlset': urls}, content_type=content_type) if all_sites_lastmod and lastmod is not None: # if lastmod is defined for all sites, set header so as # ConditionalGetMiddleware is able to send 304 NOT MODIFIED response.headers['Last-Modified'] = http_date(lastmod.timestamp()) return response
ed4ac339971cd4d218cb55759963c540888d729c82ae3dfa4f5c7c17146fd32c
import inspect from importlib import import_module from inspect import cleandoc from pathlib import Path from django.apps import apps from django.contrib import admin from django.contrib.admin.views.decorators import staff_member_required from django.contrib.admindocs import utils from django.contrib.admindocs.utils import ( replace_named_groups, replace_unnamed_groups, ) from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist from django.db import models from django.http import Http404 from django.template.engine import Engine from django.urls import get_mod_func, get_resolver, get_urlconf from django.utils._os import safe_join from django.utils.decorators import method_decorator from django.utils.functional import cached_property from django.utils.inspect import ( func_accepts_kwargs, func_accepts_var_args, get_func_full_args, method_has_no_args, ) from django.utils.translation import gettext as _ from django.views.generic import TemplateView from .utils import get_view_name # Exclude methods starting with these strings from documentation MODEL_METHODS_EXCLUDE = ('_', 'add_', 'delete', 'save', 'set_') class BaseAdminDocsView(TemplateView): """ Base view for admindocs views. """ @method_decorator(staff_member_required) def dispatch(self, request, *args, **kwargs): if not utils.docutils_is_available: # Display an error message for people without docutils self.template_name = 'admin_doc/missing_docutils.html' return self.render_to_response(admin.site.each_context(request)) return super().dispatch(request, *args, **kwargs) def get_context_data(self, **kwargs): return super().get_context_data(**{ **kwargs, **admin.site.each_context(self.request), }) class BookmarkletsView(BaseAdminDocsView): template_name = 'admin_doc/bookmarklets.html' class TemplateTagIndexView(BaseAdminDocsView): template_name = 'admin_doc/template_tag_index.html' def get_context_data(self, **kwargs): tags = [] try: engine = Engine.get_default() except ImproperlyConfigured: # Non-trivial TEMPLATES settings aren't supported (#24125). pass else: app_libs = sorted(engine.template_libraries.items()) builtin_libs = [('', lib) for lib in engine.template_builtins] for module_name, library in builtin_libs + app_libs: for tag_name, tag_func in library.tags.items(): title, body, metadata = utils.parse_docstring(tag_func.__doc__) title = title and utils.parse_rst(title, 'tag', _('tag:') + tag_name) body = body and utils.parse_rst(body, 'tag', _('tag:') + tag_name) for key in metadata: metadata[key] = utils.parse_rst(metadata[key], 'tag', _('tag:') + tag_name) tag_library = module_name.split('.')[-1] tags.append({ 'name': tag_name, 'title': title, 'body': body, 'meta': metadata, 'library': tag_library, }) return super().get_context_data(**{**kwargs, 'tags': tags}) class TemplateFilterIndexView(BaseAdminDocsView): template_name = 'admin_doc/template_filter_index.html' def get_context_data(self, **kwargs): filters = [] try: engine = Engine.get_default() except ImproperlyConfigured: # Non-trivial TEMPLATES settings aren't supported (#24125). pass else: app_libs = sorted(engine.template_libraries.items()) builtin_libs = [('', lib) for lib in engine.template_builtins] for module_name, library in builtin_libs + app_libs: for filter_name, filter_func in library.filters.items(): title, body, metadata = utils.parse_docstring(filter_func.__doc__) title = title and utils.parse_rst(title, 'filter', _('filter:') + filter_name) body = body and utils.parse_rst(body, 'filter', _('filter:') + filter_name) for key in metadata: metadata[key] = utils.parse_rst(metadata[key], 'filter', _('filter:') + filter_name) tag_library = module_name.split('.')[-1] filters.append({ 'name': filter_name, 'title': title, 'body': body, 'meta': metadata, 'library': tag_library, }) return super().get_context_data(**{**kwargs, 'filters': filters}) class ViewIndexView(BaseAdminDocsView): template_name = 'admin_doc/view_index.html' def get_context_data(self, **kwargs): views = [] url_resolver = get_resolver(get_urlconf()) try: view_functions = extract_views_from_urlpatterns(url_resolver.url_patterns) except ImproperlyConfigured: view_functions = [] for (func, regex, namespace, name) in view_functions: views.append({ 'full_name': get_view_name(func), 'url': simplify_regex(regex), 'url_name': ':'.join((namespace or []) + (name and [name] or [])), 'namespace': ':'.join(namespace or []), 'name': name, }) return super().get_context_data(**{**kwargs, 'views': views}) class ViewDetailView(BaseAdminDocsView): template_name = 'admin_doc/view_detail.html' @staticmethod def _get_view_func(view): urlconf = get_urlconf() if get_resolver(urlconf)._is_callback(view): mod, func = get_mod_func(view) try: # Separate the module and function, e.g. # 'mymodule.views.myview' -> 'mymodule.views', 'myview'). return getattr(import_module(mod), func) except ImportError: # Import may fail because view contains a class name, e.g. # 'mymodule.views.ViewContainer.my_view', so mod takes the form # 'mymodule.views.ViewContainer'. Parse it again to separate # the module and class. mod, klass = get_mod_func(mod) return getattr(getattr(import_module(mod), klass), func) def get_context_data(self, **kwargs): view = self.kwargs['view'] view_func = self._get_view_func(view) if view_func is None: raise Http404 title, body, metadata = utils.parse_docstring(view_func.__doc__) title = title and utils.parse_rst(title, 'view', _('view:') + view) body = body and utils.parse_rst(body, 'view', _('view:') + view) for key in metadata: metadata[key] = utils.parse_rst(metadata[key], 'model', _('view:') + view) return super().get_context_data(**{ **kwargs, 'name': view, 'summary': title, 'body': body, 'meta': metadata, }) class ModelIndexView(BaseAdminDocsView): template_name = 'admin_doc/model_index.html' def get_context_data(self, **kwargs): m_list = [m._meta for m in apps.get_models()] return super().get_context_data(**{**kwargs, 'models': m_list}) class ModelDetailView(BaseAdminDocsView): template_name = 'admin_doc/model_detail.html' def get_context_data(self, **kwargs): model_name = self.kwargs['model_name'] # Get the model class. try: app_config = apps.get_app_config(self.kwargs['app_label']) except LookupError: raise Http404(_("App %(app_label)r not found") % self.kwargs) try: model = app_config.get_model(model_name) except LookupError: raise Http404(_("Model %(model_name)r not found in app %(app_label)r") % self.kwargs) opts = model._meta title, body, metadata = utils.parse_docstring(model.__doc__) title = title and utils.parse_rst(title, 'model', _('model:') + model_name) body = body and utils.parse_rst(body, 'model', _('model:') + model_name) # Gather fields/field descriptions. fields = [] for field in opts.fields: # ForeignKey is a special case since the field will actually be a # descriptor that returns the other object if isinstance(field, models.ForeignKey): data_type = field.remote_field.model.__name__ app_label = field.remote_field.model._meta.app_label verbose = utils.parse_rst( (_("the related `%(app_label)s.%(data_type)s` object") % { 'app_label': app_label, 'data_type': data_type, }), 'model', _('model:') + data_type, ) else: data_type = get_readable_field_data_type(field) verbose = field.verbose_name fields.append({ 'name': field.name, 'data_type': data_type, 'verbose': verbose or '', 'help_text': field.help_text, }) # Gather many-to-many fields. for field in opts.many_to_many: data_type = field.remote_field.model.__name__ app_label = field.remote_field.model._meta.app_label verbose = _("related `%(app_label)s.%(object_name)s` objects") % { 'app_label': app_label, 'object_name': data_type, } fields.append({ 'name': "%s.all" % field.name, "data_type": 'List', 'verbose': utils.parse_rst(_("all %s") % verbose, 'model', _('model:') + opts.model_name), }) fields.append({ 'name': "%s.count" % field.name, 'data_type': 'Integer', 'verbose': utils.parse_rst(_("number of %s") % verbose, 'model', _('model:') + opts.model_name), }) methods = [] # Gather model methods. for func_name, func in model.__dict__.items(): if inspect.isfunction(func) or isinstance(func, (cached_property, property)): try: for exclude in MODEL_METHODS_EXCLUDE: if func_name.startswith(exclude): raise StopIteration except StopIteration: continue verbose = func.__doc__ verbose = verbose and ( utils.parse_rst(cleandoc(verbose), 'model', _('model:') + opts.model_name) ) # Show properties, cached_properties, and methods without # arguments as fields. Otherwise, show as a 'method with # arguments'. if isinstance(func, (cached_property, property)): fields.append({ 'name': func_name, 'data_type': get_return_data_type(func_name), 'verbose': verbose or '' }) elif method_has_no_args(func) and not func_accepts_kwargs(func) and not func_accepts_var_args(func): fields.append({ 'name': func_name, 'data_type': get_return_data_type(func_name), 'verbose': verbose or '', }) else: arguments = get_func_full_args(func) # Join arguments with ', ' and in case of default value, # join it with '='. Use repr() so that strings will be # correctly displayed. print_arguments = ', '.join([ '='.join([arg_el[0], *map(repr, arg_el[1:])]) for arg_el in arguments ]) methods.append({ 'name': func_name, 'arguments': print_arguments, 'verbose': verbose or '', }) # Gather related objects for rel in opts.related_objects: verbose = _("related `%(app_label)s.%(object_name)s` objects") % { 'app_label': rel.related_model._meta.app_label, 'object_name': rel.related_model._meta.object_name, } accessor = rel.get_accessor_name() fields.append({ 'name': "%s.all" % accessor, 'data_type': 'List', 'verbose': utils.parse_rst(_("all %s") % verbose, 'model', _('model:') + opts.model_name), }) fields.append({ 'name': "%s.count" % accessor, 'data_type': 'Integer', 'verbose': utils.parse_rst(_("number of %s") % verbose, 'model', _('model:') + opts.model_name), }) return super().get_context_data(**{ **kwargs, 'name': opts.label, 'summary': title, 'description': body, 'fields': fields, 'methods': methods, }) class TemplateDetailView(BaseAdminDocsView): template_name = 'admin_doc/template_detail.html' def get_context_data(self, **kwargs): template = self.kwargs['template'] templates = [] try: default_engine = Engine.get_default() except ImproperlyConfigured: # Non-trivial TEMPLATES settings aren't supported (#24125). pass else: # This doesn't account for template loaders (#24128). for index, directory in enumerate(default_engine.dirs): template_file = Path(safe_join(directory, template)) if template_file.exists(): template_contents = template_file.read_text() else: template_contents = '' templates.append({ 'file': template_file, 'exists': template_file.exists(), 'contents': template_contents, 'order': index, }) return super().get_context_data(**{ **kwargs, 'name': template, 'templates': templates, }) #################### # Helper functions # #################### def get_return_data_type(func_name): """Return a somewhat-helpful data type given a function name""" if func_name.startswith('get_'): if func_name.endswith('_list'): return 'List' elif func_name.endswith('_count'): return 'Integer' return '' def get_readable_field_data_type(field): """ Return the description for a given field type, if it exists. Fields' descriptions can contain format strings, which will be interpolated with the values of field.__dict__ before being output. """ return field.description % field.__dict__ def extract_views_from_urlpatterns(urlpatterns, base='', namespace=None): """ Return a list of views from a list of urlpatterns. Each object in the returned list is a two-tuple: (view_func, regex) """ views = [] for p in urlpatterns: if hasattr(p, 'url_patterns'): try: patterns = p.url_patterns except ImportError: continue views.extend(extract_views_from_urlpatterns( patterns, base + str(p.pattern), (namespace or []) + (p.namespace and [p.namespace] or []) )) elif hasattr(p, 'callback'): try: views.append((p.callback, base + str(p.pattern), namespace, p.name)) except ViewDoesNotExist: continue else: raise TypeError(_("%s does not appear to be a urlpattern object") % p) return views def simplify_regex(pattern): r""" Clean up urlpattern regexes into something more readable by humans. For example, turn "^(?P<sport_slug>\w+)/athletes/(?P<athlete_slug>\w+)/$" into "/<sport_slug>/athletes/<athlete_slug>/". """ pattern = replace_named_groups(pattern) pattern = replace_unnamed_groups(pattern) # clean up any outstanding regex-y characters. pattern = pattern.replace('^', '').replace('$', '').replace('?', '') if not pattern.startswith('/'): pattern = '/' + pattern return pattern
a967624f23222508eda75ee0382f3f29863d60d82e9e7c1ae23dc3d1e4a5f0c7
from django.contrib.postgres.signals import ( get_citext_oids, get_hstore_oids, register_type_handlers, ) from django.db import NotSupportedError, router from django.db.migrations import AddConstraint, AddIndex, RemoveIndex from django.db.migrations.operations.base import Operation from django.db.models.constraints import CheckConstraint class CreateExtension(Operation): reversible = True def __init__(self, name): self.name = name def state_forwards(self, app_label, state): pass def database_forwards(self, app_label, schema_editor, from_state, to_state): if ( schema_editor.connection.vendor != 'postgresql' or not router.allow_migrate(schema_editor.connection.alias, app_label) ): return if not self.extension_exists(schema_editor, self.name): schema_editor.execute( 'CREATE EXTENSION IF NOT EXISTS %s' % schema_editor.quote_name(self.name) ) # Clear cached, stale oids. get_hstore_oids.cache_clear() get_citext_oids.cache_clear() # Registering new type handlers cannot be done before the extension is # installed, otherwise a subsequent data migration would use the same # connection. register_type_handlers(schema_editor.connection) def database_backwards(self, app_label, schema_editor, from_state, to_state): if not router.allow_migrate(schema_editor.connection.alias, app_label): return if self.extension_exists(schema_editor, self.name): schema_editor.execute( 'DROP EXTENSION IF EXISTS %s' % schema_editor.quote_name(self.name) ) # Clear cached, stale oids. get_hstore_oids.cache_clear() get_citext_oids.cache_clear() def extension_exists(self, schema_editor, extension): with schema_editor.connection.cursor() as cursor: cursor.execute( 'SELECT 1 FROM pg_extension WHERE extname = %s', [extension], ) return bool(cursor.fetchone()) def describe(self): return "Creates extension %s" % self.name @property def migration_name_fragment(self): return 'create_extension_%s' % self.name class BloomExtension(CreateExtension): def __init__(self): self.name = 'bloom' class BtreeGinExtension(CreateExtension): def __init__(self): self.name = 'btree_gin' class BtreeGistExtension(CreateExtension): def __init__(self): self.name = 'btree_gist' class CITextExtension(CreateExtension): def __init__(self): self.name = 'citext' class CryptoExtension(CreateExtension): def __init__(self): self.name = 'pgcrypto' class HStoreExtension(CreateExtension): def __init__(self): self.name = 'hstore' class TrigramExtension(CreateExtension): def __init__(self): self.name = 'pg_trgm' class UnaccentExtension(CreateExtension): def __init__(self): self.name = 'unaccent' class NotInTransactionMixin: def _ensure_not_in_transaction(self, schema_editor): if schema_editor.connection.in_atomic_block: raise NotSupportedError( 'The %s operation cannot be executed inside a transaction ' '(set atomic = False on the migration).' % self.__class__.__name__ ) class AddIndexConcurrently(NotInTransactionMixin, AddIndex): """Create an index using PostgreSQL's CREATE INDEX CONCURRENTLY syntax.""" atomic = False def describe(self): return 'Concurrently create index %s on field(s) %s of model %s' % ( self.index.name, ', '.join(self.index.fields), self.model_name, ) def database_forwards(self, app_label, schema_editor, from_state, to_state): self._ensure_not_in_transaction(schema_editor) model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): schema_editor.add_index(model, self.index, concurrently=True) def database_backwards(self, app_label, schema_editor, from_state, to_state): self._ensure_not_in_transaction(schema_editor) model = from_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): schema_editor.remove_index(model, self.index, concurrently=True) class RemoveIndexConcurrently(NotInTransactionMixin, RemoveIndex): """Remove an index using PostgreSQL's DROP INDEX CONCURRENTLY syntax.""" atomic = False def describe(self): return 'Concurrently remove index %s from %s' % (self.name, self.model_name) def database_forwards(self, app_label, schema_editor, from_state, to_state): self._ensure_not_in_transaction(schema_editor) model = from_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): from_model_state = from_state.models[app_label, self.model_name_lower] index = from_model_state.get_index_by_name(self.name) schema_editor.remove_index(model, index, concurrently=True) def database_backwards(self, app_label, schema_editor, from_state, to_state): self._ensure_not_in_transaction(schema_editor) model = to_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): to_model_state = to_state.models[app_label, self.model_name_lower] index = to_model_state.get_index_by_name(self.name) schema_editor.add_index(model, index, concurrently=True) class CollationOperation(Operation): def __init__(self, name, locale, *, provider='libc', deterministic=True): self.name = name self.locale = locale self.provider = provider self.deterministic = deterministic def state_forwards(self, app_label, state): pass def deconstruct(self): kwargs = {'name': self.name, 'locale': self.locale} if self.provider and self.provider != 'libc': kwargs['provider'] = self.provider if self.deterministic is False: kwargs['deterministic'] = self.deterministic return ( self.__class__.__qualname__, [], kwargs, ) def create_collation(self, schema_editor): if ( self.deterministic is False and not schema_editor.connection.features.supports_non_deterministic_collations ): raise NotSupportedError( 'Non-deterministic collations require PostgreSQL 12+.' ) args = {'locale': schema_editor.quote_name(self.locale)} if self.provider != 'libc': args['provider'] = schema_editor.quote_name(self.provider) if self.deterministic is False: args['deterministic'] = 'false' schema_editor.execute('CREATE COLLATION %(name)s (%(args)s)' % { 'name': schema_editor.quote_name(self.name), 'args': ', '.join(f'{option}={value}' for option, value in args.items()), }) def remove_collation(self, schema_editor): schema_editor.execute( 'DROP COLLATION %s' % schema_editor.quote_name(self.name), ) class CreateCollation(CollationOperation): """Create a collation.""" def database_forwards(self, app_label, schema_editor, from_state, to_state): if ( schema_editor.connection.vendor != 'postgresql' or not router.allow_migrate(schema_editor.connection.alias, app_label) ): return self.create_collation(schema_editor) def database_backwards(self, app_label, schema_editor, from_state, to_state): if not router.allow_migrate(schema_editor.connection.alias, app_label): return self.remove_collation(schema_editor) def describe(self): return f'Create collation {self.name}' @property def migration_name_fragment(self): return 'create_collation_%s' % self.name.lower() class RemoveCollation(CollationOperation): """Remove a collation.""" def database_forwards(self, app_label, schema_editor, from_state, to_state): if ( schema_editor.connection.vendor != 'postgresql' or not router.allow_migrate(schema_editor.connection.alias, app_label) ): return self.remove_collation(schema_editor) def database_backwards(self, app_label, schema_editor, from_state, to_state): if not router.allow_migrate(schema_editor.connection.alias, app_label): return self.create_collation(schema_editor) def describe(self): return f'Remove collation {self.name}' @property def migration_name_fragment(self): return 'remove_collation_%s' % self.name.lower() class AddConstraintNotValid(AddConstraint): """ Add a table constraint without enforcing validation, using PostgreSQL's NOT VALID syntax. """ def __init__(self, model_name, constraint): if not isinstance(constraint, CheckConstraint): raise TypeError( 'AddConstraintNotValid.constraint must be a check constraint.' ) super().__init__(model_name, constraint) def describe(self): return 'Create not valid constraint %s on model %s' % ( self.constraint.name, self.model_name, ) def database_forwards(self, app_label, schema_editor, from_state, to_state): model = from_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): constraint_sql = self.constraint.create_sql(model, schema_editor) if constraint_sql: # Constraint.create_sql returns interpolated SQL which makes # params=None a necessity to avoid escaping attempts on # execution. schema_editor.execute(str(constraint_sql) + ' NOT VALID', params=None) @property def migration_name_fragment(self): return super().migration_name_fragment + '_not_valid' class ValidateConstraint(Operation): """Validate a table NOT VALID constraint.""" def __init__(self, model_name, name): self.model_name = model_name self.name = name def describe(self): return 'Validate constraint %s on model %s' % (self.name, self.model_name) def database_forwards(self, app_label, schema_editor, from_state, to_state): model = from_state.apps.get_model(app_label, self.model_name) if self.allow_migrate_model(schema_editor.connection.alias, model): schema_editor.execute('ALTER TABLE %s VALIDATE CONSTRAINT %s' % ( schema_editor.quote_name(model._meta.db_table), schema_editor.quote_name(self.name), )) def database_backwards(self, app_label, schema_editor, from_state, to_state): # PostgreSQL does not provide a way to make a constraint invalid. pass def state_forwards(self, app_label, state): pass @property def migration_name_fragment(self): return '%s_validate_%s' % (self.model_name.lower(), self.name.lower()) def deconstruct(self): return self.__class__.__name__, [], { 'model_name': self.model_name, 'name': self.name, }
9a1611f30ac40f95cb0d9c7d717153ffb40eb98d6836db52d7fd29e5ac9380f5
from django.contrib.postgres.fields import ArrayField from django.db.models import Subquery from django.utils.functional import cached_property class ArraySubquery(Subquery): template = 'ARRAY(%(subquery)s)' def __init__(self, queryset, **kwargs): super().__init__(queryset, **kwargs) @cached_property def output_field(self): return ArrayField(self.query.output_field)
7a68db1bc01a932a09f522dee0c70f6d492cffb02c3c7ec150b925f3088ce249
from django.db import NotSupportedError from django.db.backends.ddl_references import Statement, Table from django.db.models import Deferrable, F, Q from django.db.models.constraints import BaseConstraint from django.db.models.expressions import Col from django.db.models.sql import Query __all__ = ['ExclusionConstraint'] class ExclusionConstraint(BaseConstraint): template = 'CONSTRAINT %(name)s EXCLUDE USING %(index_type)s (%(expressions)s)%(include)s%(where)s%(deferrable)s' def __init__( self, *, name, expressions, index_type=None, condition=None, deferrable=None, include=None, opclasses=(), ): if index_type and index_type.lower() not in {'gist', 'spgist'}: raise ValueError( 'Exclusion constraints only support GiST or SP-GiST indexes.' ) if not expressions: raise ValueError( 'At least one expression is required to define an exclusion ' 'constraint.' ) if not all( isinstance(expr, (list, tuple)) and len(expr) == 2 for expr in expressions ): raise ValueError('The expressions must be a list of 2-tuples.') if not isinstance(condition, (type(None), Q)): raise ValueError( 'ExclusionConstraint.condition must be a Q instance.' ) if condition and deferrable: raise ValueError( 'ExclusionConstraint with conditions cannot be deferred.' ) if not isinstance(deferrable, (type(None), Deferrable)): raise ValueError( 'ExclusionConstraint.deferrable must be a Deferrable instance.' ) if not isinstance(include, (type(None), list, tuple)): raise ValueError( 'ExclusionConstraint.include must be a list or tuple.' ) if include and index_type and index_type.lower() != 'gist': raise ValueError( 'Covering exclusion constraints only support GiST indexes.' ) if not isinstance(opclasses, (list, tuple)): raise ValueError( 'ExclusionConstraint.opclasses must be a list or tuple.' ) if opclasses and len(expressions) != len(opclasses): raise ValueError( 'ExclusionConstraint.expressions and ' 'ExclusionConstraint.opclasses must have the same number of ' 'elements.' ) self.expressions = expressions self.index_type = index_type or 'GIST' self.condition = condition self.deferrable = deferrable self.include = tuple(include) if include else () self.opclasses = opclasses super().__init__(name=name) def _get_expression_sql(self, compiler, schema_editor, query): expressions = [] for idx, (expression, operator) in enumerate(self.expressions): if isinstance(expression, str): expression = F(expression) expression = expression.resolve_expression(query=query) sql, params = compiler.compile(expression) if not isinstance(expression, Col): sql = f'({sql})' try: opclass = self.opclasses[idx] if opclass: sql = '%s %s' % (sql, opclass) except IndexError: pass sql = sql % tuple(schema_editor.quote_value(p) for p in params) expressions.append('%s WITH %s' % (sql, operator)) return expressions def _get_condition_sql(self, compiler, schema_editor, query): if self.condition is None: return None where = query.build_where(self.condition) sql, params = where.as_sql(compiler, schema_editor.connection) return sql % tuple(schema_editor.quote_value(p) for p in params) def constraint_sql(self, model, schema_editor): query = Query(model, alias_cols=False) compiler = query.get_compiler(connection=schema_editor.connection) expressions = self._get_expression_sql(compiler, schema_editor, query) condition = self._get_condition_sql(compiler, schema_editor, query) include = [model._meta.get_field(field_name).column for field_name in self.include] return self.template % { 'name': schema_editor.quote_name(self.name), 'index_type': self.index_type, 'expressions': ', '.join(expressions), 'include': schema_editor._index_include_sql(model, include), 'where': ' WHERE (%s)' % condition if condition else '', 'deferrable': schema_editor._deferrable_constraint_sql(self.deferrable), } def create_sql(self, model, schema_editor): self.check_supported(schema_editor) return Statement( 'ALTER TABLE %(table)s ADD %(constraint)s', table=Table(model._meta.db_table, schema_editor.quote_name), constraint=self.constraint_sql(model, schema_editor), ) def remove_sql(self, model, schema_editor): return schema_editor._delete_constraint_sql( schema_editor.sql_delete_check, model, schema_editor.quote_name(self.name), ) def check_supported(self, schema_editor): if self.include and not schema_editor.connection.features.supports_covering_gist_indexes: raise NotSupportedError( 'Covering exclusion constraints requires PostgreSQL 12+.' ) def deconstruct(self): path, args, kwargs = super().deconstruct() kwargs['expressions'] = self.expressions if self.condition is not None: kwargs['condition'] = self.condition if self.index_type.lower() != 'gist': kwargs['index_type'] = self.index_type if self.deferrable: kwargs['deferrable'] = self.deferrable if self.include: kwargs['include'] = self.include if self.opclasses: kwargs['opclasses'] = self.opclasses return path, args, kwargs def __eq__(self, other): if isinstance(other, self.__class__): return ( self.name == other.name and self.index_type == other.index_type and self.expressions == other.expressions and self.condition == other.condition and self.deferrable == other.deferrable and self.include == other.include and self.opclasses == other.opclasses ) return super().__eq__(other) def __repr__(self): return '<%s: index_type=%s expressions=%s name=%s%s%s%s%s>' % ( self.__class__.__qualname__, repr(self.index_type), repr(self.expressions), repr(self.name), '' if self.condition is None else ' condition=%s' % self.condition, '' if self.deferrable is None else ' deferrable=%r' % self.deferrable, '' if not self.include else ' include=%s' % repr(self.include), '' if not self.opclasses else ' opclasses=%s' % repr(self.opclasses), )
1cf1a4677784dbbee1eedbdac49d213960840781c7829acd2fdd22d43d515261
from django.contrib.sites.shortcuts import get_current_site from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist from django.http import Http404, HttpResponse from django.template import TemplateDoesNotExist, loader from django.utils import feedgenerator from django.utils.encoding import iri_to_uri from django.utils.html import escape from django.utils.http import http_date from django.utils.timezone import get_default_timezone, is_naive, make_aware from django.utils.translation import get_language def add_domain(domain, url, secure=False): protocol = 'https' if secure else 'http' if url.startswith('//'): # Support network-path reference (see #16753) - RSS requires a protocol url = '%s:%s' % (protocol, url) elif not url.startswith(('http://', 'https://', 'mailto:')): url = iri_to_uri('%s://%s%s' % (protocol, domain, url)) return url class FeedDoesNotExist(ObjectDoesNotExist): pass class Feed: feed_type = feedgenerator.DefaultFeed title_template = None description_template = None language = None def __call__(self, request, *args, **kwargs): try: obj = self.get_object(request, *args, **kwargs) except ObjectDoesNotExist: raise Http404('Feed object does not exist.') feedgen = self.get_feed(obj, request) response = HttpResponse(content_type=feedgen.content_type) if hasattr(self, 'item_pubdate') or hasattr(self, 'item_updateddate'): # if item_pubdate or item_updateddate is defined for the feed, set # header so as ConditionalGetMiddleware is able to send 304 NOT MODIFIED response.headers['Last-Modified'] = http_date(feedgen.latest_post_date().timestamp()) feedgen.write(response, 'utf-8') return response def item_title(self, item): # Titles should be double escaped by default (see #6533) return escape(str(item)) def item_description(self, item): return str(item) def item_link(self, item): try: return item.get_absolute_url() except AttributeError: raise ImproperlyConfigured( 'Give your %s class a get_absolute_url() method, or define an ' 'item_link() method in your Feed class.' % item.__class__.__name__ ) def item_enclosures(self, item): enc_url = self._get_dynamic_attr('item_enclosure_url', item) if enc_url: enc = feedgenerator.Enclosure( url=str(enc_url), length=str(self._get_dynamic_attr('item_enclosure_length', item)), mime_type=str(self._get_dynamic_attr('item_enclosure_mime_type', item)), ) return [enc] return [] def _get_dynamic_attr(self, attname, obj, default=None): try: attr = getattr(self, attname) except AttributeError: return default if callable(attr): # Check co_argcount rather than try/excepting the function and # catching the TypeError, because something inside the function # may raise the TypeError. This technique is more accurate. try: code = attr.__code__ except AttributeError: code = attr.__call__.__code__ if code.co_argcount == 2: # one argument is 'self' return attr(obj) else: return attr() return attr def feed_extra_kwargs(self, obj): """ Return an extra keyword arguments dictionary that is used when initializing the feed generator. """ return {} def item_extra_kwargs(self, item): """ Return an extra keyword arguments dictionary that is used with the `add_item` call of the feed generator. """ return {} def get_object(self, request, *args, **kwargs): return None def get_context_data(self, **kwargs): """ Return a dictionary to use as extra context if either ``self.description_template`` or ``self.item_template`` are used. Default implementation preserves the old behavior of using {'obj': item, 'site': current_site} as the context. """ return {'obj': kwargs.get('item'), 'site': kwargs.get('site')} def get_feed(self, obj, request): """ Return a feedgenerator.DefaultFeed object, fully populated, for this feed. Raise FeedDoesNotExist for invalid parameters. """ current_site = get_current_site(request) link = self._get_dynamic_attr('link', obj) link = add_domain(current_site.domain, link, request.is_secure()) feed = self.feed_type( title=self._get_dynamic_attr('title', obj), subtitle=self._get_dynamic_attr('subtitle', obj), link=link, description=self._get_dynamic_attr('description', obj), language=self.language or get_language(), feed_url=add_domain( current_site.domain, self._get_dynamic_attr('feed_url', obj) or request.path, request.is_secure(), ), author_name=self._get_dynamic_attr('author_name', obj), author_link=self._get_dynamic_attr('author_link', obj), author_email=self._get_dynamic_attr('author_email', obj), categories=self._get_dynamic_attr('categories', obj), feed_copyright=self._get_dynamic_attr('feed_copyright', obj), feed_guid=self._get_dynamic_attr('feed_guid', obj), ttl=self._get_dynamic_attr('ttl', obj), **self.feed_extra_kwargs(obj) ) title_tmp = None if self.title_template is not None: try: title_tmp = loader.get_template(self.title_template) except TemplateDoesNotExist: pass description_tmp = None if self.description_template is not None: try: description_tmp = loader.get_template(self.description_template) except TemplateDoesNotExist: pass for item in self._get_dynamic_attr('items', obj): context = self.get_context_data(item=item, site=current_site, obj=obj, request=request) if title_tmp is not None: title = title_tmp.render(context, request) else: title = self._get_dynamic_attr('item_title', item) if description_tmp is not None: description = description_tmp.render(context, request) else: description = self._get_dynamic_attr('item_description', item) link = add_domain( current_site.domain, self._get_dynamic_attr('item_link', item), request.is_secure(), ) enclosures = self._get_dynamic_attr('item_enclosures', item) author_name = self._get_dynamic_attr('item_author_name', item) if author_name is not None: author_email = self._get_dynamic_attr('item_author_email', item) author_link = self._get_dynamic_attr('item_author_link', item) else: author_email = author_link = None tz = get_default_timezone() pubdate = self._get_dynamic_attr('item_pubdate', item) if pubdate and is_naive(pubdate): pubdate = make_aware(pubdate, tz) updateddate = self._get_dynamic_attr('item_updateddate', item) if updateddate and is_naive(updateddate): updateddate = make_aware(updateddate, tz) feed.add_item( title=title, link=link, description=description, unique_id=self._get_dynamic_attr('item_guid', item, link), unique_id_is_permalink=self._get_dynamic_attr( 'item_guid_is_permalink', item), enclosures=enclosures, pubdate=pubdate, updateddate=updateddate, author_name=author_name, author_email=author_email, author_link=author_link, comments=self._get_dynamic_attr('item_comments', item), categories=self._get_dynamic_attr('item_categories', item), item_copyright=self._get_dynamic_attr('item_copyright', item), **self.item_extra_kwargs(item) ) return feed
bfd73d530215b789213f30bf11783c420c3dfb4a383c0e3f2c49ab9f74517cdb
from datetime import datetime from django.conf import settings from django.utils.crypto import constant_time_compare, salted_hmac from django.utils.http import base36_to_int, int_to_base36 class PasswordResetTokenGenerator: """ Strategy object used to generate and check tokens for the password reset mechanism. """ key_salt = "django.contrib.auth.tokens.PasswordResetTokenGenerator" algorithm = None _secret = None def __init__(self): self.algorithm = self.algorithm or 'sha256' def _get_secret(self): return self._secret or settings.SECRET_KEY def _set_secret(self, secret): self._secret = secret secret = property(_get_secret, _set_secret) def make_token(self, user): """ Return a token that can be used once to do a password reset for the given user. """ return self._make_token_with_timestamp(user, self._num_seconds(self._now())) def check_token(self, user, token): """ Check that a password reset token is correct for a given user. """ if not (user and token): return False # Parse the token try: ts_b36, _ = token.split("-") except ValueError: return False try: ts = base36_to_int(ts_b36) except ValueError: return False # Check that the timestamp/uid has not been tampered with if not constant_time_compare(self._make_token_with_timestamp(user, ts), token): return False # Check the timestamp is within limit. if (self._num_seconds(self._now()) - ts) > settings.PASSWORD_RESET_TIMEOUT: return False return True def _make_token_with_timestamp(self, user, timestamp): # timestamp is number of seconds since 2001-1-1. Converted to base 36, # this gives us a 6 digit string until about 2069. ts_b36 = int_to_base36(timestamp) hash_string = salted_hmac( self.key_salt, self._make_hash_value(user, timestamp), secret=self.secret, algorithm=self.algorithm, ).hexdigest()[::2] # Limit to shorten the URL. return "%s-%s" % (ts_b36, hash_string) def _make_hash_value(self, user, timestamp): """ Hash the user's primary key, email (if available), and some user state that's sure to change after a password reset to produce a token that is invalidated when it's used: 1. The password field will change upon a password reset (even if the same password is chosen, due to password salting). 2. The last_login field will usually be updated very shortly after a password reset. Failing those things, settings.PASSWORD_RESET_TIMEOUT eventually invalidates the token. Running this data through salted_hmac() prevents password cracking attempts using the reset token, provided the secret isn't compromised. """ # Truncate microseconds so that tokens are consistent even if the # database doesn't support microseconds. login_timestamp = '' if user.last_login is None else user.last_login.replace(microsecond=0, tzinfo=None) email_field = user.get_email_field_name() email = getattr(user, email_field, '') or '' return f'{user.pk}{user.password}{login_timestamp}{timestamp}{email}' def _num_seconds(self, dt): return int((dt - datetime(2001, 1, 1)).total_seconds()) def _now(self): # Used for mocking in tests return datetime.now() default_token_generator = PasswordResetTokenGenerator()
5122129bc10acb5cc3cc595b4a524306e1ed74de2e04f02a38c3f0549f77620e
import base64 import binascii import functools import hashlib import importlib import math import warnings from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.core.signals import setting_changed from django.dispatch import receiver from django.utils.crypto import ( RANDOM_STRING_CHARS, constant_time_compare, get_random_string, pbkdf2, ) from django.utils.module_loading import import_string from django.utils.translation import gettext_noop as _ UNUSABLE_PASSWORD_PREFIX = '!' # This will never be a valid encoded hash UNUSABLE_PASSWORD_SUFFIX_LENGTH = 40 # number of random chars to add after UNUSABLE_PASSWORD_PREFIX def is_password_usable(encoded): """ Return True if this password wasn't generated by User.set_unusable_password(), i.e. make_password(None). """ return encoded is None or not encoded.startswith(UNUSABLE_PASSWORD_PREFIX) def check_password(password, encoded, setter=None, preferred='default'): """ Return a boolean of whether the raw password matches the three part encoded digest. If setter is specified, it'll be called when you need to regenerate the password. """ if password is None or not is_password_usable(encoded): return False preferred = get_hasher(preferred) try: hasher = identify_hasher(encoded) except ValueError: # encoded is gibberish or uses a hasher that's no longer installed. return False hasher_changed = hasher.algorithm != preferred.algorithm must_update = hasher_changed or preferred.must_update(encoded) is_correct = hasher.verify(password, encoded) # If the hasher didn't change (we don't protect against enumeration if it # does) and the password should get updated, try to close the timing gap # between the work factor of the current encoded password and the default # work factor. if not is_correct and not hasher_changed and must_update: hasher.harden_runtime(password, encoded) if setter and is_correct and must_update: setter(password) return is_correct def make_password(password, salt=None, hasher='default'): """ Turn a plain-text password into a hash for database storage Same as encode() but generate a new random salt. If password is None then return a concatenation of UNUSABLE_PASSWORD_PREFIX and a random string, which disallows logins. Additional random string reduces chances of gaining access to staff or superuser accounts. See ticket #20079 for more info. """ if password is None: return UNUSABLE_PASSWORD_PREFIX + get_random_string(UNUSABLE_PASSWORD_SUFFIX_LENGTH) if not isinstance(password, (bytes, str)): raise TypeError( 'Password must be a string or bytes, got %s.' % type(password).__qualname__ ) hasher = get_hasher(hasher) salt = salt or hasher.salt() return hasher.encode(password, salt) @functools.lru_cache() def get_hashers(): hashers = [] for hasher_path in settings.PASSWORD_HASHERS: hasher_cls = import_string(hasher_path) hasher = hasher_cls() if not getattr(hasher, 'algorithm'): raise ImproperlyConfigured("hasher doesn't specify an " "algorithm name: %s" % hasher_path) hashers.append(hasher) return hashers @functools.lru_cache() def get_hashers_by_algorithm(): return {hasher.algorithm: hasher for hasher in get_hashers()} @receiver(setting_changed) def reset_hashers(**kwargs): if kwargs['setting'] == 'PASSWORD_HASHERS': get_hashers.cache_clear() get_hashers_by_algorithm.cache_clear() def get_hasher(algorithm='default'): """ Return an instance of a loaded password hasher. If algorithm is 'default', return the default hasher. Lazily import hashers specified in the project's settings file if needed. """ if hasattr(algorithm, 'algorithm'): return algorithm elif algorithm == 'default': return get_hashers()[0] else: hashers = get_hashers_by_algorithm() try: return hashers[algorithm] except KeyError: raise ValueError("Unknown password hashing algorithm '%s'. " "Did you specify it in the PASSWORD_HASHERS " "setting?" % algorithm) def identify_hasher(encoded): """ Return an instance of a loaded password hasher. Identify hasher algorithm by examining encoded hash, and call get_hasher() to return hasher. Raise ValueError if algorithm cannot be identified, or if hasher is not loaded. """ # Ancient versions of Django created plain MD5 passwords and accepted # MD5 passwords with an empty salt. if ((len(encoded) == 32 and '$' not in encoded) or (len(encoded) == 37 and encoded.startswith('md5$$'))): algorithm = 'unsalted_md5' # Ancient versions of Django accepted SHA1 passwords with an empty salt. elif len(encoded) == 46 and encoded.startswith('sha1$$'): algorithm = 'unsalted_sha1' else: algorithm = encoded.split('$', 1)[0] return get_hasher(algorithm) def mask_hash(hash, show=6, char="*"): """ Return the given hash, with only the first ``show`` number shown. The rest are masked with ``char`` for security reasons. """ masked = hash[:show] masked += char * len(hash[show:]) return masked def must_update_salt(salt, expected_entropy): # Each character in the salt provides log_2(len(alphabet)) bits of entropy. return len(salt) * math.log2(len(RANDOM_STRING_CHARS)) < expected_entropy class BasePasswordHasher: """ Abstract base class for password hashers When creating your own hasher, you need to override algorithm, verify(), encode() and safe_summary(). PasswordHasher objects are immutable. """ algorithm = None library = None salt_entropy = 128 def _load_library(self): if self.library is not None: if isinstance(self.library, (tuple, list)): name, mod_path = self.library else: mod_path = self.library try: module = importlib.import_module(mod_path) except ImportError as e: raise ValueError("Couldn't load %r algorithm library: %s" % (self.__class__.__name__, e)) return module raise ValueError("Hasher %r doesn't specify a library attribute" % self.__class__.__name__) def salt(self): """ Generate a cryptographically secure nonce salt in ASCII with an entropy of at least `salt_entropy` bits. """ # Each character in the salt provides # log_2(len(alphabet)) bits of entropy. char_count = math.ceil(self.salt_entropy / math.log2(len(RANDOM_STRING_CHARS))) return get_random_string(char_count, allowed_chars=RANDOM_STRING_CHARS) def verify(self, password, encoded): """Check if the given password is correct.""" raise NotImplementedError('subclasses of BasePasswordHasher must provide a verify() method') def _check_encode_args(self, password, salt): if password is None: raise TypeError('password must be provided.') if not salt or '$' in salt: raise ValueError('salt must be provided and cannot contain $.') def encode(self, password, salt): """ Create an encoded database value. The result is normally formatted as "algorithm$salt$hash" and must be fewer than 128 characters. """ raise NotImplementedError('subclasses of BasePasswordHasher must provide an encode() method') def decode(self, encoded): """ Return a decoded database value. The result is a dictionary and should contain `algorithm`, `hash`, and `salt`. Extra keys can be algorithm specific like `iterations` or `work_factor`. """ raise NotImplementedError( 'subclasses of BasePasswordHasher must provide a decode() method.' ) def safe_summary(self, encoded): """ Return a summary of safe values. The result is a dictionary and will be used where the password field must be displayed to construct a safe representation of the password. """ raise NotImplementedError('subclasses of BasePasswordHasher must provide a safe_summary() method') def must_update(self, encoded): return False def harden_runtime(self, password, encoded): """ Bridge the runtime gap between the work factor supplied in `encoded` and the work factor suggested by this hasher. Taking PBKDF2 as an example, if `encoded` contains 20000 iterations and `self.iterations` is 30000, this method should run password through another 10000 iterations of PBKDF2. Similar approaches should exist for any hasher that has a work factor. If not, this method should be defined as a no-op to silence the warning. """ warnings.warn('subclasses of BasePasswordHasher should provide a harden_runtime() method') class PBKDF2PasswordHasher(BasePasswordHasher): """ Secure password hashing using the PBKDF2 algorithm (recommended) Configured to use PBKDF2 + HMAC + SHA256. The result is a 64 byte binary string. Iterations may be changed safely but you must rename the algorithm if you change SHA256. """ algorithm = "pbkdf2_sha256" iterations = 320000 digest = hashlib.sha256 def encode(self, password, salt, iterations=None): self._check_encode_args(password, salt) iterations = iterations or self.iterations hash = pbkdf2(password, salt, iterations, digest=self.digest) hash = base64.b64encode(hash).decode('ascii').strip() return "%s$%d$%s$%s" % (self.algorithm, iterations, salt, hash) def decode(self, encoded): algorithm, iterations, salt, hash = encoded.split('$', 3) assert algorithm == self.algorithm return { 'algorithm': algorithm, 'hash': hash, 'iterations': int(iterations), 'salt': salt, } def verify(self, password, encoded): decoded = self.decode(encoded) encoded_2 = self.encode(password, decoded['salt'], decoded['iterations']) return constant_time_compare(encoded, encoded_2) def safe_summary(self, encoded): decoded = self.decode(encoded) return { _('algorithm'): decoded['algorithm'], _('iterations'): decoded['iterations'], _('salt'): mask_hash(decoded['salt']), _('hash'): mask_hash(decoded['hash']), } def must_update(self, encoded): decoded = self.decode(encoded) update_salt = must_update_salt(decoded['salt'], self.salt_entropy) return (decoded['iterations'] != self.iterations) or update_salt def harden_runtime(self, password, encoded): decoded = self.decode(encoded) extra_iterations = self.iterations - decoded['iterations'] if extra_iterations > 0: self.encode(password, decoded['salt'], extra_iterations) class PBKDF2SHA1PasswordHasher(PBKDF2PasswordHasher): """ Alternate PBKDF2 hasher which uses SHA1, the default PRF recommended by PKCS #5. This is compatible with other implementations of PBKDF2, such as openssl's PKCS5_PBKDF2_HMAC_SHA1(). """ algorithm = "pbkdf2_sha1" digest = hashlib.sha1 class Argon2PasswordHasher(BasePasswordHasher): """ Secure password hashing using the argon2 algorithm. This is the winner of the Password Hashing Competition 2013-2015 (https://password-hashing.net). It requires the argon2-cffi library which depends on native C code and might cause portability issues. """ algorithm = 'argon2' library = 'argon2' time_cost = 2 memory_cost = 102400 parallelism = 8 def encode(self, password, salt): argon2 = self._load_library() params = self.params() data = argon2.low_level.hash_secret( password.encode(), salt.encode(), time_cost=params.time_cost, memory_cost=params.memory_cost, parallelism=params.parallelism, hash_len=params.hash_len, type=params.type, ) return self.algorithm + data.decode('ascii') def decode(self, encoded): argon2 = self._load_library() algorithm, rest = encoded.split('$', 1) assert algorithm == self.algorithm params = argon2.extract_parameters('$' + rest) variety, *_, b64salt, hash = rest.split('$') # Add padding. b64salt += '=' * (-len(b64salt) % 4) salt = base64.b64decode(b64salt).decode('latin1') return { 'algorithm': algorithm, 'hash': hash, 'memory_cost': params.memory_cost, 'parallelism': params.parallelism, 'salt': salt, 'time_cost': params.time_cost, 'variety': variety, 'version': params.version, 'params': params, } def verify(self, password, encoded): argon2 = self._load_library() algorithm, rest = encoded.split('$', 1) assert algorithm == self.algorithm try: return argon2.PasswordHasher().verify('$' + rest, password) except argon2.exceptions.VerificationError: return False def safe_summary(self, encoded): decoded = self.decode(encoded) return { _('algorithm'): decoded['algorithm'], _('variety'): decoded['variety'], _('version'): decoded['version'], _('memory cost'): decoded['memory_cost'], _('time cost'): decoded['time_cost'], _('parallelism'): decoded['parallelism'], _('salt'): mask_hash(decoded['salt']), _('hash'): mask_hash(decoded['hash']), } def must_update(self, encoded): decoded = self.decode(encoded) current_params = decoded['params'] new_params = self.params() # Set salt_len to the salt_len of the current parameters because salt # is explicitly passed to argon2. new_params.salt_len = current_params.salt_len update_salt = must_update_salt(decoded['salt'], self.salt_entropy) return (current_params != new_params) or update_salt def harden_runtime(self, password, encoded): # The runtime for Argon2 is too complicated to implement a sensible # hardening algorithm. pass def params(self): argon2 = self._load_library() # salt_len is a noop, because we provide our own salt. return argon2.Parameters( type=argon2.low_level.Type.ID, version=argon2.low_level.ARGON2_VERSION, salt_len=argon2.DEFAULT_RANDOM_SALT_LENGTH, hash_len=argon2.DEFAULT_HASH_LENGTH, time_cost=self.time_cost, memory_cost=self.memory_cost, parallelism=self.parallelism, ) class BCryptSHA256PasswordHasher(BasePasswordHasher): """ Secure password hashing using the bcrypt algorithm (recommended) This is considered by many to be the most secure algorithm but you must first install the bcrypt library. Please be warned that this library depends on native C code and might cause portability issues. """ algorithm = "bcrypt_sha256" digest = hashlib.sha256 library = ("bcrypt", "bcrypt") rounds = 12 def salt(self): bcrypt = self._load_library() return bcrypt.gensalt(self.rounds) def encode(self, password, salt): bcrypt = self._load_library() password = password.encode() # Hash the password prior to using bcrypt to prevent password # truncation as described in #20138. if self.digest is not None: # Use binascii.hexlify() because a hex encoded bytestring is str. password = binascii.hexlify(self.digest(password).digest()) data = bcrypt.hashpw(password, salt) return "%s$%s" % (self.algorithm, data.decode('ascii')) def decode(self, encoded): algorithm, empty, algostr, work_factor, data = encoded.split('$', 4) assert algorithm == self.algorithm return { 'algorithm': algorithm, 'algostr': algostr, 'checksum': data[22:], 'salt': data[:22], 'work_factor': int(work_factor), } def verify(self, password, encoded): algorithm, data = encoded.split('$', 1) assert algorithm == self.algorithm encoded_2 = self.encode(password, data.encode('ascii')) return constant_time_compare(encoded, encoded_2) def safe_summary(self, encoded): decoded = self.decode(encoded) return { _('algorithm'): decoded['algorithm'], _('work factor'): decoded['work_factor'], _('salt'): mask_hash(decoded['salt']), _('checksum'): mask_hash(decoded['checksum']), } def must_update(self, encoded): decoded = self.decode(encoded) return decoded['work_factor'] != self.rounds def harden_runtime(self, password, encoded): _, data = encoded.split('$', 1) salt = data[:29] # Length of the salt in bcrypt. rounds = data.split('$')[2] # work factor is logarithmic, adding one doubles the load. diff = 2**(self.rounds - int(rounds)) - 1 while diff > 0: self.encode(password, salt.encode('ascii')) diff -= 1 class BCryptPasswordHasher(BCryptSHA256PasswordHasher): """ Secure password hashing using the bcrypt algorithm This is considered by many to be the most secure algorithm but you must first install the bcrypt library. Please be warned that this library depends on native C code and might cause portability issues. This hasher does not first hash the password which means it is subject to bcrypt's 72 bytes password truncation. Most use cases should prefer the BCryptSHA256PasswordHasher. """ algorithm = "bcrypt" digest = None class ScryptPasswordHasher(BasePasswordHasher): """ Secure password hashing using the Scrypt algorithm. """ algorithm = 'scrypt' block_size = 8 maxmem = 0 parallelism = 1 work_factor = 2 ** 14 def encode(self, password, salt, n=None, r=None, p=None): self._check_encode_args(password, salt) n = n or self.work_factor r = r or self.block_size p = p or self.parallelism hash_ = hashlib.scrypt( password.encode(), salt=salt.encode(), n=n, r=r, p=p, maxmem=self.maxmem, dklen=64, ) hash_ = base64.b64encode(hash_).decode('ascii').strip() return '%s$%d$%s$%d$%d$%s' % (self.algorithm, n, salt, r, p, hash_) def decode(self, encoded): algorithm, work_factor, salt, block_size, parallelism, hash_ = encoded.split('$', 6) assert algorithm == self.algorithm return { 'algorithm': algorithm, 'work_factor': int(work_factor), 'salt': salt, 'block_size': int(block_size), 'parallelism': int(parallelism), 'hash': hash_, } def verify(self, password, encoded): decoded = self.decode(encoded) encoded_2 = self.encode( password, decoded['salt'], decoded['work_factor'], decoded['block_size'], decoded['parallelism'], ) return constant_time_compare(encoded, encoded_2) def safe_summary(self, encoded): decoded = self.decode(encoded) return { _('algorithm'): decoded['algorithm'], _('work factor'): decoded['work_factor'], _('block size'): decoded['block_size'], _('parallelism'): decoded['parallelism'], _('salt'): mask_hash(decoded['salt']), _('hash'): mask_hash(decoded['hash']), } def must_update(self, encoded): decoded = self.decode(encoded) return ( decoded['work_factor'] != self.work_factor or decoded['block_size'] != self.block_size or decoded['parallelism'] != self.parallelism ) def harden_runtime(self, password, encoded): # The runtime for Scrypt is too complicated to implement a sensible # hardening algorithm. pass class SHA1PasswordHasher(BasePasswordHasher): """ The SHA1 password hashing algorithm (not recommended) """ algorithm = "sha1" def encode(self, password, salt): self._check_encode_args(password, salt) hash = hashlib.sha1((salt + password).encode()).hexdigest() return "%s$%s$%s" % (self.algorithm, salt, hash) def decode(self, encoded): algorithm, salt, hash = encoded.split('$', 2) assert algorithm == self.algorithm return { 'algorithm': algorithm, 'hash': hash, 'salt': salt, } def verify(self, password, encoded): decoded = self.decode(encoded) encoded_2 = self.encode(password, decoded['salt']) return constant_time_compare(encoded, encoded_2) def safe_summary(self, encoded): decoded = self.decode(encoded) return { _('algorithm'): decoded['algorithm'], _('salt'): mask_hash(decoded['salt'], show=2), _('hash'): mask_hash(decoded['hash']), } def must_update(self, encoded): decoded = self.decode(encoded) return must_update_salt(decoded['salt'], self.salt_entropy) def harden_runtime(self, password, encoded): pass class MD5PasswordHasher(BasePasswordHasher): """ The Salted MD5 password hashing algorithm (not recommended) """ algorithm = "md5" def encode(self, password, salt): self._check_encode_args(password, salt) hash = hashlib.md5((salt + password).encode()).hexdigest() return "%s$%s$%s" % (self.algorithm, salt, hash) def decode(self, encoded): algorithm, salt, hash = encoded.split('$', 2) assert algorithm == self.algorithm return { 'algorithm': algorithm, 'hash': hash, 'salt': salt, } def verify(self, password, encoded): decoded = self.decode(encoded) encoded_2 = self.encode(password, decoded['salt']) return constant_time_compare(encoded, encoded_2) def safe_summary(self, encoded): decoded = self.decode(encoded) return { _('algorithm'): decoded['algorithm'], _('salt'): mask_hash(decoded['salt'], show=2), _('hash'): mask_hash(decoded['hash']), } def must_update(self, encoded): decoded = self.decode(encoded) return must_update_salt(decoded['salt'], self.salt_entropy) def harden_runtime(self, password, encoded): pass class UnsaltedSHA1PasswordHasher(BasePasswordHasher): """ Very insecure algorithm that you should *never* use; store SHA1 hashes with an empty salt. This class is implemented because Django used to accept such password hashes. Some older Django installs still have these values lingering around so we need to handle and upgrade them properly. """ algorithm = "unsalted_sha1" def salt(self): return '' def encode(self, password, salt): assert salt == '' hash = hashlib.sha1(password.encode()).hexdigest() return 'sha1$$%s' % hash def decode(self, encoded): assert encoded.startswith('sha1$$') return { 'algorithm': self.algorithm, 'hash': encoded[6:], 'salt': None, } def verify(self, password, encoded): encoded_2 = self.encode(password, '') return constant_time_compare(encoded, encoded_2) def safe_summary(self, encoded): decoded = self.decode(encoded) return { _('algorithm'): decoded['algorithm'], _('hash'): mask_hash(decoded['hash']), } def harden_runtime(self, password, encoded): pass class UnsaltedMD5PasswordHasher(BasePasswordHasher): """ Incredibly insecure algorithm that you should *never* use; stores unsalted MD5 hashes without the algorithm prefix, also accepts MD5 hashes with an empty salt. This class is implemented because Django used to store passwords this way and to accept such password hashes. Some older Django installs still have these values lingering around so we need to handle and upgrade them properly. """ algorithm = "unsalted_md5" def salt(self): return '' def encode(self, password, salt): assert salt == '' return hashlib.md5(password.encode()).hexdigest() def decode(self, encoded): return { 'algorithm': self.algorithm, 'hash': encoded, 'salt': None, } def verify(self, password, encoded): if len(encoded) == 37 and encoded.startswith('md5$$'): encoded = encoded[5:] encoded_2 = self.encode(password, '') return constant_time_compare(encoded, encoded_2) def safe_summary(self, encoded): decoded = self.decode(encoded) return { _('algorithm'): decoded['algorithm'], _('hash'): mask_hash(decoded['hash'], show=3), } def harden_runtime(self, password, encoded): pass class CryptPasswordHasher(BasePasswordHasher): """ Password hashing using UNIX crypt (not recommended) The crypt module is not supported on all platforms. """ algorithm = "crypt" library = "crypt" def salt(self): return get_random_string(2) def encode(self, password, salt): crypt = self._load_library() assert len(salt) == 2 hash = crypt.crypt(password, salt) assert hash is not None # A platform like OpenBSD with a dummy crypt module. # we don't need to store the salt, but Django used to do this return '%s$%s$%s' % (self.algorithm, '', hash) def decode(self, encoded): algorithm, salt, hash = encoded.split('$', 2) assert algorithm == self.algorithm return { 'algorithm': algorithm, 'hash': hash, 'salt': salt, } def verify(self, password, encoded): crypt = self._load_library() decoded = self.decode(encoded) data = crypt.crypt(password, decoded['hash']) return constant_time_compare(decoded['hash'], data) def safe_summary(self, encoded): decoded = self.decode(encoded) return { _('algorithm'): decoded['algorithm'], _('salt'): decoded['salt'], _('hash'): mask_hash(decoded['hash'], show=3), } def harden_runtime(self, password, encoded): pass
d11e3f41e4dc728671741f7e37dcc4026b01b0699469b0c3a745880ac5553977
from django.conf import settings from django.contrib import admin, messages from django.contrib.admin.options import IS_POPUP_VAR from django.contrib.admin.utils import unquote from django.contrib.auth import update_session_auth_hash from django.contrib.auth.forms import ( AdminPasswordChangeForm, UserChangeForm, UserCreationForm, ) from django.contrib.auth.models import Group, User from django.core.exceptions import PermissionDenied from django.db import router, transaction from django.http import Http404, HttpResponseRedirect from django.template.response import TemplateResponse from django.urls import path, reverse from django.utils.decorators import method_decorator from django.utils.html import escape from django.utils.translation import gettext, gettext_lazy as _ from django.views.decorators.csrf import csrf_protect from django.views.decorators.debug import sensitive_post_parameters csrf_protect_m = method_decorator(csrf_protect) sensitive_post_parameters_m = method_decorator(sensitive_post_parameters()) @admin.register(Group) class GroupAdmin(admin.ModelAdmin): search_fields = ('name',) ordering = ('name',) filter_horizontal = ('permissions',) def formfield_for_manytomany(self, db_field, request=None, **kwargs): if db_field.name == 'permissions': qs = kwargs.get('queryset', db_field.remote_field.model.objects) # Avoid a major performance hit resolving permission names which # triggers a content_type load: kwargs['queryset'] = qs.select_related('content_type') return super().formfield_for_manytomany(db_field, request=request, **kwargs) @admin.register(User) class UserAdmin(admin.ModelAdmin): add_form_template = 'admin/auth/user/add_form.html' change_user_password_template = None fieldsets = ( (None, {'fields': ('username', 'password')}), (_('Personal info'), {'fields': ('first_name', 'last_name', 'email')}), (_('Permissions'), { 'fields': ('is_active', 'is_staff', 'is_superuser', 'groups', 'user_permissions'), }), (_('Important dates'), {'fields': ('last_login', 'date_joined')}), ) add_fieldsets = ( (None, { 'classes': ('wide',), 'fields': ('username', 'password1', 'password2'), }), ) form = UserChangeForm add_form = UserCreationForm change_password_form = AdminPasswordChangeForm list_display = ('username', 'email', 'first_name', 'last_name', 'is_staff') list_filter = ('is_staff', 'is_superuser', 'is_active', 'groups') search_fields = ('username', 'first_name', 'last_name', 'email') ordering = ('username',) filter_horizontal = ('groups', 'user_permissions',) def get_fieldsets(self, request, obj=None): if not obj: return self.add_fieldsets return super().get_fieldsets(request, obj) def get_form(self, request, obj=None, **kwargs): """ Use special form during user creation """ defaults = {} if obj is None: defaults['form'] = self.add_form defaults.update(kwargs) return super().get_form(request, obj, **defaults) def get_urls(self): return [ path( '<id>/password/', self.admin_site.admin_view(self.user_change_password), name='auth_user_password_change', ), ] + super().get_urls() def lookup_allowed(self, lookup, value): # Don't allow lookups involving passwords. return not lookup.startswith('password') and super().lookup_allowed(lookup, value) @sensitive_post_parameters_m @csrf_protect_m def add_view(self, request, form_url='', extra_context=None): with transaction.atomic(using=router.db_for_write(self.model)): return self._add_view(request, form_url, extra_context) def _add_view(self, request, form_url='', extra_context=None): # It's an error for a user to have add permission but NOT change # permission for users. If we allowed such users to add users, they # could create superusers, which would mean they would essentially have # the permission to change users. To avoid the problem entirely, we # disallow users from adding users if they don't have change # permission. if not self.has_change_permission(request): if self.has_add_permission(request) and settings.DEBUG: # Raise Http404 in debug mode so that the user gets a helpful # error message. raise Http404( 'Your user does not have the "Change user" permission. In ' 'order to add users, Django requires that your user ' 'account have both the "Add user" and "Change user" ' 'permissions set.') raise PermissionDenied if extra_context is None: extra_context = {} username_field = self.model._meta.get_field(self.model.USERNAME_FIELD) defaults = { 'auto_populated_fields': (), 'username_help_text': username_field.help_text, } extra_context.update(defaults) return super().add_view(request, form_url, extra_context) @sensitive_post_parameters_m def user_change_password(self, request, id, form_url=''): user = self.get_object(request, unquote(id)) if not self.has_change_permission(request, user): raise PermissionDenied if user is None: raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % { 'name': self.model._meta.verbose_name, 'key': escape(id), }) if request.method == 'POST': form = self.change_password_form(user, request.POST) if form.is_valid(): form.save() change_message = self.construct_change_message(request, form, None) self.log_change(request, user, change_message) msg = gettext('Password changed successfully.') messages.success(request, msg) update_session_auth_hash(request, form.user) return HttpResponseRedirect( reverse( '%s:%s_%s_change' % ( self.admin_site.name, user._meta.app_label, user._meta.model_name, ), args=(user.pk,), ) ) else: form = self.change_password_form(user) fieldsets = [(None, {'fields': list(form.base_fields)})] adminForm = admin.helpers.AdminForm(form, fieldsets, {}) context = { 'title': _('Change password: %s') % escape(user.get_username()), 'adminForm': adminForm, 'form_url': form_url, 'form': form, 'is_popup': (IS_POPUP_VAR in request.POST or IS_POPUP_VAR in request.GET), 'is_popup_var': IS_POPUP_VAR, 'add': True, 'change': False, 'has_delete_permission': False, 'has_change_permission': True, 'has_absolute_url': False, 'opts': self.model._meta, 'original': user, 'save_as': False, 'show_save': True, **self.admin_site.each_context(request), } request.current_app = self.admin_site.name return TemplateResponse( request, self.change_user_password_template or 'admin/auth/user/change_password.html', context, ) def response_add(self, request, obj, post_url_continue=None): """ Determine the HttpResponse for the add_view stage. It mostly defers to its superclass implementation but is customized because the User model has a slightly different workflow. """ # We should allow further modification of the user just added i.e. the # 'Save' button should behave like the 'Save and continue editing' # button except in two scenarios: # * The user has pressed the 'Save and add another' button # * We are adding a user in a popup if '_addanother' not in request.POST and IS_POPUP_VAR not in request.POST: request.POST = request.POST.copy() request.POST['_continue'] = 1 return super().response_add(request, obj, post_url_continue)
9831aa8df823254c3b668b82f37155a4f09e8739de320405552aa3df89487b23
# PermWrapper and PermLookupDict proxy the permissions system into objects that # the template system can understand. class PermLookupDict: def __init__(self, user, app_label): self.user, self.app_label = user, app_label def __repr__(self): return str(self.user.get_all_permissions()) def __getitem__(self, perm_name): return self.user.has_perm("%s.%s" % (self.app_label, perm_name)) def __iter__(self): # To fix 'item in perms.someapp' and __getitem__ interaction we need to # define __iter__. See #18979 for details. raise TypeError("PermLookupDict is not iterable.") def __bool__(self): return self.user.has_module_perms(self.app_label) class PermWrapper: def __init__(self, user): self.user = user def __repr__(self): return f'{self.__class__.__qualname__}({self.user!r})' def __getitem__(self, app_label): return PermLookupDict(self.user, app_label) def __iter__(self): # I am large, I contain multitudes. raise TypeError("PermWrapper is not iterable.") def __contains__(self, perm_name): """ Lookup by "someapp" or "someapp.someperm" in perms. """ if '.' not in perm_name: # The name refers to module. return bool(self[perm_name]) app_label, perm_name = perm_name.split('.', 1) return self[app_label][perm_name] def auth(request): """ Return context variables required by apps that use Django's authentication system. If there is no 'user' attribute in the request, use AnonymousUser (from django.contrib.auth). """ if hasattr(request, 'user'): user = request.user else: from django.contrib.auth.models import AnonymousUser user = AnonymousUser() return { 'user': user, 'perms': PermWrapper(user), }
ad43cd1ba7f87994b1c4cd79cab15ce73e3bea434998c77e53b1926c29edcf53
import unicodedata from django import forms from django.contrib.auth import ( authenticate, get_user_model, password_validation, ) from django.contrib.auth.hashers import ( UNUSABLE_PASSWORD_PREFIX, identify_hasher, ) from django.contrib.auth.models import User from django.contrib.auth.tokens import default_token_generator from django.contrib.sites.shortcuts import get_current_site from django.core.exceptions import ValidationError from django.core.mail import EmailMultiAlternatives from django.template import loader from django.utils.encoding import force_bytes from django.utils.http import urlsafe_base64_encode from django.utils.text import capfirst from django.utils.translation import gettext, gettext_lazy as _ UserModel = get_user_model() def _unicode_ci_compare(s1, s2): """ Perform case-insensitive comparison of two identifiers, using the recommended algorithm from Unicode Technical Report 36, section 2.11.2(B)(2). """ return unicodedata.normalize('NFKC', s1).casefold() == unicodedata.normalize('NFKC', s2).casefold() class ReadOnlyPasswordHashWidget(forms.Widget): template_name = 'auth/widgets/read_only_password_hash.html' read_only = True def get_context(self, name, value, attrs): context = super().get_context(name, value, attrs) summary = [] if not value or value.startswith(UNUSABLE_PASSWORD_PREFIX): summary.append({'label': gettext("No password set.")}) else: try: hasher = identify_hasher(value) except ValueError: summary.append({'label': gettext("Invalid password format or unknown hashing algorithm.")}) else: for key, value_ in hasher.safe_summary(value).items(): summary.append({'label': gettext(key), 'value': value_}) context['summary'] = summary return context def id_for_label(self, id_): return None class ReadOnlyPasswordHashField(forms.Field): widget = ReadOnlyPasswordHashWidget def __init__(self, *args, **kwargs): kwargs.setdefault("required", False) kwargs.setdefault('disabled', True) super().__init__(*args, **kwargs) class UsernameField(forms.CharField): def to_python(self, value): return unicodedata.normalize('NFKC', super().to_python(value)) def widget_attrs(self, widget): return { **super().widget_attrs(widget), 'autocapitalize': 'none', 'autocomplete': 'username', } class UserCreationForm(forms.ModelForm): """ A form that creates a user, with no privileges, from the given username and password. """ error_messages = { 'password_mismatch': _('The two password fields didn’t match.'), } password1 = forms.CharField( label=_("Password"), strip=False, widget=forms.PasswordInput(attrs={'autocomplete': 'new-password'}), help_text=password_validation.password_validators_help_text_html(), ) password2 = forms.CharField( label=_("Password confirmation"), widget=forms.PasswordInput(attrs={'autocomplete': 'new-password'}), strip=False, help_text=_("Enter the same password as before, for verification."), ) class Meta: model = User fields = ("username",) field_classes = {'username': UsernameField} def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) if self._meta.model.USERNAME_FIELD in self.fields: self.fields[self._meta.model.USERNAME_FIELD].widget.attrs['autofocus'] = True def clean_password2(self): password1 = self.cleaned_data.get("password1") password2 = self.cleaned_data.get("password2") if password1 and password2 and password1 != password2: raise ValidationError( self.error_messages['password_mismatch'], code='password_mismatch', ) return password2 def _post_clean(self): super()._post_clean() # Validate the password after self.instance is updated with form data # by super(). password = self.cleaned_data.get('password2') if password: try: password_validation.validate_password(password, self.instance) except ValidationError as error: self.add_error('password2', error) def save(self, commit=True): user = super().save(commit=False) user.set_password(self.cleaned_data["password1"]) if commit: user.save() return user class UserChangeForm(forms.ModelForm): password = ReadOnlyPasswordHashField( label=_("Password"), help_text=_( 'Raw passwords are not stored, so there is no way to see this ' 'user’s password, but you can change the password using ' '<a href="{}">this form</a>.' ), ) class Meta: model = User fields = '__all__' field_classes = {'username': UsernameField} def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) password = self.fields.get('password') if password: password.help_text = password.help_text.format('../password/') user_permissions = self.fields.get('user_permissions') if user_permissions: user_permissions.queryset = user_permissions.queryset.select_related('content_type') class AuthenticationForm(forms.Form): """ Base class for authenticating users. Extend this to get a form that accepts username/password logins. """ username = UsernameField(widget=forms.TextInput(attrs={'autofocus': True})) password = forms.CharField( label=_("Password"), strip=False, widget=forms.PasswordInput(attrs={'autocomplete': 'current-password'}), ) error_messages = { 'invalid_login': _( "Please enter a correct %(username)s and password. Note that both " "fields may be case-sensitive." ), 'inactive': _("This account is inactive."), } def __init__(self, request=None, *args, **kwargs): """ The 'request' parameter is set for custom auth use by subclasses. The form data comes in via the standard 'data' kwarg. """ self.request = request self.user_cache = None super().__init__(*args, **kwargs) # Set the max length and label for the "username" field. self.username_field = UserModel._meta.get_field(UserModel.USERNAME_FIELD) username_max_length = self.username_field.max_length or 254 self.fields['username'].max_length = username_max_length self.fields['username'].widget.attrs['maxlength'] = username_max_length if self.fields['username'].label is None: self.fields['username'].label = capfirst(self.username_field.verbose_name) def clean(self): username = self.cleaned_data.get('username') password = self.cleaned_data.get('password') if username is not None and password: self.user_cache = authenticate(self.request, username=username, password=password) if self.user_cache is None: raise self.get_invalid_login_error() else: self.confirm_login_allowed(self.user_cache) return self.cleaned_data def confirm_login_allowed(self, user): """ Controls whether the given User may log in. This is a policy setting, independent of end-user authentication. This default behavior is to allow login by active users, and reject login by inactive users. If the given user cannot log in, this method should raise a ``ValidationError``. If the given user may log in, this method should return None. """ if not user.is_active: raise ValidationError( self.error_messages['inactive'], code='inactive', ) def get_user(self): return self.user_cache def get_invalid_login_error(self): return ValidationError( self.error_messages['invalid_login'], code='invalid_login', params={'username': self.username_field.verbose_name}, ) class PasswordResetForm(forms.Form): email = forms.EmailField( label=_("Email"), max_length=254, widget=forms.EmailInput(attrs={'autocomplete': 'email'}) ) def send_mail(self, subject_template_name, email_template_name, context, from_email, to_email, html_email_template_name=None): """ Send a django.core.mail.EmailMultiAlternatives to `to_email`. """ subject = loader.render_to_string(subject_template_name, context) # Email subject *must not* contain newlines subject = ''.join(subject.splitlines()) body = loader.render_to_string(email_template_name, context) email_message = EmailMultiAlternatives(subject, body, from_email, [to_email]) if html_email_template_name is not None: html_email = loader.render_to_string(html_email_template_name, context) email_message.attach_alternative(html_email, 'text/html') email_message.send() def get_users(self, email): """Given an email, return matching user(s) who should receive a reset. This allows subclasses to more easily customize the default policies that prevent inactive users and users with unusable passwords from resetting their password. """ email_field_name = UserModel.get_email_field_name() active_users = UserModel._default_manager.filter(**{ '%s__iexact' % email_field_name: email, 'is_active': True, }) return ( u for u in active_users if u.has_usable_password() and _unicode_ci_compare(email, getattr(u, email_field_name)) ) def save(self, domain_override=None, subject_template_name='registration/password_reset_subject.txt', email_template_name='registration/password_reset_email.html', use_https=False, token_generator=default_token_generator, from_email=None, request=None, html_email_template_name=None, extra_email_context=None): """ Generate a one-use only link for resetting password and send it to the user. """ email = self.cleaned_data["email"] if not domain_override: current_site = get_current_site(request) site_name = current_site.name domain = current_site.domain else: site_name = domain = domain_override email_field_name = UserModel.get_email_field_name() for user in self.get_users(email): user_email = getattr(user, email_field_name) context = { 'email': user_email, 'domain': domain, 'site_name': site_name, 'uid': urlsafe_base64_encode(force_bytes(user.pk)), 'user': user, 'token': token_generator.make_token(user), 'protocol': 'https' if use_https else 'http', **(extra_email_context or {}), } self.send_mail( subject_template_name, email_template_name, context, from_email, user_email, html_email_template_name=html_email_template_name, ) class SetPasswordForm(forms.Form): """ A form that lets a user change set their password without entering the old password """ error_messages = { 'password_mismatch': _('The two password fields didn’t match.'), } new_password1 = forms.CharField( label=_("New password"), widget=forms.PasswordInput(attrs={'autocomplete': 'new-password'}), strip=False, help_text=password_validation.password_validators_help_text_html(), ) new_password2 = forms.CharField( label=_("New password confirmation"), strip=False, widget=forms.PasswordInput(attrs={'autocomplete': 'new-password'}), ) def __init__(self, user, *args, **kwargs): self.user = user super().__init__(*args, **kwargs) def clean_new_password2(self): password1 = self.cleaned_data.get('new_password1') password2 = self.cleaned_data.get('new_password2') if password1 and password2: if password1 != password2: raise ValidationError( self.error_messages['password_mismatch'], code='password_mismatch', ) password_validation.validate_password(password2, self.user) return password2 def save(self, commit=True): password = self.cleaned_data["new_password1"] self.user.set_password(password) if commit: self.user.save() return self.user class PasswordChangeForm(SetPasswordForm): """ A form that lets a user change their password by entering their old password. """ error_messages = { **SetPasswordForm.error_messages, 'password_incorrect': _("Your old password was entered incorrectly. Please enter it again."), } old_password = forms.CharField( label=_("Old password"), strip=False, widget=forms.PasswordInput(attrs={'autocomplete': 'current-password', 'autofocus': True}), ) field_order = ['old_password', 'new_password1', 'new_password2'] def clean_old_password(self): """ Validate that the old_password field is correct. """ old_password = self.cleaned_data["old_password"] if not self.user.check_password(old_password): raise ValidationError( self.error_messages['password_incorrect'], code='password_incorrect', ) return old_password class AdminPasswordChangeForm(forms.Form): """ A form used to change the password of a user in the admin interface. """ error_messages = { 'password_mismatch': _('The two password fields didn’t match.'), } required_css_class = 'required' password1 = forms.CharField( label=_("Password"), widget=forms.PasswordInput(attrs={'autocomplete': 'new-password', 'autofocus': True}), strip=False, help_text=password_validation.password_validators_help_text_html(), ) password2 = forms.CharField( label=_("Password (again)"), widget=forms.PasswordInput(attrs={'autocomplete': 'new-password'}), strip=False, help_text=_("Enter the same password as before, for verification."), ) def __init__(self, user, *args, **kwargs): self.user = user super().__init__(*args, **kwargs) def clean_password2(self): password1 = self.cleaned_data.get('password1') password2 = self.cleaned_data.get('password2') if password1 and password2 and password1 != password2: raise ValidationError( self.error_messages['password_mismatch'], code='password_mismatch', ) password_validation.validate_password(password2, self.user) return password2 def save(self, commit=True): """Save the new password.""" password = self.cleaned_data["password1"] self.user.set_password(password) if commit: self.user.save() return self.user @property def changed_data(self): data = super().changed_data for name in self.fields: if name not in data: return [] return ['password']
f83ea9c3289b3f0a90ec6216a5b0987c59f1ca520ff6cb4dfd78fc34aa436957
from django.contrib import auth from django.contrib.auth import load_backend from django.contrib.auth.backends import RemoteUserBackend from django.core.exceptions import ImproperlyConfigured from django.utils.deprecation import MiddlewareMixin from django.utils.functional import SimpleLazyObject def get_user(request): if not hasattr(request, '_cached_user'): request._cached_user = auth.get_user(request) return request._cached_user class AuthenticationMiddleware(MiddlewareMixin): def process_request(self, request): if not hasattr(request, 'session'): raise ImproperlyConfigured( "The Django authentication middleware requires session " "middleware to be installed. Edit your MIDDLEWARE setting to " "insert " "'django.contrib.sessions.middleware.SessionMiddleware' before " "'django.contrib.auth.middleware.AuthenticationMiddleware'." ) request.user = SimpleLazyObject(lambda: get_user(request)) class RemoteUserMiddleware(MiddlewareMixin): """ Middleware for utilizing web-server-provided authentication. If request.user is not authenticated, then this middleware attempts to authenticate the username passed in the ``REMOTE_USER`` request header. If authentication is successful, the user is automatically logged in to persist the user in the session. The header used is configurable and defaults to ``REMOTE_USER``. Subclass this class and change the ``header`` attribute if you need to use a different header. """ # Name of request header to grab username from. This will be the key as # used in the request.META dictionary, i.e. the normalization of headers to # all uppercase and the addition of "HTTP_" prefix apply. header = "REMOTE_USER" force_logout_if_no_header = True def process_request(self, request): # AuthenticationMiddleware is required so that request.user exists. if not hasattr(request, 'user'): raise ImproperlyConfigured( "The Django remote user auth middleware requires the" " authentication middleware to be installed. Edit your" " MIDDLEWARE setting to insert" " 'django.contrib.auth.middleware.AuthenticationMiddleware'" " before the RemoteUserMiddleware class.") try: username = request.META[self.header] except KeyError: # If specified header doesn't exist then remove any existing # authenticated remote-user, or return (leaving request.user set to # AnonymousUser by the AuthenticationMiddleware). if self.force_logout_if_no_header and request.user.is_authenticated: self._remove_invalid_user(request) return # If the user is already authenticated and that user is the user we are # getting passed in the headers, then the correct user is already # persisted in the session and we don't need to continue. if request.user.is_authenticated: if request.user.get_username() == self.clean_username(username, request): return else: # An authenticated user is associated with the request, but # it does not match the authorized user in the header. self._remove_invalid_user(request) # We are seeing this user for the first time in this session, attempt # to authenticate the user. user = auth.authenticate(request, remote_user=username) if user: # User is valid. Set request.user and persist user in the session # by logging the user in. request.user = user auth.login(request, user) def clean_username(self, username, request): """ Allow the backend to clean the username, if the backend defines a clean_username method. """ backend_str = request.session[auth.BACKEND_SESSION_KEY] backend = auth.load_backend(backend_str) try: username = backend.clean_username(username) except AttributeError: # Backend has no clean_username method. pass return username def _remove_invalid_user(self, request): """ Remove the current authenticated user in the request which is invalid but only if the user is authenticated via the RemoteUserBackend. """ try: stored_backend = load_backend(request.session.get(auth.BACKEND_SESSION_KEY, '')) except ImportError: # backend failed to load auth.logout(request) else: if isinstance(stored_backend, RemoteUserBackend): auth.logout(request) class PersistentRemoteUserMiddleware(RemoteUserMiddleware): """ Middleware for web-server provided authentication on logon pages. Like RemoteUserMiddleware but keeps the user authenticated even if the header (``REMOTE_USER``) is not found in the request. Useful for setups when the external authentication via ``REMOTE_USER`` is only expected to happen on some "logon" URL and the rest of the application wants to use Django's authentication mechanism. """ force_logout_if_no_header = False
5a2d9f127976c2a84d17b3bab8da9ff461922d86ab8578ecc9e5812eb55c2926
from urllib.parse import urlparse, urlunparse from django.conf import settings # Avoid shadowing the login() and logout() views below. from django.contrib.auth import ( REDIRECT_FIELD_NAME, get_user_model, login as auth_login, logout as auth_logout, update_session_auth_hash, ) from django.contrib.auth.decorators import login_required from django.contrib.auth.forms import ( AuthenticationForm, PasswordChangeForm, PasswordResetForm, SetPasswordForm, ) from django.contrib.auth.tokens import default_token_generator from django.contrib.sites.shortcuts import get_current_site from django.core.exceptions import ImproperlyConfigured, ValidationError from django.http import HttpResponseRedirect, QueryDict from django.shortcuts import resolve_url from django.urls import reverse_lazy from django.utils.decorators import method_decorator from django.utils.http import ( url_has_allowed_host_and_scheme, urlsafe_base64_decode, ) from django.utils.translation import gettext_lazy as _ from django.views.decorators.cache import never_cache from django.views.decorators.csrf import csrf_protect from django.views.decorators.debug import sensitive_post_parameters from django.views.generic.base import TemplateView from django.views.generic.edit import FormView UserModel = get_user_model() class SuccessURLAllowedHostsMixin: success_url_allowed_hosts = set() def get_success_url_allowed_hosts(self): return {self.request.get_host(), *self.success_url_allowed_hosts} class LoginView(SuccessURLAllowedHostsMixin, FormView): """ Display the login form and handle the login action. """ form_class = AuthenticationForm authentication_form = None next_page = None redirect_field_name = REDIRECT_FIELD_NAME template_name = 'registration/login.html' redirect_authenticated_user = False extra_context = None @method_decorator(sensitive_post_parameters()) @method_decorator(csrf_protect) @method_decorator(never_cache) def dispatch(self, request, *args, **kwargs): if self.redirect_authenticated_user and self.request.user.is_authenticated: redirect_to = self.get_success_url() if redirect_to == self.request.path: raise ValueError( "Redirection loop for authenticated user detected. Check that " "your LOGIN_REDIRECT_URL doesn't point to a login page." ) return HttpResponseRedirect(redirect_to) return super().dispatch(request, *args, **kwargs) def get_success_url(self): return self.get_redirect_url() or self.get_default_redirect_url() def get_redirect_url(self): """Return the user-originating redirect URL if it's safe.""" redirect_to = self.request.POST.get( self.redirect_field_name, self.request.GET.get(self.redirect_field_name, '') ) url_is_safe = url_has_allowed_host_and_scheme( url=redirect_to, allowed_hosts=self.get_success_url_allowed_hosts(), require_https=self.request.is_secure(), ) return redirect_to if url_is_safe else '' def get_default_redirect_url(self): """Return the default redirect URL.""" return resolve_url(self.next_page or settings.LOGIN_REDIRECT_URL) def get_form_class(self): return self.authentication_form or self.form_class def get_form_kwargs(self): kwargs = super().get_form_kwargs() kwargs['request'] = self.request return kwargs def form_valid(self, form): """Security check complete. Log the user in.""" auth_login(self.request, form.get_user()) return HttpResponseRedirect(self.get_success_url()) def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) current_site = get_current_site(self.request) context.update({ self.redirect_field_name: self.get_redirect_url(), 'site': current_site, 'site_name': current_site.name, **(self.extra_context or {}) }) return context class LogoutView(SuccessURLAllowedHostsMixin, TemplateView): """ Log out the user and display the 'You are logged out' message. """ next_page = None redirect_field_name = REDIRECT_FIELD_NAME template_name = 'registration/logged_out.html' extra_context = None @method_decorator(never_cache) def dispatch(self, request, *args, **kwargs): auth_logout(request) next_page = self.get_next_page() if next_page: # Redirect to this page until the session has been cleared. return HttpResponseRedirect(next_page) return super().dispatch(request, *args, **kwargs) def post(self, request, *args, **kwargs): """Logout may be done via POST.""" return self.get(request, *args, **kwargs) def get_next_page(self): if self.next_page is not None: next_page = resolve_url(self.next_page) elif settings.LOGOUT_REDIRECT_URL: next_page = resolve_url(settings.LOGOUT_REDIRECT_URL) else: next_page = self.next_page if (self.redirect_field_name in self.request.POST or self.redirect_field_name in self.request.GET): next_page = self.request.POST.get( self.redirect_field_name, self.request.GET.get(self.redirect_field_name) ) url_is_safe = url_has_allowed_host_and_scheme( url=next_page, allowed_hosts=self.get_success_url_allowed_hosts(), require_https=self.request.is_secure(), ) # Security check -- Ensure the user-originating redirection URL is # safe. if not url_is_safe: next_page = self.request.path return next_page def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) current_site = get_current_site(self.request) context.update({ 'site': current_site, 'site_name': current_site.name, 'title': _('Logged out'), **(self.extra_context or {}) }) return context def logout_then_login(request, login_url=None): """ Log out the user if they are logged in. Then redirect to the login page. """ login_url = resolve_url(login_url or settings.LOGIN_URL) return LogoutView.as_view(next_page=login_url)(request) def redirect_to_login(next, login_url=None, redirect_field_name=REDIRECT_FIELD_NAME): """ Redirect the user to the login page, passing the given 'next' page. """ resolved_url = resolve_url(login_url or settings.LOGIN_URL) login_url_parts = list(urlparse(resolved_url)) if redirect_field_name: querystring = QueryDict(login_url_parts[4], mutable=True) querystring[redirect_field_name] = next login_url_parts[4] = querystring.urlencode(safe='/') return HttpResponseRedirect(urlunparse(login_url_parts)) # Class-based password reset views # - PasswordResetView sends the mail # - PasswordResetDoneView shows a success message for the above # - PasswordResetConfirmView checks the link the user clicked and # prompts for a new password # - PasswordResetCompleteView shows a success message for the above class PasswordContextMixin: extra_context = None def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context.update({ 'title': self.title, **(self.extra_context or {}) }) return context class PasswordResetView(PasswordContextMixin, FormView): email_template_name = 'registration/password_reset_email.html' extra_email_context = None form_class = PasswordResetForm from_email = None html_email_template_name = None subject_template_name = 'registration/password_reset_subject.txt' success_url = reverse_lazy('password_reset_done') template_name = 'registration/password_reset_form.html' title = _('Password reset') token_generator = default_token_generator @method_decorator(csrf_protect) def dispatch(self, *args, **kwargs): return super().dispatch(*args, **kwargs) def form_valid(self, form): opts = { 'use_https': self.request.is_secure(), 'token_generator': self.token_generator, 'from_email': self.from_email, 'email_template_name': self.email_template_name, 'subject_template_name': self.subject_template_name, 'request': self.request, 'html_email_template_name': self.html_email_template_name, 'extra_email_context': self.extra_email_context, } form.save(**opts) return super().form_valid(form) INTERNAL_RESET_SESSION_TOKEN = '_password_reset_token' class PasswordResetDoneView(PasswordContextMixin, TemplateView): template_name = 'registration/password_reset_done.html' title = _('Password reset sent') class PasswordResetConfirmView(PasswordContextMixin, FormView): form_class = SetPasswordForm post_reset_login = False post_reset_login_backend = None reset_url_token = 'set-password' success_url = reverse_lazy('password_reset_complete') template_name = 'registration/password_reset_confirm.html' title = _('Enter new password') token_generator = default_token_generator @method_decorator(sensitive_post_parameters()) @method_decorator(never_cache) def dispatch(self, *args, **kwargs): if 'uidb64' not in kwargs or 'token' not in kwargs: raise ImproperlyConfigured( "The URL path must contain 'uidb64' and 'token' parameters." ) self.validlink = False self.user = self.get_user(kwargs['uidb64']) if self.user is not None: token = kwargs['token'] if token == self.reset_url_token: session_token = self.request.session.get(INTERNAL_RESET_SESSION_TOKEN) if self.token_generator.check_token(self.user, session_token): # If the token is valid, display the password reset form. self.validlink = True return super().dispatch(*args, **kwargs) else: if self.token_generator.check_token(self.user, token): # Store the token in the session and redirect to the # password reset form at a URL without the token. That # avoids the possibility of leaking the token in the # HTTP Referer header. self.request.session[INTERNAL_RESET_SESSION_TOKEN] = token redirect_url = self.request.path.replace(token, self.reset_url_token) return HttpResponseRedirect(redirect_url) # Display the "Password reset unsuccessful" page. return self.render_to_response(self.get_context_data()) def get_user(self, uidb64): try: # urlsafe_base64_decode() decodes to bytestring uid = urlsafe_base64_decode(uidb64).decode() user = UserModel._default_manager.get(pk=uid) except (TypeError, ValueError, OverflowError, UserModel.DoesNotExist, ValidationError): user = None return user def get_form_kwargs(self): kwargs = super().get_form_kwargs() kwargs['user'] = self.user return kwargs def form_valid(self, form): user = form.save() del self.request.session[INTERNAL_RESET_SESSION_TOKEN] if self.post_reset_login: auth_login(self.request, user, self.post_reset_login_backend) return super().form_valid(form) def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) if self.validlink: context['validlink'] = True else: context.update({ 'form': None, 'title': _('Password reset unsuccessful'), 'validlink': False, }) return context class PasswordResetCompleteView(PasswordContextMixin, TemplateView): template_name = 'registration/password_reset_complete.html' title = _('Password reset complete') def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context['login_url'] = resolve_url(settings.LOGIN_URL) return context class PasswordChangeView(PasswordContextMixin, FormView): form_class = PasswordChangeForm success_url = reverse_lazy('password_change_done') template_name = 'registration/password_change_form.html' title = _('Password change') @method_decorator(sensitive_post_parameters()) @method_decorator(csrf_protect) @method_decorator(login_required) def dispatch(self, *args, **kwargs): return super().dispatch(*args, **kwargs) def get_form_kwargs(self): kwargs = super().get_form_kwargs() kwargs['user'] = self.request.user return kwargs def form_valid(self, form): form.save() # Updating the password logs out all other sessions for the user # except the current one. update_session_auth_hash(self.request, form.user) return super().form_valid(form) class PasswordChangeDoneView(PasswordContextMixin, TemplateView): template_name = 'registration/password_change_done.html' title = _('Password change successful') @method_decorator(login_required) def dispatch(self, *args, **kwargs): return super().dispatch(*args, **kwargs)
5776bce89cdd7cb4302be62398a34408ec7c101568140fd0b3cad0e90c17054b
import json from django import forms from django.contrib.admin.utils import ( display_for_field, flatten_fieldsets, help_text_for_field, label_for_field, lookup_field, quote, ) from django.core.exceptions import ObjectDoesNotExist from django.db.models.fields.related import ( ForeignObjectRel, ManyToManyRel, OneToOneField, ) from django.forms.utils import flatatt from django.template.defaultfilters import capfirst, linebreaksbr from django.urls import NoReverseMatch, reverse from django.utils.html import conditional_escape, format_html from django.utils.safestring import mark_safe from django.utils.translation import gettext, gettext_lazy as _ ACTION_CHECKBOX_NAME = '_selected_action' class ActionForm(forms.Form): action = forms.ChoiceField(label=_('Action:')) select_across = forms.BooleanField( label='', required=False, initial=0, widget=forms.HiddenInput({'class': 'select-across'}), ) checkbox = forms.CheckboxInput({'class': 'action-select'}, lambda value: False) class AdminForm: def __init__(self, form, fieldsets, prepopulated_fields, readonly_fields=None, model_admin=None): self.form, self.fieldsets = form, fieldsets self.prepopulated_fields = [{ 'field': form[field_name], 'dependencies': [form[f] for f in dependencies] } for field_name, dependencies in prepopulated_fields.items()] self.model_admin = model_admin if readonly_fields is None: readonly_fields = () self.readonly_fields = readonly_fields def __repr__(self): return ( f'<{self.__class__.__qualname__}: ' f'form={self.form.__class__.__qualname__} ' f'fieldsets={self.fieldsets!r}>' ) def __iter__(self): for name, options in self.fieldsets: yield Fieldset( self.form, name, readonly_fields=self.readonly_fields, model_admin=self.model_admin, **options ) @property def errors(self): return self.form.errors @property def non_field_errors(self): return self.form.non_field_errors @property def media(self): media = self.form.media for fs in self: media = media + fs.media return media class Fieldset: def __init__(self, form, name=None, readonly_fields=(), fields=(), classes=(), description=None, model_admin=None): self.form = form self.name, self.fields = name, fields self.classes = ' '.join(classes) self.description = description self.model_admin = model_admin self.readonly_fields = readonly_fields @property def media(self): if 'collapse' in self.classes: return forms.Media(js=['admin/js/collapse.js']) return forms.Media() def __iter__(self): for field in self.fields: yield Fieldline(self.form, field, self.readonly_fields, model_admin=self.model_admin) class Fieldline: def __init__(self, form, field, readonly_fields=None, model_admin=None): self.form = form # A django.forms.Form instance if not hasattr(field, "__iter__") or isinstance(field, str): self.fields = [field] else: self.fields = field self.has_visible_field = not all( field in self.form.fields and self.form.fields[field].widget.is_hidden for field in self.fields ) self.model_admin = model_admin if readonly_fields is None: readonly_fields = () self.readonly_fields = readonly_fields def __iter__(self): for i, field in enumerate(self.fields): if field in self.readonly_fields: yield AdminReadonlyField(self.form, field, is_first=(i == 0), model_admin=self.model_admin) else: yield AdminField(self.form, field, is_first=(i == 0)) def errors(self): return mark_safe( '\n'.join( self.form[f].errors.as_ul() for f in self.fields if f not in self.readonly_fields ).strip('\n') ) class AdminField: def __init__(self, form, field, is_first): self.field = form[field] # A django.forms.BoundField instance self.is_first = is_first # Whether this field is first on the line self.is_checkbox = isinstance(self.field.field.widget, forms.CheckboxInput) self.is_readonly = False def label_tag(self): classes = [] contents = conditional_escape(self.field.label) if self.is_checkbox: classes.append('vCheckboxLabel') if self.field.field.required: classes.append('required') if not self.is_first: classes.append('inline') attrs = {'class': ' '.join(classes)} if classes else {} # checkboxes should not have a label suffix as the checkbox appears # to the left of the label. return self.field.label_tag( contents=mark_safe(contents), attrs=attrs, label_suffix='' if self.is_checkbox else None, ) def errors(self): return mark_safe(self.field.errors.as_ul()) class AdminReadonlyField: def __init__(self, form, field, is_first, model_admin=None): # Make self.field look a little bit like a field. This means that # {{ field.name }} must be a useful class name to identify the field. # For convenience, store other field-related data here too. if callable(field): class_name = field.__name__ if field.__name__ != '<lambda>' else '' else: class_name = field if form._meta.labels and class_name in form._meta.labels: label = form._meta.labels[class_name] else: label = label_for_field(field, form._meta.model, model_admin, form=form) if form._meta.help_texts and class_name in form._meta.help_texts: help_text = form._meta.help_texts[class_name] else: help_text = help_text_for_field(class_name, form._meta.model) self.field = { 'name': class_name, 'label': label, 'help_text': help_text, 'field': field, } self.form = form self.model_admin = model_admin self.is_first = is_first self.is_checkbox = False self.is_readonly = True self.empty_value_display = model_admin.get_empty_value_display() def label_tag(self): attrs = {} if not self.is_first: attrs["class"] = "inline" label = self.field['label'] return format_html('<label{}>{}{}</label>', flatatt(attrs), capfirst(label), self.form.label_suffix) def get_admin_url(self, remote_field, remote_obj): url_name = 'admin:%s_%s_change' % ( remote_field.model._meta.app_label, remote_field.model._meta.model_name, ) try: url = reverse(url_name, args=[quote(remote_obj.pk)]) return format_html('<a href="{}">{}</a>', url, remote_obj) except NoReverseMatch: return str(remote_obj) def contents(self): from django.contrib.admin.templatetags.admin_list import _boolean_icon field, obj, model_admin = self.field['field'], self.form.instance, self.model_admin try: f, attr, value = lookup_field(field, obj, model_admin) except (AttributeError, ValueError, ObjectDoesNotExist): result_repr = self.empty_value_display else: if field in self.form.fields: widget = self.form[field].field.widget # This isn't elegant but suffices for contrib.auth's # ReadOnlyPasswordHashWidget. if getattr(widget, 'read_only', False): return widget.render(field, value) if f is None: if getattr(attr, 'boolean', False): result_repr = _boolean_icon(value) else: if hasattr(value, "__html__"): result_repr = value else: result_repr = linebreaksbr(value) else: if isinstance(f.remote_field, ManyToManyRel) and value is not None: result_repr = ", ".join(map(str, value.all())) elif ( isinstance(f.remote_field, (ForeignObjectRel, OneToOneField)) and value is not None ): result_repr = self.get_admin_url(f.remote_field, value) else: result_repr = display_for_field(value, f, self.empty_value_display) result_repr = linebreaksbr(result_repr) return conditional_escape(result_repr) class InlineAdminFormSet: """ A wrapper around an inline formset for use in the admin system. """ def __init__(self, inline, formset, fieldsets, prepopulated_fields=None, readonly_fields=None, model_admin=None, has_add_permission=True, has_change_permission=True, has_delete_permission=True, has_view_permission=True): self.opts = inline self.formset = formset self.fieldsets = fieldsets self.model_admin = model_admin if readonly_fields is None: readonly_fields = () self.readonly_fields = readonly_fields if prepopulated_fields is None: prepopulated_fields = {} self.prepopulated_fields = prepopulated_fields self.classes = ' '.join(inline.classes) if inline.classes else '' self.has_add_permission = has_add_permission self.has_change_permission = has_change_permission self.has_delete_permission = has_delete_permission self.has_view_permission = has_view_permission def __iter__(self): if self.has_change_permission: readonly_fields_for_editing = self.readonly_fields else: readonly_fields_for_editing = self.readonly_fields + flatten_fieldsets(self.fieldsets) for form, original in zip(self.formset.initial_forms, self.formset.get_queryset()): view_on_site_url = self.opts.get_view_on_site_url(original) yield InlineAdminForm( self.formset, form, self.fieldsets, self.prepopulated_fields, original, readonly_fields_for_editing, model_admin=self.opts, view_on_site_url=view_on_site_url, ) for form in self.formset.extra_forms: yield InlineAdminForm( self.formset, form, self.fieldsets, self.prepopulated_fields, None, self.readonly_fields, model_admin=self.opts, ) if self.has_add_permission: yield InlineAdminForm( self.formset, self.formset.empty_form, self.fieldsets, self.prepopulated_fields, None, self.readonly_fields, model_admin=self.opts, ) def fields(self): fk = getattr(self.formset, "fk", None) empty_form = self.formset.empty_form meta_labels = empty_form._meta.labels or {} meta_help_texts = empty_form._meta.help_texts or {} for i, field_name in enumerate(flatten_fieldsets(self.fieldsets)): if fk and fk.name == field_name: continue if not self.has_change_permission or field_name in self.readonly_fields: yield { 'name': field_name, 'label': meta_labels.get(field_name) or label_for_field( field_name, self.opts.model, self.opts, form=empty_form, ), 'widget': {'is_hidden': False}, 'required': False, 'help_text': meta_help_texts.get(field_name) or help_text_for_field(field_name, self.opts.model), } else: form_field = empty_form.fields[field_name] label = form_field.label if label is None: label = label_for_field(field_name, self.opts.model, self.opts, form=empty_form) yield { 'name': field_name, 'label': label, 'widget': form_field.widget, 'required': form_field.required, 'help_text': form_field.help_text, } def inline_formset_data(self): verbose_name = self.opts.verbose_name return json.dumps({ 'name': '#%s' % self.formset.prefix, 'options': { 'prefix': self.formset.prefix, 'addText': gettext('Add another %(verbose_name)s') % { 'verbose_name': capfirst(verbose_name), }, 'deleteText': gettext('Remove'), } }) @property def forms(self): return self.formset.forms @property def non_form_errors(self): return self.formset.non_form_errors @property def media(self): media = self.opts.media + self.formset.media for fs in self: media = media + fs.media return media class InlineAdminForm(AdminForm): """ A wrapper around an inline form for use in the admin system. """ def __init__(self, formset, form, fieldsets, prepopulated_fields, original, readonly_fields=None, model_admin=None, view_on_site_url=None): self.formset = formset self.model_admin = model_admin self.original = original self.show_url = original and view_on_site_url is not None self.absolute_url = view_on_site_url super().__init__(form, fieldsets, prepopulated_fields, readonly_fields, model_admin) def __iter__(self): for name, options in self.fieldsets: yield InlineFieldset( self.formset, self.form, name, self.readonly_fields, model_admin=self.model_admin, **options ) def needs_explicit_pk_field(self): return ( # Auto fields are editable, so check for auto or non-editable pk. self.form._meta.model._meta.auto_field or not self.form._meta.model._meta.pk.editable or # Also search any parents for an auto field. (The pk info is # propagated to child models so that does not need to be checked # in parents.) any(parent._meta.auto_field or not parent._meta.model._meta.pk.editable for parent in self.form._meta.model._meta.get_parent_list()) ) def pk_field(self): return AdminField(self.form, self.formset._pk_field.name, False) def fk_field(self): fk = getattr(self.formset, "fk", None) if fk: return AdminField(self.form, fk.name, False) else: return "" def deletion_field(self): from django.forms.formsets import DELETION_FIELD_NAME return AdminField(self.form, DELETION_FIELD_NAME, False) def ordering_field(self): from django.forms.formsets import ORDERING_FIELD_NAME return AdminField(self.form, ORDERING_FIELD_NAME, False) class InlineFieldset(Fieldset): def __init__(self, formset, *args, **kwargs): self.formset = formset super().__init__(*args, **kwargs) def __iter__(self): fk = getattr(self.formset, "fk", None) for field in self.fields: if not fk or fk.name != field: yield Fieldline(self.form, field, self.readonly_fields, model_admin=self.model_admin) class AdminErrorList(forms.utils.ErrorList): """Store errors for the form/formsets in an add/change view.""" def __init__(self, form, inline_formsets): super().__init__() if form.is_bound: self.extend(form.errors.values()) for inline_formset in inline_formsets: self.extend(inline_formset.non_form_errors()) for errors_in_inline_form in inline_formset.errors: self.extend(errors_in_inline_form.values())
a3d8dd4c35a0bdb402f06ca4bc51ebdb7efac5e8e64cf685ce44db6b0cdf23ec
import copy import json import re from functools import partial, update_wrapper from urllib.parse import quote as urlquote from django import forms from django.conf import settings from django.contrib import messages from django.contrib.admin import helpers, widgets from django.contrib.admin.checks import ( BaseModelAdminChecks, InlineModelAdminChecks, ModelAdminChecks, ) from django.contrib.admin.decorators import display from django.contrib.admin.exceptions import DisallowedModelAdminToField from django.contrib.admin.templatetags.admin_urls import add_preserved_filters from django.contrib.admin.utils import ( NestedObjects, construct_change_message, flatten_fieldsets, get_deleted_objects, lookup_spawns_duplicates, model_format_dict, model_ngettext, quote, unquote, ) from django.contrib.admin.widgets import ( AutocompleteSelect, AutocompleteSelectMultiple, ) from django.contrib.auth import get_permission_codename from django.core.exceptions import ( FieldDoesNotExist, FieldError, PermissionDenied, ValidationError, ) from django.core.paginator import Paginator from django.db import models, router, transaction from django.db.models.constants import LOOKUP_SEP from django.forms.formsets import DELETION_FIELD_NAME, all_valid from django.forms.models import ( BaseInlineFormSet, inlineformset_factory, modelform_defines_fields, modelform_factory, modelformset_factory, ) from django.forms.widgets import CheckboxSelectMultiple, SelectMultiple from django.http import HttpResponseRedirect from django.http.response import HttpResponseBase from django.template.response import SimpleTemplateResponse, TemplateResponse from django.urls import reverse from django.utils.decorators import method_decorator from django.utils.html import format_html from django.utils.http import urlencode from django.utils.safestring import mark_safe from django.utils.text import ( capfirst, format_lazy, get_text_list, smart_split, unescape_string_literal, ) from django.utils.translation import gettext as _, ngettext from django.views.decorators.csrf import csrf_protect from django.views.generic import RedirectView IS_POPUP_VAR = '_popup' TO_FIELD_VAR = '_to_field' HORIZONTAL, VERTICAL = 1, 2 def get_content_type_for_model(obj): # Since this module gets imported in the application's root package, # it cannot import models from other applications at the module level. from django.contrib.contenttypes.models import ContentType return ContentType.objects.get_for_model(obj, for_concrete_model=False) def get_ul_class(radio_style): return 'radiolist' if radio_style == VERTICAL else 'radiolist inline' class IncorrectLookupParameters(Exception): pass # Defaults for formfield_overrides. ModelAdmin subclasses can change this # by adding to ModelAdmin.formfield_overrides. FORMFIELD_FOR_DBFIELD_DEFAULTS = { models.DateTimeField: { 'form_class': forms.SplitDateTimeField, 'widget': widgets.AdminSplitDateTime }, models.DateField: {'widget': widgets.AdminDateWidget}, models.TimeField: {'widget': widgets.AdminTimeWidget}, models.TextField: {'widget': widgets.AdminTextareaWidget}, models.URLField: {'widget': widgets.AdminURLFieldWidget}, models.IntegerField: {'widget': widgets.AdminIntegerFieldWidget}, models.BigIntegerField: {'widget': widgets.AdminBigIntegerFieldWidget}, models.CharField: {'widget': widgets.AdminTextInputWidget}, models.ImageField: {'widget': widgets.AdminFileWidget}, models.FileField: {'widget': widgets.AdminFileWidget}, models.EmailField: {'widget': widgets.AdminEmailInputWidget}, models.UUIDField: {'widget': widgets.AdminUUIDInputWidget}, } csrf_protect_m = method_decorator(csrf_protect) class BaseModelAdmin(metaclass=forms.MediaDefiningClass): """Functionality common to both ModelAdmin and InlineAdmin.""" autocomplete_fields = () raw_id_fields = () fields = None exclude = None fieldsets = None form = forms.ModelForm filter_vertical = () filter_horizontal = () radio_fields = {} prepopulated_fields = {} formfield_overrides = {} readonly_fields = () ordering = None sortable_by = None view_on_site = True show_full_result_count = True checks_class = BaseModelAdminChecks def check(self, **kwargs): return self.checks_class().check(self, **kwargs) def __init__(self): # Merge FORMFIELD_FOR_DBFIELD_DEFAULTS with the formfield_overrides # rather than simply overwriting. overrides = copy.deepcopy(FORMFIELD_FOR_DBFIELD_DEFAULTS) for k, v in self.formfield_overrides.items(): overrides.setdefault(k, {}).update(v) self.formfield_overrides = overrides def formfield_for_dbfield(self, db_field, request, **kwargs): """ Hook for specifying the form Field instance for a given database Field instance. If kwargs are given, they're passed to the form Field's constructor. """ # If the field specifies choices, we don't need to look for special # admin widgets - we just need to use a select widget of some kind. if db_field.choices: return self.formfield_for_choice_field(db_field, request, **kwargs) # ForeignKey or ManyToManyFields if isinstance(db_field, (models.ForeignKey, models.ManyToManyField)): # Combine the field kwargs with any options for formfield_overrides. # Make sure the passed in **kwargs override anything in # formfield_overrides because **kwargs is more specific, and should # always win. if db_field.__class__ in self.formfield_overrides: kwargs = {**self.formfield_overrides[db_field.__class__], **kwargs} # Get the correct formfield. if isinstance(db_field, models.ForeignKey): formfield = self.formfield_for_foreignkey(db_field, request, **kwargs) elif isinstance(db_field, models.ManyToManyField): formfield = self.formfield_for_manytomany(db_field, request, **kwargs) # For non-raw_id fields, wrap the widget with a wrapper that adds # extra HTML -- the "add other" interface -- to the end of the # rendered output. formfield can be None if it came from a # OneToOneField with parent_link=True or a M2M intermediary. if formfield and db_field.name not in self.raw_id_fields: related_modeladmin = self.admin_site._registry.get(db_field.remote_field.model) wrapper_kwargs = {} if related_modeladmin: wrapper_kwargs.update( can_add_related=related_modeladmin.has_add_permission(request), can_change_related=related_modeladmin.has_change_permission(request), can_delete_related=related_modeladmin.has_delete_permission(request), can_view_related=related_modeladmin.has_view_permission(request), ) formfield.widget = widgets.RelatedFieldWidgetWrapper( formfield.widget, db_field.remote_field, self.admin_site, **wrapper_kwargs ) return formfield # If we've got overrides for the formfield defined, use 'em. **kwargs # passed to formfield_for_dbfield override the defaults. for klass in db_field.__class__.mro(): if klass in self.formfield_overrides: kwargs = {**copy.deepcopy(self.formfield_overrides[klass]), **kwargs} return db_field.formfield(**kwargs) # For any other type of field, just call its formfield() method. return db_field.formfield(**kwargs) def formfield_for_choice_field(self, db_field, request, **kwargs): """ Get a form Field for a database Field that has declared choices. """ # If the field is named as a radio_field, use a RadioSelect if db_field.name in self.radio_fields: # Avoid stomping on custom widget/choices arguments. if 'widget' not in kwargs: kwargs['widget'] = widgets.AdminRadioSelect(attrs={ 'class': get_ul_class(self.radio_fields[db_field.name]), }) if 'choices' not in kwargs: kwargs['choices'] = db_field.get_choices( include_blank=db_field.blank, blank_choice=[('', _('None'))] ) return db_field.formfield(**kwargs) def get_field_queryset(self, db, db_field, request): """ If the ModelAdmin specifies ordering, the queryset should respect that ordering. Otherwise don't specify the queryset, let the field decide (return None in that case). """ related_admin = self.admin_site._registry.get(db_field.remote_field.model) if related_admin is not None: ordering = related_admin.get_ordering(request) if ordering is not None and ordering != (): return db_field.remote_field.model._default_manager.using(db).order_by(*ordering) return None def formfield_for_foreignkey(self, db_field, request, **kwargs): """ Get a form Field for a ForeignKey. """ db = kwargs.get('using') if 'widget' not in kwargs: if db_field.name in self.get_autocomplete_fields(request): kwargs['widget'] = AutocompleteSelect(db_field, self.admin_site, using=db) elif db_field.name in self.raw_id_fields: kwargs['widget'] = widgets.ForeignKeyRawIdWidget(db_field.remote_field, self.admin_site, using=db) elif db_field.name in self.radio_fields: kwargs['widget'] = widgets.AdminRadioSelect(attrs={ 'class': get_ul_class(self.radio_fields[db_field.name]), }) kwargs['empty_label'] = _('None') if db_field.blank else None if 'queryset' not in kwargs: queryset = self.get_field_queryset(db, db_field, request) if queryset is not None: kwargs['queryset'] = queryset return db_field.formfield(**kwargs) def formfield_for_manytomany(self, db_field, request, **kwargs): """ Get a form Field for a ManyToManyField. """ # If it uses an intermediary model that isn't auto created, don't show # a field in admin. if not db_field.remote_field.through._meta.auto_created: return None db = kwargs.get('using') if 'widget' not in kwargs: autocomplete_fields = self.get_autocomplete_fields(request) if db_field.name in autocomplete_fields: kwargs['widget'] = AutocompleteSelectMultiple( db_field, self.admin_site, using=db, ) elif db_field.name in self.raw_id_fields: kwargs['widget'] = widgets.ManyToManyRawIdWidget( db_field.remote_field, self.admin_site, using=db, ) elif db_field.name in [*self.filter_vertical, *self.filter_horizontal]: kwargs['widget'] = widgets.FilteredSelectMultiple( db_field.verbose_name, db_field.name in self.filter_vertical ) if 'queryset' not in kwargs: queryset = self.get_field_queryset(db, db_field, request) if queryset is not None: kwargs['queryset'] = queryset form_field = db_field.formfield(**kwargs) if (isinstance(form_field.widget, SelectMultiple) and not isinstance(form_field.widget, (CheckboxSelectMultiple, AutocompleteSelectMultiple))): msg = _('Hold down “Control”, or “Command” on a Mac, to select more than one.') help_text = form_field.help_text form_field.help_text = format_lazy('{} {}', help_text, msg) if help_text else msg return form_field def get_autocomplete_fields(self, request): """ Return a list of ForeignKey and/or ManyToMany fields which should use an autocomplete widget. """ return self.autocomplete_fields def get_view_on_site_url(self, obj=None): if obj is None or not self.view_on_site: return None if callable(self.view_on_site): return self.view_on_site(obj) elif hasattr(obj, 'get_absolute_url'): # use the ContentType lookup if view_on_site is True return reverse('admin:view_on_site', kwargs={ 'content_type_id': get_content_type_for_model(obj).pk, 'object_id': obj.pk }) def get_empty_value_display(self): """ Return the empty_value_display set on ModelAdmin or AdminSite. """ try: return mark_safe(self.empty_value_display) except AttributeError: return mark_safe(self.admin_site.empty_value_display) def get_exclude(self, request, obj=None): """ Hook for specifying exclude. """ return self.exclude def get_fields(self, request, obj=None): """ Hook for specifying fields. """ if self.fields: return self.fields # _get_form_for_get_fields() is implemented in subclasses. form = self._get_form_for_get_fields(request, obj) return [*form.base_fields, *self.get_readonly_fields(request, obj)] def get_fieldsets(self, request, obj=None): """ Hook for specifying fieldsets. """ if self.fieldsets: return self.fieldsets return [(None, {'fields': self.get_fields(request, obj)})] def get_inlines(self, request, obj): """Hook for specifying custom inlines.""" return self.inlines def get_ordering(self, request): """ Hook for specifying field ordering. """ return self.ordering or () # otherwise we might try to *None, which is bad ;) def get_readonly_fields(self, request, obj=None): """ Hook for specifying custom readonly fields. """ return self.readonly_fields def get_prepopulated_fields(self, request, obj=None): """ Hook for specifying custom prepopulated fields. """ return self.prepopulated_fields def get_queryset(self, request): """ Return a QuerySet of all model instances that can be edited by the admin site. This is used by changelist_view. """ qs = self.model._default_manager.get_queryset() # TODO: this should be handled by some parameter to the ChangeList. ordering = self.get_ordering(request) if ordering: qs = qs.order_by(*ordering) return qs def get_sortable_by(self, request): """Hook for specifying which fields can be sorted in the changelist.""" return self.sortable_by if self.sortable_by is not None else self.get_list_display(request) def lookup_allowed(self, lookup, value): from django.contrib.admin.filters import SimpleListFilter model = self.model # Check FKey lookups that are allowed, so that popups produced by # ForeignKeyRawIdWidget, on the basis of ForeignKey.limit_choices_to, # are allowed to work. for fk_lookup in model._meta.related_fkey_lookups: # As ``limit_choices_to`` can be a callable, invoke it here. if callable(fk_lookup): fk_lookup = fk_lookup() if (lookup, value) in widgets.url_params_from_lookup_dict(fk_lookup).items(): return True relation_parts = [] prev_field = None for part in lookup.split(LOOKUP_SEP): try: field = model._meta.get_field(part) except FieldDoesNotExist: # Lookups on nonexistent fields are ok, since they're ignored # later. break # It is allowed to filter on values that would be found from local # model anyways. For example, if you filter on employee__department__id, # then the id value would be found already from employee__department_id. if not prev_field or (prev_field.is_relation and field not in prev_field.get_path_info()[-1].target_fields): relation_parts.append(part) if not getattr(field, 'get_path_info', None): # This is not a relational field, so further parts # must be transforms. break prev_field = field model = field.get_path_info()[-1].to_opts.model if len(relation_parts) <= 1: # Either a local field filter, or no fields at all. return True valid_lookups = {self.date_hierarchy} for filter_item in self.list_filter: if isinstance(filter_item, type) and issubclass(filter_item, SimpleListFilter): valid_lookups.add(filter_item.parameter_name) elif isinstance(filter_item, (list, tuple)): valid_lookups.add(filter_item[0]) else: valid_lookups.add(filter_item) # Is it a valid relational lookup? return not { LOOKUP_SEP.join(relation_parts), LOOKUP_SEP.join(relation_parts + [part]) }.isdisjoint(valid_lookups) def to_field_allowed(self, request, to_field): """ Return True if the model associated with this admin should be allowed to be referenced by the specified field. """ opts = self.model._meta try: field = opts.get_field(to_field) except FieldDoesNotExist: return False # Always allow referencing the primary key since it's already possible # to get this information from the change view URL. if field.primary_key: return True # Allow reverse relationships to models defining m2m fields if they # target the specified field. for many_to_many in opts.many_to_many: if many_to_many.m2m_target_field_name() == to_field: return True # Make sure at least one of the models registered for this site # references this field through a FK or a M2M relationship. registered_models = set() for model, admin in self.admin_site._registry.items(): registered_models.add(model) for inline in admin.inlines: registered_models.add(inline.model) related_objects = ( f for f in opts.get_fields(include_hidden=True) if (f.auto_created and not f.concrete) ) for related_object in related_objects: related_model = related_object.related_model remote_field = related_object.field.remote_field if (any(issubclass(model, related_model) for model in registered_models) and hasattr(remote_field, 'get_related_field') and remote_field.get_related_field() == field): return True return False def has_add_permission(self, request): """ Return True if the given request has permission to add an object. Can be overridden by the user in subclasses. """ opts = self.opts codename = get_permission_codename('add', opts) return request.user.has_perm("%s.%s" % (opts.app_label, codename)) def has_change_permission(self, request, obj=None): """ Return True if the given request has permission to change the given Django model instance, the default implementation doesn't examine the `obj` parameter. Can be overridden by the user in subclasses. In such case it should return True if the given request has permission to change the `obj` model instance. If `obj` is None, this should return True if the given request has permission to change *any* object of the given type. """ opts = self.opts codename = get_permission_codename('change', opts) return request.user.has_perm("%s.%s" % (opts.app_label, codename)) def has_delete_permission(self, request, obj=None): """ Return True if the given request has permission to change the given Django model instance, the default implementation doesn't examine the `obj` parameter. Can be overridden by the user in subclasses. In such case it should return True if the given request has permission to delete the `obj` model instance. If `obj` is None, this should return True if the given request has permission to delete *any* object of the given type. """ opts = self.opts codename = get_permission_codename('delete', opts) return request.user.has_perm("%s.%s" % (opts.app_label, codename)) def has_view_permission(self, request, obj=None): """ Return True if the given request has permission to view the given Django model instance. The default implementation doesn't examine the `obj` parameter. If overridden by the user in subclasses, it should return True if the given request has permission to view the `obj` model instance. If `obj` is None, it should return True if the request has permission to view any object of the given type. """ opts = self.opts codename_view = get_permission_codename('view', opts) codename_change = get_permission_codename('change', opts) return ( request.user.has_perm('%s.%s' % (opts.app_label, codename_view)) or request.user.has_perm('%s.%s' % (opts.app_label, codename_change)) ) def has_view_or_change_permission(self, request, obj=None): return self.has_view_permission(request, obj) or self.has_change_permission(request, obj) def has_module_permission(self, request): """ Return True if the given request has any permission in the given app label. Can be overridden by the user in subclasses. In such case it should return True if the given request has permission to view the module on the admin index page and access the module's index page. Overriding it does not restrict access to the add, change or delete views. Use `ModelAdmin.has_(add|change|delete)_permission` for that. """ return request.user.has_module_perms(self.opts.app_label) class ModelAdmin(BaseModelAdmin): """Encapsulate all admin options and functionality for a given model.""" list_display = ('__str__',) list_display_links = () list_filter = () list_select_related = False list_per_page = 100 list_max_show_all = 200 list_editable = () search_fields = () search_help_text = None date_hierarchy = None save_as = False save_as_continue = True save_on_top = False paginator = Paginator preserve_filters = True inlines = [] # Custom templates (designed to be over-ridden in subclasses) add_form_template = None change_form_template = None change_list_template = None delete_confirmation_template = None delete_selected_confirmation_template = None object_history_template = None popup_response_template = None # Actions actions = [] action_form = helpers.ActionForm actions_on_top = True actions_on_bottom = False actions_selection_counter = True checks_class = ModelAdminChecks def __init__(self, model, admin_site): self.model = model self.opts = model._meta self.admin_site = admin_site super().__init__() def __str__(self): return "%s.%s" % (self.model._meta.app_label, self.__class__.__name__) def __repr__(self): return ( f'<{self.__class__.__qualname__}: model={self.model.__qualname__} ' f'site={self.admin_site!r}>' ) def get_inline_instances(self, request, obj=None): inline_instances = [] for inline_class in self.get_inlines(request, obj): inline = inline_class(self.model, self.admin_site) if request: if not (inline.has_view_or_change_permission(request, obj) or inline.has_add_permission(request, obj) or inline.has_delete_permission(request, obj)): continue if not inline.has_add_permission(request, obj): inline.max_num = 0 inline_instances.append(inline) return inline_instances def get_urls(self): from django.urls import path def wrap(view): def wrapper(*args, **kwargs): return self.admin_site.admin_view(view)(*args, **kwargs) wrapper.model_admin = self return update_wrapper(wrapper, view) info = self.model._meta.app_label, self.model._meta.model_name return [ path('', wrap(self.changelist_view), name='%s_%s_changelist' % info), path('add/', wrap(self.add_view), name='%s_%s_add' % info), path('<path:object_id>/history/', wrap(self.history_view), name='%s_%s_history' % info), path('<path:object_id>/delete/', wrap(self.delete_view), name='%s_%s_delete' % info), path('<path:object_id>/change/', wrap(self.change_view), name='%s_%s_change' % info), # For backwards compatibility (was the change url before 1.9) path('<path:object_id>/', wrap(RedirectView.as_view( pattern_name='%s:%s_%s_change' % ((self.admin_site.name,) + info) ))), ] @property def urls(self): return self.get_urls() @property def media(self): extra = '' if settings.DEBUG else '.min' js = [ 'vendor/jquery/jquery%s.js' % extra, 'jquery.init.js', 'core.js', 'admin/RelatedObjectLookups.js', 'actions.js', 'urlify.js', 'prepopulate.js', 'vendor/xregexp/xregexp%s.js' % extra, ] return forms.Media(js=['admin/js/%s' % url for url in js]) def get_model_perms(self, request): """ Return a dict of all perms for this model. This dict has the keys ``add``, ``change``, ``delete``, and ``view`` mapping to the True/False for each of those actions. """ return { 'add': self.has_add_permission(request), 'change': self.has_change_permission(request), 'delete': self.has_delete_permission(request), 'view': self.has_view_permission(request), } def _get_form_for_get_fields(self, request, obj): return self.get_form(request, obj, fields=None) def get_form(self, request, obj=None, change=False, **kwargs): """ Return a Form class for use in the admin add view. This is used by add_view and change_view. """ if 'fields' in kwargs: fields = kwargs.pop('fields') else: fields = flatten_fieldsets(self.get_fieldsets(request, obj)) excluded = self.get_exclude(request, obj) exclude = [] if excluded is None else list(excluded) readonly_fields = self.get_readonly_fields(request, obj) exclude.extend(readonly_fields) # Exclude all fields if it's a change form and the user doesn't have # the change permission. if change and hasattr(request, 'user') and not self.has_change_permission(request, obj): exclude.extend(fields) if excluded is None and hasattr(self.form, '_meta') and self.form._meta.exclude: # Take the custom ModelForm's Meta.exclude into account only if the # ModelAdmin doesn't define its own. exclude.extend(self.form._meta.exclude) # if exclude is an empty list we pass None to be consistent with the # default on modelform_factory exclude = exclude or None # Remove declared form fields which are in readonly_fields. new_attrs = dict.fromkeys(f for f in readonly_fields if f in self.form.declared_fields) form = type(self.form.__name__, (self.form,), new_attrs) defaults = { 'form': form, 'fields': fields, 'exclude': exclude, 'formfield_callback': partial(self.formfield_for_dbfield, request=request), **kwargs, } if defaults['fields'] is None and not modelform_defines_fields(defaults['form']): defaults['fields'] = forms.ALL_FIELDS try: return modelform_factory(self.model, **defaults) except FieldError as e: raise FieldError( '%s. Check fields/fieldsets/exclude attributes of class %s.' % (e, self.__class__.__name__) ) def get_changelist(self, request, **kwargs): """ Return the ChangeList class for use on the changelist page. """ from django.contrib.admin.views.main import ChangeList return ChangeList def get_changelist_instance(self, request): """ Return a `ChangeList` instance based on `request`. May raise `IncorrectLookupParameters`. """ list_display = self.get_list_display(request) list_display_links = self.get_list_display_links(request, list_display) # Add the action checkboxes if any actions are available. if self.get_actions(request): list_display = ['action_checkbox', *list_display] sortable_by = self.get_sortable_by(request) ChangeList = self.get_changelist(request) return ChangeList( request, self.model, list_display, list_display_links, self.get_list_filter(request), self.date_hierarchy, self.get_search_fields(request), self.get_list_select_related(request), self.list_per_page, self.list_max_show_all, self.list_editable, self, sortable_by, self.search_help_text, ) def get_object(self, request, object_id, from_field=None): """ Return an instance matching the field and value provided, the primary key is used if no field is provided. Return ``None`` if no match is found or the object_id fails validation. """ queryset = self.get_queryset(request) model = queryset.model field = model._meta.pk if from_field is None else model._meta.get_field(from_field) try: object_id = field.to_python(object_id) return queryset.get(**{field.name: object_id}) except (model.DoesNotExist, ValidationError, ValueError): return None def get_changelist_form(self, request, **kwargs): """ Return a Form class for use in the Formset on the changelist page. """ defaults = { 'formfield_callback': partial(self.formfield_for_dbfield, request=request), **kwargs, } if defaults.get('fields') is None and not modelform_defines_fields(defaults.get('form')): defaults['fields'] = forms.ALL_FIELDS return modelform_factory(self.model, **defaults) def get_changelist_formset(self, request, **kwargs): """ Return a FormSet class for use on the changelist page if list_editable is used. """ defaults = { 'formfield_callback': partial(self.formfield_for_dbfield, request=request), **kwargs, } return modelformset_factory( self.model, self.get_changelist_form(request), extra=0, fields=self.list_editable, **defaults ) def get_formsets_with_inlines(self, request, obj=None): """ Yield formsets and the corresponding inlines. """ for inline in self.get_inline_instances(request, obj): yield inline.get_formset(request, obj), inline def get_paginator(self, request, queryset, per_page, orphans=0, allow_empty_first_page=True): return self.paginator(queryset, per_page, orphans, allow_empty_first_page) def log_addition(self, request, obj, message): """ Log that an object has been successfully added. The default implementation creates an admin LogEntry object. """ from django.contrib.admin.models import ADDITION, LogEntry return LogEntry.objects.log_action( user_id=request.user.pk, content_type_id=get_content_type_for_model(obj).pk, object_id=obj.pk, object_repr=str(obj), action_flag=ADDITION, change_message=message, ) def log_change(self, request, obj, message): """ Log that an object has been successfully changed. The default implementation creates an admin LogEntry object. """ from django.contrib.admin.models import CHANGE, LogEntry return LogEntry.objects.log_action( user_id=request.user.pk, content_type_id=get_content_type_for_model(obj).pk, object_id=obj.pk, object_repr=str(obj), action_flag=CHANGE, change_message=message, ) def log_deletion(self, request, obj, object_repr): """ Log that an object will be deleted. Note that this method must be called before the deletion. The default implementation creates an admin LogEntry object. """ from django.contrib.admin.models import DELETION, LogEntry return LogEntry.objects.log_action( user_id=request.user.pk, content_type_id=get_content_type_for_model(obj).pk, object_id=obj.pk, object_repr=object_repr, action_flag=DELETION, ) @display(description=mark_safe('<input type="checkbox" id="action-toggle">')) def action_checkbox(self, obj): """ A list_display column containing a checkbox widget. """ return helpers.checkbox.render(helpers.ACTION_CHECKBOX_NAME, str(obj.pk)) @staticmethod def _get_action_description(func, name): return getattr(func, 'short_description', capfirst(name.replace('_', ' '))) def _get_base_actions(self): """Return the list of actions, prior to any request-based filtering.""" actions = [] base_actions = (self.get_action(action) for action in self.actions or []) # get_action might have returned None, so filter any of those out. base_actions = [action for action in base_actions if action] base_action_names = {name for _, name, _ in base_actions} # Gather actions from the admin site first for (name, func) in self.admin_site.actions: if name in base_action_names: continue description = self._get_action_description(func, name) actions.append((func, name, description)) # Add actions from this ModelAdmin. actions.extend(base_actions) return actions def _filter_actions_by_permissions(self, request, actions): """Filter out any actions that the user doesn't have access to.""" filtered_actions = [] for action in actions: callable = action[0] if not hasattr(callable, 'allowed_permissions'): filtered_actions.append(action) continue permission_checks = ( getattr(self, 'has_%s_permission' % permission) for permission in callable.allowed_permissions ) if any(has_permission(request) for has_permission in permission_checks): filtered_actions.append(action) return filtered_actions def get_actions(self, request): """ Return a dictionary mapping the names of all actions for this ModelAdmin to a tuple of (callable, name, description) for each action. """ # If self.actions is set to None that means actions are disabled on # this page. if self.actions is None or IS_POPUP_VAR in request.GET: return {} actions = self._filter_actions_by_permissions(request, self._get_base_actions()) return {name: (func, name, desc) for func, name, desc in actions} def get_action_choices(self, request, default_choices=models.BLANK_CHOICE_DASH): """ Return a list of choices for use in a form object. Each choice is a tuple (name, description). """ choices = [] + default_choices for func, name, description in self.get_actions(request).values(): choice = (name, description % model_format_dict(self.opts)) choices.append(choice) return choices def get_action(self, action): """ Return a given action from a parameter, which can either be a callable, or the name of a method on the ModelAdmin. Return is a tuple of (callable, name, description). """ # If the action is a callable, just use it. if callable(action): func = action action = action.__name__ # Next, look for a method. Grab it off self.__class__ to get an unbound # method instead of a bound one; this ensures that the calling # conventions are the same for functions and methods. elif hasattr(self.__class__, action): func = getattr(self.__class__, action) # Finally, look for a named method on the admin site else: try: func = self.admin_site.get_action(action) except KeyError: return None description = self._get_action_description(func, action) return func, action, description def get_list_display(self, request): """ Return a sequence containing the fields to be displayed on the changelist. """ return self.list_display def get_list_display_links(self, request, list_display): """ Return a sequence containing the fields to be displayed as links on the changelist. The list_display parameter is the list of fields returned by get_list_display(). """ if self.list_display_links or self.list_display_links is None or not list_display: return self.list_display_links else: # Use only the first item in list_display as link return list(list_display)[:1] def get_list_filter(self, request): """ Return a sequence containing the fields to be displayed as filters in the right sidebar of the changelist page. """ return self.list_filter def get_list_select_related(self, request): """ Return a list of fields to add to the select_related() part of the changelist items query. """ return self.list_select_related def get_search_fields(self, request): """ Return a sequence containing the fields to be searched whenever somebody submits a search query. """ return self.search_fields def get_search_results(self, request, queryset, search_term): """ Return a tuple containing a queryset to implement the search and a boolean indicating if the results may contain duplicates. """ # Apply keyword searches. def construct_search(field_name): if field_name.startswith('^'): return "%s__istartswith" % field_name[1:] elif field_name.startswith('='): return "%s__iexact" % field_name[1:] elif field_name.startswith('@'): return "%s__search" % field_name[1:] # Use field_name if it includes a lookup. opts = queryset.model._meta lookup_fields = field_name.split(LOOKUP_SEP) # Go through the fields, following all relations. prev_field = None for path_part in lookup_fields: if path_part == 'pk': path_part = opts.pk.name try: field = opts.get_field(path_part) except FieldDoesNotExist: # Use valid query lookups. if prev_field and prev_field.get_lookup(path_part): return field_name else: prev_field = field if hasattr(field, 'get_path_info'): # Update opts to follow the relation. opts = field.get_path_info()[-1].to_opts # Otherwise, use the field with icontains. return "%s__icontains" % field_name may_have_duplicates = False search_fields = self.get_search_fields(request) if search_fields and search_term: orm_lookups = [construct_search(str(search_field)) for search_field in search_fields] for bit in smart_split(search_term): if bit.startswith(('"', "'")) and bit[0] == bit[-1]: bit = unescape_string_literal(bit) or_queries = models.Q( *((orm_lookup, bit) for orm_lookup in orm_lookups), _connector=models.Q.OR, ) queryset = queryset.filter(or_queries) may_have_duplicates |= any( lookup_spawns_duplicates(self.opts, search_spec) for search_spec in orm_lookups ) return queryset, may_have_duplicates def get_preserved_filters(self, request): """ Return the preserved filters querystring. """ match = request.resolver_match if self.preserve_filters and match: opts = self.model._meta current_url = '%s:%s' % (match.app_name, match.url_name) changelist_url = 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name) if current_url == changelist_url: preserved_filters = request.GET.urlencode() else: preserved_filters = request.GET.get('_changelist_filters') if preserved_filters: return urlencode({'_changelist_filters': preserved_filters}) return '' def construct_change_message(self, request, form, formsets, add=False): """ Construct a JSON structure describing changes from a changed object. """ return construct_change_message(form, formsets, add) def message_user(self, request, message, level=messages.INFO, extra_tags='', fail_silently=False): """ Send a message to the user. The default implementation posts a message using the django.contrib.messages backend. Exposes almost the same API as messages.add_message(), but accepts the positional arguments in a different order to maintain backwards compatibility. For convenience, it accepts the `level` argument as a string rather than the usual level number. """ if not isinstance(level, int): # attempt to get the level if passed a string try: level = getattr(messages.constants, level.upper()) except AttributeError: levels = messages.constants.DEFAULT_TAGS.values() levels_repr = ', '.join('`%s`' % level for level in levels) raise ValueError( 'Bad message level string: `%s`. Possible values are: %s' % (level, levels_repr) ) messages.add_message(request, level, message, extra_tags=extra_tags, fail_silently=fail_silently) def save_form(self, request, form, change): """ Given a ModelForm return an unsaved instance. ``change`` is True if the object is being changed, and False if it's being added. """ return form.save(commit=False) def save_model(self, request, obj, form, change): """ Given a model instance save it to the database. """ obj.save() def delete_model(self, request, obj): """ Given a model instance delete it from the database. """ obj.delete() def delete_queryset(self, request, queryset): """Given a queryset, delete it from the database.""" queryset.delete() def save_formset(self, request, form, formset, change): """ Given an inline formset save it to the database. """ formset.save() def save_related(self, request, form, formsets, change): """ Given the ``HttpRequest``, the parent ``ModelForm`` instance, the list of inline formsets and a boolean value based on whether the parent is being added or changed, save the related objects to the database. Note that at this point save_form() and save_model() have already been called. """ form.save_m2m() for formset in formsets: self.save_formset(request, form, formset, change=change) def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None): opts = self.model._meta app_label = opts.app_label preserved_filters = self.get_preserved_filters(request) form_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, form_url) view_on_site_url = self.get_view_on_site_url(obj) has_editable_inline_admin_formsets = False for inline in context['inline_admin_formsets']: if inline.has_add_permission or inline.has_change_permission or inline.has_delete_permission: has_editable_inline_admin_formsets = True break context.update({ 'add': add, 'change': change, 'has_view_permission': self.has_view_permission(request, obj), 'has_add_permission': self.has_add_permission(request), 'has_change_permission': self.has_change_permission(request, obj), 'has_delete_permission': self.has_delete_permission(request, obj), 'has_editable_inline_admin_formsets': has_editable_inline_admin_formsets, 'has_file_field': context['adminform'].form.is_multipart() or any( admin_formset.formset.is_multipart() for admin_formset in context['inline_admin_formsets'] ), 'has_absolute_url': view_on_site_url is not None, 'absolute_url': view_on_site_url, 'form_url': form_url, 'opts': opts, 'content_type_id': get_content_type_for_model(self.model).pk, 'save_as': self.save_as, 'save_on_top': self.save_on_top, 'to_field_var': TO_FIELD_VAR, 'is_popup_var': IS_POPUP_VAR, 'app_label': app_label, }) if add and self.add_form_template is not None: form_template = self.add_form_template else: form_template = self.change_form_template request.current_app = self.admin_site.name return TemplateResponse(request, form_template or [ "admin/%s/%s/change_form.html" % (app_label, opts.model_name), "admin/%s/change_form.html" % app_label, "admin/change_form.html" ], context) def response_add(self, request, obj, post_url_continue=None): """ Determine the HttpResponse for the add_view stage. """ opts = obj._meta preserved_filters = self.get_preserved_filters(request) obj_url = reverse( 'admin:%s_%s_change' % (opts.app_label, opts.model_name), args=(quote(obj.pk),), current_app=self.admin_site.name, ) # Add a link to the object's change form if the user can edit the obj. if self.has_change_permission(request, obj): obj_repr = format_html('<a href="{}">{}</a>', urlquote(obj_url), obj) else: obj_repr = str(obj) msg_dict = { 'name': opts.verbose_name, 'obj': obj_repr, } # Here, we distinguish between different save types by checking for # the presence of keys in request.POST. if IS_POPUP_VAR in request.POST: to_field = request.POST.get(TO_FIELD_VAR) if to_field: attr = str(to_field) else: attr = obj._meta.pk.attname value = obj.serializable_value(attr) popup_response_data = json.dumps({ 'value': str(value), 'obj': str(obj), }) return TemplateResponse(request, self.popup_response_template or [ 'admin/%s/%s/popup_response.html' % (opts.app_label, opts.model_name), 'admin/%s/popup_response.html' % opts.app_label, 'admin/popup_response.html', ], { 'popup_response_data': popup_response_data, }) elif "_continue" in request.POST or ( # Redirecting after "Save as new". "_saveasnew" in request.POST and self.save_as_continue and self.has_change_permission(request, obj) ): msg = _('The {name} “{obj}” was added successfully.') if self.has_change_permission(request, obj): msg += ' ' + _('You may edit it again below.') self.message_user(request, format_html(msg, **msg_dict), messages.SUCCESS) if post_url_continue is None: post_url_continue = obj_url post_url_continue = add_preserved_filters( {'preserved_filters': preserved_filters, 'opts': opts}, post_url_continue ) return HttpResponseRedirect(post_url_continue) elif "_addanother" in request.POST: msg = format_html( _('The {name} “{obj}” was added successfully. You may add another {name} below.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) redirect_url = request.path redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) else: msg = format_html( _('The {name} “{obj}” was added successfully.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) return self.response_post_save_add(request, obj) def response_change(self, request, obj): """ Determine the HttpResponse for the change_view stage. """ if IS_POPUP_VAR in request.POST: opts = obj._meta to_field = request.POST.get(TO_FIELD_VAR) attr = str(to_field) if to_field else opts.pk.attname value = request.resolver_match.kwargs['object_id'] new_value = obj.serializable_value(attr) popup_response_data = json.dumps({ 'action': 'change', 'value': str(value), 'obj': str(obj), 'new_value': str(new_value), }) return TemplateResponse(request, self.popup_response_template or [ 'admin/%s/%s/popup_response.html' % (opts.app_label, opts.model_name), 'admin/%s/popup_response.html' % opts.app_label, 'admin/popup_response.html', ], { 'popup_response_data': popup_response_data, }) opts = self.model._meta preserved_filters = self.get_preserved_filters(request) msg_dict = { 'name': opts.verbose_name, 'obj': format_html('<a href="{}">{}</a>', urlquote(request.path), obj), } if "_continue" in request.POST: msg = format_html( _('The {name} “{obj}” was changed successfully. You may edit it again below.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) redirect_url = request.path redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) elif "_saveasnew" in request.POST: msg = format_html( _('The {name} “{obj}” was added successfully. You may edit it again below.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) redirect_url = reverse('admin:%s_%s_change' % (opts.app_label, opts.model_name), args=(obj.pk,), current_app=self.admin_site.name) redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) elif "_addanother" in request.POST: msg = format_html( _('The {name} “{obj}” was changed successfully. You may add another {name} below.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) redirect_url = reverse('admin:%s_%s_add' % (opts.app_label, opts.model_name), current_app=self.admin_site.name) redirect_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, redirect_url) return HttpResponseRedirect(redirect_url) else: msg = format_html( _('The {name} “{obj}” was changed successfully.'), **msg_dict ) self.message_user(request, msg, messages.SUCCESS) return self.response_post_save_change(request, obj) def _response_post_save(self, request, obj): opts = self.model._meta if self.has_view_or_change_permission(request): post_url = reverse('admin:%s_%s_changelist' % (opts.app_label, opts.model_name), current_app=self.admin_site.name) preserved_filters = self.get_preserved_filters(request) post_url = add_preserved_filters({'preserved_filters': preserved_filters, 'opts': opts}, post_url) else: post_url = reverse('admin:index', current_app=self.admin_site.name) return HttpResponseRedirect(post_url) def response_post_save_add(self, request, obj): """ Figure out where to redirect after the 'Save' button has been pressed when adding a new object. """ return self._response_post_save(request, obj) def response_post_save_change(self, request, obj): """ Figure out where to redirect after the 'Save' button has been pressed when editing an existing object. """ return self._response_post_save(request, obj) def response_action(self, request, queryset): """ Handle an admin action. This is called if a request is POSTed to the changelist; it returns an HttpResponse if the action was handled, and None otherwise. """ # There can be multiple action forms on the page (at the top # and bottom of the change list, for example). Get the action # whose button was pushed. try: action_index = int(request.POST.get('index', 0)) except ValueError: action_index = 0 # Construct the action form. data = request.POST.copy() data.pop(helpers.ACTION_CHECKBOX_NAME, None) data.pop("index", None) # Use the action whose button was pushed try: data.update({'action': data.getlist('action')[action_index]}) except IndexError: # If we didn't get an action from the chosen form that's invalid # POST data, so by deleting action it'll fail the validation check # below. So no need to do anything here pass action_form = self.action_form(data, auto_id=None) action_form.fields['action'].choices = self.get_action_choices(request) # If the form's valid we can handle the action. if action_form.is_valid(): action = action_form.cleaned_data['action'] select_across = action_form.cleaned_data['select_across'] func = self.get_actions(request)[action][0] # Get the list of selected PKs. If nothing's selected, we can't # perform an action on it, so bail. Except we want to perform # the action explicitly on all objects. selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME) if not selected and not select_across: # Reminder that something needs to be selected or nothing will happen msg = _("Items must be selected in order to perform " "actions on them. No items have been changed.") self.message_user(request, msg, messages.WARNING) return None if not select_across: # Perform the action only on the selected objects queryset = queryset.filter(pk__in=selected) response = func(self, request, queryset) # Actions may return an HttpResponse-like object, which will be # used as the response from the POST. If not, we'll be a good # little HTTP citizen and redirect back to the changelist page. if isinstance(response, HttpResponseBase): return response else: return HttpResponseRedirect(request.get_full_path()) else: msg = _("No action selected.") self.message_user(request, msg, messages.WARNING) return None def response_delete(self, request, obj_display, obj_id): """ Determine the HttpResponse for the delete_view stage. """ opts = self.model._meta if IS_POPUP_VAR in request.POST: popup_response_data = json.dumps({ 'action': 'delete', 'value': str(obj_id), }) return TemplateResponse(request, self.popup_response_template or [ 'admin/%s/%s/popup_response.html' % (opts.app_label, opts.model_name), 'admin/%s/popup_response.html' % opts.app_label, 'admin/popup_response.html', ], { 'popup_response_data': popup_response_data, }) self.message_user( request, _('The %(name)s “%(obj)s” was deleted successfully.') % { 'name': opts.verbose_name, 'obj': obj_display, }, messages.SUCCESS, ) if self.has_change_permission(request, None): post_url = reverse( 'admin:%s_%s_changelist' % (opts.app_label, opts.model_name), current_app=self.admin_site.name, ) preserved_filters = self.get_preserved_filters(request) post_url = add_preserved_filters( {'preserved_filters': preserved_filters, 'opts': opts}, post_url ) else: post_url = reverse('admin:index', current_app=self.admin_site.name) return HttpResponseRedirect(post_url) def render_delete_form(self, request, context): opts = self.model._meta app_label = opts.app_label request.current_app = self.admin_site.name context.update( to_field_var=TO_FIELD_VAR, is_popup_var=IS_POPUP_VAR, media=self.media, ) return TemplateResponse( request, self.delete_confirmation_template or [ "admin/{}/{}/delete_confirmation.html".format(app_label, opts.model_name), "admin/{}/delete_confirmation.html".format(app_label), "admin/delete_confirmation.html", ], context, ) def get_inline_formsets(self, request, formsets, inline_instances, obj=None): # Edit permissions on parent model are required for editable inlines. can_edit_parent = self.has_change_permission(request, obj) if obj else self.has_add_permission(request) inline_admin_formsets = [] for inline, formset in zip(inline_instances, formsets): fieldsets = list(inline.get_fieldsets(request, obj)) readonly = list(inline.get_readonly_fields(request, obj)) if can_edit_parent: has_add_permission = inline.has_add_permission(request, obj) has_change_permission = inline.has_change_permission(request, obj) has_delete_permission = inline.has_delete_permission(request, obj) else: # Disable all edit-permissions, and overide formset settings. has_add_permission = has_change_permission = has_delete_permission = False formset.extra = formset.max_num = 0 has_view_permission = inline.has_view_permission(request, obj) prepopulated = dict(inline.get_prepopulated_fields(request, obj)) inline_admin_formset = helpers.InlineAdminFormSet( inline, formset, fieldsets, prepopulated, readonly, model_admin=self, has_add_permission=has_add_permission, has_change_permission=has_change_permission, has_delete_permission=has_delete_permission, has_view_permission=has_view_permission, ) inline_admin_formsets.append(inline_admin_formset) return inline_admin_formsets def get_changeform_initial_data(self, request): """ Get the initial form data from the request's GET params. """ initial = dict(request.GET.items()) for k in initial: try: f = self.model._meta.get_field(k) except FieldDoesNotExist: continue # We have to special-case M2Ms as a list of comma-separated PKs. if isinstance(f, models.ManyToManyField): initial[k] = initial[k].split(",") return initial def _get_obj_does_not_exist_redirect(self, request, opts, object_id): """ Create a message informing the user that the object doesn't exist and return a redirect to the admin index page. """ msg = _('%(name)s with ID “%(key)s” doesn’t exist. Perhaps it was deleted?') % { 'name': opts.verbose_name, 'key': unquote(object_id), } self.message_user(request, msg, messages.WARNING) url = reverse('admin:index', current_app=self.admin_site.name) return HttpResponseRedirect(url) @csrf_protect_m def changeform_view(self, request, object_id=None, form_url='', extra_context=None): with transaction.atomic(using=router.db_for_write(self.model)): return self._changeform_view(request, object_id, form_url, extra_context) def _changeform_view(self, request, object_id, form_url, extra_context): to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR)) if to_field and not self.to_field_allowed(request, to_field): raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field) model = self.model opts = model._meta if request.method == 'POST' and '_saveasnew' in request.POST: object_id = None add = object_id is None if add: if not self.has_add_permission(request): raise PermissionDenied obj = None else: obj = self.get_object(request, unquote(object_id), to_field) if request.method == 'POST': if not self.has_change_permission(request, obj): raise PermissionDenied else: if not self.has_view_or_change_permission(request, obj): raise PermissionDenied if obj is None: return self._get_obj_does_not_exist_redirect(request, opts, object_id) fieldsets = self.get_fieldsets(request, obj) ModelForm = self.get_form( request, obj, change=not add, fields=flatten_fieldsets(fieldsets) ) if request.method == 'POST': form = ModelForm(request.POST, request.FILES, instance=obj) form_validated = form.is_valid() if form_validated: new_object = self.save_form(request, form, change=not add) else: new_object = form.instance formsets, inline_instances = self._create_formsets(request, new_object, change=not add) if all_valid(formsets) and form_validated: self.save_model(request, new_object, form, not add) self.save_related(request, form, formsets, not add) change_message = self.construct_change_message(request, form, formsets, add) if add: self.log_addition(request, new_object, change_message) return self.response_add(request, new_object) else: self.log_change(request, new_object, change_message) return self.response_change(request, new_object) else: form_validated = False else: if add: initial = self.get_changeform_initial_data(request) form = ModelForm(initial=initial) formsets, inline_instances = self._create_formsets(request, form.instance, change=False) else: form = ModelForm(instance=obj) formsets, inline_instances = self._create_formsets(request, obj, change=True) if not add and not self.has_change_permission(request, obj): readonly_fields = flatten_fieldsets(fieldsets) else: readonly_fields = self.get_readonly_fields(request, obj) adminForm = helpers.AdminForm( form, list(fieldsets), # Clear prepopulated fields on a view-only form to avoid a crash. self.get_prepopulated_fields(request, obj) if add or self.has_change_permission(request, obj) else {}, readonly_fields, model_admin=self) media = self.media + adminForm.media inline_formsets = self.get_inline_formsets(request, formsets, inline_instances, obj) for inline_formset in inline_formsets: media = media + inline_formset.media if add: title = _('Add %s') elif self.has_change_permission(request, obj): title = _('Change %s') else: title = _('View %s') context = { **self.admin_site.each_context(request), 'title': title % opts.verbose_name, 'subtitle': str(obj) if obj else None, 'adminform': adminForm, 'object_id': object_id, 'original': obj, 'is_popup': IS_POPUP_VAR in request.POST or IS_POPUP_VAR in request.GET, 'to_field': to_field, 'media': media, 'inline_admin_formsets': inline_formsets, 'errors': helpers.AdminErrorList(form, formsets), 'preserved_filters': self.get_preserved_filters(request), } # Hide the "Save" and "Save and continue" buttons if "Save as New" was # previously chosen to prevent the interface from getting confusing. if request.method == 'POST' and not form_validated and "_saveasnew" in request.POST: context['show_save'] = False context['show_save_and_continue'] = False # Use the change template instead of the add template. add = False context.update(extra_context or {}) return self.render_change_form(request, context, add=add, change=not add, obj=obj, form_url=form_url) def add_view(self, request, form_url='', extra_context=None): return self.changeform_view(request, None, form_url, extra_context) def change_view(self, request, object_id, form_url='', extra_context=None): return self.changeform_view(request, object_id, form_url, extra_context) def _get_edited_object_pks(self, request, prefix): """Return POST data values of list_editable primary keys.""" pk_pattern = re.compile( r'{}-\d+-{}$'.format(re.escape(prefix), self.model._meta.pk.name) ) return [value for key, value in request.POST.items() if pk_pattern.match(key)] def _get_list_editable_queryset(self, request, prefix): """ Based on POST data, return a queryset of the objects that were edited via list_editable. """ object_pks = self._get_edited_object_pks(request, prefix) queryset = self.get_queryset(request) validate = queryset.model._meta.pk.to_python try: for pk in object_pks: validate(pk) except ValidationError: # Disable the optimization if the POST data was tampered with. return queryset return queryset.filter(pk__in=object_pks) @csrf_protect_m def changelist_view(self, request, extra_context=None): """ The 'change list' admin view for this model. """ from django.contrib.admin.views.main import ERROR_FLAG opts = self.model._meta app_label = opts.app_label if not self.has_view_or_change_permission(request): raise PermissionDenied try: cl = self.get_changelist_instance(request) except IncorrectLookupParameters: # Wacky lookup parameters were given, so redirect to the main # changelist page, without parameters, and pass an 'invalid=1' # parameter via the query string. If wacky parameters were given # and the 'invalid=1' parameter was already in the query string, # something is screwed up with the database, so display an error # page. if ERROR_FLAG in request.GET: return SimpleTemplateResponse('admin/invalid_setup.html', { 'title': _('Database error'), }) return HttpResponseRedirect(request.path + '?' + ERROR_FLAG + '=1') # If the request was POSTed, this might be a bulk action or a bulk # edit. Try to look up an action or confirmation first, but if this # isn't an action the POST will fall through to the bulk edit check, # below. action_failed = False selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME) actions = self.get_actions(request) # Actions with no confirmation if (actions and request.method == 'POST' and 'index' in request.POST and '_save' not in request.POST): if selected: response = self.response_action(request, queryset=cl.get_queryset(request)) if response: return response else: action_failed = True else: msg = _("Items must be selected in order to perform " "actions on them. No items have been changed.") self.message_user(request, msg, messages.WARNING) action_failed = True # Actions with confirmation if (actions and request.method == 'POST' and helpers.ACTION_CHECKBOX_NAME in request.POST and 'index' not in request.POST and '_save' not in request.POST): if selected: response = self.response_action(request, queryset=cl.get_queryset(request)) if response: return response else: action_failed = True if action_failed: # Redirect back to the changelist page to avoid resubmitting the # form if the user refreshes the browser or uses the "No, take # me back" button on the action confirmation page. return HttpResponseRedirect(request.get_full_path()) # If we're allowing changelist editing, we need to construct a formset # for the changelist given all the fields to be edited. Then we'll # use the formset to validate/process POSTed data. formset = cl.formset = None # Handle POSTed bulk-edit data. if request.method == 'POST' and cl.list_editable and '_save' in request.POST: if not self.has_change_permission(request): raise PermissionDenied FormSet = self.get_changelist_formset(request) modified_objects = self._get_list_editable_queryset(request, FormSet.get_default_prefix()) formset = cl.formset = FormSet(request.POST, request.FILES, queryset=modified_objects) if formset.is_valid(): changecount = 0 for form in formset.forms: if form.has_changed(): obj = self.save_form(request, form, change=True) self.save_model(request, obj, form, change=True) self.save_related(request, form, formsets=[], change=True) change_msg = self.construct_change_message(request, form, None) self.log_change(request, obj, change_msg) changecount += 1 if changecount: msg = ngettext( "%(count)s %(name)s was changed successfully.", "%(count)s %(name)s were changed successfully.", changecount ) % { 'count': changecount, 'name': model_ngettext(opts, changecount), } self.message_user(request, msg, messages.SUCCESS) return HttpResponseRedirect(request.get_full_path()) # Handle GET -- construct a formset for display. elif cl.list_editable and self.has_change_permission(request): FormSet = self.get_changelist_formset(request) formset = cl.formset = FormSet(queryset=cl.result_list) # Build the list of media to be used by the formset. if formset: media = self.media + formset.media else: media = self.media # Build the action form and populate it with available actions. if actions: action_form = self.action_form(auto_id=None) action_form.fields['action'].choices = self.get_action_choices(request) media += action_form.media else: action_form = None selection_note_all = ngettext( '%(total_count)s selected', 'All %(total_count)s selected', cl.result_count ) context = { **self.admin_site.each_context(request), 'module_name': str(opts.verbose_name_plural), 'selection_note': _('0 of %(cnt)s selected') % {'cnt': len(cl.result_list)}, 'selection_note_all': selection_note_all % {'total_count': cl.result_count}, 'title': cl.title, 'subtitle': None, 'is_popup': cl.is_popup, 'to_field': cl.to_field, 'cl': cl, 'media': media, 'has_add_permission': self.has_add_permission(request), 'opts': cl.opts, 'action_form': action_form, 'actions_on_top': self.actions_on_top, 'actions_on_bottom': self.actions_on_bottom, 'actions_selection_counter': self.actions_selection_counter, 'preserved_filters': self.get_preserved_filters(request), **(extra_context or {}), } request.current_app = self.admin_site.name return TemplateResponse(request, self.change_list_template or [ 'admin/%s/%s/change_list.html' % (app_label, opts.model_name), 'admin/%s/change_list.html' % app_label, 'admin/change_list.html' ], context) def get_deleted_objects(self, objs, request): """ Hook for customizing the delete process for the delete view and the "delete selected" action. """ return get_deleted_objects(objs, request, self.admin_site) @csrf_protect_m def delete_view(self, request, object_id, extra_context=None): with transaction.atomic(using=router.db_for_write(self.model)): return self._delete_view(request, object_id, extra_context) def _delete_view(self, request, object_id, extra_context): "The 'delete' admin view for this model." opts = self.model._meta app_label = opts.app_label to_field = request.POST.get(TO_FIELD_VAR, request.GET.get(TO_FIELD_VAR)) if to_field and not self.to_field_allowed(request, to_field): raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field) obj = self.get_object(request, unquote(object_id), to_field) if not self.has_delete_permission(request, obj): raise PermissionDenied if obj is None: return self._get_obj_does_not_exist_redirect(request, opts, object_id) # Populate deleted_objects, a data structure of all related objects that # will also be deleted. deleted_objects, model_count, perms_needed, protected = self.get_deleted_objects([obj], request) if request.POST and not protected: # The user has confirmed the deletion. if perms_needed: raise PermissionDenied obj_display = str(obj) attr = str(to_field) if to_field else opts.pk.attname obj_id = obj.serializable_value(attr) self.log_deletion(request, obj, obj_display) self.delete_model(request, obj) return self.response_delete(request, obj_display, obj_id) object_name = str(opts.verbose_name) if perms_needed or protected: title = _("Cannot delete %(name)s") % {"name": object_name} else: title = _("Are you sure?") context = { **self.admin_site.each_context(request), 'title': title, 'subtitle': None, 'object_name': object_name, 'object': obj, 'deleted_objects': deleted_objects, 'model_count': dict(model_count).items(), 'perms_lacking': perms_needed, 'protected': protected, 'opts': opts, 'app_label': app_label, 'preserved_filters': self.get_preserved_filters(request), 'is_popup': IS_POPUP_VAR in request.POST or IS_POPUP_VAR in request.GET, 'to_field': to_field, **(extra_context or {}), } return self.render_delete_form(request, context) def history_view(self, request, object_id, extra_context=None): "The 'history' admin view for this model." from django.contrib.admin.models import LogEntry # First check if the user can see this history. model = self.model obj = self.get_object(request, unquote(object_id)) if obj is None: return self._get_obj_does_not_exist_redirect(request, model._meta, object_id) if not self.has_view_or_change_permission(request, obj): raise PermissionDenied # Then get the history for this object. opts = model._meta app_label = opts.app_label action_list = LogEntry.objects.filter( object_id=unquote(object_id), content_type=get_content_type_for_model(model) ).select_related().order_by('action_time') context = { **self.admin_site.each_context(request), 'title': _('Change history: %s') % obj, 'subtitle': None, 'action_list': action_list, 'module_name': str(capfirst(opts.verbose_name_plural)), 'object': obj, 'opts': opts, 'preserved_filters': self.get_preserved_filters(request), **(extra_context or {}), } request.current_app = self.admin_site.name return TemplateResponse(request, self.object_history_template or [ "admin/%s/%s/object_history.html" % (app_label, opts.model_name), "admin/%s/object_history.html" % app_label, "admin/object_history.html" ], context) def get_formset_kwargs(self, request, obj, inline, prefix): formset_params = { 'instance': obj, 'prefix': prefix, 'queryset': inline.get_queryset(request), } if request.method == 'POST': formset_params.update({ 'data': request.POST.copy(), 'files': request.FILES, 'save_as_new': '_saveasnew' in request.POST }) return formset_params def _create_formsets(self, request, obj, change): "Helper function to generate formsets for add/change_view." formsets = [] inline_instances = [] prefixes = {} get_formsets_args = [request] if change: get_formsets_args.append(obj) for FormSet, inline in self.get_formsets_with_inlines(*get_formsets_args): prefix = FormSet.get_default_prefix() prefixes[prefix] = prefixes.get(prefix, 0) + 1 if prefixes[prefix] != 1 or not prefix: prefix = "%s-%s" % (prefix, prefixes[prefix]) formset_params = self.get_formset_kwargs(request, obj, inline, prefix) formset = FormSet(**formset_params) def user_deleted_form(request, obj, formset, index): """Return whether or not the user deleted the form.""" return ( inline.has_delete_permission(request, obj) and '{}-{}-DELETE'.format(formset.prefix, index) in request.POST ) # Bypass validation of each view-only inline form (since the form's # data won't be in request.POST), unless the form was deleted. if not inline.has_change_permission(request, obj if change else None): for index, form in enumerate(formset.initial_forms): if user_deleted_form(request, obj, formset, index): continue form._errors = {} form.cleaned_data = form.initial formsets.append(formset) inline_instances.append(inline) return formsets, inline_instances class InlineModelAdmin(BaseModelAdmin): """ Options for inline editing of ``model`` instances. Provide ``fk_name`` to specify the attribute name of the ``ForeignKey`` from ``model`` to its parent. This is required if ``model`` has more than one ``ForeignKey`` to its parent. """ model = None fk_name = None formset = BaseInlineFormSet extra = 3 min_num = None max_num = None template = None verbose_name = None verbose_name_plural = None can_delete = True show_change_link = False checks_class = InlineModelAdminChecks classes = None def __init__(self, parent_model, admin_site): self.admin_site = admin_site self.parent_model = parent_model self.opts = self.model._meta self.has_registered_model = admin_site.is_registered(self.model) super().__init__() if self.verbose_name is None: self.verbose_name = self.model._meta.verbose_name if self.verbose_name_plural is None: self.verbose_name_plural = self.model._meta.verbose_name_plural @property def media(self): extra = '' if settings.DEBUG else '.min' js = ['vendor/jquery/jquery%s.js' % extra, 'jquery.init.js', 'inlines.js'] if self.filter_vertical or self.filter_horizontal: js.extend(['SelectBox.js', 'SelectFilter2.js']) if self.classes and 'collapse' in self.classes: js.append('collapse.js') return forms.Media(js=['admin/js/%s' % url for url in js]) def get_extra(self, request, obj=None, **kwargs): """Hook for customizing the number of extra inline forms.""" return self.extra def get_min_num(self, request, obj=None, **kwargs): """Hook for customizing the min number of inline forms.""" return self.min_num def get_max_num(self, request, obj=None, **kwargs): """Hook for customizing the max number of extra inline forms.""" return self.max_num def get_formset(self, request, obj=None, **kwargs): """Return a BaseInlineFormSet class for use in admin add/change views.""" if 'fields' in kwargs: fields = kwargs.pop('fields') else: fields = flatten_fieldsets(self.get_fieldsets(request, obj)) excluded = self.get_exclude(request, obj) exclude = [] if excluded is None else list(excluded) exclude.extend(self.get_readonly_fields(request, obj)) if excluded is None and hasattr(self.form, '_meta') and self.form._meta.exclude: # Take the custom ModelForm's Meta.exclude into account only if the # InlineModelAdmin doesn't define its own. exclude.extend(self.form._meta.exclude) # If exclude is an empty list we use None, since that's the actual # default. exclude = exclude or None can_delete = self.can_delete and self.has_delete_permission(request, obj) defaults = { 'form': self.form, 'formset': self.formset, 'fk_name': self.fk_name, 'fields': fields, 'exclude': exclude, 'formfield_callback': partial(self.formfield_for_dbfield, request=request), 'extra': self.get_extra(request, obj, **kwargs), 'min_num': self.get_min_num(request, obj, **kwargs), 'max_num': self.get_max_num(request, obj, **kwargs), 'can_delete': can_delete, **kwargs, } base_model_form = defaults['form'] can_change = self.has_change_permission(request, obj) if request else True can_add = self.has_add_permission(request, obj) if request else True class DeleteProtectedModelForm(base_model_form): def hand_clean_DELETE(self): """ We don't validate the 'DELETE' field itself because on templates it's not rendered using the field information, but just using a generic "deletion_field" of the InlineModelAdmin. """ if self.cleaned_data.get(DELETION_FIELD_NAME, False): using = router.db_for_write(self._meta.model) collector = NestedObjects(using=using) if self.instance._state.adding: return collector.collect([self.instance]) if collector.protected: objs = [] for p in collector.protected: objs.append( # Translators: Model verbose name and instance representation, # suitable to be an item in a list. _('%(class_name)s %(instance)s') % { 'class_name': p._meta.verbose_name, 'instance': p} ) params = { 'class_name': self._meta.model._meta.verbose_name, 'instance': self.instance, 'related_objects': get_text_list(objs, _('and')), } msg = _("Deleting %(class_name)s %(instance)s would require " "deleting the following protected related objects: " "%(related_objects)s") raise ValidationError(msg, code='deleting_protected', params=params) def is_valid(self): result = super().is_valid() self.hand_clean_DELETE() return result def has_changed(self): # Protect against unauthorized edits. if not can_change and not self.instance._state.adding: return False if not can_add and self.instance._state.adding: return False return super().has_changed() defaults['form'] = DeleteProtectedModelForm if defaults['fields'] is None and not modelform_defines_fields(defaults['form']): defaults['fields'] = forms.ALL_FIELDS return inlineformset_factory(self.parent_model, self.model, **defaults) def _get_form_for_get_fields(self, request, obj=None): return self.get_formset(request, obj, fields=None).form def get_queryset(self, request): queryset = super().get_queryset(request) if not self.has_view_or_change_permission(request): queryset = queryset.none() return queryset def _has_any_perms_for_target_model(self, request, perms): """ This method is called only when the ModelAdmin's model is for an ManyToManyField's implicit through model (if self.opts.auto_created). Return True if the user has any of the given permissions ('add', 'change', etc.) for the model that points to the through model. """ opts = self.opts # Find the target model of an auto-created many-to-many relationship. for field in opts.fields: if field.remote_field and field.remote_field.model != self.parent_model: opts = field.remote_field.model._meta break return any( request.user.has_perm('%s.%s' % (opts.app_label, get_permission_codename(perm, opts))) for perm in perms ) def has_add_permission(self, request, obj): if self.opts.auto_created: # Auto-created intermediate models don't have their own # permissions. The user needs to have the change permission for the # related model in order to be able to do anything with the # intermediate model. return self._has_any_perms_for_target_model(request, ['change']) return super().has_add_permission(request) def has_change_permission(self, request, obj=None): if self.opts.auto_created: # Same comment as has_add_permission(). return self._has_any_perms_for_target_model(request, ['change']) return super().has_change_permission(request) def has_delete_permission(self, request, obj=None): if self.opts.auto_created: # Same comment as has_add_permission(). return self._has_any_perms_for_target_model(request, ['change']) return super().has_delete_permission(request, obj) def has_view_permission(self, request, obj=None): if self.opts.auto_created: # Same comment as has_add_permission(). The 'change' permission # also implies the 'view' permission. return self._has_any_perms_for_target_model(request, ['view', 'change']) return super().has_view_permission(request) class StackedInline(InlineModelAdmin): template = 'admin/edit_inline/stacked.html' class TabularInline(InlineModelAdmin): template = 'admin/edit_inline/tabular.html'
ef5b3c50b66abb20c61f27f911585274a3a6656a6d39cd796ce215144b780b41
import re from functools import update_wrapper from weakref import WeakSet from django.apps import apps from django.conf import settings from django.contrib.admin import ModelAdmin, actions from django.contrib.admin.views.autocomplete import AutocompleteJsonView from django.contrib.auth import REDIRECT_FIELD_NAME from django.core.exceptions import ImproperlyConfigured from django.db.models.base import ModelBase from django.http import ( Http404, HttpResponsePermanentRedirect, HttpResponseRedirect, ) from django.template.response import TemplateResponse from django.urls import NoReverseMatch, Resolver404, resolve, reverse from django.utils.decorators import method_decorator from django.utils.functional import LazyObject from django.utils.module_loading import import_string from django.utils.text import capfirst from django.utils.translation import gettext as _, gettext_lazy from django.views.decorators.cache import never_cache from django.views.decorators.common import no_append_slash from django.views.decorators.csrf import csrf_protect from django.views.i18n import JavaScriptCatalog all_sites = WeakSet() class AlreadyRegistered(Exception): pass class NotRegistered(Exception): pass class AdminSite: """ An AdminSite object encapsulates an instance of the Django admin application, ready to be hooked in to your URLconf. Models are registered with the AdminSite using the register() method, and the get_urls() method can then be used to access Django view functions that present a full admin interface for the collection of registered models. """ # Text to put at the end of each page's <title>. site_title = gettext_lazy('Django site admin') # Text to put in each page's <h1>. site_header = gettext_lazy('Django administration') # Text to put at the top of the admin index page. index_title = gettext_lazy('Site administration') # URL for the "View site" link at the top of each admin page. site_url = '/' enable_nav_sidebar = True empty_value_display = '-' login_form = None index_template = None app_index_template = None login_template = None logout_template = None password_change_template = None password_change_done_template = None final_catch_all_view = True def __init__(self, name='admin'): self._registry = {} # model_class class -> admin_class instance self.name = name self._actions = {'delete_selected': actions.delete_selected} self._global_actions = self._actions.copy() all_sites.add(self) def __repr__(self): return f'{self.__class__.__name__}(name={self.name!r})' def check(self, app_configs): """ Run the system checks on all ModelAdmins, except if they aren't customized at all. """ if app_configs is None: app_configs = apps.get_app_configs() app_configs = set(app_configs) # Speed up lookups below errors = [] modeladmins = (o for o in self._registry.values() if o.__class__ is not ModelAdmin) for modeladmin in modeladmins: if modeladmin.model._meta.app_config in app_configs: errors.extend(modeladmin.check()) return errors def register(self, model_or_iterable, admin_class=None, **options): """ Register the given model(s) with the given admin class. The model(s) should be Model classes, not instances. If an admin class isn't given, use ModelAdmin (the default admin options). If keyword arguments are given -- e.g., list_display -- apply them as options to the admin class. If a model is already registered, raise AlreadyRegistered. If a model is abstract, raise ImproperlyConfigured. """ admin_class = admin_class or ModelAdmin if isinstance(model_or_iterable, ModelBase): model_or_iterable = [model_or_iterable] for model in model_or_iterable: if model._meta.abstract: raise ImproperlyConfigured( 'The model %s is abstract, so it cannot be registered with admin.' % model.__name__ ) if model in self._registry: registered_admin = str(self._registry[model]) msg = 'The model %s is already registered ' % model.__name__ if registered_admin.endswith('.ModelAdmin'): # Most likely registered without a ModelAdmin subclass. msg += 'in app %r.' % re.sub(r'\.ModelAdmin$', '', registered_admin) else: msg += 'with %r.' % registered_admin raise AlreadyRegistered(msg) # Ignore the registration if the model has been # swapped out. if not model._meta.swapped: # If we got **options then dynamically construct a subclass of # admin_class with those **options. if options: # For reasons I don't quite understand, without a __module__ # the created class appears to "live" in the wrong place, # which causes issues later on. options['__module__'] = __name__ admin_class = type("%sAdmin" % model.__name__, (admin_class,), options) # Instantiate the admin class to save in the registry self._registry[model] = admin_class(model, self) def unregister(self, model_or_iterable): """ Unregister the given model(s). If a model isn't already registered, raise NotRegistered. """ if isinstance(model_or_iterable, ModelBase): model_or_iterable = [model_or_iterable] for model in model_or_iterable: if model not in self._registry: raise NotRegistered('The model %s is not registered' % model.__name__) del self._registry[model] def is_registered(self, model): """ Check if a model class is registered with this `AdminSite`. """ return model in self._registry def add_action(self, action, name=None): """ Register an action to be available globally. """ name = name or action.__name__ self._actions[name] = action self._global_actions[name] = action def disable_action(self, name): """ Disable a globally-registered action. Raise KeyError for invalid names. """ del self._actions[name] def get_action(self, name): """ Explicitly get a registered global action whether it's enabled or not. Raise KeyError for invalid names. """ return self._global_actions[name] @property def actions(self): """ Get all the enabled actions as an iterable of (name, func). """ return self._actions.items() def has_permission(self, request): """ Return True if the given HttpRequest has permission to view *at least one* page in the admin site. """ return request.user.is_active and request.user.is_staff def admin_view(self, view, cacheable=False): """ Decorator to create an admin view attached to this ``AdminSite``. This wraps the view and provides permission checking by calling ``self.has_permission``. You'll want to use this from within ``AdminSite.get_urls()``: class MyAdminSite(AdminSite): def get_urls(self): from django.urls import path urls = super().get_urls() urls += [ path('my_view/', self.admin_view(some_view)) ] return urls By default, admin_views are marked non-cacheable using the ``never_cache`` decorator. If the view can be safely cached, set cacheable=True. """ def inner(request, *args, **kwargs): if not self.has_permission(request): if request.path == reverse('admin:logout', current_app=self.name): index_path = reverse('admin:index', current_app=self.name) return HttpResponseRedirect(index_path) # Inner import to prevent django.contrib.admin (app) from # importing django.contrib.auth.models.User (unrelated model). from django.contrib.auth.views import redirect_to_login return redirect_to_login( request.get_full_path(), reverse('admin:login', current_app=self.name) ) return view(request, *args, **kwargs) if not cacheable: inner = never_cache(inner) # We add csrf_protect here so this function can be used as a utility # function for any view, without having to repeat 'csrf_protect'. if not getattr(view, 'csrf_exempt', False): inner = csrf_protect(inner) return update_wrapper(inner, view) def get_urls(self): # Since this module gets imported in the application's root package, # it cannot import models from other applications at the module level, # and django.contrib.contenttypes.views imports ContentType. from django.contrib.contenttypes import views as contenttype_views from django.urls import include, path, re_path def wrap(view, cacheable=False): def wrapper(*args, **kwargs): return self.admin_view(view, cacheable)(*args, **kwargs) wrapper.admin_site = self return update_wrapper(wrapper, view) # Admin-site-wide views. urlpatterns = [ path('', wrap(self.index), name='index'), path('login/', self.login, name='login'), path('logout/', wrap(self.logout), name='logout'), path('password_change/', wrap(self.password_change, cacheable=True), name='password_change'), path( 'password_change/done/', wrap(self.password_change_done, cacheable=True), name='password_change_done', ), path('autocomplete/', wrap(self.autocomplete_view), name='autocomplete'), path('jsi18n/', wrap(self.i18n_javascript, cacheable=True), name='jsi18n'), path( 'r/<int:content_type_id>/<path:object_id>/', wrap(contenttype_views.shortcut), name='view_on_site', ), ] # Add in each model's views, and create a list of valid URLS for the # app_index valid_app_labels = [] for model, model_admin in self._registry.items(): urlpatterns += [ path('%s/%s/' % (model._meta.app_label, model._meta.model_name), include(model_admin.urls)), ] if model._meta.app_label not in valid_app_labels: valid_app_labels.append(model._meta.app_label) # If there were ModelAdmins registered, we should have a list of app # labels for which we need to allow access to the app_index view, if valid_app_labels: regex = r'^(?P<app_label>' + '|'.join(valid_app_labels) + ')/$' urlpatterns += [ re_path(regex, wrap(self.app_index), name='app_list'), ] if self.final_catch_all_view: urlpatterns.append(re_path(r'(?P<url>.*)$', wrap(self.catch_all_view))) return urlpatterns @property def urls(self): return self.get_urls(), 'admin', self.name def each_context(self, request): """ Return a dictionary of variables to put in the template context for *every* page in the admin site. For sites running on a subpath, use the SCRIPT_NAME value if site_url hasn't been customized. """ script_name = request.META['SCRIPT_NAME'] site_url = script_name if self.site_url == '/' and script_name else self.site_url return { 'site_title': self.site_title, 'site_header': self.site_header, 'site_url': site_url, 'has_permission': self.has_permission(request), 'available_apps': self.get_app_list(request), 'is_popup': False, 'is_nav_sidebar_enabled': self.enable_nav_sidebar, } def password_change(self, request, extra_context=None): """ Handle the "change password" task -- both form display and validation. """ from django.contrib.admin.forms import AdminPasswordChangeForm from django.contrib.auth.views import PasswordChangeView url = reverse('admin:password_change_done', current_app=self.name) defaults = { 'form_class': AdminPasswordChangeForm, 'success_url': url, 'extra_context': {**self.each_context(request), **(extra_context or {})}, } if self.password_change_template is not None: defaults['template_name'] = self.password_change_template request.current_app = self.name return PasswordChangeView.as_view(**defaults)(request) def password_change_done(self, request, extra_context=None): """ Display the "success" page after a password change. """ from django.contrib.auth.views import PasswordChangeDoneView defaults = { 'extra_context': {**self.each_context(request), **(extra_context or {})}, } if self.password_change_done_template is not None: defaults['template_name'] = self.password_change_done_template request.current_app = self.name return PasswordChangeDoneView.as_view(**defaults)(request) def i18n_javascript(self, request, extra_context=None): """ Display the i18n JavaScript that the Django admin requires. `extra_context` is unused but present for consistency with the other admin views. """ return JavaScriptCatalog.as_view(packages=['django.contrib.admin'])(request) def logout(self, request, extra_context=None): """ Log out the user for the given HttpRequest. This should *not* assume the user is already logged in. """ from django.contrib.auth.views import LogoutView defaults = { 'extra_context': { **self.each_context(request), # Since the user isn't logged out at this point, the value of # has_permission must be overridden. 'has_permission': False, **(extra_context or {}) }, } if self.logout_template is not None: defaults['template_name'] = self.logout_template request.current_app = self.name return LogoutView.as_view(**defaults)(request) @method_decorator(never_cache) def login(self, request, extra_context=None): """ Display the login form for the given HttpRequest. """ if request.method == 'GET' and self.has_permission(request): # Already logged-in, redirect to admin index index_path = reverse('admin:index', current_app=self.name) return HttpResponseRedirect(index_path) # Since this module gets imported in the application's root package, # it cannot import models from other applications at the module level, # and django.contrib.admin.forms eventually imports User. from django.contrib.admin.forms import AdminAuthenticationForm from django.contrib.auth.views import LoginView context = { **self.each_context(request), 'title': _('Log in'), 'app_path': request.get_full_path(), 'username': request.user.get_username(), } if (REDIRECT_FIELD_NAME not in request.GET and REDIRECT_FIELD_NAME not in request.POST): context[REDIRECT_FIELD_NAME] = reverse('admin:index', current_app=self.name) context.update(extra_context or {}) defaults = { 'extra_context': context, 'authentication_form': self.login_form or AdminAuthenticationForm, 'template_name': self.login_template or 'admin/login.html', } request.current_app = self.name return LoginView.as_view(**defaults)(request) def autocomplete_view(self, request): return AutocompleteJsonView.as_view(admin_site=self)(request) @no_append_slash def catch_all_view(self, request, url): if settings.APPEND_SLASH and not url.endswith('/'): urlconf = getattr(request, 'urlconf', None) try: match = resolve('%s/' % request.path_info, urlconf) except Resolver404: pass else: if getattr(match.func, 'should_append_slash', True): return HttpResponsePermanentRedirect('%s/' % request.path) raise Http404 def _build_app_dict(self, request, label=None): """ Build the app dictionary. The optional `label` parameter filters models of a specific app. """ app_dict = {} if label: models = { m: m_a for m, m_a in self._registry.items() if m._meta.app_label == label } else: models = self._registry for model, model_admin in models.items(): app_label = model._meta.app_label has_module_perms = model_admin.has_module_permission(request) if not has_module_perms: continue perms = model_admin.get_model_perms(request) # Check whether user has any perm for this module. # If so, add the module to the model_list. if True not in perms.values(): continue info = (app_label, model._meta.model_name) model_dict = { 'model': model, 'name': capfirst(model._meta.verbose_name_plural), 'object_name': model._meta.object_name, 'perms': perms, 'admin_url': None, 'add_url': None, } if perms.get('change') or perms.get('view'): model_dict['view_only'] = not perms.get('change') try: model_dict['admin_url'] = reverse('admin:%s_%s_changelist' % info, current_app=self.name) except NoReverseMatch: pass if perms.get('add'): try: model_dict['add_url'] = reverse('admin:%s_%s_add' % info, current_app=self.name) except NoReverseMatch: pass if app_label in app_dict: app_dict[app_label]['models'].append(model_dict) else: app_dict[app_label] = { 'name': apps.get_app_config(app_label).verbose_name, 'app_label': app_label, 'app_url': reverse( 'admin:app_list', kwargs={'app_label': app_label}, current_app=self.name, ), 'has_module_perms': has_module_perms, 'models': [model_dict], } if label: return app_dict.get(label) return app_dict def get_app_list(self, request): """ Return a sorted list of all the installed apps that have been registered in this site. """ app_dict = self._build_app_dict(request) # Sort the apps alphabetically. app_list = sorted(app_dict.values(), key=lambda x: x['name'].lower()) # Sort the models alphabetically within each app. for app in app_list: app['models'].sort(key=lambda x: x['name']) return app_list def index(self, request, extra_context=None): """ Display the main admin index page, which lists all of the installed apps that have been registered in this site. """ app_list = self.get_app_list(request) context = { **self.each_context(request), 'title': self.index_title, 'subtitle': None, 'app_list': app_list, **(extra_context or {}), } request.current_app = self.name return TemplateResponse(request, self.index_template or 'admin/index.html', context) def app_index(self, request, app_label, extra_context=None): app_dict = self._build_app_dict(request, app_label) if not app_dict: raise Http404('The requested admin page does not exist.') # Sort the models alphabetically within each app. app_dict['models'].sort(key=lambda x: x['name']) context = { **self.each_context(request), 'title': _('%(app)s administration') % {'app': app_dict['name']}, 'subtitle': None, 'app_list': [app_dict], 'app_label': app_label, **(extra_context or {}), } request.current_app = self.name return TemplateResponse(request, self.app_index_template or [ 'admin/%s/app_index.html' % app_label, 'admin/app_index.html' ], context) class DefaultAdminSite(LazyObject): def _setup(self): AdminSiteClass = import_string(apps.get_app_config('admin').default_site) self._wrapped = AdminSiteClass() def __repr__(self): return repr(self._wrapped) # This global object represents the default admin site, for the common case. # You can provide your own AdminSite using the (Simple)AdminConfig.default_site # attribute. You can also instantiate AdminSite in your own code to create a # custom admin site. site = DefaultAdminSite()
9a1d8031174e206e35b5c9d6b6fe81792907905f28e8a8d0ee34c91d6ee5f15b
import datetime import decimal import json from collections import defaultdict from django.core.exceptions import FieldDoesNotExist from django.db import models, router from django.db.models.constants import LOOKUP_SEP from django.db.models.deletion import Collector from django.forms.utils import pretty_name from django.urls import NoReverseMatch, reverse from django.utils import formats, timezone from django.utils.html import format_html from django.utils.regex_helper import _lazy_re_compile from django.utils.text import capfirst from django.utils.translation import ngettext, override as translation_override QUOTE_MAP = {i: '_%02X' % i for i in b'":/_#?;@&=+$,"[]<>%\n\\'} UNQUOTE_MAP = {v: chr(k) for k, v in QUOTE_MAP.items()} UNQUOTE_RE = _lazy_re_compile('_(?:%s)' % '|'.join([x[1:] for x in UNQUOTE_MAP])) class FieldIsAForeignKeyColumnName(Exception): """A field is a foreign key attname, i.e. <FK>_id.""" pass def lookup_spawns_duplicates(opts, lookup_path): """ Return True if the given lookup path spawns duplicates. """ lookup_fields = lookup_path.split(LOOKUP_SEP) # Go through the fields (following all relations) and look for an m2m. for field_name in lookup_fields: if field_name == 'pk': field_name = opts.pk.name try: field = opts.get_field(field_name) except FieldDoesNotExist: # Ignore query lookups. continue else: if hasattr(field, 'get_path_info'): # This field is a relation; update opts to follow the relation. path_info = field.get_path_info() opts = path_info[-1].to_opts if any(path.m2m for path in path_info): # This field is a m2m relation so duplicates must be # handled. return True return False def prepare_lookup_value(key, value): """ Return a lookup value prepared to be used in queryset filtering. """ # if key ends with __in, split parameter into separate values if key.endswith('__in'): value = value.split(',') # if key ends with __isnull, special case '' and the string literals 'false' and '0' elif key.endswith('__isnull'): value = value.lower() not in ('', 'false', '0') return value def quote(s): """ Ensure that primary key values do not confuse the admin URLs by escaping any '/', '_' and ':' and similarly problematic characters. Similar to urllib.parse.quote(), except that the quoting is slightly different so that it doesn't get automatically unquoted by the web browser. """ return s.translate(QUOTE_MAP) if isinstance(s, str) else s def unquote(s): """Undo the effects of quote().""" return UNQUOTE_RE.sub(lambda m: UNQUOTE_MAP[m[0]], s) def flatten(fields): """ Return a list which is a single level of flattening of the original list. """ flat = [] for field in fields: if isinstance(field, (list, tuple)): flat.extend(field) else: flat.append(field) return flat def flatten_fieldsets(fieldsets): """Return a list of field names from an admin fieldsets structure.""" field_names = [] for name, opts in fieldsets: field_names.extend( flatten(opts['fields']) ) return field_names def get_deleted_objects(objs, request, admin_site): """ Find all objects related to ``objs`` that should also be deleted. ``objs`` must be a homogeneous iterable of objects (e.g. a QuerySet). Return a nested list of strings suitable for display in the template with the ``unordered_list`` filter. """ try: obj = objs[0] except IndexError: return [], {}, set(), [] else: using = router.db_for_write(obj._meta.model) collector = NestedObjects(using=using) collector.collect(objs) perms_needed = set() def format_callback(obj): model = obj.__class__ has_admin = model in admin_site._registry opts = obj._meta no_edit_link = '%s: %s' % (capfirst(opts.verbose_name), obj) if has_admin: if not admin_site._registry[model].has_delete_permission(request, obj): perms_needed.add(opts.verbose_name) try: admin_url = reverse('%s:%s_%s_change' % (admin_site.name, opts.app_label, opts.model_name), None, (quote(obj.pk),)) except NoReverseMatch: # Change url doesn't exist -- don't display link to edit return no_edit_link # Display a link to the admin page. return format_html('{}: <a href="{}">{}</a>', capfirst(opts.verbose_name), admin_url, obj) else: # Don't display link to edit, because it either has no # admin or is edited inline. return no_edit_link to_delete = collector.nested(format_callback) protected = [format_callback(obj) for obj in collector.protected] model_count = {model._meta.verbose_name_plural: len(objs) for model, objs in collector.model_objs.items()} return to_delete, model_count, perms_needed, protected class NestedObjects(Collector): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.edges = {} # {from_instance: [to_instances]} self.protected = set() self.model_objs = defaultdict(set) def add_edge(self, source, target): self.edges.setdefault(source, []).append(target) def collect(self, objs, source=None, source_attr=None, **kwargs): for obj in objs: if source_attr and not source_attr.endswith('+'): related_name = source_attr % { 'class': source._meta.model_name, 'app_label': source._meta.app_label, } self.add_edge(getattr(obj, related_name), obj) else: self.add_edge(None, obj) self.model_objs[obj._meta.model].add(obj) try: return super().collect(objs, source_attr=source_attr, **kwargs) except models.ProtectedError as e: self.protected.update(e.protected_objects) except models.RestrictedError as e: self.protected.update(e.restricted_objects) def related_objects(self, related_model, related_fields, objs): qs = super().related_objects(related_model, related_fields, objs) return qs.select_related(*[related_field.name for related_field in related_fields]) def _nested(self, obj, seen, format_callback): if obj in seen: return [] seen.add(obj) children = [] for child in self.edges.get(obj, ()): children.extend(self._nested(child, seen, format_callback)) if format_callback: ret = [format_callback(obj)] else: ret = [obj] if children: ret.append(children) return ret def nested(self, format_callback=None): """ Return the graph as a nested list. """ seen = set() roots = [] for root in self.edges.get(None, ()): roots.extend(self._nested(root, seen, format_callback)) return roots def can_fast_delete(self, *args, **kwargs): """ We always want to load the objects into memory so that we can display them to the user in confirm page. """ return False def model_format_dict(obj): """ Return a `dict` with keys 'verbose_name' and 'verbose_name_plural', typically for use with string formatting. `obj` may be a `Model` instance, `Model` subclass, or `QuerySet` instance. """ if isinstance(obj, (models.Model, models.base.ModelBase)): opts = obj._meta elif isinstance(obj, models.query.QuerySet): opts = obj.model._meta else: opts = obj return { 'verbose_name': opts.verbose_name, 'verbose_name_plural': opts.verbose_name_plural, } def model_ngettext(obj, n=None): """ Return the appropriate `verbose_name` or `verbose_name_plural` value for `obj` depending on the count `n`. `obj` may be a `Model` instance, `Model` subclass, or `QuerySet` instance. If `obj` is a `QuerySet` instance, `n` is optional and the length of the `QuerySet` is used. """ if isinstance(obj, models.query.QuerySet): if n is None: n = obj.count() obj = obj.model d = model_format_dict(obj) singular, plural = d["verbose_name"], d["verbose_name_plural"] return ngettext(singular, plural, n or 0) def lookup_field(name, obj, model_admin=None): opts = obj._meta try: f = _get_non_gfk_field(opts, name) except (FieldDoesNotExist, FieldIsAForeignKeyColumnName): # For non-field values, the value is either a method, property or # returned via a callable. if callable(name): attr = name value = attr(obj) elif hasattr(model_admin, name) and name != '__str__': attr = getattr(model_admin, name) value = attr(obj) else: attr = getattr(obj, name) if callable(attr): value = attr() else: value = attr f = None else: attr = None value = getattr(obj, name) return f, attr, value def _get_non_gfk_field(opts, name): """ For historical reasons, the admin app relies on GenericForeignKeys as being "not found" by get_field(). This could likely be cleaned up. Reverse relations should also be excluded as these aren't attributes of the model (rather something like `foo_set`). """ field = opts.get_field(name) if (field.is_relation and # Generic foreign keys OR reverse relations ((field.many_to_one and not field.related_model) or field.one_to_many)): raise FieldDoesNotExist() # Avoid coercing <FK>_id fields to FK if field.is_relation and not field.many_to_many and hasattr(field, 'attname') and field.attname == name: raise FieldIsAForeignKeyColumnName() return field def label_for_field(name, model, model_admin=None, return_attr=False, form=None): """ Return a sensible label for a field name. The name can be a callable, property (but not created with @property decorator), or the name of an object's attribute, as well as a model field. If return_attr is True, also return the resolved attribute (which could be a callable). This will be None if (and only if) the name refers to a field. """ attr = None try: field = _get_non_gfk_field(model._meta, name) try: label = field.verbose_name except AttributeError: # field is likely a ForeignObjectRel label = field.related_model._meta.verbose_name except FieldDoesNotExist: if name == "__str__": label = str(model._meta.verbose_name) attr = str else: if callable(name): attr = name elif hasattr(model_admin, name): attr = getattr(model_admin, name) elif hasattr(model, name): attr = getattr(model, name) elif form and name in form.fields: attr = form.fields[name] else: message = "Unable to lookup '%s' on %s" % (name, model._meta.object_name) if model_admin: message += " or %s" % model_admin.__class__.__name__ if form: message += " or %s" % form.__class__.__name__ raise AttributeError(message) if hasattr(attr, "short_description"): label = attr.short_description elif (isinstance(attr, property) and hasattr(attr, "fget") and hasattr(attr.fget, "short_description")): label = attr.fget.short_description elif callable(attr): if attr.__name__ == "<lambda>": label = "--" else: label = pretty_name(attr.__name__) else: label = pretty_name(name) except FieldIsAForeignKeyColumnName: label = pretty_name(name) attr = name if return_attr: return (label, attr) else: return label def help_text_for_field(name, model): help_text = "" try: field = _get_non_gfk_field(model._meta, name) except (FieldDoesNotExist, FieldIsAForeignKeyColumnName): pass else: if hasattr(field, 'help_text'): help_text = field.help_text return help_text def display_for_field(value, field, empty_value_display): from django.contrib.admin.templatetags.admin_list import _boolean_icon if getattr(field, 'flatchoices', None): return dict(field.flatchoices).get(value, empty_value_display) # BooleanField needs special-case null-handling, so it comes before the # general null test. elif isinstance(field, models.BooleanField): return _boolean_icon(value) elif value is None: return empty_value_display elif isinstance(field, models.DateTimeField): return formats.localize(timezone.template_localtime(value)) elif isinstance(field, (models.DateField, models.TimeField)): return formats.localize(value) elif isinstance(field, models.DecimalField): return formats.number_format(value, field.decimal_places) elif isinstance(field, (models.IntegerField, models.FloatField)): return formats.number_format(value) elif isinstance(field, models.FileField) and value: return format_html('<a href="{}">{}</a>', value.url, value) elif isinstance(field, models.JSONField) and value: try: return json.dumps(value, ensure_ascii=False, cls=field.encoder) except TypeError: return display_for_value(value, empty_value_display) else: return display_for_value(value, empty_value_display) def display_for_value(value, empty_value_display, boolean=False): from django.contrib.admin.templatetags.admin_list import _boolean_icon if boolean: return _boolean_icon(value) elif value is None: return empty_value_display elif isinstance(value, bool): return str(value) elif isinstance(value, datetime.datetime): return formats.localize(timezone.template_localtime(value)) elif isinstance(value, (datetime.date, datetime.time)): return formats.localize(value) elif isinstance(value, (int, decimal.Decimal, float)): return formats.number_format(value) elif isinstance(value, (list, tuple)): return ', '.join(str(v) for v in value) else: return str(value) class NotRelationField(Exception): pass def get_model_from_relation(field): if hasattr(field, 'get_path_info'): return field.get_path_info()[-1].to_opts.model else: raise NotRelationField def reverse_field_path(model, path): """ Create a reversed field path. E.g. Given (Order, "user__groups"), return (Group, "user__order"). Final field must be a related model, not a data field. """ reversed_path = [] parent = model pieces = path.split(LOOKUP_SEP) for piece in pieces: field = parent._meta.get_field(piece) # skip trailing data field if extant: if len(reversed_path) == len(pieces) - 1: # final iteration try: get_model_from_relation(field) except NotRelationField: break # Field should point to another model if field.is_relation and not (field.auto_created and not field.concrete): related_name = field.related_query_name() parent = field.remote_field.model else: related_name = field.field.name parent = field.related_model reversed_path.insert(0, related_name) return (parent, LOOKUP_SEP.join(reversed_path)) def get_fields_from_path(model, path): """ Return list of Fields given path relative to model. e.g. (ModelX, "user__groups__name") -> [ <django.db.models.fields.related.ForeignKey object at 0x...>, <django.db.models.fields.related.ManyToManyField object at 0x...>, <django.db.models.fields.CharField object at 0x...>, ] """ pieces = path.split(LOOKUP_SEP) fields = [] for piece in pieces: if fields: parent = get_model_from_relation(fields[-1]) else: parent = model fields.append(parent._meta.get_field(piece)) return fields def construct_change_message(form, formsets, add): """ Construct a JSON structure describing changes from a changed object. Translations are deactivated so that strings are stored untranslated. Translation happens later on LogEntry access. """ # Evaluating `form.changed_data` prior to disabling translations is required # to avoid fields affected by localization from being included incorrectly, # e.g. where date formats differ such as MM/DD/YYYY vs DD/MM/YYYY. changed_data = form.changed_data with translation_override(None): # Deactivate translations while fetching verbose_name for form # field labels and using `field_name`, if verbose_name is not provided. # Translations will happen later on LogEntry access. changed_field_labels = _get_changed_field_labels_from_form(form, changed_data) change_message = [] if add: change_message.append({'added': {}}) elif form.changed_data: change_message.append({'changed': {'fields': changed_field_labels}}) if formsets: with translation_override(None): for formset in formsets: for added_object in formset.new_objects: change_message.append({ 'added': { 'name': str(added_object._meta.verbose_name), 'object': str(added_object), } }) for changed_object, changed_fields in formset.changed_objects: change_message.append({ 'changed': { 'name': str(changed_object._meta.verbose_name), 'object': str(changed_object), 'fields': _get_changed_field_labels_from_form(formset.forms[0], changed_fields), } }) for deleted_object in formset.deleted_objects: change_message.append({ 'deleted': { 'name': str(deleted_object._meta.verbose_name), 'object': str(deleted_object), } }) return change_message def _get_changed_field_labels_from_form(form, changed_data): changed_field_labels = [] for field_name in changed_data: try: verbose_field_name = form.fields[field_name].label or field_name except KeyError: verbose_field_name = field_name changed_field_labels.append(str(verbose_field_name)) return changed_field_labels
0ccb7217065d757c7955c5702914a825d85c57398b70df59b987a703f0f9329e
""" This encapsulates the logic for displaying filters in the Django admin. Filters are specified in models with the "list_filter" option. Each filter subclass knows how to display a filter for a field that passes a certain test -- e.g. being a DateField or ForeignKey. """ import datetime from django.contrib.admin.options import IncorrectLookupParameters from django.contrib.admin.utils import ( get_model_from_relation, prepare_lookup_value, reverse_field_path, ) from django.core.exceptions import ImproperlyConfigured, ValidationError from django.db import models from django.utils import timezone from django.utils.translation import gettext_lazy as _ class ListFilter: title = None # Human-readable title to appear in the right sidebar. template = 'admin/filter.html' def __init__(self, request, params, model, model_admin): # This dictionary will eventually contain the request's query string # parameters actually used by this filter. self.used_parameters = {} if self.title is None: raise ImproperlyConfigured( "The list filter '%s' does not specify a 'title'." % self.__class__.__name__ ) def has_output(self): """ Return True if some choices would be output for this filter. """ raise NotImplementedError('subclasses of ListFilter must provide a has_output() method') def choices(self, changelist): """ Return choices ready to be output in the template. `changelist` is the ChangeList to be displayed. """ raise NotImplementedError('subclasses of ListFilter must provide a choices() method') def queryset(self, request, queryset): """ Return the filtered queryset. """ raise NotImplementedError('subclasses of ListFilter must provide a queryset() method') def expected_parameters(self): """ Return the list of parameter names that are expected from the request's query string and that will be used by this filter. """ raise NotImplementedError('subclasses of ListFilter must provide an expected_parameters() method') class SimpleListFilter(ListFilter): # The parameter that should be used in the query string for that filter. parameter_name = None def __init__(self, request, params, model, model_admin): super().__init__(request, params, model, model_admin) if self.parameter_name is None: raise ImproperlyConfigured( "The list filter '%s' does not specify a 'parameter_name'." % self.__class__.__name__ ) if self.parameter_name in params: value = params.pop(self.parameter_name) self.used_parameters[self.parameter_name] = value lookup_choices = self.lookups(request, model_admin) if lookup_choices is None: lookup_choices = () self.lookup_choices = list(lookup_choices) def has_output(self): return len(self.lookup_choices) > 0 def value(self): """ Return the value (in string format) provided in the request's query string for this filter, if any, or None if the value wasn't provided. """ return self.used_parameters.get(self.parameter_name) def lookups(self, request, model_admin): """ Must be overridden to return a list of tuples (value, verbose value) """ raise NotImplementedError( 'The SimpleListFilter.lookups() method must be overridden to ' 'return a list of tuples (value, verbose value).' ) def expected_parameters(self): return [self.parameter_name] def choices(self, changelist): yield { 'selected': self.value() is None, 'query_string': changelist.get_query_string(remove=[self.parameter_name]), 'display': _('All'), } for lookup, title in self.lookup_choices: yield { 'selected': self.value() == str(lookup), 'query_string': changelist.get_query_string({self.parameter_name: lookup}), 'display': title, } class FieldListFilter(ListFilter): _field_list_filters = [] _take_priority_index = 0 def __init__(self, field, request, params, model, model_admin, field_path): self.field = field self.field_path = field_path self.title = getattr(field, 'verbose_name', field_path) super().__init__(request, params, model, model_admin) for p in self.expected_parameters(): if p in params: value = params.pop(p) self.used_parameters[p] = prepare_lookup_value(p, value) def has_output(self): return True def queryset(self, request, queryset): try: return queryset.filter(**self.used_parameters) except (ValueError, ValidationError) as e: # Fields may raise a ValueError or ValidationError when converting # the parameters to the correct type. raise IncorrectLookupParameters(e) @classmethod def register(cls, test, list_filter_class, take_priority=False): if take_priority: # This is to allow overriding the default filters for certain types # of fields with some custom filters. The first found in the list # is used in priority. cls._field_list_filters.insert( cls._take_priority_index, (test, list_filter_class)) cls._take_priority_index += 1 else: cls._field_list_filters.append((test, list_filter_class)) @classmethod def create(cls, field, request, params, model, model_admin, field_path): for test, list_filter_class in cls._field_list_filters: if test(field): return list_filter_class(field, request, params, model, model_admin, field_path=field_path) class RelatedFieldListFilter(FieldListFilter): def __init__(self, field, request, params, model, model_admin, field_path): other_model = get_model_from_relation(field) self.lookup_kwarg = '%s__%s__exact' % (field_path, field.target_field.name) self.lookup_kwarg_isnull = '%s__isnull' % field_path self.lookup_val = params.get(self.lookup_kwarg) self.lookup_val_isnull = params.get(self.lookup_kwarg_isnull) super().__init__(field, request, params, model, model_admin, field_path) self.lookup_choices = self.field_choices(field, request, model_admin) if hasattr(field, 'verbose_name'): self.lookup_title = field.verbose_name else: self.lookup_title = other_model._meta.verbose_name self.title = self.lookup_title self.empty_value_display = model_admin.get_empty_value_display() @property def include_empty_choice(self): """ Return True if a "(None)" choice should be included, which filters out everything except empty relationships. """ return self.field.null or (self.field.is_relation and self.field.many_to_many) def has_output(self): if self.include_empty_choice: extra = 1 else: extra = 0 return len(self.lookup_choices) + extra > 1 def expected_parameters(self): return [self.lookup_kwarg, self.lookup_kwarg_isnull] def field_admin_ordering(self, field, request, model_admin): """ Return the model admin's ordering for related field, if provided. """ related_admin = model_admin.admin_site._registry.get(field.remote_field.model) if related_admin is not None: return related_admin.get_ordering(request) return () def field_choices(self, field, request, model_admin): ordering = self.field_admin_ordering(field, request, model_admin) return field.get_choices(include_blank=False, ordering=ordering) def choices(self, changelist): yield { 'selected': self.lookup_val is None and not self.lookup_val_isnull, 'query_string': changelist.get_query_string(remove=[self.lookup_kwarg, self.lookup_kwarg_isnull]), 'display': _('All'), } for pk_val, val in self.lookup_choices: yield { 'selected': self.lookup_val == str(pk_val), 'query_string': changelist.get_query_string({self.lookup_kwarg: pk_val}, [self.lookup_kwarg_isnull]), 'display': val, } if self.include_empty_choice: yield { 'selected': bool(self.lookup_val_isnull), 'query_string': changelist.get_query_string({self.lookup_kwarg_isnull: 'True'}, [self.lookup_kwarg]), 'display': self.empty_value_display, } FieldListFilter.register(lambda f: f.remote_field, RelatedFieldListFilter) class BooleanFieldListFilter(FieldListFilter): def __init__(self, field, request, params, model, model_admin, field_path): self.lookup_kwarg = '%s__exact' % field_path self.lookup_kwarg2 = '%s__isnull' % field_path self.lookup_val = params.get(self.lookup_kwarg) self.lookup_val2 = params.get(self.lookup_kwarg2) super().__init__(field, request, params, model, model_admin, field_path) if (self.used_parameters and self.lookup_kwarg in self.used_parameters and self.used_parameters[self.lookup_kwarg] in ('1', '0')): self.used_parameters[self.lookup_kwarg] = bool(int(self.used_parameters[self.lookup_kwarg])) def expected_parameters(self): return [self.lookup_kwarg, self.lookup_kwarg2] def choices(self, changelist): field_choices = dict(self.field.flatchoices) for lookup, title in ( (None, _('All')), ('1', field_choices.get(True, _('Yes'))), ('0', field_choices.get(False, _('No'))), ): yield { 'selected': self.lookup_val == lookup and not self.lookup_val2, 'query_string': changelist.get_query_string({self.lookup_kwarg: lookup}, [self.lookup_kwarg2]), 'display': title, } if self.field.null: yield { 'selected': self.lookup_val2 == 'True', 'query_string': changelist.get_query_string({self.lookup_kwarg2: 'True'}, [self.lookup_kwarg]), 'display': field_choices.get(None, _('Unknown')), } FieldListFilter.register(lambda f: isinstance(f, models.BooleanField), BooleanFieldListFilter) class ChoicesFieldListFilter(FieldListFilter): def __init__(self, field, request, params, model, model_admin, field_path): self.lookup_kwarg = '%s__exact' % field_path self.lookup_kwarg_isnull = '%s__isnull' % field_path self.lookup_val = params.get(self.lookup_kwarg) self.lookup_val_isnull = params.get(self.lookup_kwarg_isnull) super().__init__(field, request, params, model, model_admin, field_path) def expected_parameters(self): return [self.lookup_kwarg, self.lookup_kwarg_isnull] def choices(self, changelist): yield { 'selected': self.lookup_val is None, 'query_string': changelist.get_query_string(remove=[self.lookup_kwarg, self.lookup_kwarg_isnull]), 'display': _('All') } none_title = '' for lookup, title in self.field.flatchoices: if lookup is None: none_title = title continue yield { 'selected': str(lookup) == self.lookup_val, 'query_string': changelist.get_query_string({self.lookup_kwarg: lookup}, [self.lookup_kwarg_isnull]), 'display': title, } if none_title: yield { 'selected': bool(self.lookup_val_isnull), 'query_string': changelist.get_query_string({self.lookup_kwarg_isnull: 'True'}, [self.lookup_kwarg]), 'display': none_title, } FieldListFilter.register(lambda f: bool(f.choices), ChoicesFieldListFilter) class DateFieldListFilter(FieldListFilter): def __init__(self, field, request, params, model, model_admin, field_path): self.field_generic = '%s__' % field_path self.date_params = {k: v for k, v in params.items() if k.startswith(self.field_generic)} now = timezone.now() # When time zone support is enabled, convert "now" to the user's time # zone so Django's definition of "Today" matches what the user expects. if timezone.is_aware(now): now = timezone.localtime(now) if isinstance(field, models.DateTimeField): today = now.replace(hour=0, minute=0, second=0, microsecond=0) else: # field is a models.DateField today = now.date() tomorrow = today + datetime.timedelta(days=1) if today.month == 12: next_month = today.replace(year=today.year + 1, month=1, day=1) else: next_month = today.replace(month=today.month + 1, day=1) next_year = today.replace(year=today.year + 1, month=1, day=1) self.lookup_kwarg_since = '%s__gte' % field_path self.lookup_kwarg_until = '%s__lt' % field_path self.links = ( (_('Any date'), {}), (_('Today'), { self.lookup_kwarg_since: str(today), self.lookup_kwarg_until: str(tomorrow), }), (_('Past 7 days'), { self.lookup_kwarg_since: str(today - datetime.timedelta(days=7)), self.lookup_kwarg_until: str(tomorrow), }), (_('This month'), { self.lookup_kwarg_since: str(today.replace(day=1)), self.lookup_kwarg_until: str(next_month), }), (_('This year'), { self.lookup_kwarg_since: str(today.replace(month=1, day=1)), self.lookup_kwarg_until: str(next_year), }), ) if field.null: self.lookup_kwarg_isnull = '%s__isnull' % field_path self.links += ( (_('No date'), {self.field_generic + 'isnull': 'True'}), (_('Has date'), {self.field_generic + 'isnull': 'False'}), ) super().__init__(field, request, params, model, model_admin, field_path) def expected_parameters(self): params = [self.lookup_kwarg_since, self.lookup_kwarg_until] if self.field.null: params.append(self.lookup_kwarg_isnull) return params def choices(self, changelist): for title, param_dict in self.links: yield { 'selected': self.date_params == param_dict, 'query_string': changelist.get_query_string(param_dict, [self.field_generic]), 'display': title, } FieldListFilter.register( lambda f: isinstance(f, models.DateField), DateFieldListFilter) # This should be registered last, because it's a last resort. For example, # if a field is eligible to use the BooleanFieldListFilter, that'd be much # more appropriate, and the AllValuesFieldListFilter won't get used for it. class AllValuesFieldListFilter(FieldListFilter): def __init__(self, field, request, params, model, model_admin, field_path): self.lookup_kwarg = field_path self.lookup_kwarg_isnull = '%s__isnull' % field_path self.lookup_val = params.get(self.lookup_kwarg) self.lookup_val_isnull = params.get(self.lookup_kwarg_isnull) self.empty_value_display = model_admin.get_empty_value_display() parent_model, reverse_path = reverse_field_path(model, field_path) # Obey parent ModelAdmin queryset when deciding which options to show if model == parent_model: queryset = model_admin.get_queryset(request) else: queryset = parent_model._default_manager.all() self.lookup_choices = queryset.distinct().order_by(field.name).values_list(field.name, flat=True) super().__init__(field, request, params, model, model_admin, field_path) def expected_parameters(self): return [self.lookup_kwarg, self.lookup_kwarg_isnull] def choices(self, changelist): yield { 'selected': self.lookup_val is None and self.lookup_val_isnull is None, 'query_string': changelist.get_query_string(remove=[self.lookup_kwarg, self.lookup_kwarg_isnull]), 'display': _('All'), } include_none = False for val in self.lookup_choices: if val is None: include_none = True continue val = str(val) yield { 'selected': self.lookup_val == val, 'query_string': changelist.get_query_string({self.lookup_kwarg: val}, [self.lookup_kwarg_isnull]), 'display': val, } if include_none: yield { 'selected': bool(self.lookup_val_isnull), 'query_string': changelist.get_query_string({self.lookup_kwarg_isnull: 'True'}, [self.lookup_kwarg]), 'display': self.empty_value_display, } FieldListFilter.register(lambda f: True, AllValuesFieldListFilter) class RelatedOnlyFieldListFilter(RelatedFieldListFilter): def field_choices(self, field, request, model_admin): pk_qs = model_admin.get_queryset(request).distinct().values_list('%s__pk' % self.field_path, flat=True) ordering = self.field_admin_ordering(field, request, model_admin) return field.get_choices(include_blank=False, limit_choices_to={'pk__in': pk_qs}, ordering=ordering) class EmptyFieldListFilter(FieldListFilter): def __init__(self, field, request, params, model, model_admin, field_path): if not field.empty_strings_allowed and not field.null: raise ImproperlyConfigured( "The list filter '%s' cannot be used with field '%s' which " "doesn't allow empty strings and nulls." % ( self.__class__.__name__, field.name, ) ) self.lookup_kwarg = '%s__isempty' % field_path self.lookup_val = params.get(self.lookup_kwarg) super().__init__(field, request, params, model, model_admin, field_path) def queryset(self, request, queryset): if self.lookup_kwarg not in self.used_parameters: return queryset if self.lookup_val not in ('0', '1'): raise IncorrectLookupParameters lookup_conditions = [] if self.field.empty_strings_allowed: lookup_conditions.append((self.field_path, '')) if self.field.null: lookup_conditions.append((f'{self.field_path}__isnull', True)) lookup_condition = models.Q(*lookup_conditions, _connector=models.Q.OR) if self.lookup_val == '1': return queryset.filter(lookup_condition) return queryset.exclude(lookup_condition) def expected_parameters(self): return [self.lookup_kwarg] def choices(self, changelist): for lookup, title in ( (None, _('All')), ('1', _('Empty')), ('0', _('Not empty')), ): yield { 'selected': self.lookup_val == lookup, 'query_string': changelist.get_query_string({self.lookup_kwarg: lookup}), 'display': title, }
6edc64d989ad9f9df546af5a2bb8a51ea95bf017254c981a7536a74f5f7fd356
import hashlib import json import os import posixpath import re from urllib.parse import unquote, urldefrag, urlsplit, urlunsplit from django.conf import settings from django.contrib.staticfiles.utils import check_settings, matches_patterns from django.core.exceptions import ImproperlyConfigured from django.core.files.base import ContentFile from django.core.files.storage import FileSystemStorage, get_storage_class from django.utils.functional import LazyObject class StaticFilesStorage(FileSystemStorage): """ Standard file system storage for static files. The defaults for ``location`` and ``base_url`` are ``STATIC_ROOT`` and ``STATIC_URL``. """ def __init__(self, location=None, base_url=None, *args, **kwargs): if location is None: location = settings.STATIC_ROOT if base_url is None: base_url = settings.STATIC_URL check_settings(base_url) super().__init__(location, base_url, *args, **kwargs) # FileSystemStorage fallbacks to MEDIA_ROOT when location # is empty, so we restore the empty value. if not location: self.base_location = None self.location = None def path(self, name): if not self.location: raise ImproperlyConfigured("You're using the staticfiles app " "without having set the STATIC_ROOT " "setting to a filesystem path.") return super().path(name) class HashedFilesMixin: default_template = """url("%(url)s")""" max_post_process_passes = 5 patterns = ( ("*.css", ( r"""(?P<matched>url\(['"]{0,1}\s*(?P<url>.*?)["']{0,1}\))""", ( r"""(?P<matched>@import\s*["']\s*(?P<url>.*?)["'])""", """@import url("%(url)s")""", ), )), ('*.js', ( ( r'(?P<matched>)^(//# (?-i:sourceMappingURL)=(?P<url>.*))$', '//# sourceMappingURL=%(url)s', ), ( r"""(?P<matched>import\s+(?s:(?P<imports>.*?))\s*from\s*["'](?P<url>.*?)["'])""", 'import %(imports)s from "%(url)s"', ), ( r"""(?P<matched>export\s+(?s:(?P<exports>.*?))\s*from\s*["'](?P<url>.*?)["'])""", 'export %(exports)s from "%(url)s"', ), (r"""(?P<matched>import\(["'](?P<url>.*?)["']\))""", 'import("%(url)s")'), )), ) keep_intermediate_files = True def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._patterns = {} self.hashed_files = {} for extension, patterns in self.patterns: for pattern in patterns: if isinstance(pattern, (tuple, list)): pattern, template = pattern else: template = self.default_template compiled = re.compile(pattern, re.IGNORECASE) self._patterns.setdefault(extension, []).append((compiled, template)) def file_hash(self, name, content=None): """ Return a hash of the file with the given name and optional content. """ if content is None: return None md5 = hashlib.md5() for chunk in content.chunks(): md5.update(chunk) return md5.hexdigest()[:12] def hashed_name(self, name, content=None, filename=None): # `filename` is the name of file to hash if `content` isn't given. # `name` is the base name to construct the new hashed filename from. parsed_name = urlsplit(unquote(name)) clean_name = parsed_name.path.strip() filename = (filename and urlsplit(unquote(filename)).path.strip()) or clean_name opened = content is None if opened: if not self.exists(filename): raise ValueError("The file '%s' could not be found with %r." % (filename, self)) try: content = self.open(filename) except OSError: # Handle directory paths and fragments return name try: file_hash = self.file_hash(clean_name, content) finally: if opened: content.close() path, filename = os.path.split(clean_name) root, ext = os.path.splitext(filename) file_hash = ('.%s' % file_hash) if file_hash else '' hashed_name = os.path.join(path, "%s%s%s" % (root, file_hash, ext)) unparsed_name = list(parsed_name) unparsed_name[2] = hashed_name # Special casing for a @font-face hack, like url(myfont.eot?#iefix") # http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax if '?#' in name and not unparsed_name[3]: unparsed_name[2] += '?' return urlunsplit(unparsed_name) def _url(self, hashed_name_func, name, force=False, hashed_files=None): """ Return the non-hashed URL in DEBUG mode. """ if settings.DEBUG and not force: hashed_name, fragment = name, '' else: clean_name, fragment = urldefrag(name) if urlsplit(clean_name).path.endswith('/'): # don't hash paths hashed_name = name else: args = (clean_name,) if hashed_files is not None: args += (hashed_files,) hashed_name = hashed_name_func(*args) final_url = super().url(hashed_name) # Special casing for a @font-face hack, like url(myfont.eot?#iefix") # http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax query_fragment = '?#' in name # [sic!] if fragment or query_fragment: urlparts = list(urlsplit(final_url)) if fragment and not urlparts[4]: urlparts[4] = fragment if query_fragment and not urlparts[3]: urlparts[2] += '?' final_url = urlunsplit(urlparts) return unquote(final_url) def url(self, name, force=False): """ Return the non-hashed URL in DEBUG mode. """ return self._url(self.stored_name, name, force) def url_converter(self, name, hashed_files, template=None): """ Return the custom URL converter for the given file name. """ if template is None: template = self.default_template def converter(matchobj): """ Convert the matched URL to a normalized and hashed URL. This requires figuring out which files the matched URL resolves to and calling the url() method of the storage. """ matches = matchobj.groupdict() matched = matches['matched'] url = matches['url'] # Ignore absolute/protocol-relative and data-uri URLs. if re.match(r'^[a-z]+:', url): return matched # Ignore absolute URLs that don't point to a static file (dynamic # CSS / JS?). Note that STATIC_URL cannot be empty. if url.startswith('/') and not url.startswith(settings.STATIC_URL): return matched # Strip off the fragment so a path-like fragment won't interfere. url_path, fragment = urldefrag(url) if url_path.startswith('/'): # Otherwise the condition above would have returned prematurely. assert url_path.startswith(settings.STATIC_URL) target_name = url_path[len(settings.STATIC_URL):] else: # We're using the posixpath module to mix paths and URLs conveniently. source_name = name if os.sep == '/' else name.replace(os.sep, '/') target_name = posixpath.join(posixpath.dirname(source_name), url_path) # Determine the hashed name of the target file with the storage backend. hashed_url = self._url( self._stored_name, unquote(target_name), force=True, hashed_files=hashed_files, ) transformed_url = '/'.join(url_path.split('/')[:-1] + hashed_url.split('/')[-1:]) # Restore the fragment that was stripped off earlier. if fragment: transformed_url += ('?#' if '?#' in url else '#') + fragment # Return the hashed version to the file matches['url'] = unquote(transformed_url) return template % matches return converter def post_process(self, paths, dry_run=False, **options): """ Post process the given dictionary of files (called from collectstatic). Processing is actually two separate operations: 1. renaming files to include a hash of their content for cache-busting, and copying those files to the target storage. 2. adjusting files which contain references to other files so they refer to the cache-busting filenames. If either of these are performed on a file, then that file is considered post-processed. """ # don't even dare to process the files if we're in dry run mode if dry_run: return # where to store the new paths hashed_files = {} # build a list of adjustable files adjustable_paths = [ path for path in paths if matches_patterns(path, self._patterns) ] # Adjustable files to yield at end, keyed by the original path. processed_adjustable_paths = {} # Do a single pass first. Post-process all files once, yielding not # adjustable files and exceptions, and collecting adjustable files. for name, hashed_name, processed, _ in self._post_process(paths, adjustable_paths, hashed_files): if name not in adjustable_paths or isinstance(processed, Exception): yield name, hashed_name, processed else: processed_adjustable_paths[name] = (name, hashed_name, processed) paths = {path: paths[path] for path in adjustable_paths} substitutions = False for i in range(self.max_post_process_passes): substitutions = False for name, hashed_name, processed, subst in self._post_process(paths, adjustable_paths, hashed_files): # Overwrite since hashed_name may be newer. processed_adjustable_paths[name] = (name, hashed_name, processed) substitutions = substitutions or subst if not substitutions: break if substitutions: yield 'All', None, RuntimeError('Max post-process passes exceeded.') # Store the processed paths self.hashed_files.update(hashed_files) # Yield adjustable files with final, hashed name. yield from processed_adjustable_paths.values() def _post_process(self, paths, adjustable_paths, hashed_files): # Sort the files by directory level def path_level(name): return len(name.split(os.sep)) for name in sorted(paths, key=path_level, reverse=True): substitutions = True # use the original, local file, not the copied-but-unprocessed # file, which might be somewhere far away, like S3 storage, path = paths[name] with storage.open(path) as original_file: cleaned_name = self.clean_name(name) hash_key = self.hash_key(cleaned_name) # generate the hash with the original content, even for # adjustable files. if hash_key not in hashed_files: hashed_name = self.hashed_name(name, original_file) else: hashed_name = hashed_files[hash_key] # then get the original's file content.. if hasattr(original_file, 'seek'): original_file.seek(0) hashed_file_exists = self.exists(hashed_name) processed = False # ..to apply each replacement pattern to the content if name in adjustable_paths: old_hashed_name = hashed_name content = original_file.read().decode('utf-8') for extension, patterns in self._patterns.items(): if matches_patterns(path, (extension,)): for pattern, template in patterns: converter = self.url_converter(name, hashed_files, template) try: content = pattern.sub(converter, content) except ValueError as exc: yield name, None, exc, False if hashed_file_exists: self.delete(hashed_name) # then save the processed result content_file = ContentFile(content.encode()) if self.keep_intermediate_files: # Save intermediate file for reference self._save(hashed_name, content_file) hashed_name = self.hashed_name(name, content_file) if self.exists(hashed_name): self.delete(hashed_name) saved_name = self._save(hashed_name, content_file) hashed_name = self.clean_name(saved_name) # If the file hash stayed the same, this file didn't change if old_hashed_name == hashed_name: substitutions = False processed = True if not processed: # or handle the case in which neither processing nor # a change to the original file happened if not hashed_file_exists: processed = True saved_name = self._save(hashed_name, original_file) hashed_name = self.clean_name(saved_name) # and then set the cache accordingly hashed_files[hash_key] = hashed_name yield name, hashed_name, processed, substitutions def clean_name(self, name): return name.replace('\\', '/') def hash_key(self, name): return name def _stored_name(self, name, hashed_files): # Normalize the path to avoid multiple names for the same file like # ../foo/bar.css and ../foo/../foo/bar.css which normalize to the same # path. name = posixpath.normpath(name) cleaned_name = self.clean_name(name) hash_key = self.hash_key(cleaned_name) cache_name = hashed_files.get(hash_key) if cache_name is None: cache_name = self.clean_name(self.hashed_name(name)) return cache_name def stored_name(self, name): cleaned_name = self.clean_name(name) hash_key = self.hash_key(cleaned_name) cache_name = self.hashed_files.get(hash_key) if cache_name: return cache_name # No cached name found, recalculate it from the files. intermediate_name = name for i in range(self.max_post_process_passes + 1): cache_name = self.clean_name( self.hashed_name(name, content=None, filename=intermediate_name) ) if intermediate_name == cache_name: # Store the hashed name if there was a miss. self.hashed_files[hash_key] = cache_name return cache_name else: # Move on to the next intermediate file. intermediate_name = cache_name # If the cache name can't be determined after the max number of passes, # the intermediate files on disk may be corrupt; avoid an infinite loop. raise ValueError("The name '%s' could not be hashed with %r." % (name, self)) class ManifestFilesMixin(HashedFilesMixin): manifest_version = '1.0' # the manifest format standard manifest_name = 'staticfiles.json' manifest_strict = True keep_intermediate_files = False def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.hashed_files = self.load_manifest() def read_manifest(self): try: with self.open(self.manifest_name) as manifest: return manifest.read().decode() except FileNotFoundError: return None def load_manifest(self): content = self.read_manifest() if content is None: return {} try: stored = json.loads(content) except json.JSONDecodeError: pass else: version = stored.get('version') if version == '1.0': return stored.get('paths', {}) raise ValueError("Couldn't load manifest '%s' (version %s)" % (self.manifest_name, self.manifest_version)) def post_process(self, *args, **kwargs): self.hashed_files = {} yield from super().post_process(*args, **kwargs) if not kwargs.get('dry_run'): self.save_manifest() def save_manifest(self): payload = {'paths': self.hashed_files, 'version': self.manifest_version} if self.exists(self.manifest_name): self.delete(self.manifest_name) contents = json.dumps(payload).encode() self._save(self.manifest_name, ContentFile(contents)) def stored_name(self, name): parsed_name = urlsplit(unquote(name)) clean_name = parsed_name.path.strip() hash_key = self.hash_key(clean_name) cache_name = self.hashed_files.get(hash_key) if cache_name is None: if self.manifest_strict: raise ValueError("Missing staticfiles manifest entry for '%s'" % clean_name) cache_name = self.clean_name(self.hashed_name(name)) unparsed_name = list(parsed_name) unparsed_name[2] = cache_name # Special casing for a @font-face hack, like url(myfont.eot?#iefix") # http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax if '?#' in name and not unparsed_name[3]: unparsed_name[2] += '?' return urlunsplit(unparsed_name) class ManifestStaticFilesStorage(ManifestFilesMixin, StaticFilesStorage): """ A static file system storage backend which also saves hashed copies of the files it saves. """ pass class ConfiguredStorage(LazyObject): def _setup(self): self._wrapped = get_storage_class(settings.STATICFILES_STORAGE)() staticfiles_storage = ConfiguredStorage()
1dfb701c83a90b3844e49e3251cf062c05a731b07f6eb1cdac5ea1f6f5a1c6ae
import functools import itertools from collections import defaultdict from django.contrib.contenttypes.models import ContentType from django.core import checks from django.core.exceptions import FieldDoesNotExist, ObjectDoesNotExist from django.db import DEFAULT_DB_ALIAS, models, router, transaction from django.db.models import DO_NOTHING, ForeignObject, ForeignObjectRel from django.db.models.base import ModelBase, make_foreign_order_accessors from django.db.models.fields.mixins import FieldCacheMixin from django.db.models.fields.related import ( ReverseManyToOneDescriptor, lazy_related_operation, ) from django.db.models.query_utils import PathInfo from django.db.models.sql import AND from django.db.models.sql.where import WhereNode from django.utils.functional import cached_property class GenericForeignKey(FieldCacheMixin): """ Provide a generic many-to-one relation through the ``content_type`` and ``object_id`` fields. This class also doubles as an accessor to the related object (similar to ForwardManyToOneDescriptor) by adding itself as a model attribute. """ # Field flags auto_created = False concrete = False editable = False hidden = False is_relation = True many_to_many = False many_to_one = True one_to_many = False one_to_one = False related_model = None remote_field = None def __init__(self, ct_field='content_type', fk_field='object_id', for_concrete_model=True): self.ct_field = ct_field self.fk_field = fk_field self.for_concrete_model = for_concrete_model self.editable = False self.rel = None self.column = None def contribute_to_class(self, cls, name, **kwargs): self.name = name self.model = cls cls._meta.add_field(self, private=True) setattr(cls, name, self) def get_filter_kwargs_for_object(self, obj): """See corresponding method on Field""" return { self.fk_field: getattr(obj, self.fk_field), self.ct_field: getattr(obj, self.ct_field), } def get_forward_related_filter(self, obj): """See corresponding method on RelatedField""" return { self.fk_field: obj.pk, self.ct_field: ContentType.objects.get_for_model(obj).pk, } def __str__(self): model = self.model return '%s.%s' % (model._meta.label, self.name) def check(self, **kwargs): return [ *self._check_field_name(), *self._check_object_id_field(), *self._check_content_type_field(), ] def _check_field_name(self): if self.name.endswith("_"): return [ checks.Error( 'Field names must not end with an underscore.', obj=self, id='fields.E001', ) ] else: return [] def _check_object_id_field(self): try: self.model._meta.get_field(self.fk_field) except FieldDoesNotExist: return [ checks.Error( "The GenericForeignKey object ID references the " "nonexistent field '%s'." % self.fk_field, obj=self, id='contenttypes.E001', ) ] else: return [] def _check_content_type_field(self): """ Check if field named `field_name` in model `model` exists and is a valid content_type field (is a ForeignKey to ContentType). """ try: field = self.model._meta.get_field(self.ct_field) except FieldDoesNotExist: return [ checks.Error( "The GenericForeignKey content type references the " "nonexistent field '%s.%s'." % ( self.model._meta.object_name, self.ct_field ), obj=self, id='contenttypes.E002', ) ] else: if not isinstance(field, models.ForeignKey): return [ checks.Error( "'%s.%s' is not a ForeignKey." % ( self.model._meta.object_name, self.ct_field ), hint=( "GenericForeignKeys must use a ForeignKey to " "'contenttypes.ContentType' as the 'content_type' field." ), obj=self, id='contenttypes.E003', ) ] elif field.remote_field.model != ContentType: return [ checks.Error( "'%s.%s' is not a ForeignKey to 'contenttypes.ContentType'." % ( self.model._meta.object_name, self.ct_field ), hint=( "GenericForeignKeys must use a ForeignKey to " "'contenttypes.ContentType' as the 'content_type' field." ), obj=self, id='contenttypes.E004', ) ] else: return [] def get_cache_name(self): return self.name def get_content_type(self, obj=None, id=None, using=None): if obj is not None: return ContentType.objects.db_manager(obj._state.db).get_for_model( obj, for_concrete_model=self.for_concrete_model) elif id is not None: return ContentType.objects.db_manager(using).get_for_id(id) else: # This should never happen. I love comments like this, don't you? raise Exception("Impossible arguments to GFK.get_content_type!") def get_prefetch_queryset(self, instances, queryset=None): if queryset is not None: raise ValueError("Custom queryset can't be used for this lookup.") # For efficiency, group the instances by content type and then do one # query per model fk_dict = defaultdict(set) # We need one instance for each group in order to get the right db: instance_dict = {} ct_attname = self.model._meta.get_field(self.ct_field).get_attname() for instance in instances: # We avoid looking for values if either ct_id or fkey value is None ct_id = getattr(instance, ct_attname) if ct_id is not None: fk_val = getattr(instance, self.fk_field) if fk_val is not None: fk_dict[ct_id].add(fk_val) instance_dict[ct_id] = instance ret_val = [] for ct_id, fkeys in fk_dict.items(): instance = instance_dict[ct_id] ct = self.get_content_type(id=ct_id, using=instance._state.db) ret_val.extend(ct.get_all_objects_for_this_type(pk__in=fkeys)) # For doing the join in Python, we have to match both the FK val and the # content type, so we use a callable that returns a (fk, class) pair. def gfk_key(obj): ct_id = getattr(obj, ct_attname) if ct_id is None: return None else: model = self.get_content_type(id=ct_id, using=obj._state.db).model_class() return (model._meta.pk.get_prep_value(getattr(obj, self.fk_field)), model) return ( ret_val, lambda obj: (obj.pk, obj.__class__), gfk_key, True, self.name, True, ) def __get__(self, instance, cls=None): if instance is None: return self # Don't use getattr(instance, self.ct_field) here because that might # reload the same ContentType over and over (#5570). Instead, get the # content type ID here, and later when the actual instance is needed, # use ContentType.objects.get_for_id(), which has a global cache. f = self.model._meta.get_field(self.ct_field) ct_id = getattr(instance, f.get_attname(), None) pk_val = getattr(instance, self.fk_field) rel_obj = self.get_cached_value(instance, default=None) if rel_obj is not None: ct_match = ct_id == self.get_content_type(obj=rel_obj, using=instance._state.db).id pk_match = rel_obj._meta.pk.to_python(pk_val) == rel_obj.pk if ct_match and pk_match: return rel_obj else: rel_obj = None if ct_id is not None: ct = self.get_content_type(id=ct_id, using=instance._state.db) try: rel_obj = ct.get_object_for_this_type(pk=pk_val) except ObjectDoesNotExist: pass self.set_cached_value(instance, rel_obj) return rel_obj def __set__(self, instance, value): ct = None fk = None if value is not None: ct = self.get_content_type(obj=value) fk = value.pk setattr(instance, self.ct_field, ct) setattr(instance, self.fk_field, fk) self.set_cached_value(instance, value) class GenericRel(ForeignObjectRel): """ Used by GenericRelation to store information about the relation. """ def __init__(self, field, to, related_name=None, related_query_name=None, limit_choices_to=None): super().__init__( field, to, related_name=related_query_name or '+', related_query_name=related_query_name, limit_choices_to=limit_choices_to, on_delete=DO_NOTHING, ) class GenericRelation(ForeignObject): """ Provide a reverse to a relation created by a GenericForeignKey. """ # Field flags auto_created = False empty_strings_allowed = False many_to_many = False many_to_one = False one_to_many = True one_to_one = False rel_class = GenericRel mti_inherited = False def __init__(self, to, object_id_field='object_id', content_type_field='content_type', for_concrete_model=True, related_query_name=None, limit_choices_to=None, **kwargs): kwargs['rel'] = self.rel_class( self, to, related_query_name=related_query_name, limit_choices_to=limit_choices_to, ) # Reverse relations are always nullable (Django can't enforce that a # foreign key on the related model points to this model). kwargs['null'] = True kwargs['blank'] = True kwargs['on_delete'] = models.CASCADE kwargs['editable'] = False kwargs['serialize'] = False # This construct is somewhat of an abuse of ForeignObject. This field # represents a relation from pk to object_id field. But, this relation # isn't direct, the join is generated reverse along foreign key. So, # the from_field is object_id field, to_field is pk because of the # reverse join. super().__init__(to, from_fields=[object_id_field], to_fields=[], **kwargs) self.object_id_field_name = object_id_field self.content_type_field_name = content_type_field self.for_concrete_model = for_concrete_model def check(self, **kwargs): return [ *super().check(**kwargs), *self._check_generic_foreign_key_existence(), ] def _is_matching_generic_foreign_key(self, field): """ Return True if field is a GenericForeignKey whose content type and object id fields correspond to the equivalent attributes on this GenericRelation. """ return ( isinstance(field, GenericForeignKey) and field.ct_field == self.content_type_field_name and field.fk_field == self.object_id_field_name ) def _check_generic_foreign_key_existence(self): target = self.remote_field.model if isinstance(target, ModelBase): fields = target._meta.private_fields if any(self._is_matching_generic_foreign_key(field) for field in fields): return [] else: return [ checks.Error( "The GenericRelation defines a relation with the model " "'%s', but that model does not have a GenericForeignKey." % target._meta.label, obj=self, id='contenttypes.E004', ) ] else: return [] def resolve_related_fields(self): self.to_fields = [self.model._meta.pk.name] return [(self.remote_field.model._meta.get_field(self.object_id_field_name), self.model._meta.pk)] def _get_path_info_with_parent(self, filtered_relation): """ Return the path that joins the current model through any parent models. The idea is that if you have a GFK defined on a parent model then we need to join the parent model first, then the child model. """ # With an inheritance chain ChildTag -> Tag and Tag defines the # GenericForeignKey, and a TaggedItem model has a GenericRelation to # ChildTag, then we need to generate a join from TaggedItem to Tag # (as Tag.object_id == TaggedItem.pk), and another join from Tag to # ChildTag (as that is where the relation is to). Do this by first # generating a join to the parent model, then generating joins to the # child models. path = [] opts = self.remote_field.model._meta.concrete_model._meta parent_opts = opts.get_field(self.object_id_field_name).model._meta target = parent_opts.pk path.append(PathInfo( from_opts=self.model._meta, to_opts=parent_opts, target_fields=(target,), join_field=self.remote_field, m2m=True, direct=False, filtered_relation=filtered_relation, )) # Collect joins needed for the parent -> child chain. This is easiest # to do if we collect joins for the child -> parent chain and then # reverse the direction (call to reverse() and use of # field.remote_field.get_path_info()). parent_field_chain = [] while parent_opts != opts: field = opts.get_ancestor_link(parent_opts.model) parent_field_chain.append(field) opts = field.remote_field.model._meta parent_field_chain.reverse() for field in parent_field_chain: path.extend(field.remote_field.get_path_info()) return path def get_path_info(self, filtered_relation=None): opts = self.remote_field.model._meta object_id_field = opts.get_field(self.object_id_field_name) if object_id_field.model != opts.model: return self._get_path_info_with_parent(filtered_relation) else: target = opts.pk return [PathInfo( from_opts=self.model._meta, to_opts=opts, target_fields=(target,), join_field=self.remote_field, m2m=True, direct=False, filtered_relation=filtered_relation, )] def get_reverse_path_info(self, filtered_relation=None): opts = self.model._meta from_opts = self.remote_field.model._meta return [PathInfo( from_opts=from_opts, to_opts=opts, target_fields=(opts.pk,), join_field=self, m2m=not self.unique, direct=False, filtered_relation=filtered_relation, )] def value_to_string(self, obj): qs = getattr(obj, self.name).all() return str([instance.pk for instance in qs]) def contribute_to_class(self, cls, name, **kwargs): kwargs['private_only'] = True super().contribute_to_class(cls, name, **kwargs) self.model = cls # Disable the reverse relation for fields inherited by subclasses of a # model in multi-table inheritance. The reverse relation points to the # field of the base model. if self.mti_inherited: self.remote_field.related_name = '+' self.remote_field.related_query_name = None setattr(cls, self.name, ReverseGenericManyToOneDescriptor(self.remote_field)) # Add get_RELATED_order() and set_RELATED_order() to the model this # field belongs to, if the model on the other end of this relation # is ordered with respect to its corresponding GenericForeignKey. if not cls._meta.abstract: def make_generic_foreign_order_accessors(related_model, model): if self._is_matching_generic_foreign_key(model._meta.order_with_respect_to): make_foreign_order_accessors(model, related_model) lazy_related_operation(make_generic_foreign_order_accessors, self.model, self.remote_field.model) def set_attributes_from_rel(self): pass def get_internal_type(self): return "ManyToManyField" def get_content_type(self): """ Return the content type associated with this field's model. """ return ContentType.objects.get_for_model(self.model, for_concrete_model=self.for_concrete_model) def get_extra_restriction(self, alias, remote_alias): field = self.remote_field.model._meta.get_field(self.content_type_field_name) contenttype_pk = self.get_content_type().pk lookup = field.get_lookup('exact')(field.get_col(remote_alias), contenttype_pk) return WhereNode([lookup], connector=AND) def bulk_related_objects(self, objs, using=DEFAULT_DB_ALIAS): """ Return all objects related to ``objs`` via this ``GenericRelation``. """ return self.remote_field.model._base_manager.db_manager(using).filter(**{ "%s__pk" % self.content_type_field_name: ContentType.objects.db_manager(using).get_for_model( self.model, for_concrete_model=self.for_concrete_model).pk, "%s__in" % self.object_id_field_name: [obj.pk for obj in objs] }) class ReverseGenericManyToOneDescriptor(ReverseManyToOneDescriptor): """ Accessor to the related objects manager on the one-to-many relation created by GenericRelation. In the example:: class Post(Model): comments = GenericRelation(Comment) ``post.comments`` is a ReverseGenericManyToOneDescriptor instance. """ @cached_property def related_manager_cls(self): return create_generic_related_manager( self.rel.model._default_manager.__class__, self.rel, ) def create_generic_related_manager(superclass, rel): """ Factory function to create a manager that subclasses another manager (generally the default manager of a given model) and adds behaviors specific to generic relations. """ class GenericRelatedObjectManager(superclass): def __init__(self, instance=None): super().__init__() self.instance = instance self.model = rel.model self.get_content_type = functools.partial( ContentType.objects.db_manager(instance._state.db).get_for_model, for_concrete_model=rel.field.for_concrete_model, ) self.content_type = self.get_content_type(instance) self.content_type_field_name = rel.field.content_type_field_name self.object_id_field_name = rel.field.object_id_field_name self.prefetch_cache_name = rel.field.attname self.pk_val = instance.pk self.core_filters = { '%s__pk' % self.content_type_field_name: self.content_type.id, self.object_id_field_name: self.pk_val, } def __call__(self, *, manager): manager = getattr(self.model, manager) manager_class = create_generic_related_manager(manager.__class__, rel) return manager_class(instance=self.instance) do_not_call_in_templates = True def __str__(self): return repr(self) def _apply_rel_filters(self, queryset): """ Filter the queryset for the instance this manager is bound to. """ db = self._db or router.db_for_read(self.model, instance=self.instance) return queryset.using(db).filter(**self.core_filters) def _remove_prefetched_objects(self): try: self.instance._prefetched_objects_cache.pop(self.prefetch_cache_name) except (AttributeError, KeyError): pass # nothing to clear from cache def get_queryset(self): try: return self.instance._prefetched_objects_cache[self.prefetch_cache_name] except (AttributeError, KeyError): queryset = super().get_queryset() return self._apply_rel_filters(queryset) def get_prefetch_queryset(self, instances, queryset=None): if queryset is None: queryset = super().get_queryset() queryset._add_hints(instance=instances[0]) queryset = queryset.using(queryset._db or self._db) # Group instances by content types. content_type_queries = ( models.Q( (f'{self.content_type_field_name}__pk', content_type_id), (f'{self.object_id_field_name}__in', {obj.pk for obj in objs}), ) for content_type_id, objs in itertools.groupby( sorted(instances, key=lambda obj: self.get_content_type(obj).pk), lambda obj: self.get_content_type(obj).pk, ) ) query = models.Q(*content_type_queries, _connector=models.Q.OR) # We (possibly) need to convert object IDs to the type of the # instances' PK in order to match up instances: object_id_converter = instances[0]._meta.pk.to_python content_type_id_field_name = '%s_id' % self.content_type_field_name return ( queryset.filter(query), lambda relobj: ( object_id_converter(getattr(relobj, self.object_id_field_name)), getattr(relobj, content_type_id_field_name), ), lambda obj: (obj.pk, self.get_content_type(obj).pk), False, self.prefetch_cache_name, False, ) def add(self, *objs, bulk=True): self._remove_prefetched_objects() db = router.db_for_write(self.model, instance=self.instance) def check_and_update_obj(obj): if not isinstance(obj, self.model): raise TypeError("'%s' instance expected, got %r" % ( self.model._meta.object_name, obj )) setattr(obj, self.content_type_field_name, self.content_type) setattr(obj, self.object_id_field_name, self.pk_val) if bulk: pks = [] for obj in objs: if obj._state.adding or obj._state.db != db: raise ValueError( "%r instance isn't saved. Use bulk=False or save " "the object first." % obj ) check_and_update_obj(obj) pks.append(obj.pk) self.model._base_manager.using(db).filter(pk__in=pks).update(**{ self.content_type_field_name: self.content_type, self.object_id_field_name: self.pk_val, }) else: with transaction.atomic(using=db, savepoint=False): for obj in objs: check_and_update_obj(obj) obj.save() add.alters_data = True def remove(self, *objs, bulk=True): if not objs: return self._clear(self.filter(pk__in=[o.pk for o in objs]), bulk) remove.alters_data = True def clear(self, *, bulk=True): self._clear(self, bulk) clear.alters_data = True def _clear(self, queryset, bulk): self._remove_prefetched_objects() db = router.db_for_write(self.model, instance=self.instance) queryset = queryset.using(db) if bulk: # `QuerySet.delete()` creates its own atomic block which # contains the `pre_delete` and `post_delete` signal handlers. queryset.delete() else: with transaction.atomic(using=db, savepoint=False): for obj in queryset: obj.delete() _clear.alters_data = True def set(self, objs, *, bulk=True, clear=False): # Force evaluation of `objs` in case it's a queryset whose value # could be affected by `manager.clear()`. Refs #19816. objs = tuple(objs) db = router.db_for_write(self.model, instance=self.instance) with transaction.atomic(using=db, savepoint=False): if clear: self.clear() self.add(*objs, bulk=bulk) else: old_objs = set(self.using(db).all()) new_objs = [] for obj in objs: if obj in old_objs: old_objs.remove(obj) else: new_objs.append(obj) self.remove(*old_objs) self.add(*new_objs, bulk=bulk) set.alters_data = True def create(self, **kwargs): self._remove_prefetched_objects() kwargs[self.content_type_field_name] = self.content_type kwargs[self.object_id_field_name] = self.pk_val db = router.db_for_write(self.model, instance=self.instance) return super().using(db).create(**kwargs) create.alters_data = True def get_or_create(self, **kwargs): kwargs[self.content_type_field_name] = self.content_type kwargs[self.object_id_field_name] = self.pk_val db = router.db_for_write(self.model, instance=self.instance) return super().using(db).get_or_create(**kwargs) get_or_create.alters_data = True def update_or_create(self, **kwargs): kwargs[self.content_type_field_name] = self.content_type kwargs[self.object_id_field_name] = self.pk_val db = router.db_for_write(self.model, instance=self.instance) return super().using(db).update_or_create(**kwargs) update_or_create.alters_data = True return GenericRelatedObjectManager
91d87a909e223ac9576adb9dcf20964a93582ce98f330c33d24eb5d4332eadda
from django.contrib.sessions.base_session import ( AbstractBaseSession, BaseSessionManager, ) class SessionManager(BaseSessionManager): use_in_migrations = True class Session(AbstractBaseSession): """ Django provides full support for anonymous sessions. The session framework lets you store and retrieve arbitrary data on a per-site-visitor basis. It stores data on the server side and abstracts the sending and receiving of cookies. Cookies contain a session ID -- not the data itself. The Django sessions framework is entirely cookie-based. It does not fall back to putting session IDs in URLs. This is an intentional design decision. Not only does that behavior make URLs ugly, it makes your site vulnerable to session-ID theft via the "Referer" header. For complete documentation on using Sessions in your code, consult the sessions documentation that is shipped with Django (also available on the Django web site). """ objects = SessionManager() @classmethod def get_session_store_class(cls): from django.contrib.sessions.backends.db import SessionStore return SessionStore class Meta(AbstractBaseSession.Meta): db_table = 'django_session'
b5be15dd1c1cb5383f9786ea3c675bdb4680d88c4a6e39aaec65b9b41b1cbafd
import re from django.utils.regex_helper import _lazy_re_compile # Regular expression for recognizing HEXEWKB and WKT. A prophylactic measure # to prevent potentially malicious input from reaching the underlying C # library. Not a substitute for good web security programming practices. hex_regex = _lazy_re_compile(r'^[0-9A-F]+$', re.I) wkt_regex = _lazy_re_compile( r'^(SRID=(?P<srid>\-?\d+);)?' r'(?P<wkt>' r'(?P<type>POINT|LINESTRING|LINEARRING|POLYGON|MULTIPOINT|' r'MULTILINESTRING|MULTIPOLYGON|GEOMETRYCOLLECTION)' r'[ACEGIMLONPSRUTYZ\d,\.\-\+\(\) ]+)$', re.I ) json_regex = _lazy_re_compile(r'^(\s+)?\{.*}(\s+)?$', re.DOTALL)
d44db33b5f0f817371cadab314e31489ffc3840fde66ed08d681202913719a59
import binascii import json from django.conf import settings from django.contrib.messages.storage.base import BaseStorage, Message from django.core import signing from django.http import SimpleCookie from django.utils.safestring import SafeData, mark_safe class MessageEncoder(json.JSONEncoder): """ Compactly serialize instances of the ``Message`` class as JSON. """ message_key = '__json_message' def default(self, obj): if isinstance(obj, Message): # Using 0/1 here instead of False/True to produce more compact json is_safedata = 1 if isinstance(obj.message, SafeData) else 0 message = [self.message_key, is_safedata, obj.level, obj.message] if obj.extra_tags: message.append(obj.extra_tags) return message return super().default(obj) class MessageDecoder(json.JSONDecoder): """ Decode JSON that includes serialized ``Message`` instances. """ def process_messages(self, obj): if isinstance(obj, list) and obj: if obj[0] == MessageEncoder.message_key: if obj[1]: obj[3] = mark_safe(obj[3]) return Message(*obj[2:]) return [self.process_messages(item) for item in obj] if isinstance(obj, dict): return {key: self.process_messages(value) for key, value in obj.items()} return obj def decode(self, s, **kwargs): decoded = super().decode(s, **kwargs) return self.process_messages(decoded) class MessageSerializer: def dumps(self, obj): return json.dumps( obj, separators=(',', ':'), cls=MessageEncoder, ).encode('latin-1') def loads(self, data): return json.loads(data.decode('latin-1'), cls=MessageDecoder) class CookieStorage(BaseStorage): """ Store messages in a cookie. """ cookie_name = 'messages' # uwsgi's default configuration enforces a maximum size of 4kb for all the # HTTP headers. In order to leave some room for other cookies and headers, # restrict the session cookie to 1/2 of 4kb. See #18781. max_cookie_size = 2048 not_finished = '__messagesnotfinished__' key_salt = 'django.contrib.messages' def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.signer = signing.get_cookie_signer(salt=self.key_salt) def _get(self, *args, **kwargs): """ Retrieve a list of messages from the messages cookie. If the not_finished sentinel value is found at the end of the message list, remove it and return a result indicating that not all messages were retrieved by this storage. """ data = self.request.COOKIES.get(self.cookie_name) messages = self._decode(data) all_retrieved = not (messages and messages[-1] == self.not_finished) if messages and not all_retrieved: # remove the sentinel value messages.pop() return messages, all_retrieved def _update_cookie(self, encoded_data, response): """ Either set the cookie with the encoded data if there is any data to store, or delete the cookie. """ if encoded_data: response.set_cookie( self.cookie_name, encoded_data, domain=settings.SESSION_COOKIE_DOMAIN, secure=settings.SESSION_COOKIE_SECURE or None, httponly=settings.SESSION_COOKIE_HTTPONLY or None, samesite=settings.SESSION_COOKIE_SAMESITE, ) else: response.delete_cookie( self.cookie_name, domain=settings.SESSION_COOKIE_DOMAIN, samesite=settings.SESSION_COOKIE_SAMESITE, ) def _store(self, messages, response, remove_oldest=True, *args, **kwargs): """ Store the messages to a cookie and return a list of any messages which could not be stored. If the encoded data is larger than ``max_cookie_size``, remove messages until the data fits (these are the messages which are returned), and add the not_finished sentinel value to indicate as much. """ unstored_messages = [] encoded_data = self._encode(messages) if self.max_cookie_size: # data is going to be stored eventually by SimpleCookie, which # adds its own overhead, which we must account for. cookie = SimpleCookie() # create outside the loop def stored_length(val): return len(cookie.value_encode(val)[1]) while encoded_data and stored_length(encoded_data) > self.max_cookie_size: if remove_oldest: unstored_messages.append(messages.pop(0)) else: unstored_messages.insert(0, messages.pop()) encoded_data = self._encode(messages + [self.not_finished], encode_empty=unstored_messages) self._update_cookie(encoded_data, response) return unstored_messages def _encode(self, messages, encode_empty=False): """ Return an encoded version of the messages list which can be stored as plain text. Since the data will be retrieved from the client-side, the encoded data also contains a hash to ensure that the data was not tampered with. """ if messages or encode_empty: return self.signer.sign_object(messages, serializer=MessageSerializer, compress=True) def _decode(self, data): """ Safely decode an encoded text stream back into a list of messages. If the encoded text stream contained an invalid hash or was in an invalid format, return None. """ if not data: return None try: return self.signer.unsign_object(data, serializer=MessageSerializer) # RemovedInDjango41Warning: when the deprecation ends, replace with: # # except (signing.BadSignature, json.JSONDecodeError): # pass except signing.BadSignature: decoded = None except (binascii.Error, json.JSONDecodeError): decoded = self.signer.unsign(data) if decoded: # RemovedInDjango41Warning. try: return json.loads(decoded, cls=MessageDecoder) except json.JSONDecodeError: pass # Mark the data as used (so it gets removed) since something was wrong # with the data. self.used = True return None
eea9ed0787201e047c540c136505cb109e9d201f3f76bd1db8d8f76539cef004
from django.conf import settings from django.contrib.messages import constants, utils LEVEL_TAGS = utils.get_level_tags() class Message: """ Represent an actual message that can be stored in any of the supported storage classes (typically session- or cookie-based) and rendered in a view or template. """ def __init__(self, level, message, extra_tags=None): self.level = int(level) self.message = message self.extra_tags = extra_tags def _prepare(self): """ Prepare the message for serialization by forcing the ``message`` and ``extra_tags`` to str in case they are lazy translations. """ self.message = str(self.message) self.extra_tags = str(self.extra_tags) if self.extra_tags is not None else None def __eq__(self, other): if not isinstance(other, Message): return NotImplemented return self.level == other.level and self.message == other.message def __str__(self): return str(self.message) @property def tags(self): return ' '.join(tag for tag in [self.extra_tags, self.level_tag] if tag) @property def level_tag(self): return LEVEL_TAGS.get(self.level, '') class BaseStorage: """ This is the base backend for temporary message storage. This is not a complete class; to be a usable storage backend, it must be subclassed and the two methods ``_get`` and ``_store`` overridden. """ def __init__(self, request, *args, **kwargs): self.request = request self._queued_messages = [] self.used = False self.added_new = False super().__init__(*args, **kwargs) def __len__(self): return len(self._loaded_messages) + len(self._queued_messages) def __iter__(self): self.used = True if self._queued_messages: self._loaded_messages.extend(self._queued_messages) self._queued_messages = [] return iter(self._loaded_messages) def __contains__(self, item): return item in self._loaded_messages or item in self._queued_messages def __repr__(self): return f'<{self.__class__.__qualname__}: request={self.request!r}>' @property def _loaded_messages(self): """ Return a list of loaded messages, retrieving them first if they have not been loaded yet. """ if not hasattr(self, '_loaded_data'): messages, all_retrieved = self._get() self._loaded_data = messages or [] return self._loaded_data def _get(self, *args, **kwargs): """ Retrieve a list of stored messages. Return a tuple of the messages and a flag indicating whether or not all the messages originally intended to be stored in this storage were, in fact, stored and retrieved; e.g., ``(messages, all_retrieved)``. **This method must be implemented by a subclass.** If it is possible to tell if the backend was not used (as opposed to just containing no messages) then ``None`` should be returned in place of ``messages``. """ raise NotImplementedError('subclasses of BaseStorage must provide a _get() method') def _store(self, messages, response, *args, **kwargs): """ Store a list of messages and return a list of any messages which could not be stored. One type of object must be able to be stored, ``Message``. **This method must be implemented by a subclass.** """ raise NotImplementedError('subclasses of BaseStorage must provide a _store() method') def _prepare_messages(self, messages): """ Prepare a list of messages for storage. """ for message in messages: message._prepare() def update(self, response): """ Store all unread messages. If the backend has yet to be iterated, store previously stored messages again. Otherwise, only store messages added after the last iteration. """ self._prepare_messages(self._queued_messages) if self.used: return self._store(self._queued_messages, response) elif self.added_new: messages = self._loaded_messages + self._queued_messages return self._store(messages, response) def add(self, level, message, extra_tags=''): """ Queue a message to be stored. The message is only queued if it contained something and its level is not less than the recording level (``self.level``). """ if not message: return # Check that the message level is not less than the recording level. level = int(level) if level < self.level: return # Add the message. self.added_new = True message = Message(level, message, extra_tags=extra_tags) self._queued_messages.append(message) def _get_level(self): """ Return the minimum recorded level. The default level is the ``MESSAGE_LEVEL`` setting. If this is not found, the ``INFO`` level is used. """ if not hasattr(self, '_level'): self._level = getattr(settings, 'MESSAGE_LEVEL', constants.INFO) return self._level def _set_level(self, value=None): """ Set a custom minimum recorded level. If set to ``None``, the default level will be used (see the ``_get_level`` method). """ if value is None and hasattr(self, '_level'): del self._level else: self._level = int(value) level = property(_get_level, _set_level, _set_level)
5034514d48196735e139b82b84826740ff639771bfa0df95c6f7ad4865caeee0
import warnings from django.contrib.postgres.fields import ArrayField from django.db.models import Aggregate, BooleanField, JSONField, Value from django.utils.deprecation import RemovedInDjango50Warning from .mixins import OrderableAggMixin __all__ = [ 'ArrayAgg', 'BitAnd', 'BitOr', 'BoolAnd', 'BoolOr', 'JSONBAgg', 'StringAgg', ] # RemovedInDjango50Warning NOT_PROVIDED = object() class DeprecatedConvertValueMixin: def __init__(self, *expressions, default=NOT_PROVIDED, **extra): if default is NOT_PROVIDED: default = None self._default_provided = False else: self._default_provided = True super().__init__(*expressions, default=default, **extra) def convert_value(self, value, expression, connection): if value is None and not self._default_provided: warnings.warn(self.deprecation_msg, category=RemovedInDjango50Warning) return self.deprecation_value return value class ArrayAgg(DeprecatedConvertValueMixin, OrderableAggMixin, Aggregate): function = 'ARRAY_AGG' template = '%(function)s(%(distinct)s%(expressions)s %(ordering)s)' allow_distinct = True # RemovedInDjango50Warning deprecation_value = property(lambda self: []) deprecation_msg = ( 'In Django 5.0, ArrayAgg() will return None instead of an empty list ' 'if there are no rows. Pass default=None to opt into the new behavior ' 'and silence this warning or default=Value([]) to keep the previous ' 'behavior.' ) @property def output_field(self): return ArrayField(self.source_expressions[0].output_field) class BitAnd(Aggregate): function = 'BIT_AND' class BitOr(Aggregate): function = 'BIT_OR' class BoolAnd(Aggregate): function = 'BOOL_AND' output_field = BooleanField() class BoolOr(Aggregate): function = 'BOOL_OR' output_field = BooleanField() class JSONBAgg(DeprecatedConvertValueMixin, OrderableAggMixin, Aggregate): function = 'JSONB_AGG' template = '%(function)s(%(distinct)s%(expressions)s %(ordering)s)' allow_distinct = True output_field = JSONField() # RemovedInDjango50Warning deprecation_value = '[]' deprecation_msg = ( "In Django 5.0, JSONBAgg() will return None instead of an empty list " "if there are no rows. Pass default=None to opt into the new behavior " "and silence this warning or default=Value('[]') to keep the previous " "behavior." ) class StringAgg(DeprecatedConvertValueMixin, OrderableAggMixin, Aggregate): function = 'STRING_AGG' template = '%(function)s(%(distinct)s%(expressions)s %(ordering)s)' allow_distinct = True # RemovedInDjango50Warning deprecation_value = '' deprecation_msg = ( "In Django 5.0, StringAgg() will return None instead of an empty " "string if there are no rows. Pass default=None to opt into the new " "behavior and silence this warning or default=Value('') to keep the " "previous behavior." ) def __init__(self, expression, delimiter, **extra): delimiter_expr = Value(str(delimiter)) super().__init__(expression, delimiter_expr, **extra)
7099caac0ab21e83e09988e2e2a5eef2b097bc8be7bc0e6f77c57f0ab8c6e5df
from django.db.models import Aggregate, FloatField, IntegerField __all__ = [ 'CovarPop', 'Corr', 'RegrAvgX', 'RegrAvgY', 'RegrCount', 'RegrIntercept', 'RegrR2', 'RegrSlope', 'RegrSXX', 'RegrSXY', 'RegrSYY', 'StatAggregate', ] class StatAggregate(Aggregate): output_field = FloatField() def __init__(self, y, x, output_field=None, filter=None, default=None): if not x or not y: raise ValueError('Both y and x must be provided.') super().__init__(y, x, output_field=output_field, filter=filter, default=default) class Corr(StatAggregate): function = 'CORR' class CovarPop(StatAggregate): def __init__(self, y, x, sample=False, filter=None, default=None): self.function = 'COVAR_SAMP' if sample else 'COVAR_POP' super().__init__(y, x, filter=filter, default=default) class RegrAvgX(StatAggregate): function = 'REGR_AVGX' class RegrAvgY(StatAggregate): function = 'REGR_AVGY' class RegrCount(StatAggregate): function = 'REGR_COUNT' output_field = IntegerField() empty_aggregate_value = 0 class RegrIntercept(StatAggregate): function = 'REGR_INTERCEPT' class RegrR2(StatAggregate): function = 'REGR_R2' class RegrSlope(StatAggregate): function = 'REGR_SLOPE' class RegrSXX(StatAggregate): function = 'REGR_SXX' class RegrSXY(StatAggregate): function = 'REGR_SXY' class RegrSYY(StatAggregate): function = 'REGR_SYY'
a0fcfaaf242a27e903ca5da8b5336465b4bbb12f7f83467b023cf5a1d2ada0e7
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('sites', '0001_initial'), ] operations = [ migrations.CreateModel( name='Redirect', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('site', models.ForeignKey( to='sites.Site', on_delete=models.CASCADE, verbose_name='site', )), ('old_path', models.CharField( help_text=( 'This should be an absolute path, excluding the domain name. Example: “/events/search/”.' ), max_length=200, verbose_name='redirect from', db_index=True )), ('new_path', models.CharField( help_text='This can be either an absolute path (as above) or a full URL starting with “http://”.', max_length=200, verbose_name='redirect to', blank=True )), ], options={ 'ordering': ['old_path'], 'unique_together': {('site', 'old_path')}, 'db_table': 'django_redirect', 'verbose_name': 'redirect', 'verbose_name_plural': 'redirects', }, bases=(models.Model,), ), ]
560269dc9771b194e33448fb2d1124fa7df3016633c3c2089814d9f7e7dc18b0
import django.contrib.auth.models from django.contrib.auth import validators from django.db import migrations, models from django.utils import timezone class Migration(migrations.Migration): dependencies = [ ('contenttypes', '__first__'), ] operations = [ migrations.CreateModel( name='Permission', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=50, verbose_name='name')), ('content_type', models.ForeignKey( to='contenttypes.ContentType', on_delete=models.CASCADE, verbose_name='content type', )), ('codename', models.CharField(max_length=100, verbose_name='codename')), ], options={ 'ordering': ['content_type__app_label', 'content_type__model', 'codename'], 'unique_together': {('content_type', 'codename')}, 'verbose_name': 'permission', 'verbose_name_plural': 'permissions', }, managers=[ ('objects', django.contrib.auth.models.PermissionManager()), ], ), migrations.CreateModel( name='Group', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(unique=True, max_length=80, verbose_name='name')), ('permissions', models.ManyToManyField(to='auth.Permission', verbose_name='permissions', blank=True)), ], options={ 'verbose_name': 'group', 'verbose_name_plural': 'groups', }, managers=[ ('objects', django.contrib.auth.models.GroupManager()), ], ), migrations.CreateModel( name='User', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('password', models.CharField(max_length=128, verbose_name='password')), ('last_login', models.DateTimeField(default=timezone.now, verbose_name='last login')), ('is_superuser', models.BooleanField( default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status' )), ('username', models.CharField( help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.', unique=True, max_length=30, verbose_name='username', validators=[validators.UnicodeUsernameValidator()], )), ('first_name', models.CharField(max_length=30, verbose_name='first name', blank=True)), ('last_name', models.CharField(max_length=30, verbose_name='last name', blank=True)), ('email', models.EmailField(max_length=75, verbose_name='email address', blank=True)), ('is_staff', models.BooleanField( default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status' )), ('is_active', models.BooleanField( default=True, verbose_name='active', help_text=( 'Designates whether this user should be treated as active. Unselect this instead of deleting ' 'accounts.' ) )), ('date_joined', models.DateTimeField(default=timezone.now, verbose_name='date joined')), ('groups', models.ManyToManyField( to='auth.Group', verbose_name='groups', blank=True, related_name='user_set', related_query_name='user', help_text=( 'The groups this user belongs to. A user will get all permissions granted to each of their ' 'groups.' ) )), ('user_permissions', models.ManyToManyField( to='auth.Permission', verbose_name='user permissions', blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user') ), ], options={ 'swappable': 'AUTH_USER_MODEL', 'verbose_name': 'user', 'verbose_name_plural': 'users', }, managers=[ ('objects', django.contrib.auth.models.UserManager()), ], ), ]
b02048e8768cfe70a839ff863fda10e8b3bcdd43f8dbcfaa7b5d1e379543f11a
import django.contrib.admin.models from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('contenttypes', '__first__'), ] operations = [ migrations.CreateModel( name='LogEntry', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('action_time', models.DateTimeField(auto_now=True, verbose_name='action time')), ('object_id', models.TextField(null=True, verbose_name='object id', blank=True)), ('object_repr', models.CharField(max_length=200, verbose_name='object repr')), ('action_flag', models.PositiveSmallIntegerField(verbose_name='action flag')), ('change_message', models.TextField(verbose_name='change message', blank=True)), ('content_type', models.ForeignKey( on_delete=models.SET_NULL, blank=True, null=True, to='contenttypes.ContentType', verbose_name='content type', )), ('user', models.ForeignKey( to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE, verbose_name='user', )), ], options={ 'ordering': ['-action_time'], 'db_table': 'django_admin_log', 'verbose_name': 'log entry', 'verbose_name_plural': 'log entries', }, bases=(models.Model,), managers=[ ('objects', django.contrib.admin.models.LogEntryManager()), ], ), ]
f1333ed9517378f9d6f80c9ca70290c718b464b9e92207d70e0cec64f11fccf6
from datetime import datetime, timedelta from django import forms from django.conf import settings from django.contrib import messages from django.contrib.admin import FieldListFilter from django.contrib.admin.exceptions import ( DisallowedModelAdminLookup, DisallowedModelAdminToField, ) from django.contrib.admin.options import ( IS_POPUP_VAR, TO_FIELD_VAR, IncorrectLookupParameters, ) from django.contrib.admin.utils import ( get_fields_from_path, lookup_spawns_duplicates, prepare_lookup_value, quote, ) from django.core.exceptions import ( FieldDoesNotExist, ImproperlyConfigured, SuspiciousOperation, ) from django.core.paginator import InvalidPage from django.db.models import Exists, F, Field, ManyToOneRel, OrderBy, OuterRef from django.db.models.expressions import Combinable from django.urls import reverse from django.utils.http import urlencode from django.utils.timezone import make_aware from django.utils.translation import gettext # Changelist settings ALL_VAR = 'all' ORDER_VAR = 'o' PAGE_VAR = 'p' SEARCH_VAR = 'q' ERROR_FLAG = 'e' IGNORED_PARAMS = (ALL_VAR, ORDER_VAR, SEARCH_VAR, IS_POPUP_VAR, TO_FIELD_VAR) class ChangeListSearchForm(forms.Form): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # Populate "fields" dynamically because SEARCH_VAR is a variable: self.fields = { SEARCH_VAR: forms.CharField(required=False, strip=False), } class ChangeList: search_form_class = ChangeListSearchForm def __init__(self, request, model, list_display, list_display_links, list_filter, date_hierarchy, search_fields, list_select_related, list_per_page, list_max_show_all, list_editable, model_admin, sortable_by, search_help_text): self.model = model self.opts = model._meta self.lookup_opts = self.opts self.root_queryset = model_admin.get_queryset(request) self.list_display = list_display self.list_display_links = list_display_links self.list_filter = list_filter self.has_filters = None self.has_active_filters = None self.clear_all_filters_qs = None self.date_hierarchy = date_hierarchy self.search_fields = search_fields self.list_select_related = list_select_related self.list_per_page = list_per_page self.list_max_show_all = list_max_show_all self.model_admin = model_admin self.preserved_filters = model_admin.get_preserved_filters(request) self.sortable_by = sortable_by self.search_help_text = search_help_text # Get search parameters from the query string. _search_form = self.search_form_class(request.GET) if not _search_form.is_valid(): for error in _search_form.errors.values(): messages.error(request, ', '.join(error)) self.query = _search_form.cleaned_data.get(SEARCH_VAR) or '' try: self.page_num = int(request.GET.get(PAGE_VAR, 1)) except ValueError: self.page_num = 1 self.show_all = ALL_VAR in request.GET self.is_popup = IS_POPUP_VAR in request.GET to_field = request.GET.get(TO_FIELD_VAR) if to_field and not model_admin.to_field_allowed(request, to_field): raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field) self.to_field = to_field self.params = dict(request.GET.items()) if PAGE_VAR in self.params: del self.params[PAGE_VAR] if ERROR_FLAG in self.params: del self.params[ERROR_FLAG] if self.is_popup: self.list_editable = () else: self.list_editable = list_editable self.queryset = self.get_queryset(request) self.get_results(request) if self.is_popup: title = gettext('Select %s') elif self.model_admin.has_change_permission(request): title = gettext('Select %s to change') else: title = gettext('Select %s to view') self.title = title % self.opts.verbose_name self.pk_attname = self.lookup_opts.pk.attname def __repr__(self): return '<%s: model=%s model_admin=%s>' % ( self.__class__.__qualname__, self.model.__qualname__, self.model_admin.__class__.__qualname__, ) def get_filters_params(self, params=None): """ Return all params except IGNORED_PARAMS. """ params = params or self.params lookup_params = params.copy() # a dictionary of the query string # Remove all the parameters that are globally and systematically # ignored. for ignored in IGNORED_PARAMS: if ignored in lookup_params: del lookup_params[ignored] return lookup_params def get_filters(self, request): lookup_params = self.get_filters_params() may_have_duplicates = False has_active_filters = False for key, value in lookup_params.items(): if not self.model_admin.lookup_allowed(key, value): raise DisallowedModelAdminLookup("Filtering by %s not allowed" % key) filter_specs = [] for list_filter in self.list_filter: lookup_params_count = len(lookup_params) if callable(list_filter): # This is simply a custom list filter class. spec = list_filter(request, lookup_params, self.model, self.model_admin) else: field_path = None if isinstance(list_filter, (tuple, list)): # This is a custom FieldListFilter class for a given field. field, field_list_filter_class = list_filter else: # This is simply a field name, so use the default # FieldListFilter class that has been registered for the # type of the given field. field, field_list_filter_class = list_filter, FieldListFilter.create if not isinstance(field, Field): field_path = field field = get_fields_from_path(self.model, field_path)[-1] spec = field_list_filter_class( field, request, lookup_params, self.model, self.model_admin, field_path=field_path, ) # field_list_filter_class removes any lookup_params it # processes. If that happened, check if duplicates should be # removed. if lookup_params_count > len(lookup_params): may_have_duplicates |= lookup_spawns_duplicates( self.lookup_opts, field_path, ) if spec and spec.has_output(): filter_specs.append(spec) if lookup_params_count > len(lookup_params): has_active_filters = True if self.date_hierarchy: # Create bounded lookup parameters so that the query is more # efficient. year = lookup_params.pop('%s__year' % self.date_hierarchy, None) if year is not None: month = lookup_params.pop('%s__month' % self.date_hierarchy, None) day = lookup_params.pop('%s__day' % self.date_hierarchy, None) try: from_date = datetime( int(year), int(month if month is not None else 1), int(day if day is not None else 1), ) except ValueError as e: raise IncorrectLookupParameters(e) from e if day: to_date = from_date + timedelta(days=1) elif month: # In this branch, from_date will always be the first of a # month, so advancing 32 days gives the next month. to_date = (from_date + timedelta(days=32)).replace(day=1) else: to_date = from_date.replace(year=from_date.year + 1) if settings.USE_TZ: from_date = make_aware(from_date) to_date = make_aware(to_date) lookup_params.update({ '%s__gte' % self.date_hierarchy: from_date, '%s__lt' % self.date_hierarchy: to_date, }) # At this point, all the parameters used by the various ListFilters # have been removed from lookup_params, which now only contains other # parameters passed via the query string. We now loop through the # remaining parameters both to ensure that all the parameters are valid # fields and to determine if at least one of them spawns duplicates. If # the lookup parameters aren't real fields, then bail out. try: for key, value in lookup_params.items(): lookup_params[key] = prepare_lookup_value(key, value) may_have_duplicates |= lookup_spawns_duplicates(self.lookup_opts, key) return ( filter_specs, bool(filter_specs), lookup_params, may_have_duplicates, has_active_filters, ) except FieldDoesNotExist as e: raise IncorrectLookupParameters(e) from e def get_query_string(self, new_params=None, remove=None): if new_params is None: new_params = {} if remove is None: remove = [] p = self.params.copy() for r in remove: for k in list(p): if k.startswith(r): del p[k] for k, v in new_params.items(): if v is None: if k in p: del p[k] else: p[k] = v return '?%s' % urlencode(sorted(p.items())) def get_results(self, request): paginator = self.model_admin.get_paginator(request, self.queryset, self.list_per_page) # Get the number of objects, with admin filters applied. result_count = paginator.count # Get the total number of objects, with no admin filters applied. if self.model_admin.show_full_result_count: full_result_count = self.root_queryset.count() else: full_result_count = None can_show_all = result_count <= self.list_max_show_all multi_page = result_count > self.list_per_page # Get the list of objects to display on this page. if (self.show_all and can_show_all) or not multi_page: result_list = self.queryset._clone() else: try: result_list = paginator.page(self.page_num).object_list except InvalidPage: raise IncorrectLookupParameters self.result_count = result_count self.show_full_result_count = self.model_admin.show_full_result_count # Admin actions are shown if there is at least one entry # or if entries are not counted because show_full_result_count is disabled self.show_admin_actions = not self.show_full_result_count or bool(full_result_count) self.full_result_count = full_result_count self.result_list = result_list self.can_show_all = can_show_all self.multi_page = multi_page self.paginator = paginator def _get_default_ordering(self): ordering = [] if self.model_admin.ordering: ordering = self.model_admin.ordering elif self.lookup_opts.ordering: ordering = self.lookup_opts.ordering return ordering def get_ordering_field(self, field_name): """ Return the proper model field name corresponding to the given field_name to use for ordering. field_name may either be the name of a proper model field or the name of a method (on the admin or model) or a callable with the 'admin_order_field' attribute. Return None if no proper model field name can be matched. """ try: field = self.lookup_opts.get_field(field_name) return field.name except FieldDoesNotExist: # See whether field_name is a name of a non-field # that allows sorting. if callable(field_name): attr = field_name elif hasattr(self.model_admin, field_name): attr = getattr(self.model_admin, field_name) else: attr = getattr(self.model, field_name) if isinstance(attr, property) and hasattr(attr, 'fget'): attr = attr.fget return getattr(attr, 'admin_order_field', None) def get_ordering(self, request, queryset): """ Return the list of ordering fields for the change list. First check the get_ordering() method in model admin, then check the object's default ordering. Then, any manually-specified ordering from the query string overrides anything. Finally, a deterministic order is guaranteed by calling _get_deterministic_ordering() with the constructed ordering. """ params = self.params ordering = list(self.model_admin.get_ordering(request) or self._get_default_ordering()) if ORDER_VAR in params: # Clear ordering and used params ordering = [] order_params = params[ORDER_VAR].split('.') for p in order_params: try: none, pfx, idx = p.rpartition('-') field_name = self.list_display[int(idx)] order_field = self.get_ordering_field(field_name) if not order_field: continue # No 'admin_order_field', skip it if isinstance(order_field, OrderBy): if pfx == '-': order_field = order_field.copy() order_field.reverse_ordering() ordering.append(order_field) elif hasattr(order_field, 'resolve_expression'): # order_field is an expression. ordering.append(order_field.desc() if pfx == '-' else order_field.asc()) # reverse order if order_field has already "-" as prefix elif order_field.startswith('-') and pfx == '-': ordering.append(order_field[1:]) else: ordering.append(pfx + order_field) except (IndexError, ValueError): continue # Invalid ordering specified, skip it. # Add the given query's ordering fields, if any. ordering.extend(queryset.query.order_by) return self._get_deterministic_ordering(ordering) def _get_deterministic_ordering(self, ordering): """ Ensure a deterministic order across all database backends. Search for a single field or unique together set of fields providing a total ordering. If these are missing, augment the ordering with a descendant primary key. """ ordering = list(ordering) ordering_fields = set() total_ordering_fields = {'pk'} | { field.attname for field in self.lookup_opts.fields if field.unique and not field.null } for part in ordering: # Search for single field providing a total ordering. field_name = None if isinstance(part, str): field_name = part.lstrip('-') elif isinstance(part, F): field_name = part.name elif isinstance(part, OrderBy) and isinstance(part.expression, F): field_name = part.expression.name if field_name: # Normalize attname references by using get_field(). try: field = self.lookup_opts.get_field(field_name) except FieldDoesNotExist: # Could be "?" for random ordering or a related field # lookup. Skip this part of introspection for now. continue # Ordering by a related field name orders by the referenced # model's ordering. Skip this part of introspection for now. if field.remote_field and field_name == field.name: continue if field.attname in total_ordering_fields: break ordering_fields.add(field.attname) else: # No single total ordering field, try unique_together and total # unique constraints. constraint_field_names = ( *self.lookup_opts.unique_together, *( constraint.fields for constraint in self.lookup_opts.total_unique_constraints ), ) for field_names in constraint_field_names: # Normalize attname references by using get_field(). fields = [self.lookup_opts.get_field(field_name) for field_name in field_names] # Composite unique constraints containing a nullable column # cannot ensure total ordering. if any(field.null for field in fields): continue if ordering_fields.issuperset(field.attname for field in fields): break else: # If no set of unique fields is present in the ordering, rely # on the primary key to provide total ordering. ordering.append('-pk') return ordering def get_ordering_field_columns(self): """ Return a dictionary of ordering field column numbers and asc/desc. """ # We must cope with more than one column having the same underlying sort # field, so we base things on column numbers. ordering = self._get_default_ordering() ordering_fields = {} if ORDER_VAR not in self.params: # for ordering specified on ModelAdmin or model Meta, we don't know # the right column numbers absolutely, because there might be more # than one column associated with that ordering, so we guess. for field in ordering: if isinstance(field, (Combinable, OrderBy)): if not isinstance(field, OrderBy): field = field.asc() if isinstance(field.expression, F): order_type = 'desc' if field.descending else 'asc' field = field.expression.name else: continue elif field.startswith('-'): field = field[1:] order_type = 'desc' else: order_type = 'asc' for index, attr in enumerate(self.list_display): if self.get_ordering_field(attr) == field: ordering_fields[index] = order_type break else: for p in self.params[ORDER_VAR].split('.'): none, pfx, idx = p.rpartition('-') try: idx = int(idx) except ValueError: continue # skip it ordering_fields[idx] = 'desc' if pfx == '-' else 'asc' return ordering_fields def get_queryset(self, request): # First, we collect all the declared list filters. ( self.filter_specs, self.has_filters, remaining_lookup_params, filters_may_have_duplicates, self.has_active_filters, ) = self.get_filters(request) # Then, we let every list filter modify the queryset to its liking. qs = self.root_queryset for filter_spec in self.filter_specs: new_qs = filter_spec.queryset(request, qs) if new_qs is not None: qs = new_qs try: # Finally, we apply the remaining lookup parameters from the query # string (i.e. those that haven't already been processed by the # filters). qs = qs.filter(**remaining_lookup_params) except (SuspiciousOperation, ImproperlyConfigured): # Allow certain types of errors to be re-raised as-is so that the # caller can treat them in a special way. raise except Exception as e: # Every other error is caught with a naked except, because we don't # have any other way of validating lookup parameters. They might be # invalid if the keyword arguments are incorrect, or if the values # are not in the correct type, so we might get FieldError, # ValueError, ValidationError, or ?. raise IncorrectLookupParameters(e) # Apply search results qs, search_may_have_duplicates = self.model_admin.get_search_results( request, qs, self.query, ) # Set query string for clearing all filters. self.clear_all_filters_qs = self.get_query_string( new_params=remaining_lookup_params, remove=self.get_filters_params(), ) # Remove duplicates from results, if necessary if filters_may_have_duplicates | search_may_have_duplicates: qs = qs.filter(pk=OuterRef('pk')) qs = self.root_queryset.filter(Exists(qs)) # Set ordering. ordering = self.get_ordering(request, qs) qs = qs.order_by(*ordering) if not qs.query.select_related: qs = self.apply_select_related(qs) return qs def apply_select_related(self, qs): if self.list_select_related is True: return qs.select_related() if self.list_select_related is False: if self.has_related_field_in_list_display(): return qs.select_related() if self.list_select_related: return qs.select_related(*self.list_select_related) return qs def has_related_field_in_list_display(self): for field_name in self.list_display: try: field = self.lookup_opts.get_field(field_name) except FieldDoesNotExist: pass else: if isinstance(field.remote_field, ManyToOneRel): # <FK>_id field names don't require a join. if field_name != field.get_attname(): return True return False def url_for_result(self, result): pk = getattr(result, self.pk_attname) return reverse('admin:%s_%s_change' % (self.opts.app_label, self.opts.model_name), args=(quote(pk),), current_app=self.model_admin.admin_site.name)
dfef8bec39532675796ccb0e1f63056d8e916e8ac0abe02266e2e5af08ffcc76
import datetime from django.contrib.admin.templatetags.admin_urls import add_preserved_filters from django.contrib.admin.utils import ( display_for_field, display_for_value, get_fields_from_path, label_for_field, lookup_field, ) from django.contrib.admin.views.main import ( ALL_VAR, IS_POPUP_VAR, ORDER_VAR, PAGE_VAR, SEARCH_VAR, ) from django.core.exceptions import ObjectDoesNotExist from django.db import models from django.template import Library from django.template.loader import get_template from django.templatetags.static import static from django.urls import NoReverseMatch from django.utils import formats, timezone from django.utils.html import format_html from django.utils.safestring import mark_safe from django.utils.text import capfirst from django.utils.translation import gettext as _ from .base import InclusionAdminNode register = Library() @register.simple_tag def paginator_number(cl, i): """ Generate an individual page index link in a paginated list. """ if i == cl.paginator.ELLIPSIS: return format_html('{} ', cl.paginator.ELLIPSIS) elif i == cl.page_num: return format_html('<span class="this-page">{}</span> ', i) else: return format_html( '<a href="{}"{}>{}</a> ', cl.get_query_string({PAGE_VAR: i}), mark_safe(' class="end"' if i == cl.paginator.num_pages else ''), i, ) def pagination(cl): """ Generate the series of links to the pages in a paginated list. """ pagination_required = (not cl.show_all or not cl.can_show_all) and cl.multi_page page_range = cl.paginator.get_elided_page_range(cl.page_num) if pagination_required else [] need_show_all_link = cl.can_show_all and not cl.show_all and cl.multi_page return { 'cl': cl, 'pagination_required': pagination_required, 'show_all_url': need_show_all_link and cl.get_query_string({ALL_VAR: ''}), 'page_range': page_range, 'ALL_VAR': ALL_VAR, '1': 1, } @register.tag(name='pagination') def pagination_tag(parser, token): return InclusionAdminNode( parser, token, func=pagination, template_name='pagination.html', takes_context=False, ) def result_headers(cl): """ Generate the list column headers. """ ordering_field_columns = cl.get_ordering_field_columns() for i, field_name in enumerate(cl.list_display): text, attr = label_for_field( field_name, cl.model, model_admin=cl.model_admin, return_attr=True ) is_field_sortable = cl.sortable_by is None or field_name in cl.sortable_by if attr: field_name = _coerce_field_name(field_name, i) # Potentially not sortable # if the field is the action checkbox: no sorting and special class if field_name == 'action_checkbox': yield { "text": text, "class_attrib": mark_safe(' class="action-checkbox-column"'), "sortable": False, } continue admin_order_field = getattr(attr, "admin_order_field", None) # Set ordering for attr that is a property, if defined. if isinstance(attr, property) and hasattr(attr, 'fget'): admin_order_field = getattr(attr.fget, 'admin_order_field', None) if not admin_order_field: is_field_sortable = False if not is_field_sortable: # Not sortable yield { 'text': text, 'class_attrib': format_html(' class="column-{}"', field_name), 'sortable': False, } continue # OK, it is sortable if we got this far th_classes = ['sortable', 'column-{}'.format(field_name)] order_type = '' new_order_type = 'asc' sort_priority = 0 # Is it currently being sorted on? is_sorted = i in ordering_field_columns if is_sorted: order_type = ordering_field_columns.get(i).lower() sort_priority = list(ordering_field_columns).index(i) + 1 th_classes.append('sorted %sending' % order_type) new_order_type = {'asc': 'desc', 'desc': 'asc'}[order_type] # build new ordering param o_list_primary = [] # URL for making this field the primary sort o_list_remove = [] # URL for removing this field from sort o_list_toggle = [] # URL for toggling order type for this field def make_qs_param(t, n): return ('-' if t == 'desc' else '') + str(n) for j, ot in ordering_field_columns.items(): if j == i: # Same column param = make_qs_param(new_order_type, j) # We want clicking on this header to bring the ordering to the # front o_list_primary.insert(0, param) o_list_toggle.append(param) # o_list_remove - omit else: param = make_qs_param(ot, j) o_list_primary.append(param) o_list_toggle.append(param) o_list_remove.append(param) if i not in ordering_field_columns: o_list_primary.insert(0, make_qs_param(new_order_type, i)) yield { "text": text, "sortable": True, "sorted": is_sorted, "ascending": order_type == "asc", "sort_priority": sort_priority, "url_primary": cl.get_query_string({ORDER_VAR: '.'.join(o_list_primary)}), "url_remove": cl.get_query_string({ORDER_VAR: '.'.join(o_list_remove)}), "url_toggle": cl.get_query_string({ORDER_VAR: '.'.join(o_list_toggle)}), "class_attrib": format_html(' class="{}"', ' '.join(th_classes)) if th_classes else '', } def _boolean_icon(field_val): icon_url = static('admin/img/icon-%s.svg' % {True: 'yes', False: 'no', None: 'unknown'}[field_val]) return format_html('<img src="{}" alt="{}">', icon_url, field_val) def _coerce_field_name(field_name, field_index): """ Coerce a field_name (which may be a callable) to a string. """ if callable(field_name): if field_name.__name__ == '<lambda>': return 'lambda' + str(field_index) else: return field_name.__name__ return field_name def items_for_result(cl, result, form): """ Generate the actual list of data. """ def link_in_col(is_first, field_name, cl): if cl.list_display_links is None: return False if is_first and not cl.list_display_links: return True return field_name in cl.list_display_links first = True pk = cl.lookup_opts.pk.attname for field_index, field_name in enumerate(cl.list_display): empty_value_display = cl.model_admin.get_empty_value_display() row_classes = ['field-%s' % _coerce_field_name(field_name, field_index)] try: f, attr, value = lookup_field(field_name, result, cl.model_admin) except ObjectDoesNotExist: result_repr = empty_value_display else: empty_value_display = getattr(attr, 'empty_value_display', empty_value_display) if f is None or f.auto_created: if field_name == 'action_checkbox': row_classes = ['action-checkbox'] boolean = getattr(attr, 'boolean', False) result_repr = display_for_value(value, empty_value_display, boolean) if isinstance(value, (datetime.date, datetime.time)): row_classes.append('nowrap') else: if isinstance(f.remote_field, models.ManyToOneRel): field_val = getattr(result, f.name) if field_val is None: result_repr = empty_value_display else: result_repr = field_val else: result_repr = display_for_field(value, f, empty_value_display) if isinstance(f, (models.DateField, models.TimeField, models.ForeignKey)): row_classes.append('nowrap') row_class = mark_safe(' class="%s"' % ' '.join(row_classes)) # If list_display_links not defined, add the link tag to the first field if link_in_col(first, field_name, cl): table_tag = 'th' if first else 'td' first = False # Display link to the result's change_view if the url exists, else # display just the result's representation. try: url = cl.url_for_result(result) except NoReverseMatch: link_or_text = result_repr else: url = add_preserved_filters({'preserved_filters': cl.preserved_filters, 'opts': cl.opts}, url) # Convert the pk to something that can be used in JavaScript. # Problem cases are non-ASCII strings. if cl.to_field: attr = str(cl.to_field) else: attr = pk value = result.serializable_value(attr) link_or_text = format_html( '<a href="{}"{}>{}</a>', url, format_html( ' data-popup-opener="{}"', value ) if cl.is_popup else '', result_repr) yield format_html('<{}{}>{}</{}>', table_tag, row_class, link_or_text, table_tag) else: # By default the fields come from ModelAdmin.list_editable, but if we pull # the fields out of the form instead of list_editable custom admins # can provide fields on a per request basis if (form and field_name in form.fields and not ( field_name == cl.model._meta.pk.name and form[cl.model._meta.pk.name].is_hidden)): bf = form[field_name] result_repr = mark_safe(str(bf.errors) + str(bf)) yield format_html('<td{}>{}</td>', row_class, result_repr) if form and not form[cl.model._meta.pk.name].is_hidden: yield format_html('<td>{}</td>', form[cl.model._meta.pk.name]) class ResultList(list): """ Wrapper class used to return items in a list_editable changelist, annotated with the form object for error reporting purposes. Needed to maintain backwards compatibility with existing admin templates. """ def __init__(self, form, *items): self.form = form super().__init__(*items) def results(cl): if cl.formset: for res, form in zip(cl.result_list, cl.formset.forms): yield ResultList(form, items_for_result(cl, res, form)) else: for res in cl.result_list: yield ResultList(None, items_for_result(cl, res, None)) def result_hidden_fields(cl): if cl.formset: for res, form in zip(cl.result_list, cl.formset.forms): if form[cl.model._meta.pk.name].is_hidden: yield mark_safe(form[cl.model._meta.pk.name]) def result_list(cl): """ Display the headers and data list together. """ headers = list(result_headers(cl)) num_sorted_fields = 0 for h in headers: if h['sortable'] and h['sorted']: num_sorted_fields += 1 return { 'cl': cl, 'result_hidden_fields': list(result_hidden_fields(cl)), 'result_headers': headers, 'num_sorted_fields': num_sorted_fields, 'results': list(results(cl)), } @register.tag(name='result_list') def result_list_tag(parser, token): return InclusionAdminNode( parser, token, func=result_list, template_name='change_list_results.html', takes_context=False, ) def date_hierarchy(cl): """ Display the date hierarchy for date drill-down functionality. """ if cl.date_hierarchy: field_name = cl.date_hierarchy field = get_fields_from_path(cl.model, field_name)[-1] if isinstance(field, models.DateTimeField): dates_or_datetimes = 'datetimes' qs_kwargs = {'is_dst': True} else: dates_or_datetimes = 'dates' qs_kwargs = {} year_field = '%s__year' % field_name month_field = '%s__month' % field_name day_field = '%s__day' % field_name field_generic = '%s__' % field_name year_lookup = cl.params.get(year_field) month_lookup = cl.params.get(month_field) day_lookup = cl.params.get(day_field) def link(filters): return cl.get_query_string(filters, [field_generic]) if not (year_lookup or month_lookup or day_lookup): # select appropriate start level date_range = cl.queryset.aggregate(first=models.Min(field_name), last=models.Max(field_name)) if date_range['first'] and date_range['last']: if dates_or_datetimes == 'datetimes': date_range = { k: timezone.localtime(v) if timezone.is_aware(v) else v for k, v in date_range.items() } if date_range['first'].year == date_range['last'].year: year_lookup = date_range['first'].year if date_range['first'].month == date_range['last'].month: month_lookup = date_range['first'].month if year_lookup and month_lookup and day_lookup: day = datetime.date(int(year_lookup), int(month_lookup), int(day_lookup)) return { 'show': True, 'back': { 'link': link({year_field: year_lookup, month_field: month_lookup}), 'title': capfirst(formats.date_format(day, 'YEAR_MONTH_FORMAT')) }, 'choices': [{'title': capfirst(formats.date_format(day, 'MONTH_DAY_FORMAT'))}] } elif year_lookup and month_lookup: days = getattr(cl.queryset, dates_or_datetimes)(field_name, 'day', **qs_kwargs) return { 'show': True, 'back': { 'link': link({year_field: year_lookup}), 'title': str(year_lookup) }, 'choices': [{ 'link': link({year_field: year_lookup, month_field: month_lookup, day_field: day.day}), 'title': capfirst(formats.date_format(day, 'MONTH_DAY_FORMAT')) } for day in days] } elif year_lookup: months = getattr(cl.queryset, dates_or_datetimes)(field_name, 'month', **qs_kwargs) return { 'show': True, 'back': { 'link': link({}), 'title': _('All dates') }, 'choices': [{ 'link': link({year_field: year_lookup, month_field: month.month}), 'title': capfirst(formats.date_format(month, 'YEAR_MONTH_FORMAT')) } for month in months] } else: years = getattr(cl.queryset, dates_or_datetimes)(field_name, 'year', **qs_kwargs) return { 'show': True, 'back': None, 'choices': [{ 'link': link({year_field: str(year.year)}), 'title': str(year.year), } for year in years] } @register.tag(name='date_hierarchy') def date_hierarchy_tag(parser, token): return InclusionAdminNode( parser, token, func=date_hierarchy, template_name='date_hierarchy.html', takes_context=False, ) def search_form(cl): """ Display a search form for searching the list. """ return { 'cl': cl, 'show_result_count': cl.result_count != cl.full_result_count, 'search_var': SEARCH_VAR, 'is_popup_var': IS_POPUP_VAR, } @register.tag(name='search_form') def search_form_tag(parser, token): return InclusionAdminNode(parser, token, func=search_form, template_name='search_form.html', takes_context=False) @register.simple_tag def admin_list_filter(cl, spec): tpl = get_template(spec.template) return tpl.render({ 'title': spec.title, 'choices': list(spec.choices(cl)), 'spec': spec, }) def admin_actions(context): """ Track the number of times the action field has been rendered on the page, so we know which value to use. """ context['action_index'] = context.get('action_index', -1) + 1 return context @register.tag(name='admin_actions') def admin_actions_tag(parser, token): return InclusionAdminNode(parser, token, func=admin_actions, template_name='actions.html') @register.tag(name='change_list_object_tools') def change_list_object_tools_tag(parser, token): """Display the row of change list object tools.""" return InclusionAdminNode( parser, token, func=lambda context: context, template_name='change_list_object_tools.html', )
f3d1b714e047aa387449e12fa01859d2bf61338a600c3b8d937a56534b9820a1
import json from django import template from django.template.context import Context from .base import InclusionAdminNode register = template.Library() def prepopulated_fields_js(context): """ Create a list of prepopulated_fields that should render JavaScript for the prepopulated fields for both the admin form and inlines. """ prepopulated_fields = [] if 'adminform' in context: prepopulated_fields.extend(context['adminform'].prepopulated_fields) if 'inline_admin_formsets' in context: for inline_admin_formset in context['inline_admin_formsets']: for inline_admin_form in inline_admin_formset: if inline_admin_form.original is None: prepopulated_fields.extend(inline_admin_form.prepopulated_fields) prepopulated_fields_json = [] for field in prepopulated_fields: prepopulated_fields_json.append({ "id": "#%s" % field["field"].auto_id, "name": field["field"].name, "dependency_ids": ["#%s" % dependency.auto_id for dependency in field["dependencies"]], "dependency_list": [dependency.name for dependency in field["dependencies"]], "maxLength": field["field"].field.max_length or 50, "allowUnicode": getattr(field["field"].field, "allow_unicode", False) }) context.update({ 'prepopulated_fields': prepopulated_fields, 'prepopulated_fields_json': json.dumps(prepopulated_fields_json), }) return context @register.tag(name='prepopulated_fields_js') def prepopulated_fields_js_tag(parser, token): return InclusionAdminNode(parser, token, func=prepopulated_fields_js, template_name="prepopulated_fields_js.html") def submit_row(context): """ Display the row of buttons for delete and save. """ add = context['add'] change = context['change'] is_popup = context['is_popup'] save_as = context['save_as'] show_save = context.get('show_save', True) show_save_and_add_another = context.get('show_save_and_add_another', True) show_save_and_continue = context.get('show_save_and_continue', True) has_add_permission = context['has_add_permission'] has_change_permission = context['has_change_permission'] has_view_permission = context['has_view_permission'] has_editable_inline_admin_formsets = context['has_editable_inline_admin_formsets'] can_save = (has_change_permission and change) or (has_add_permission and add) or has_editable_inline_admin_formsets can_save_and_add_another = ( has_add_permission and not is_popup and (not save_as or add) and can_save and show_save_and_add_another ) can_save_and_continue = not is_popup and can_save and has_view_permission and show_save_and_continue can_change = has_change_permission or has_editable_inline_admin_formsets ctx = Context(context) ctx.update({ 'can_change': can_change, 'show_delete_link': ( not is_popup and context['has_delete_permission'] and change and context.get('show_delete', True) ), 'show_save_as_new': not is_popup and has_change_permission and change and save_as, 'show_save_and_add_another': can_save_and_add_another, 'show_save_and_continue': can_save_and_continue, 'show_save': show_save and can_save, 'show_close': not(show_save and can_save) }) return ctx @register.tag(name='submit_row') def submit_row_tag(parser, token): return InclusionAdminNode(parser, token, func=submit_row, template_name='submit_line.html') @register.tag(name='change_form_object_tools') def change_form_object_tools_tag(parser, token): """Display the row of change form object tools.""" return InclusionAdminNode( parser, token, func=lambda context: context, template_name='change_form_object_tools.html', ) @register.filter def cell_count(inline_admin_form): """Return the number of cells used in a tabular inline.""" count = 1 # Hidden cell with hidden 'id' field for fieldset in inline_admin_form: # Loop through all the fields (one per cell) for line in fieldset: for field in line: count += 1 if inline_admin_form.formset.can_delete: # Delete checkbox count += 1 return count
4fb4503ca0294069f51e3c7a9696b9f89eabcf4c8beba80182b5d0fd53796dc4
from django.conf import settings from django.contrib.staticfiles.handlers import StaticFilesHandler from django.core.management.commands.runserver import ( Command as RunserverCommand, ) class Command(RunserverCommand): help = "Starts a lightweight web server for development and also serves static files." def add_arguments(self, parser): super().add_arguments(parser) parser.add_argument( '--nostatic', action="store_false", dest='use_static_handler', help='Tells Django to NOT automatically serve static files at STATIC_URL.', ) parser.add_argument( '--insecure', action="store_true", dest='insecure_serving', help='Allows serving static files even if DEBUG is False.', ) def get_handler(self, *args, **options): """ Return the static files serving handler wrapping the default handler, if static files should be served. Otherwise return the default handler. """ handler = super().get_handler(*args, **options) use_static_handler = options['use_static_handler'] insecure_serving = options['insecure_serving'] if use_static_handler and (settings.DEBUG or insecure_serving): return StaticFilesHandler(handler) return handler
7e28ebc00ac0366cd7ed8569a37259bdd87fc20bc91f5063b9a92dffc6ed9a17
import datetime import logging import os import shutil import tempfile from django.conf import settings from django.contrib.sessions.backends.base import ( VALID_KEY_CHARS, CreateError, SessionBase, UpdateError, ) from django.contrib.sessions.exceptions import InvalidSessionKey from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation from django.utils import timezone class SessionStore(SessionBase): """ Implement a file based session store. """ def __init__(self, session_key=None): self.storage_path = self._get_storage_path() self.file_prefix = settings.SESSION_COOKIE_NAME super().__init__(session_key) @classmethod def _get_storage_path(cls): try: return cls._storage_path except AttributeError: storage_path = getattr(settings, 'SESSION_FILE_PATH', None) or tempfile.gettempdir() # Make sure the storage path is valid. if not os.path.isdir(storage_path): raise ImproperlyConfigured( "The session storage path %r doesn't exist. Please set your" " SESSION_FILE_PATH setting to an existing directory in which" " Django can store session data." % storage_path) cls._storage_path = storage_path return storage_path def _key_to_file(self, session_key=None): """ Get the file associated with this session key. """ if session_key is None: session_key = self._get_or_create_session_key() # Make sure we're not vulnerable to directory traversal. Session keys # should always be md5s, so they should never contain directory # components. if not set(session_key).issubset(VALID_KEY_CHARS): raise InvalidSessionKey( "Invalid characters in session key") return os.path.join(self.storage_path, self.file_prefix + session_key) def _last_modification(self): """ Return the modification time of the file storing the session's content. """ modification = os.stat(self._key_to_file()).st_mtime tz = timezone.utc if settings.USE_TZ else None return datetime.datetime.fromtimestamp(modification, tz=tz) def _expiry_date(self, session_data): """ Return the expiry time of the file storing the session's content. """ return session_data.get('_session_expiry') or ( self._last_modification() + datetime.timedelta(seconds=self.get_session_cookie_age()) ) def load(self): session_data = {} try: with open(self._key_to_file(), encoding='ascii') as session_file: file_data = session_file.read() # Don't fail if there is no data in the session file. # We may have opened the empty placeholder file. if file_data: try: session_data = self.decode(file_data) except (EOFError, SuspiciousOperation) as e: if isinstance(e, SuspiciousOperation): logger = logging.getLogger('django.security.%s' % e.__class__.__name__) logger.warning(str(e)) self.create() # Remove expired sessions. expiry_age = self.get_expiry_age(expiry=self._expiry_date(session_data)) if expiry_age <= 0: session_data = {} self.delete() self.create() except (OSError, SuspiciousOperation): self._session_key = None return session_data def create(self): while True: self._session_key = self._get_new_session_key() try: self.save(must_create=True) except CreateError: continue self.modified = True return def save(self, must_create=False): if self.session_key is None: return self.create() # Get the session data now, before we start messing # with the file it is stored within. session_data = self._get_session(no_load=must_create) session_file_name = self._key_to_file() try: # Make sure the file exists. If it does not already exist, an # empty placeholder file is created. flags = os.O_WRONLY | getattr(os, 'O_BINARY', 0) if must_create: flags |= os.O_EXCL | os.O_CREAT fd = os.open(session_file_name, flags) os.close(fd) except FileNotFoundError: if not must_create: raise UpdateError except FileExistsError: if must_create: raise CreateError # Write the session file without interfering with other threads # or processes. By writing to an atomically generated temporary # file and then using the atomic os.rename() to make the complete # file visible, we avoid having to lock the session file, while # still maintaining its integrity. # # Note: Locking the session file was explored, but rejected in part # because in order to be atomic and cross-platform, it required a # long-lived lock file for each session, doubling the number of # files in the session storage directory at any given time. This # rename solution is cleaner and avoids any additional overhead # when reading the session data, which is the more common case # unless SESSION_SAVE_EVERY_REQUEST = True. # # See ticket #8616. dir, prefix = os.path.split(session_file_name) try: output_file_fd, output_file_name = tempfile.mkstemp(dir=dir, prefix=prefix + '_out_') renamed = False try: try: os.write(output_file_fd, self.encode(session_data).encode()) finally: os.close(output_file_fd) # This will atomically rename the file (os.rename) if the OS # supports it. Otherwise this will result in a shutil.copy2 # and os.unlink (for example on Windows). See #9084. shutil.move(output_file_name, session_file_name) renamed = True finally: if not renamed: os.unlink(output_file_name) except (EOFError, OSError): pass def exists(self, session_key): return os.path.exists(self._key_to_file(session_key)) def delete(self, session_key=None): if session_key is None: if self.session_key is None: return session_key = self.session_key try: os.unlink(self._key_to_file(session_key)) except OSError: pass def clean(self): pass @classmethod def clear_expired(cls): storage_path = cls._get_storage_path() file_prefix = settings.SESSION_COOKIE_NAME for session_file in os.listdir(storage_path): if not session_file.startswith(file_prefix): continue session_key = session_file[len(file_prefix):] session = cls(session_key) # When an expired session is loaded, its file is removed, and a # new file is immediately created. Prevent this by disabling # the create() method. session.create = lambda: None session.load()
a4088ee68ef3801bad56560057dddb3e79a2c3358fed5e4defec4fb714a48c98
from importlib import import_module from django.conf import settings from django.core.management.base import BaseCommand, CommandError class Command(BaseCommand): help = ( "Can be run as a cronjob or directly to clean out expired sessions " "when the backend supports it." ) def handle(self, **options): engine = import_module(settings.SESSION_ENGINE) try: engine.SessionStore.clear_expired() except NotImplementedError: raise CommandError( "Session engine '%s' doesn't support clearing expired " "sessions." % settings.SESSION_ENGINE )
049cabf3d9d4735e91a37ddfd973d9defbb20f4a2d928ebe447090a2fe7a2edd
import logging import os import re from ctypes import CDLL, CFUNCTYPE, c_char_p, c_int from ctypes.util import find_library from django.contrib.gis.gdal.error import GDALException from django.core.exceptions import ImproperlyConfigured logger = logging.getLogger('django.contrib.gis') # Custom library path set? try: from django.conf import settings lib_path = settings.GDAL_LIBRARY_PATH except (AttributeError, ImportError, ImproperlyConfigured, OSError): lib_path = None if lib_path: lib_names = None elif os.name == 'nt': # Windows NT shared libraries lib_names = [ 'gdal303', 'gdal302', 'gdal301', 'gdal300', 'gdal204', 'gdal203', 'gdal202', 'gdal201', 'gdal20', ] elif os.name == 'posix': # *NIX library names. lib_names = [ 'gdal', 'GDAL', 'gdal3.3.0', 'gdal3.2.0', 'gdal3.1.0', 'gdal3.0.0', 'gdal2.4.0', 'gdal2.3.0', 'gdal2.2.0', 'gdal2.1.0', 'gdal2.0.0', ] else: raise ImproperlyConfigured('GDAL is unsupported on OS "%s".' % os.name) # Using the ctypes `find_library` utility to find the # path to the GDAL library from the list of library names. if lib_names: for lib_name in lib_names: lib_path = find_library(lib_name) if lib_path is not None: break if lib_path is None: raise ImproperlyConfigured( 'Could not find the GDAL library (tried "%s"). Is GDAL installed? ' 'If it is, try setting GDAL_LIBRARY_PATH in your settings.' % '", "'.join(lib_names) ) # This loads the GDAL/OGR C library lgdal = CDLL(lib_path) # On Windows, the GDAL binaries have some OSR routines exported with # STDCALL, while others are not. Thus, the library will also need to # be loaded up as WinDLL for said OSR functions that require the # different calling convention. if os.name == 'nt': from ctypes import WinDLL lwingdal = WinDLL(lib_path) def std_call(func): """ Return the correct STDCALL function for certain OSR routines on Win32 platforms. """ if os.name == 'nt': return lwingdal[func] else: return lgdal[func] # #### Version-information functions. #### # Return GDAL library version information with the given key. _version_info = std_call('GDALVersionInfo') _version_info.argtypes = [c_char_p] _version_info.restype = c_char_p def gdal_version(): "Return only the GDAL version number information." return _version_info(b'RELEASE_NAME') def gdal_full_version(): "Return the full GDAL version information." return _version_info(b'') def gdal_version_info(): ver = gdal_version() m = re.match(br'^(?P<major>\d+)\.(?P<minor>\d+)(?:\.(?P<subminor>\d+))?', ver) if not m: raise GDALException('Could not parse GDAL version string "%s"' % ver) major, minor, subminor = m.groups() return (int(major), int(minor), subminor and int(subminor)) GDAL_VERSION = gdal_version_info() # Set library error handling so as errors are logged CPLErrorHandler = CFUNCTYPE(None, c_int, c_int, c_char_p) def err_handler(error_class, error_number, message): logger.error('GDAL_ERROR %d: %s', error_number, message) err_handler = CPLErrorHandler(err_handler) def function(name, args, restype): func = std_call(name) func.argtypes = args func.restype = restype return func set_error_handler = function('CPLSetErrorHandler', [CPLErrorHandler], CPLErrorHandler) set_error_handler(err_handler)
24b08c486b38616503ef05bc54aae7967d51c9762057d518729b597ea4b383fa
""" The Spatial Reference class, represents OGR Spatial Reference objects. Example: >>> from django.contrib.gis.gdal import SpatialReference >>> srs = SpatialReference('WGS84') >>> print(srs) GEOGCS["WGS 84", DATUM["WGS_1984", SPHEROID["WGS 84",6378137,298.257223563, AUTHORITY["EPSG","7030"]], TOWGS84[0,0,0,0,0,0,0], AUTHORITY["EPSG","6326"]], PRIMEM["Greenwich",0, AUTHORITY["EPSG","8901"]], UNIT["degree",0.01745329251994328, AUTHORITY["EPSG","9122"]], AUTHORITY["EPSG","4326"]] >>> print(srs.proj) +proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs >>> print(srs.ellipsoid) (6378137.0, 6356752.3142451793, 298.25722356300003) >>> print(srs.projected, srs.geographic) False True >>> srs.import_epsg(32140) >>> print(srs.name) NAD83 / Texas South Central """ from ctypes import byref, c_char_p, c_int from enum import IntEnum from django.contrib.gis.gdal.base import GDALBase from django.contrib.gis.gdal.error import SRSException from django.contrib.gis.gdal.libgdal import GDAL_VERSION from django.contrib.gis.gdal.prototypes import srs as capi from django.utils.encoding import force_bytes, force_str class AxisOrder(IntEnum): TRADITIONAL = 0 AUTHORITY = 1 class SpatialReference(GDALBase): """ A wrapper for the OGRSpatialReference object. According to the GDAL web site, the SpatialReference object "provide[s] services to represent coordinate systems (projections and datums) and to transform between them." """ destructor = capi.release_srs def __init__(self, srs_input='', srs_type='user', axis_order=None): """ Create a GDAL OSR Spatial Reference object from the given input. The input may be string of OGC Well Known Text (WKT), an integer EPSG code, a PROJ string, and/or a projection "well known" shorthand string (one of 'WGS84', 'WGS72', 'NAD27', 'NAD83'). """ if not isinstance(axis_order, (type(None), AxisOrder)): raise ValueError( 'SpatialReference.axis_order must be an AxisOrder instance.' ) self.axis_order = axis_order or AxisOrder.TRADITIONAL if srs_type == 'wkt': self.ptr = capi.new_srs(c_char_p(b'')) self.import_wkt(srs_input) if self.axis_order == AxisOrder.TRADITIONAL and GDAL_VERSION >= (3, 0): capi.set_axis_strategy(self.ptr, self.axis_order) elif self.axis_order != AxisOrder.TRADITIONAL and GDAL_VERSION < (3, 0): raise ValueError('%s is not supported in GDAL < 3.0.' % self.axis_order) return elif isinstance(srs_input, str): try: # If SRID is a string, e.g., '4326', then make acceptable # as user input. srid = int(srs_input) srs_input = 'EPSG:%d' % srid except ValueError: pass elif isinstance(srs_input, int): # EPSG integer code was input. srs_type = 'epsg' elif isinstance(srs_input, self.ptr_type): srs = srs_input srs_type = 'ogr' else: raise TypeError('Invalid SRS type "%s"' % srs_type) if srs_type == 'ogr': # Input is already an SRS pointer. srs = srs_input else: # Creating a new SRS pointer, using the string buffer. buf = c_char_p(b'') srs = capi.new_srs(buf) # If the pointer is NULL, throw an exception. if not srs: raise SRSException('Could not create spatial reference from: %s' % srs_input) else: self.ptr = srs if self.axis_order == AxisOrder.TRADITIONAL and GDAL_VERSION >= (3, 0): capi.set_axis_strategy(self.ptr, self.axis_order) elif self.axis_order != AxisOrder.TRADITIONAL and GDAL_VERSION < (3, 0): raise ValueError('%s is not supported in GDAL < 3.0.' % self.axis_order) # Importing from either the user input string or an integer SRID. if srs_type == 'user': self.import_user_input(srs_input) elif srs_type == 'epsg': self.import_epsg(srs_input) def __getitem__(self, target): """ Return the value of the given string attribute node, None if the node doesn't exist. Can also take a tuple as a parameter, (target, child), where child is the index of the attribute in the WKT. For example: >>> wkt = 'GEOGCS["WGS 84", DATUM["WGS_1984, ... AUTHORITY["EPSG","4326"]]' >>> srs = SpatialReference(wkt) # could also use 'WGS84', or 4326 >>> print(srs['GEOGCS']) WGS 84 >>> print(srs['DATUM']) WGS_1984 >>> print(srs['AUTHORITY']) EPSG >>> print(srs['AUTHORITY', 1]) # The authority value 4326 >>> print(srs['TOWGS84', 4]) # the fourth value in this wkt 0 >>> print(srs['UNIT|AUTHORITY']) # For the units authority, have to use the pipe symbole. EPSG >>> print(srs['UNIT|AUTHORITY', 1]) # The authority value for the units 9122 """ if isinstance(target, tuple): return self.attr_value(*target) else: return self.attr_value(target) def __str__(self): "Use 'pretty' WKT." return self.pretty_wkt # #### SpatialReference Methods #### def attr_value(self, target, index=0): """ The attribute value for the given target node (e.g. 'PROJCS'). The index keyword specifies an index of the child node to return. """ if not isinstance(target, str) or not isinstance(index, int): raise TypeError return capi.get_attr_value(self.ptr, force_bytes(target), index) def auth_name(self, target): "Return the authority name for the given string target node." return capi.get_auth_name(self.ptr, force_bytes(target)) def auth_code(self, target): "Return the authority code for the given string target node." return capi.get_auth_code(self.ptr, force_bytes(target)) def clone(self): "Return a clone of this SpatialReference object." return SpatialReference(capi.clone_srs(self.ptr), axis_order=self.axis_order) def from_esri(self): "Morph this SpatialReference from ESRI's format to EPSG." capi.morph_from_esri(self.ptr) def identify_epsg(self): """ This method inspects the WKT of this SpatialReference, and will add EPSG authority nodes where an EPSG identifier is applicable. """ capi.identify_epsg(self.ptr) def to_esri(self): "Morph this SpatialReference to ESRI's format." capi.morph_to_esri(self.ptr) def validate(self): "Check to see if the given spatial reference is valid." capi.srs_validate(self.ptr) # #### Name & SRID properties #### @property def name(self): "Return the name of this Spatial Reference." if self.projected: return self.attr_value('PROJCS') elif self.geographic: return self.attr_value('GEOGCS') elif self.local: return self.attr_value('LOCAL_CS') else: return None @property def srid(self): "Return the SRID of top-level authority, or None if undefined." try: return int(self.attr_value('AUTHORITY', 1)) except (TypeError, ValueError): return None # #### Unit Properties #### @property def linear_name(self): "Return the name of the linear units." units, name = capi.linear_units(self.ptr, byref(c_char_p())) return name @property def linear_units(self): "Return the value of the linear units." units, name = capi.linear_units(self.ptr, byref(c_char_p())) return units @property def angular_name(self): "Return the name of the angular units." units, name = capi.angular_units(self.ptr, byref(c_char_p())) return name @property def angular_units(self): "Return the value of the angular units." units, name = capi.angular_units(self.ptr, byref(c_char_p())) return units @property def units(self): """ Return a 2-tuple of the units value and the units name. Automatically determine whether to return the linear or angular units. """ units, name = None, None if self.projected or self.local: units, name = capi.linear_units(self.ptr, byref(c_char_p())) elif self.geographic: units, name = capi.angular_units(self.ptr, byref(c_char_p())) if name is not None: name = force_str(name) return (units, name) # #### Spheroid/Ellipsoid Properties #### @property def ellipsoid(self): """ Return a tuple of the ellipsoid parameters: (semimajor axis, semiminor axis, and inverse flattening) """ return (self.semi_major, self.semi_minor, self.inverse_flattening) @property def semi_major(self): "Return the Semi Major Axis for this Spatial Reference." return capi.semi_major(self.ptr, byref(c_int())) @property def semi_minor(self): "Return the Semi Minor Axis for this Spatial Reference." return capi.semi_minor(self.ptr, byref(c_int())) @property def inverse_flattening(self): "Return the Inverse Flattening for this Spatial Reference." return capi.invflattening(self.ptr, byref(c_int())) # #### Boolean Properties #### @property def geographic(self): """ Return True if this SpatialReference is geographic (root node is GEOGCS). """ return bool(capi.isgeographic(self.ptr)) @property def local(self): "Return True if this SpatialReference is local (root node is LOCAL_CS)." return bool(capi.islocal(self.ptr)) @property def projected(self): """ Return True if this SpatialReference is a projected coordinate system (root node is PROJCS). """ return bool(capi.isprojected(self.ptr)) # #### Import Routines ##### def import_epsg(self, epsg): "Import the Spatial Reference from the EPSG code (an integer)." capi.from_epsg(self.ptr, epsg) def import_proj(self, proj): """Import the Spatial Reference from a PROJ string.""" capi.from_proj(self.ptr, proj) def import_user_input(self, user_input): "Import the Spatial Reference from the given user input string." capi.from_user_input(self.ptr, force_bytes(user_input)) def import_wkt(self, wkt): "Import the Spatial Reference from OGC WKT (string)" capi.from_wkt(self.ptr, byref(c_char_p(force_bytes(wkt)))) def import_xml(self, xml): "Import the Spatial Reference from an XML string." capi.from_xml(self.ptr, xml) # #### Export Properties #### @property def wkt(self): "Return the WKT representation of this Spatial Reference." return capi.to_wkt(self.ptr, byref(c_char_p())) @property def pretty_wkt(self, simplify=0): "Return the 'pretty' representation of the WKT." return capi.to_pretty_wkt(self.ptr, byref(c_char_p()), simplify) @property def proj(self): """Return the PROJ representation for this Spatial Reference.""" return capi.to_proj(self.ptr, byref(c_char_p())) @property def proj4(self): "Alias for proj()." return self.proj @property def xml(self, dialect=''): "Return the XML representation of this Spatial Reference." return capi.to_xml(self.ptr, byref(c_char_p()), force_bytes(dialect)) class CoordTransform(GDALBase): "The coordinate system transformation object." destructor = capi.destroy_ct def __init__(self, source, target): "Initialize on a source and target SpatialReference objects." if not isinstance(source, SpatialReference) or not isinstance(target, SpatialReference): raise TypeError('source and target must be of type SpatialReference') self.ptr = capi.new_ct(source._ptr, target._ptr) self._srs1_name = source.name self._srs2_name = target.name def __str__(self): return 'Transform from "%s" to "%s"' % (self._srs1_name, self._srs2_name)
003599a64942751ce40fc6bdeba7f980b0b3925022ee8db3eb5ed2c7c95e316c
""" The SpatialProxy object allows for lazy-geometries and lazy-rasters. The proxy uses Python descriptors for instantiating and setting Geometry or Raster objects corresponding to geographic model fields. Thanks to Robert Coup for providing this functionality (see #4322). """ from django.db.models.query_utils import DeferredAttribute class SpatialProxy(DeferredAttribute): def __init__(self, klass, field, load_func=None): """ Initialize on the given Geometry or Raster class (not an instance) and the corresponding field. """ self._klass = klass self._load_func = load_func or klass super().__init__(field) def __get__(self, instance, cls=None): """ Retrieve the geometry or raster, initializing it using the corresponding class specified during initialization and the value of the field. Currently, GEOS or OGR geometries as well as GDALRasters are supported. """ if instance is None: # Accessed on a class, not an instance return self # Getting the value of the field. try: geo_value = instance.__dict__[self.field.attname] except KeyError: geo_value = super().__get__(instance, cls) if isinstance(geo_value, self._klass): geo_obj = geo_value elif (geo_value is None) or (geo_value == ''): geo_obj = None else: # Otherwise, a geometry or raster object is built using the field's # contents, and the model's corresponding attribute is set. geo_obj = self._load_func(geo_value) setattr(instance, self.field.attname, geo_obj) return geo_obj def __set__(self, instance, value): """ Retrieve the proxied geometry or raster with the corresponding class specified during initialization. To set geometries, use values of None, HEXEWKB, or WKT. To set rasters, use JSON or dict values. """ # The geographic type of the field. gtype = self.field.geom_type if gtype == 'RASTER' and (value is None or isinstance(value, (str, dict, self._klass))): # For raster fields, ensure input is None or a string, dict, or # raster instance. pass elif isinstance(value, self._klass): # The geometry type must match that of the field -- unless the # general GeometryField is used. if value.srid is None: # Assigning the field SRID if the geometry has no SRID. value.srid = self.field.srid elif value is None or isinstance(value, (str, memoryview)): # Set geometries with None, WKT, HEX, or WKB pass else: raise TypeError('Cannot set %s SpatialProxy (%s) with value of type: %s' % ( instance.__class__.__name__, gtype, type(value))) # Setting the objects dictionary with the value, and returning. instance.__dict__[self.field.attname] = value return value
7dabbe658c6e47a7cf7e29bc7d7a731186ac19ba7f49b1cf9ae41c78c8cdebeb
from django.db.backends.postgresql.schema import DatabaseSchemaEditor from django.db.models.expressions import Col, Func class PostGISSchemaEditor(DatabaseSchemaEditor): geom_index_type = 'GIST' geom_index_ops_nd = 'GIST_GEOMETRY_OPS_ND' rast_index_template = 'ST_ConvexHull(%(expressions)s)' sql_alter_column_to_3d = "ALTER COLUMN %(column)s TYPE %(type)s USING ST_Force3D(%(column)s)::%(type)s" sql_alter_column_to_2d = "ALTER COLUMN %(column)s TYPE %(type)s USING ST_Force2D(%(column)s)::%(type)s" def geo_quote_name(self, name): return self.connection.ops.geo_quote_name(name) def _field_should_be_indexed(self, model, field): if getattr(field, 'spatial_index', False): return True return super()._field_should_be_indexed(model, field) def _create_index_sql(self, model, *, fields=None, **kwargs): if fields is None or len(fields) != 1 or not hasattr(fields[0], 'geodetic'): return super()._create_index_sql(model, fields=fields, **kwargs) field = fields[0] expressions = None opclasses = None if field.geom_type == 'RASTER': # For raster fields, wrap index creation SQL statement with ST_ConvexHull. # Indexes on raster columns are based on the convex hull of the raster. expressions = Func(Col(None, field), template=self.rast_index_template) fields = None elif field.dim > 2 and not field.geography: # Use "nd" ops which are fast on multidimensional cases opclasses = [self.geom_index_ops_nd] name = kwargs.get('name') if not name: name = self._create_index_name(model._meta.db_table, [field.column], '_id') return super()._create_index_sql( model, fields=fields, name=name, using=' USING %s' % self.geom_index_type, opclasses=opclasses, expressions=expressions, ) def _alter_column_type_sql(self, table, old_field, new_field, new_type): """ Special case when dimension changed. """ if not hasattr(old_field, 'dim') or not hasattr(new_field, 'dim'): return super()._alter_column_type_sql(table, old_field, new_field, new_type) if old_field.dim == 2 and new_field.dim == 3: sql_alter = self.sql_alter_column_to_3d elif old_field.dim == 3 and new_field.dim == 2: sql_alter = self.sql_alter_column_to_2d else: sql_alter = self.sql_alter_column_type return ( ( sql_alter % { "column": self.quote_name(new_field.column), "type": new_type, }, [], ), [], )
73ff570eb4ae5ccda44ffbe0562db4cb0eb93127fe24284e8efdd73e6e72cd45
import json import os import sys import uuid from ctypes import ( addressof, byref, c_buffer, c_char_p, c_double, c_int, c_void_p, string_at, ) from django.contrib.gis.gdal.driver import Driver from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.gdal.prototypes import raster as capi from django.contrib.gis.gdal.raster.band import BandList from django.contrib.gis.gdal.raster.base import GDALRasterBase from django.contrib.gis.gdal.raster.const import ( GDAL_RESAMPLE_ALGORITHMS, VSI_DELETE_BUFFER_ON_READ, VSI_FILESYSTEM_PREFIX, VSI_MEM_FILESYSTEM_BASE_PATH, VSI_TAKE_BUFFER_OWNERSHIP, ) from django.contrib.gis.gdal.srs import SpatialReference, SRSException from django.contrib.gis.geometry import json_regex from django.utils.encoding import force_bytes, force_str from django.utils.functional import cached_property class TransformPoint(list): indices = { 'origin': (0, 3), 'scale': (1, 5), 'skew': (2, 4), } def __init__(self, raster, prop): x = raster.geotransform[self.indices[prop][0]] y = raster.geotransform[self.indices[prop][1]] super().__init__([x, y]) self._raster = raster self._prop = prop @property def x(self): return self[0] @x.setter def x(self, value): gtf = self._raster.geotransform gtf[self.indices[self._prop][0]] = value self._raster.geotransform = gtf @property def y(self): return self[1] @y.setter def y(self, value): gtf = self._raster.geotransform gtf[self.indices[self._prop][1]] = value self._raster.geotransform = gtf class GDALRaster(GDALRasterBase): """ Wrap a raster GDAL Data Source object. """ destructor = capi.close_ds def __init__(self, ds_input, write=False): self._write = 1 if write else 0 Driver.ensure_registered() # Preprocess json inputs. This converts json strings to dictionaries, # which are parsed below the same way as direct dictionary inputs. if isinstance(ds_input, str) and json_regex.match(ds_input): ds_input = json.loads(ds_input) # If input is a valid file path, try setting file as source. if isinstance(ds_input, str): if ( not ds_input.startswith(VSI_FILESYSTEM_PREFIX) and not os.path.exists(ds_input) ): raise GDALException( 'Unable to read raster source input "%s".' % ds_input ) try: # GDALOpen will auto-detect the data source type. self._ptr = capi.open_ds(force_bytes(ds_input), self._write) except GDALException as err: raise GDALException('Could not open the datasource at "{}" ({}).'.format(ds_input, err)) elif isinstance(ds_input, bytes): # Create a new raster in write mode. self._write = 1 # Get size of buffer. size = sys.getsizeof(ds_input) # Pass data to ctypes, keeping a reference to the ctypes object so # that the vsimem file remains available until the GDALRaster is # deleted. self._ds_input = c_buffer(ds_input) # Create random name to reference in vsimem filesystem. vsi_path = os.path.join(VSI_MEM_FILESYSTEM_BASE_PATH, str(uuid.uuid4())) # Create vsimem file from buffer. capi.create_vsi_file_from_mem_buffer( force_bytes(vsi_path), byref(self._ds_input), size, VSI_TAKE_BUFFER_OWNERSHIP, ) # Open the new vsimem file as a GDALRaster. try: self._ptr = capi.open_ds(force_bytes(vsi_path), self._write) except GDALException: # Remove the broken file from the VSI filesystem. capi.unlink_vsi_file(force_bytes(vsi_path)) raise GDALException('Failed creating VSI raster from the input buffer.') elif isinstance(ds_input, dict): # A new raster needs to be created in write mode self._write = 1 # Create driver (in memory by default) driver = Driver(ds_input.get('driver', 'MEM')) # For out of memory drivers, check filename argument if driver.name != 'MEM' and 'name' not in ds_input: raise GDALException('Specify name for creation of raster with driver "{}".'.format(driver.name)) # Check if width and height where specified if 'width' not in ds_input or 'height' not in ds_input: raise GDALException('Specify width and height attributes for JSON or dict input.') # Check if srid was specified if 'srid' not in ds_input: raise GDALException('Specify srid for JSON or dict input.') # Create null terminated gdal options array. papsz_options = [] for key, val in ds_input.get('papsz_options', {}).items(): option = '{}={}'.format(key, val) papsz_options.append(option.upper().encode()) papsz_options.append(None) # Convert papszlist to ctypes array. papsz_options = (c_char_p * len(papsz_options))(*papsz_options) # Create GDAL Raster self._ptr = capi.create_ds( driver._ptr, force_bytes(ds_input.get('name', '')), ds_input['width'], ds_input['height'], ds_input.get('nr_of_bands', len(ds_input.get('bands', []))), ds_input.get('datatype', 6), byref(papsz_options), ) # Set band data if provided for i, band_input in enumerate(ds_input.get('bands', [])): band = self.bands[i] if 'nodata_value' in band_input: band.nodata_value = band_input['nodata_value'] # Instantiate band filled with nodata values if only # partial input data has been provided. if band.nodata_value is not None and ( 'data' not in band_input or 'size' in band_input or 'shape' in band_input): band.data(data=(band.nodata_value,), shape=(1, 1)) # Set band data values from input. band.data( data=band_input.get('data'), size=band_input.get('size'), shape=band_input.get('shape'), offset=band_input.get('offset'), ) # Set SRID self.srs = ds_input.get('srid') # Set additional properties if provided if 'origin' in ds_input: self.origin.x, self.origin.y = ds_input['origin'] if 'scale' in ds_input: self.scale.x, self.scale.y = ds_input['scale'] if 'skew' in ds_input: self.skew.x, self.skew.y = ds_input['skew'] elif isinstance(ds_input, c_void_p): # Instantiate the object using an existing pointer to a gdal raster. self._ptr = ds_input else: raise GDALException('Invalid data source input type: "{}".'.format(type(ds_input))) def __del__(self): if self.is_vsi_based: # Remove the temporary file from the VSI in-memory filesystem. capi.unlink_vsi_file(force_bytes(self.name)) super().__del__() def __str__(self): return self.name def __repr__(self): """ Short-hand representation because WKB may be very large. """ return '<Raster object at %s>' % hex(addressof(self._ptr)) def _flush(self): """ Flush all data from memory into the source file if it exists. The data that needs flushing are geotransforms, coordinate systems, nodata_values and pixel values. This function will be called automatically wherever it is needed. """ # Raise an Exception if the value is being changed in read mode. if not self._write: raise GDALException('Raster needs to be opened in write mode to change values.') capi.flush_ds(self._ptr) @property def vsi_buffer(self): if not ( self.is_vsi_based and self.name.startswith(VSI_MEM_FILESYSTEM_BASE_PATH) ): return None # Prepare an integer that will contain the buffer length. out_length = c_int() # Get the data using the vsi file name. dat = capi.get_mem_buffer_from_vsi_file( force_bytes(self.name), byref(out_length), VSI_DELETE_BUFFER_ON_READ, ) # Read the full buffer pointer. return string_at(dat, out_length.value) @cached_property def is_vsi_based(self): return self._ptr and self.name.startswith(VSI_FILESYSTEM_PREFIX) @property def name(self): """ Return the name of this raster. Corresponds to filename for file-based rasters. """ return force_str(capi.get_ds_description(self._ptr)) @cached_property def driver(self): """ Return the GDAL Driver used for this raster. """ ds_driver = capi.get_ds_driver(self._ptr) return Driver(ds_driver) @property def width(self): """ Width (X axis) in pixels. """ return capi.get_ds_xsize(self._ptr) @property def height(self): """ Height (Y axis) in pixels. """ return capi.get_ds_ysize(self._ptr) @property def srs(self): """ Return the SpatialReference used in this GDALRaster. """ try: wkt = capi.get_ds_projection_ref(self._ptr) if not wkt: return None return SpatialReference(wkt, srs_type='wkt') except SRSException: return None @srs.setter def srs(self, value): """ Set the spatial reference used in this GDALRaster. The input can be a SpatialReference or any parameter accepted by the SpatialReference constructor. """ if isinstance(value, SpatialReference): srs = value elif isinstance(value, (int, str)): srs = SpatialReference(value) else: raise ValueError('Could not create a SpatialReference from input.') capi.set_ds_projection_ref(self._ptr, srs.wkt.encode()) self._flush() @property def srid(self): """ Shortcut to access the srid of this GDALRaster. """ return self.srs.srid @srid.setter def srid(self, value): """ Shortcut to set this GDALRaster's srs from an srid. """ self.srs = value @property def geotransform(self): """ Return the geotransform of the data source. Return the default geotransform if it does not exist or has not been set previously. The default is [0.0, 1.0, 0.0, 0.0, 0.0, -1.0]. """ # Create empty ctypes double array for data gtf = (c_double * 6)() capi.get_ds_geotransform(self._ptr, byref(gtf)) return list(gtf) @geotransform.setter def geotransform(self, values): "Set the geotransform for the data source." if len(values) != 6 or not all(isinstance(x, (int, float)) for x in values): raise ValueError('Geotransform must consist of 6 numeric values.') # Create ctypes double array with input and write data values = (c_double * 6)(*values) capi.set_ds_geotransform(self._ptr, byref(values)) self._flush() @property def origin(self): """ Coordinates of the raster origin. """ return TransformPoint(self, 'origin') @property def scale(self): """ Pixel scale in units of the raster projection. """ return TransformPoint(self, 'scale') @property def skew(self): """ Skew of pixels (rotation parameters). """ return TransformPoint(self, 'skew') @property def extent(self): """ Return the extent as a 4-tuple (xmin, ymin, xmax, ymax). """ # Calculate boundary values based on scale and size xval = self.origin.x + self.scale.x * self.width yval = self.origin.y + self.scale.y * self.height # Calculate min and max values xmin = min(xval, self.origin.x) xmax = max(xval, self.origin.x) ymin = min(yval, self.origin.y) ymax = max(yval, self.origin.y) return xmin, ymin, xmax, ymax @property def bands(self): return BandList(self) def warp(self, ds_input, resampling='NearestNeighbour', max_error=0.0): """ Return a warped GDALRaster with the given input characteristics. The input is expected to be a dictionary containing the parameters of the target raster. Allowed values are width, height, SRID, origin, scale, skew, datatype, driver, and name (filename). By default, the warp functions keeps all parameters equal to the values of the original source raster. For the name of the target raster, the name of the source raster will be used and appended with _copy. + source_driver_name. In addition, the resampling algorithm can be specified with the "resampling" input parameter. The default is NearestNeighbor. For a list of all options consult the GDAL_RESAMPLE_ALGORITHMS constant. """ # Get the parameters defining the geotransform, srid, and size of the raster ds_input.setdefault('width', self.width) ds_input.setdefault('height', self.height) ds_input.setdefault('srid', self.srs.srid) ds_input.setdefault('origin', self.origin) ds_input.setdefault('scale', self.scale) ds_input.setdefault('skew', self.skew) # Get the driver, name, and datatype of the target raster ds_input.setdefault('driver', self.driver.name) if 'name' not in ds_input: ds_input['name'] = self.name + '_copy.' + self.driver.name if 'datatype' not in ds_input: ds_input['datatype'] = self.bands[0].datatype() # Instantiate raster bands filled with nodata values. ds_input['bands'] = [{'nodata_value': bnd.nodata_value} for bnd in self.bands] # Create target raster target = GDALRaster(ds_input, write=True) # Select resampling algorithm algorithm = GDAL_RESAMPLE_ALGORITHMS[resampling] # Reproject image capi.reproject_image( self._ptr, self.srs.wkt.encode(), target._ptr, target.srs.wkt.encode(), algorithm, 0.0, max_error, c_void_p(), c_void_p(), c_void_p() ) # Make sure all data is written to file target._flush() return target def clone(self, name=None): """Return a clone of this GDALRaster.""" if name: clone_name = name elif self.driver.name != 'MEM': clone_name = self.name + '_copy.' + self.driver.name else: clone_name = os.path.join(VSI_MEM_FILESYSTEM_BASE_PATH, str(uuid.uuid4())) return GDALRaster( capi.copy_ds( self.driver._ptr, force_bytes(clone_name), self._ptr, c_int(), c_char_p(), c_void_p(), c_void_p(), ), write=self._write, ) def transform(self, srs, driver=None, name=None, resampling='NearestNeighbour', max_error=0.0): """ Return a copy of this raster reprojected into the given spatial reference system. """ # Convert the resampling algorithm name into an algorithm id algorithm = GDAL_RESAMPLE_ALGORITHMS[resampling] if isinstance(srs, SpatialReference): target_srs = srs elif isinstance(srs, (int, str)): target_srs = SpatialReference(srs) else: raise TypeError( 'Transform only accepts SpatialReference, string, and integer ' 'objects.' ) if target_srs.srid == self.srid and (not driver or driver == self.driver.name): return self.clone(name) # Create warped virtual dataset in the target reference system target = capi.auto_create_warped_vrt( self._ptr, self.srs.wkt.encode(), target_srs.wkt.encode(), algorithm, max_error, c_void_p() ) target = GDALRaster(target) # Construct the target warp dictionary from the virtual raster data = { 'srid': target_srs.srid, 'width': target.width, 'height': target.height, 'origin': [target.origin.x, target.origin.y], 'scale': [target.scale.x, target.scale.y], 'skew': [target.skew.x, target.skew.y], } # Set the driver and filepath if provided if driver: data['driver'] = driver if name: data['name'] = name # Warp the raster into new srid return self.warp(data, resampling=resampling, max_error=max_error) @property def info(self): """ Return information about this raster in a string format equivalent to the output of the gdalinfo command line utility. """ return capi.get_ds_info(self.ptr, None).decode()
5aac7806849ccd4472bf5b1af1aebab3936a5e388f3c9e1043ed58d0a4d34a3e
""" GDAL - Constant definitions """ from ctypes import ( c_double, c_float, c_int16, c_int32, c_ubyte, c_uint16, c_uint32, ) # See https://www.gdal.org/gdal_8h.html#a22e22ce0a55036a96f652765793fb7a4 GDAL_PIXEL_TYPES = { 0: 'GDT_Unknown', # Unknown or unspecified type 1: 'GDT_Byte', # Eight bit unsigned integer 2: 'GDT_UInt16', # Sixteen bit unsigned integer 3: 'GDT_Int16', # Sixteen bit signed integer 4: 'GDT_UInt32', # Thirty-two bit unsigned integer 5: 'GDT_Int32', # Thirty-two bit signed integer 6: 'GDT_Float32', # Thirty-two bit floating point 7: 'GDT_Float64', # Sixty-four bit floating point 8: 'GDT_CInt16', # Complex Int16 9: 'GDT_CInt32', # Complex Int32 10: 'GDT_CFloat32', # Complex Float32 11: 'GDT_CFloat64', # Complex Float64 } # A list of gdal datatypes that are integers. GDAL_INTEGER_TYPES = [1, 2, 3, 4, 5] # Lookup values to convert GDAL pixel type indices into ctypes objects. # The GDAL band-io works with ctypes arrays to hold data to be written # or to hold the space for data to be read into. The lookup below helps # selecting the right ctypes object for a given gdal pixel type. GDAL_TO_CTYPES = [ None, c_ubyte, c_uint16, c_int16, c_uint32, c_int32, c_float, c_double, None, None, None, None ] # List of resampling algorithms that can be used to warp a GDALRaster. GDAL_RESAMPLE_ALGORITHMS = { 'NearestNeighbour': 0, 'Bilinear': 1, 'Cubic': 2, 'CubicSpline': 3, 'Lanczos': 4, 'Average': 5, 'Mode': 6, } # See https://www.gdal.org/gdal_8h.html#ace76452d94514561fffa8ea1d2a5968c GDAL_COLOR_TYPES = { 0: 'GCI_Undefined', # Undefined, default value, i.e. not known 1: 'GCI_GrayIndex', # Grayscale 2: 'GCI_PaletteIndex', # Paletted 3: 'GCI_RedBand', # Red band of RGBA image 4: 'GCI_GreenBand', # Green band of RGBA image 5: 'GCI_BlueBand', # Blue band of RGBA image 6: 'GCI_AlphaBand', # Alpha (0=transparent, 255=opaque) 7: 'GCI_HueBand', # Hue band of HLS image 8: 'GCI_SaturationBand', # Saturation band of HLS image 9: 'GCI_LightnessBand', # Lightness band of HLS image 10: 'GCI_CyanBand', # Cyan band of CMYK image 11: 'GCI_MagentaBand', # Magenta band of CMYK image 12: 'GCI_YellowBand', # Yellow band of CMYK image 13: 'GCI_BlackBand', # Black band of CMLY image 14: 'GCI_YCbCr_YBand', # Y Luminance 15: 'GCI_YCbCr_CbBand', # Cb Chroma 16: 'GCI_YCbCr_CrBand', # Cr Chroma, also GCI_Max } # GDAL virtual filesystems prefix. VSI_FILESYSTEM_PREFIX = '/vsi' # Fixed base path for buffer-based GDAL in-memory files. VSI_MEM_FILESYSTEM_BASE_PATH = '/vsimem/' # Should the memory file system take ownership of the buffer, freeing it when # the file is deleted? (No, GDALRaster.__del__() will delete the buffer.) VSI_TAKE_BUFFER_OWNERSHIP = False # Should a VSI file be removed when retrieving its buffer? VSI_DELETE_BUFFER_ON_READ = False
ad7f4899b54f8800cb5d8bc10a7ee4228461a9491270a763c00c5ee8e652e059
""" Tests for django.core.servers. """ import errno import os import socket import threading from http.client import HTTPConnection from urllib.error import HTTPError from urllib.parse import urlencode from urllib.request import urlopen from django.conf import settings from django.core.servers.basehttp import ThreadedWSGIServer, WSGIServer from django.db import DEFAULT_DB_ALIAS, connections from django.test import LiveServerTestCase, override_settings from django.test.testcases import LiveServerThread, QuietWSGIRequestHandler from .models import Person TEST_ROOT = os.path.dirname(__file__) TEST_SETTINGS = { 'MEDIA_URL': 'media/', 'MEDIA_ROOT': os.path.join(TEST_ROOT, 'media'), 'STATIC_URL': 'static/', 'STATIC_ROOT': os.path.join(TEST_ROOT, 'static'), } @override_settings(ROOT_URLCONF='servers.urls', **TEST_SETTINGS) class LiveServerBase(LiveServerTestCase): available_apps = [ 'servers', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', ] fixtures = ['testdata.json'] def urlopen(self, url): return urlopen(self.live_server_url + url) class CloseConnectionTestServer(ThreadedWSGIServer): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # This event is set right after the first time a request closes its # database connections. self._connections_closed = threading.Event() def _close_connections(self): super()._close_connections() self._connections_closed.set() class CloseConnectionTestLiveServerThread(LiveServerThread): server_class = CloseConnectionTestServer def _create_server(self, connections_override=None): return super()._create_server(connections_override=self.connections_override) class LiveServerTestCloseConnectionTest(LiveServerBase): server_thread_class = CloseConnectionTestLiveServerThread @classmethod def _make_connections_override(cls): conn = connections[DEFAULT_DB_ALIAS] cls.conn = conn cls.old_conn_max_age = conn.settings_dict['CONN_MAX_AGE'] # Set the connection's CONN_MAX_AGE to None to simulate the # CONN_MAX_AGE setting being set to None on the server. This prevents # Django from closing the connection and allows testing that # ThreadedWSGIServer closes connections. conn.settings_dict['CONN_MAX_AGE'] = None # Pass a database connection through to the server to check it is being # closed by ThreadedWSGIServer. return {DEFAULT_DB_ALIAS: conn} @classmethod def tearDownConnectionTest(cls): cls.conn.settings_dict['CONN_MAX_AGE'] = cls.old_conn_max_age @classmethod def tearDownClass(cls): cls.tearDownConnectionTest() super().tearDownClass() def test_closes_connections(self): # The server's request thread sets this event after closing # its database connections. closed_event = self.server_thread.httpd._connections_closed conn = self.conn # Open a connection to the database. conn.connect() self.assertIsNotNone(conn.connection) with self.urlopen('/model_view/') as f: # The server can access the database. self.assertEqual(f.read().splitlines(), [b'jane', b'robert']) # Wait for the server's request thread to close the connection. # A timeout of 0.1 seconds should be more than enough. If the wait # times out, the assertion after should fail. closed_event.wait(timeout=0.1) self.assertIsNone(conn.connection) class FailingLiveServerThread(LiveServerThread): def _create_server(self): raise RuntimeError('Error creating server.') class LiveServerTestCaseSetupTest(LiveServerBase): server_thread_class = FailingLiveServerThread @classmethod def check_allowed_hosts(cls, expected): if settings.ALLOWED_HOSTS != expected: raise RuntimeError(f'{settings.ALLOWED_HOSTS} != {expected}') @classmethod def setUpClass(cls): cls.check_allowed_hosts(['testserver']) try: super().setUpClass() except RuntimeError: # LiveServerTestCase's change to ALLOWED_HOSTS should be reverted. cls.check_allowed_hosts(['testserver']) else: raise RuntimeError('Server did not fail.') cls.set_up_called = True @classmethod def tearDownClass(cls): # Make tearDownClass() a no-op because setUpClass() was already cleaned # up, and because the error inside setUpClass() was handled, which will # cause tearDownClass() to be called when it normally wouldn't. pass def test_set_up_class(self): self.assertIs(self.set_up_called, True) class LiveServerAddress(LiveServerBase): @classmethod def setUpClass(cls): super().setUpClass() # put it in a list to prevent descriptor lookups in test cls.live_server_url_test = [cls.live_server_url] def test_live_server_url_is_class_property(self): self.assertIsInstance(self.live_server_url_test[0], str) self.assertEqual(self.live_server_url_test[0], self.live_server_url) class LiveServerSingleThread(LiveServerThread): def _create_server(self): return WSGIServer((self.host, self.port), QuietWSGIRequestHandler, allow_reuse_address=False) class SingleThreadLiveServerTestCase(LiveServerTestCase): server_thread_class = LiveServerSingleThread class LiveServerViews(LiveServerBase): def test_protocol(self): """Launched server serves with HTTP 1.1.""" with self.urlopen('/example_view/') as f: self.assertEqual(f.version, 11) def test_closes_connection_without_content_length(self): """ A HTTP 1.1 server is supposed to support keep-alive. Since our development server is rather simple we support it only in cases where we can detect a content length from the response. This should be doable for all simple views and streaming responses where an iterable with length of one is passed. The latter follows as result of `set_content_length` from https://github.com/python/cpython/blob/master/Lib/wsgiref/handlers.py. If we cannot detect a content length we explicitly set the `Connection` header to `close` to notify the client that we do not actually support it. """ conn = HTTPConnection(LiveServerViews.server_thread.host, LiveServerViews.server_thread.port, timeout=1) try: conn.request('GET', '/streaming_example_view/', headers={'Connection': 'keep-alive'}) response = conn.getresponse() self.assertTrue(response.will_close) self.assertEqual(response.read(), b'Iamastream') self.assertEqual(response.status, 200) self.assertEqual(response.getheader('Connection'), 'close') conn.request('GET', '/streaming_example_view/', headers={'Connection': 'close'}) response = conn.getresponse() self.assertTrue(response.will_close) self.assertEqual(response.read(), b'Iamastream') self.assertEqual(response.status, 200) self.assertEqual(response.getheader('Connection'), 'close') finally: conn.close() def test_keep_alive_on_connection_with_content_length(self): """ See `test_closes_connection_without_content_length` for details. This is a follow up test, which ensure that we do not close the connection if not needed, hence allowing us to take advantage of keep-alive. """ conn = HTTPConnection(LiveServerViews.server_thread.host, LiveServerViews.server_thread.port) try: conn.request('GET', '/example_view/', headers={"Connection": "keep-alive"}) response = conn.getresponse() self.assertFalse(response.will_close) self.assertEqual(response.read(), b'example view') self.assertEqual(response.status, 200) self.assertIsNone(response.getheader('Connection')) conn.request('GET', '/example_view/', headers={"Connection": "close"}) response = conn.getresponse() self.assertFalse(response.will_close) self.assertEqual(response.read(), b'example view') self.assertEqual(response.status, 200) self.assertIsNone(response.getheader('Connection')) finally: conn.close() def test_keep_alive_connection_clears_previous_request_data(self): conn = HTTPConnection(LiveServerViews.server_thread.host, LiveServerViews.server_thread.port) try: conn.request('POST', '/method_view/', b'{}', headers={"Connection": "keep-alive"}) response = conn.getresponse() self.assertFalse(response.will_close) self.assertEqual(response.status, 200) self.assertEqual(response.read(), b'POST') conn.request('POST', '/method_view/', b'{}', headers={"Connection": "close"}) response = conn.getresponse() self.assertFalse(response.will_close) self.assertEqual(response.status, 200) self.assertEqual(response.read(), b'POST') finally: conn.close() def test_404(self): with self.assertRaises(HTTPError) as err: self.urlopen('/') err.exception.close() self.assertEqual(err.exception.code, 404, 'Expected 404 response') def test_view(self): with self.urlopen('/example_view/') as f: self.assertEqual(f.read(), b'example view') def test_static_files(self): with self.urlopen('/static/example_static_file.txt') as f: self.assertEqual(f.read().rstrip(b'\r\n'), b'example static file') def test_no_collectstatic_emulation(self): """ LiveServerTestCase reports a 404 status code when HTTP client tries to access a static file that isn't explicitly put under STATIC_ROOT. """ with self.assertRaises(HTTPError) as err: self.urlopen('/static/another_app/another_app_static_file.txt') err.exception.close() self.assertEqual(err.exception.code, 404, 'Expected 404 response') def test_media_files(self): with self.urlopen('/media/example_media_file.txt') as f: self.assertEqual(f.read().rstrip(b'\r\n'), b'example media file') def test_environ(self): with self.urlopen('/environ_view/?%s' % urlencode({'q': 'тест'})) as f: self.assertIn(b"QUERY_STRING: 'q=%D1%82%D0%B5%D1%81%D1%82'", f.read()) @override_settings(ROOT_URLCONF='servers.urls') class SingleTreadLiveServerViews(SingleThreadLiveServerTestCase): available_apps = ['servers'] def test_closes_connection_with_content_length(self): """ Contrast to LiveServerViews.test_keep_alive_on_connection_with_content_length(). Persistent connections require threading server. """ conn = HTTPConnection( SingleTreadLiveServerViews.server_thread.host, SingleTreadLiveServerViews.server_thread.port, timeout=1, ) try: conn.request('GET', '/example_view/', headers={'Connection': 'keep-alive'}) response = conn.getresponse() self.assertTrue(response.will_close) self.assertEqual(response.read(), b'example view') self.assertEqual(response.status, 200) self.assertEqual(response.getheader('Connection'), 'close') finally: conn.close() class LiveServerDatabase(LiveServerBase): def test_fixtures_loaded(self): """ Fixtures are properly loaded and visible to the live server thread. """ with self.urlopen('/model_view/') as f: self.assertEqual(f.read().splitlines(), [b'jane', b'robert']) def test_database_writes(self): """ Data written to the database by a view can be read. """ with self.urlopen('/create_model_instance/'): pass self.assertQuerysetEqual( Person.objects.all().order_by('pk'), ['jane', 'robert', 'emily'], lambda b: b.name ) class LiveServerPort(LiveServerBase): def test_port_bind(self): """ Each LiveServerTestCase binds to a unique port or fails to start a server thread when run concurrently (#26011). """ TestCase = type("TestCase", (LiveServerBase,), {}) try: TestCase.setUpClass() except OSError as e: if e.errno == errno.EADDRINUSE: # We're out of ports, LiveServerTestCase correctly fails with # an OSError. return # Unexpected error. raise try: # We've acquired a port, ensure our server threads acquired # different addresses. self.assertNotEqual( self.live_server_url, TestCase.live_server_url, "Acquired duplicate server addresses for server threads: %s" % self.live_server_url ) finally: TestCase.tearDownClass() def test_specified_port_bind(self): """LiveServerTestCase.port customizes the server's port.""" TestCase = type('TestCase', (LiveServerBase,), {}) # Find an open port and tell TestCase to use it. s = socket.socket() s.bind(('', 0)) TestCase.port = s.getsockname()[1] s.close() TestCase.setUpClass() try: self.assertEqual( TestCase.port, TestCase.server_thread.port, 'Did not use specified port for LiveServerTestCase thread: %s' % TestCase.port ) finally: TestCase.tearDownClass() class LiveServerThreadedTests(LiveServerBase): """If LiveServerTestCase isn't threaded, these tests will hang.""" def test_view_calls_subview(self): url = '/subview_calling_view/?%s' % urlencode({'url': self.live_server_url}) with self.urlopen(url) as f: self.assertEqual(f.read(), b'subview calling view: subview') def test_check_model_instance_from_subview(self): url = '/check_model_instance_from_subview/?%s' % urlencode({ 'url': self.live_server_url, }) with self.urlopen(url) as f: self.assertIn(b'emily', f.read())
479a2eb6866e9bd70677f3a5cdba324f9d59c23b07414f5c78bda93f73d20c23
from unittest import mock from django.apps.registry import apps as global_apps from django.db import DatabaseError, connection, migrations, models from django.db.migrations.exceptions import InvalidMigrationPlan from django.db.migrations.executor import MigrationExecutor from django.db.migrations.graph import MigrationGraph from django.db.migrations.recorder import MigrationRecorder from django.db.migrations.state import ProjectState from django.test import ( SimpleTestCase, modify_settings, override_settings, skipUnlessDBFeature, ) from .test_base import MigrationTestBase @modify_settings(INSTALLED_APPS={'append': 'migrations2'}) class ExecutorTests(MigrationTestBase): """ Tests the migration executor (full end-to-end running). Bear in mind that if these are failing you should fix the other test failures first, as they may be propagating into here. """ available_apps = ["migrations", "migrations2", "django.contrib.auth", "django.contrib.contenttypes"] @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_run(self): """ Tests running a simple set of migrations. """ executor = MigrationExecutor(connection) # Let's look at the plan first and make sure it's up to scratch plan = executor.migration_plan([("migrations", "0002_second")]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0001_initial"], False), (executor.loader.graph.nodes["migrations", "0002_second"], False), ], ) # Were the tables there before? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_book") # Alright, let's try running it executor.migrate([("migrations", "0002_second")]) # Are the tables there now? self.assertTableExists("migrations_author") self.assertTableExists("migrations_book") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Alright, let's undo what we did plan = executor.migration_plan([("migrations", None)]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0002_second"], True), (executor.loader.graph.nodes["migrations", "0001_initial"], True), ], ) executor.migrate([("migrations", None)]) # Are the tables gone? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_book") @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_run_with_squashed(self): """ Tests running a squashed migration from zero (should ignore what it replaces) """ executor = MigrationExecutor(connection) # Check our leaf node is the squashed one leaves = [key for key in executor.loader.graph.leaf_nodes() if key[0] == "migrations"] self.assertEqual(leaves, [("migrations", "0001_squashed_0002")]) # Check the plan plan = executor.migration_plan([("migrations", "0001_squashed_0002")]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0001_squashed_0002"], False), ], ) # Were the tables there before? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_book") # Alright, let's try running it executor.migrate([("migrations", "0001_squashed_0002")]) # Are the tables there now? self.assertTableExists("migrations_author") self.assertTableExists("migrations_book") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Alright, let's undo what we did. Should also just use squashed. plan = executor.migration_plan([("migrations", None)]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0001_squashed_0002"], True), ], ) executor.migrate([("migrations", None)]) # Are the tables gone? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_book") @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_non_atomic"}) def test_non_atomic_migration(self): """ Applying a non-atomic migration works as expected. """ executor = MigrationExecutor(connection) with self.assertRaisesMessage(RuntimeError, "Abort migration"): executor.migrate([("migrations", "0001_initial")]) self.assertTableExists("migrations_publisher") migrations_apps = executor.loader.project_state(("migrations", "0001_initial")).apps Publisher = migrations_apps.get_model("migrations", "Publisher") self.assertTrue(Publisher.objects.exists()) self.assertTableNotExists("migrations_book") @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_atomic_operation"}) def test_atomic_operation_in_non_atomic_migration(self): """ An atomic operation is properly rolled back inside a non-atomic migration. """ executor = MigrationExecutor(connection) with self.assertRaisesMessage(RuntimeError, "Abort migration"): executor.migrate([("migrations", "0001_initial")]) migrations_apps = executor.loader.project_state(("migrations", "0001_initial")).apps Editor = migrations_apps.get_model("migrations", "Editor") self.assertFalse(Editor.objects.exists()) # Record previous migration as successful. executor.migrate([("migrations", "0001_initial")], fake=True) # Rebuild the graph to reflect the new DB state. executor.loader.build_graph() # Migrating backwards is also atomic. with self.assertRaisesMessage(RuntimeError, "Abort migration"): executor.migrate([("migrations", None)]) self.assertFalse(Editor.objects.exists()) @override_settings(MIGRATION_MODULES={ "migrations": "migrations.test_migrations", "migrations2": "migrations2.test_migrations_2", }) def test_empty_plan(self): """ Re-planning a full migration of a fully-migrated set doesn't perform spurious unmigrations and remigrations. There was previously a bug where the executor just always performed the backwards plan for applied migrations - which even for the most recent migration in an app, might include other, dependent apps, and these were being unmigrated. """ # Make the initial plan, check it executor = MigrationExecutor(connection) plan = executor.migration_plan([ ("migrations", "0002_second"), ("migrations2", "0001_initial"), ]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0001_initial"], False), (executor.loader.graph.nodes["migrations", "0002_second"], False), (executor.loader.graph.nodes["migrations2", "0001_initial"], False), ], ) # Fake-apply all migrations executor.migrate([ ("migrations", "0002_second"), ("migrations2", "0001_initial") ], fake=True) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Now plan a second time and make sure it's empty plan = executor.migration_plan([ ("migrations", "0002_second"), ("migrations2", "0001_initial"), ]) self.assertEqual(plan, []) # The resulting state should include applied migrations. state = executor.migrate([ ("migrations", "0002_second"), ("migrations2", "0001_initial"), ]) self.assertIn(('migrations', 'book'), state.models) self.assertIn(('migrations', 'author'), state.models) self.assertIn(('migrations2', 'otherauthor'), state.models) # Erase all the fake records executor.recorder.record_unapplied("migrations2", "0001_initial") executor.recorder.record_unapplied("migrations", "0002_second") executor.recorder.record_unapplied("migrations", "0001_initial") @override_settings(MIGRATION_MODULES={ "migrations": "migrations.test_migrations", "migrations2": "migrations2.test_migrations_2_no_deps", }) def test_mixed_plan_not_supported(self): """ Although the MigrationExecutor interfaces allows for mixed migration plans (combined forwards and backwards migrations) this is not supported. """ # Prepare for mixed plan executor = MigrationExecutor(connection) plan = executor.migration_plan([("migrations", "0002_second")]) self.assertEqual( plan, [ (executor.loader.graph.nodes["migrations", "0001_initial"], False), (executor.loader.graph.nodes["migrations", "0002_second"], False), ], ) executor.migrate(None, plan) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() self.assertIn(('migrations', '0001_initial'), executor.loader.applied_migrations) self.assertIn(('migrations', '0002_second'), executor.loader.applied_migrations) self.assertNotIn(('migrations2', '0001_initial'), executor.loader.applied_migrations) # Generate mixed plan plan = executor.migration_plan([ ("migrations", None), ("migrations2", "0001_initial"), ]) msg = ( 'Migration plans with both forwards and backwards migrations are ' 'not supported. Please split your migration process into separate ' 'plans of only forwards OR backwards migrations.' ) with self.assertRaisesMessage(InvalidMigrationPlan, msg) as cm: executor.migrate(None, plan) self.assertEqual( cm.exception.args[1], [ (executor.loader.graph.nodes["migrations", "0002_second"], True), (executor.loader.graph.nodes["migrations", "0001_initial"], True), (executor.loader.graph.nodes["migrations2", "0001_initial"], False), ], ) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() executor.migrate([ ("migrations", None), ("migrations2", None), ]) # Are the tables gone? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_book") self.assertTableNotExists("migrations2_otherauthor") @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_soft_apply(self): """ Tests detection of initial migrations already having been applied. """ state = {"faked": None} def fake_storer(phase, migration=None, fake=None): state["faked"] = fake executor = MigrationExecutor(connection, progress_callback=fake_storer) # Were the tables there before? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Run it normally self.assertEqual( executor.migration_plan([("migrations", "0001_initial")]), [ (executor.loader.graph.nodes["migrations", "0001_initial"], False), ], ) executor.migrate([("migrations", "0001_initial")]) # Are the tables there now? self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble") # We shouldn't have faked that one self.assertIs(state["faked"], False) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Fake-reverse that executor.migrate([("migrations", None)], fake=True) # Are the tables still there? self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble") # Make sure that was faked self.assertIs(state["faked"], True) # Finally, migrate forwards; this should fake-apply our initial migration executor.loader.build_graph() self.assertEqual( executor.migration_plan([("migrations", "0001_initial")]), [ (executor.loader.graph.nodes["migrations", "0001_initial"], False), ], ) # Applying the migration should raise a database level error # because we haven't given the --fake-initial option with self.assertRaises(DatabaseError): executor.migrate([("migrations", "0001_initial")]) # Reset the faked state state = {"faked": None} # Allow faking of initial CreateModel operations executor.migrate([("migrations", "0001_initial")], fake_initial=True) self.assertIs(state["faked"], True) # And migrate back to clean up the database executor.loader.build_graph() executor.migrate([("migrations", None)]) self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") @override_settings( MIGRATION_MODULES={ "migrations": "migrations.test_migrations_custom_user", "django.contrib.auth": "django.contrib.auth.migrations", }, AUTH_USER_MODEL="migrations.Author", ) def test_custom_user(self): """ Regression test for #22325 - references to a custom user model defined in the same app are not resolved correctly. """ executor = MigrationExecutor(connection) self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Migrate forwards executor.migrate([("migrations", "0001_initial")]) self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble") # Make sure the soft-application detection works (#23093) # Change table_names to not return auth_user during this as # it wouldn't be there in a normal run, and ensure migrations.Author # exists in the global app registry temporarily. old_table_names = connection.introspection.table_names connection.introspection.table_names = lambda c: [x for x in old_table_names(c) if x != "auth_user"] migrations_apps = executor.loader.project_state(("migrations", "0001_initial")).apps global_apps.get_app_config("migrations").models["author"] = migrations_apps.get_model("migrations", "author") try: migration = executor.loader.get_migration("auth", "0001_initial") self.assertIs(executor.detect_soft_applied(None, migration)[0], True) finally: connection.introspection.table_names = old_table_names del global_apps.get_app_config("migrations").models["author"] # And migrate back to clean up the database executor.loader.build_graph() executor.migrate([("migrations", None)]) self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") @override_settings( MIGRATION_MODULES={ "migrations": "migrations.test_add_many_to_many_field_initial", }, ) def test_detect_soft_applied_add_field_manytomanyfield(self): """ executor.detect_soft_applied() detects ManyToManyField tables from an AddField operation. This checks the case of AddField in a migration with other operations (0001) and the case of AddField in its own migration (0002). """ tables = [ # from 0001 "migrations_project", "migrations_task", "migrations_project_tasks", # from 0002 "migrations_task_projects", ] executor = MigrationExecutor(connection) # Create the tables for 0001 but make it look like the migration hasn't # been applied. executor.migrate([("migrations", "0001_initial")]) executor.migrate([("migrations", None)], fake=True) for table in tables[:3]: self.assertTableExists(table) # Table detection sees 0001 is applied but not 0002. migration = executor.loader.get_migration("migrations", "0001_initial") self.assertIs(executor.detect_soft_applied(None, migration)[0], True) migration = executor.loader.get_migration("migrations", "0002_initial") self.assertIs(executor.detect_soft_applied(None, migration)[0], False) # Create the tables for both migrations but make it look like neither # has been applied. executor.loader.build_graph() executor.migrate([("migrations", "0001_initial")], fake=True) executor.migrate([("migrations", "0002_initial")]) executor.loader.build_graph() executor.migrate([("migrations", None)], fake=True) # Table detection sees 0002 is applied. migration = executor.loader.get_migration("migrations", "0002_initial") self.assertIs(executor.detect_soft_applied(None, migration)[0], True) # Leave the tables for 0001 except the many-to-many table. That missing # table should cause detect_soft_applied() to return False. with connection.schema_editor() as editor: for table in tables[2:]: editor.execute(editor.sql_delete_table % {"table": table}) migration = executor.loader.get_migration("migrations", "0001_initial") self.assertIs(executor.detect_soft_applied(None, migration)[0], False) # Cleanup by removing the remaining tables. with connection.schema_editor() as editor: for table in tables[:2]: editor.execute(editor.sql_delete_table % {"table": table}) for table in tables: self.assertTableNotExists(table) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.lookuperror_a", "migrations.migrations_test_apps.lookuperror_b", "migrations.migrations_test_apps.lookuperror_c" ] ) def test_unrelated_model_lookups_forwards(self): """ #24123 - All models of apps already applied which are unrelated to the first app being applied are part of the initial model state. """ try: executor = MigrationExecutor(connection) self.assertTableNotExists("lookuperror_a_a1") self.assertTableNotExists("lookuperror_b_b1") self.assertTableNotExists("lookuperror_c_c1") executor.migrate([("lookuperror_b", "0003_b3")]) self.assertTableExists("lookuperror_b_b3") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Migrate forwards -- This led to a lookup LookupErrors because # lookuperror_b.B2 is already applied executor.migrate([ ("lookuperror_a", "0004_a4"), ("lookuperror_c", "0003_c3"), ]) self.assertTableExists("lookuperror_a_a4") self.assertTableExists("lookuperror_c_c3") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() finally: # Cleanup executor.migrate([ ("lookuperror_a", None), ("lookuperror_b", None), ("lookuperror_c", None), ]) self.assertTableNotExists("lookuperror_a_a1") self.assertTableNotExists("lookuperror_b_b1") self.assertTableNotExists("lookuperror_c_c1") @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.lookuperror_a", "migrations.migrations_test_apps.lookuperror_b", "migrations.migrations_test_apps.lookuperror_c" ] ) def test_unrelated_model_lookups_backwards(self): """ #24123 - All models of apps being unapplied which are unrelated to the first app being unapplied are part of the initial model state. """ try: executor = MigrationExecutor(connection) self.assertTableNotExists("lookuperror_a_a1") self.assertTableNotExists("lookuperror_b_b1") self.assertTableNotExists("lookuperror_c_c1") executor.migrate([ ("lookuperror_a", "0004_a4"), ("lookuperror_b", "0003_b3"), ("lookuperror_c", "0003_c3"), ]) self.assertTableExists("lookuperror_b_b3") self.assertTableExists("lookuperror_a_a4") self.assertTableExists("lookuperror_c_c3") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Migrate backwards -- This led to a lookup LookupErrors because # lookuperror_b.B2 is not in the initial state (unrelated to app c) executor.migrate([("lookuperror_a", None)]) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() finally: # Cleanup executor.migrate([ ("lookuperror_b", None), ("lookuperror_c", None) ]) self.assertTableNotExists("lookuperror_a_a1") self.assertTableNotExists("lookuperror_b_b1") self.assertTableNotExists("lookuperror_c_c1") @override_settings( INSTALLED_APPS=[ 'migrations.migrations_test_apps.mutate_state_a', 'migrations.migrations_test_apps.mutate_state_b', ] ) def test_unrelated_applied_migrations_mutate_state(self): """ #26647 - Unrelated applied migrations should be part of the final state in both directions. """ executor = MigrationExecutor(connection) executor.migrate([ ('mutate_state_b', '0002_add_field'), ]) # Migrate forward. executor.loader.build_graph() state = executor.migrate([ ('mutate_state_a', '0001_initial'), ]) self.assertIn('added', state.models['mutate_state_b', 'b'].fields) executor.loader.build_graph() # Migrate backward. state = executor.migrate([ ('mutate_state_a', None), ]) self.assertIn('added', state.models['mutate_state_b', 'b'].fields) executor.migrate([ ('mutate_state_b', None), ]) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_process_callback(self): """ #24129 - Tests callback process """ call_args_list = [] def callback(*args): call_args_list.append(args) executor = MigrationExecutor(connection, progress_callback=callback) # Were the tables there before? self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") executor.migrate([ ("migrations", "0001_initial"), ("migrations", "0002_second"), ]) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() executor.migrate([ ("migrations", None), ("migrations", None), ]) self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") migrations = executor.loader.graph.nodes expected = [ ("render_start",), ("render_success",), ("apply_start", migrations['migrations', '0001_initial'], False), ("apply_success", migrations['migrations', '0001_initial'], False), ("apply_start", migrations['migrations', '0002_second'], False), ("apply_success", migrations['migrations', '0002_second'], False), ("render_start",), ("render_success",), ("unapply_start", migrations['migrations', '0002_second'], False), ("unapply_success", migrations['migrations', '0002_second'], False), ("unapply_start", migrations['migrations', '0001_initial'], False), ("unapply_success", migrations['migrations', '0001_initial'], False), ] self.assertEqual(call_args_list, expected) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.alter_fk.author_app", "migrations.migrations_test_apps.alter_fk.book_app", ] ) def test_alter_id_type_with_fk(self): try: executor = MigrationExecutor(connection) self.assertTableNotExists("author_app_author") self.assertTableNotExists("book_app_book") # Apply initial migrations executor.migrate([ ("author_app", "0001_initial"), ("book_app", "0001_initial"), ]) self.assertTableExists("author_app_author") self.assertTableExists("book_app_book") # Rebuild the graph to reflect the new DB state executor.loader.build_graph() # Apply PK type alteration executor.migrate([("author_app", "0002_alter_id")]) # Rebuild the graph to reflect the new DB state executor.loader.build_graph() finally: # We can't simply unapply the migrations here because there is no # implicit cast from VARCHAR to INT on the database level. with connection.schema_editor() as editor: editor.execute(editor.sql_delete_table % {"table": "book_app_book"}) editor.execute(editor.sql_delete_table % {"table": "author_app_author"}) self.assertTableNotExists("author_app_author") self.assertTableNotExists("book_app_book") executor.migrate([("author_app", None)], fake=True) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_apply_all_replaced_marks_replacement_as_applied(self): """ Applying all replaced migrations marks replacement as applied (#24628). """ recorder = MigrationRecorder(connection) # Place the database in a state where the replaced migrations are # partially applied: 0001 is applied, 0002 is not. recorder.record_applied("migrations", "0001_initial") executor = MigrationExecutor(connection) # Use fake because we don't actually have the first migration # applied, so the second will fail. And there's no need to actually # create/modify tables here, we're just testing the # MigrationRecord, which works the same with or without fake. executor.migrate([("migrations", "0002_second")], fake=True) # Because we've now applied 0001 and 0002 both, their squashed # replacement should be marked as applied. self.assertIn( ("migrations", "0001_squashed_0002"), recorder.applied_migrations(), ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_migrate_marks_replacement_applied_even_if_it_did_nothing(self): """ A new squash migration will be marked as applied even if all its replaced migrations were previously already applied (#24628). """ recorder = MigrationRecorder(connection) # Record all replaced migrations as applied recorder.record_applied("migrations", "0001_initial") recorder.record_applied("migrations", "0002_second") executor = MigrationExecutor(connection) executor.migrate([("migrations", "0001_squashed_0002")]) # Because 0001 and 0002 are both applied, even though this migrate run # didn't apply anything new, their squashed replacement should be # marked as applied. self.assertIn( ("migrations", "0001_squashed_0002"), recorder.applied_migrations(), ) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_migrate_marks_replacement_unapplied(self): executor = MigrationExecutor(connection) executor.migrate([('migrations', '0001_squashed_0002')]) try: self.assertIn( ('migrations', '0001_squashed_0002'), executor.recorder.applied_migrations(), ) finally: executor.loader.build_graph() executor.migrate([('migrations', None)]) self.assertNotIn( ('migrations', '0001_squashed_0002'), executor.recorder.applied_migrations(), ) # When the feature is False, the operation and the record won't be # performed in a transaction and the test will systematically pass. @skipUnlessDBFeature('can_rollback_ddl') def test_migrations_applied_and_recorded_atomically(self): """Migrations are applied and recorded atomically.""" class Migration(migrations.Migration): operations = [ migrations.CreateModel('model', [ ('id', models.AutoField(primary_key=True)), ]), ] executor = MigrationExecutor(connection) with mock.patch('django.db.migrations.executor.MigrationExecutor.record_migration') as record_migration: record_migration.side_effect = RuntimeError('Recording migration failed.') with self.assertRaisesMessage(RuntimeError, 'Recording migration failed.'): executor.apply_migration( ProjectState(), Migration('0001_initial', 'record_migration'), ) executor.migrate([('migrations', '0001_initial')]) # The migration isn't recorded as applied since it failed. migration_recorder = MigrationRecorder(connection) self.assertIs( migration_recorder.migration_qs.filter( app='record_migration', name='0001_initial', ).exists(), False, ) self.assertTableNotExists('record_migration_model') def test_migrations_not_applied_on_deferred_sql_failure(self): """Migrations are not recorded if deferred SQL application fails.""" class DeferredSQL: def __str__(self): raise DatabaseError('Failed to apply deferred SQL') class Migration(migrations.Migration): atomic = False def apply(self, project_state, schema_editor, collect_sql=False): schema_editor.deferred_sql.append(DeferredSQL()) executor = MigrationExecutor(connection) with self.assertRaisesMessage(DatabaseError, 'Failed to apply deferred SQL'): executor.apply_migration( ProjectState(), Migration('0001_initial', 'deferred_sql'), ) # The migration isn't recorded as applied since it failed. migration_recorder = MigrationRecorder(connection) self.assertIs( migration_recorder.migration_qs.filter( app='deferred_sql', name='0001_initial', ).exists(), False, ) class FakeLoader: def __init__(self, graph, applied): self.graph = graph self.applied_migrations = applied class FakeMigration: """Really all we need is any object with a debug-useful repr.""" def __init__(self, name): self.name = name def __repr__(self): return 'M<%s>' % self.name class ExecutorUnitTests(SimpleTestCase): """(More) isolated unit tests for executor methods.""" def test_minimize_rollbacks(self): """ Minimize unnecessary rollbacks in connected apps. When you say "./manage.py migrate appA 0001", rather than migrating to just after appA-0001 in the linearized migration plan (which could roll back migrations in other apps that depend on appA 0001, but don't need to be rolled back since we're not rolling back appA 0001), we migrate to just before appA-0002. """ a1_impl = FakeMigration('a1') a1 = ('a', '1') a2_impl = FakeMigration('a2') a2 = ('a', '2') b1_impl = FakeMigration('b1') b1 = ('b', '1') graph = MigrationGraph() graph.add_node(a1, a1_impl) graph.add_node(a2, a2_impl) graph.add_node(b1, b1_impl) graph.add_dependency(None, b1, a1) graph.add_dependency(None, a2, a1) executor = MigrationExecutor(None) executor.loader = FakeLoader(graph, { a1: a1_impl, b1: b1_impl, a2: a2_impl, }) plan = executor.migration_plan({a1}) self.assertEqual(plan, [(a2_impl, True)]) def test_minimize_rollbacks_branchy(self): r""" Minimize rollbacks when target has multiple in-app children. a: 1 <---- 3 <--\ \ \- 2 <--- 4 \ \ b: \- 1 <--- 2 """ a1_impl = FakeMigration('a1') a1 = ('a', '1') a2_impl = FakeMigration('a2') a2 = ('a', '2') a3_impl = FakeMigration('a3') a3 = ('a', '3') a4_impl = FakeMigration('a4') a4 = ('a', '4') b1_impl = FakeMigration('b1') b1 = ('b', '1') b2_impl = FakeMigration('b2') b2 = ('b', '2') graph = MigrationGraph() graph.add_node(a1, a1_impl) graph.add_node(a2, a2_impl) graph.add_node(a3, a3_impl) graph.add_node(a4, a4_impl) graph.add_node(b1, b1_impl) graph.add_node(b2, b2_impl) graph.add_dependency(None, a2, a1) graph.add_dependency(None, a3, a1) graph.add_dependency(None, a4, a2) graph.add_dependency(None, a4, a3) graph.add_dependency(None, b2, b1) graph.add_dependency(None, b1, a1) graph.add_dependency(None, b2, a2) executor = MigrationExecutor(None) executor.loader = FakeLoader(graph, { a1: a1_impl, b1: b1_impl, a2: a2_impl, b2: b2_impl, a3: a3_impl, a4: a4_impl, }) plan = executor.migration_plan({a1}) should_be_rolled_back = [b2_impl, a4_impl, a2_impl, a3_impl] exp = [(m, True) for m in should_be_rolled_back] self.assertEqual(plan, exp) def test_backwards_nothing_to_do(self): r""" If the current state satisfies the given target, do nothing. a: 1 <--- 2 b: \- 1 c: \- 1 If a1 is applied already and a2 is not, and we're asked to migrate to a1, don't apply or unapply b1 or c1, regardless of their current state. """ a1_impl = FakeMigration('a1') a1 = ('a', '1') a2_impl = FakeMigration('a2') a2 = ('a', '2') b1_impl = FakeMigration('b1') b1 = ('b', '1') c1_impl = FakeMigration('c1') c1 = ('c', '1') graph = MigrationGraph() graph.add_node(a1, a1_impl) graph.add_node(a2, a2_impl) graph.add_node(b1, b1_impl) graph.add_node(c1, c1_impl) graph.add_dependency(None, a2, a1) graph.add_dependency(None, b1, a1) graph.add_dependency(None, c1, a1) executor = MigrationExecutor(None) executor.loader = FakeLoader(graph, { a1: a1_impl, b1: b1_impl, }) plan = executor.migration_plan({a1}) self.assertEqual(plan, [])
4ac8eceddfc39bf8f0d61232d6b1f7ab9eac136f9a5e6494462c893f4a62695a
import datetime import importlib import io import os import sys from unittest import mock from django.apps import apps from django.core.management import CommandError, call_command from django.db import ( ConnectionHandler, DatabaseError, OperationalError, connection, connections, models, ) from django.db.backends.base.schema import BaseDatabaseSchemaEditor from django.db.backends.utils import truncate_name from django.db.migrations.exceptions import InconsistentMigrationHistory from django.db.migrations.recorder import MigrationRecorder from django.test import TestCase, override_settings, skipUnlessDBFeature from .models import UnicodeModel, UnserializableModel from .routers import TestRouter from .test_base import MigrationTestBase class MigrateTests(MigrationTestBase): """ Tests running the migrate command. """ databases = {'default', 'other'} @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_migrate(self): """ Tests basic usage of the migrate command. """ # No tables are created self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") # Run the migrations to 0001 only stdout = io.StringIO() call_command('migrate', 'migrations', '0001', verbosity=2, stdout=stdout, no_color=True) stdout = stdout.getvalue() self.assertIn('Target specific migration: 0001_initial, from migrations', stdout) self.assertIn('Applying migrations.0001_initial... OK', stdout) self.assertIn('Running pre-migrate handlers for application migrations', stdout) self.assertIn('Running post-migrate handlers for application migrations', stdout) # The correct tables exist self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble") self.assertTableNotExists("migrations_book") # Run migrations all the way call_command("migrate", verbosity=0) # The correct tables exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableExists("migrations_book") # Unmigrate everything stdout = io.StringIO() call_command('migrate', 'migrations', 'zero', verbosity=2, stdout=stdout, no_color=True) stdout = stdout.getvalue() self.assertIn('Unapply all migrations: migrations', stdout) self.assertIn('Unapplying migrations.0002_second... OK', stdout) self.assertIn('Running pre-migrate handlers for application migrations', stdout) self.assertIn('Running post-migrate handlers for application migrations', stdout) # Tables are gone self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") @override_settings(INSTALLED_APPS=[ 'django.contrib.auth', 'django.contrib.contenttypes', 'migrations.migrations_test_apps.migrated_app', ]) def test_migrate_with_system_checks(self): out = io.StringIO() call_command('migrate', skip_checks=False, no_color=True, stdout=out) self.assertIn('Apply all migrations: migrated_app', out.getvalue()) @override_settings(INSTALLED_APPS=['migrations', 'migrations.migrations_test_apps.unmigrated_app_syncdb']) def test_app_without_migrations(self): msg = "App 'unmigrated_app_syncdb' does not have migrations." with self.assertRaisesMessage(CommandError, msg): call_command('migrate', app_label='unmigrated_app_syncdb') @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_clashing_prefix'}) def test_ambiguous_prefix(self): msg = ( "More than one migration matches 'a' in app 'migrations'. Please " "be more specific." ) with self.assertRaisesMessage(CommandError, msg): call_command('migrate', app_label='migrations', migration_name='a') @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_unknown_prefix(self): msg = "Cannot find a migration matching 'nonexistent' from app 'migrations'." with self.assertRaisesMessage(CommandError, msg): call_command('migrate', app_label='migrations', migration_name='nonexistent') @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_initial_false"}) def test_migrate_initial_false(self): """ `Migration.initial = False` skips fake-initial detection. """ # Make sure no tables are created self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Run the migrations to 0001 only call_command("migrate", "migrations", "0001", verbosity=0) # Fake rollback call_command("migrate", "migrations", "zero", fake=True, verbosity=0) # Make sure fake-initial detection does not run with self.assertRaises(DatabaseError): call_command("migrate", "migrations", "0001", fake_initial=True, verbosity=0) call_command("migrate", "migrations", "0001", fake=True, verbosity=0) # Real rollback call_command("migrate", "migrations", "zero", verbosity=0) # Make sure it's all gone self.assertTableNotExists("migrations_author") self.assertTableNotExists("migrations_tribble") self.assertTableNotExists("migrations_book") @override_settings( MIGRATION_MODULES={"migrations": "migrations.test_migrations"}, DATABASE_ROUTERS=['migrations.routers.TestRouter'], ) def test_migrate_fake_initial(self): """ --fake-initial only works if all tables created in the initial migration of an app exists. Database routers must be obeyed when doing that check. """ # Make sure no tables are created for db in self.databases: self.assertTableNotExists("migrations_author", using=db) self.assertTableNotExists("migrations_tribble", using=db) # Run the migrations to 0001 only call_command("migrate", "migrations", "0001", verbosity=0) call_command("migrate", "migrations", "0001", verbosity=0, database="other") # Make sure the right tables exist self.assertTableExists("migrations_author") self.assertTableNotExists("migrations_tribble") # Also check the "other" database self.assertTableNotExists("migrations_author", using="other") self.assertTableExists("migrations_tribble", using="other") # Fake a roll-back call_command("migrate", "migrations", "zero", fake=True, verbosity=0) call_command("migrate", "migrations", "zero", fake=True, verbosity=0, database="other") # Make sure the tables still exist self.assertTableExists("migrations_author") self.assertTableExists("migrations_tribble", using="other") # Try to run initial migration with self.assertRaises(DatabaseError): call_command("migrate", "migrations", "0001", verbosity=0) # Run initial migration with an explicit --fake-initial out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command("migrate", "migrations", "0001", fake_initial=True, stdout=out, verbosity=1) call_command("migrate", "migrations", "0001", fake_initial=True, verbosity=0, database="other") self.assertIn( "migrations.0001_initial... faked", out.getvalue().lower() ) try: # Run migrations all the way. call_command('migrate', verbosity=0) call_command('migrate', verbosity=0, database="other") self.assertTableExists('migrations_author') self.assertTableNotExists('migrations_tribble') self.assertTableExists('migrations_book') self.assertTableNotExists('migrations_author', using='other') self.assertTableNotExists('migrations_tribble', using='other') self.assertTableNotExists('migrations_book', using='other') # Fake a roll-back. call_command('migrate', 'migrations', 'zero', fake=True, verbosity=0) call_command('migrate', 'migrations', 'zero', fake=True, verbosity=0, database='other') self.assertTableExists('migrations_author') self.assertTableNotExists('migrations_tribble') self.assertTableExists('migrations_book') # Run initial migration. with self.assertRaises(DatabaseError): call_command('migrate', 'migrations', verbosity=0) # Run initial migration with an explicit --fake-initial. with self.assertRaises(DatabaseError): # Fails because "migrations_tribble" does not exist but needs # to in order to make --fake-initial work. call_command('migrate', 'migrations', fake_initial=True, verbosity=0) # Fake an apply. call_command('migrate', 'migrations', fake=True, verbosity=0) call_command('migrate', 'migrations', fake=True, verbosity=0, database='other') finally: # Unmigrate everything. call_command('migrate', 'migrations', 'zero', verbosity=0) call_command('migrate', 'migrations', 'zero', verbosity=0, database='other') # Make sure it's all gone for db in self.databases: self.assertTableNotExists("migrations_author", using=db) self.assertTableNotExists("migrations_tribble", using=db) self.assertTableNotExists("migrations_book", using=db) @skipUnlessDBFeature('ignores_table_name_case') def test_migrate_fake_initial_case_insensitive(self): with override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_fake_initial_case_insensitive.initial', }): call_command('migrate', 'migrations', '0001', verbosity=0) call_command('migrate', 'migrations', 'zero', fake=True, verbosity=0) with override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_fake_initial_case_insensitive.fake_initial', }): out = io.StringIO() call_command( 'migrate', 'migrations', '0001', fake_initial=True, stdout=out, verbosity=1, no_color=True, ) self.assertIn( 'migrations.0001_initial... faked', out.getvalue().lower(), ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_fake_split_initial"}) def test_migrate_fake_split_initial(self): """ Split initial migrations can be faked with --fake-initial. """ try: call_command('migrate', 'migrations', '0002', verbosity=0) call_command('migrate', 'migrations', 'zero', fake=True, verbosity=0) out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command('migrate', 'migrations', '0002', fake_initial=True, stdout=out, verbosity=1) value = out.getvalue().lower() self.assertIn('migrations.0001_initial... faked', value) self.assertIn('migrations.0002_second... faked', value) finally: # Fake an apply. call_command('migrate', 'migrations', fake=True, verbosity=0) # Unmigrate everything. call_command('migrate', 'migrations', 'zero', verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"}) def test_migrate_conflict_exit(self): """ migrate exits if it detects a conflict. """ msg = ( "Conflicting migrations detected; multiple leaf nodes in the " "migration graph: (0002_conflicting_second, 0002_second in " "migrations).\n" "To fix them run 'python manage.py makemigrations --merge'" ) with self.assertRaisesMessage(CommandError, msg): call_command("migrate", "migrations") @override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_migrations', }) def test_migrate_check(self): with self.assertRaises(SystemExit): call_command('migrate', 'migrations', '0001', check_unapplied=True) self.assertTableNotExists('migrations_author') self.assertTableNotExists('migrations_tribble') self.assertTableNotExists('migrations_book') @override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_migrations_plan', }) def test_migrate_check_plan(self): out = io.StringIO() with self.assertRaises(SystemExit): call_command( 'migrate', 'migrations', '0001', check_unapplied=True, plan=True, stdout=out, no_color=True, ) self.assertEqual( 'Planned operations:\n' 'migrations.0001_initial\n' ' Create model Salamander\n' ' Raw Python operation -> Grow salamander tail.\n', out.getvalue(), ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_showmigrations_list(self): """ showmigrations --list displays migrations and whether or not they're applied. """ out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: True): call_command("showmigrations", format='list', stdout=out, verbosity=0, no_color=False) self.assertEqual( '\x1b[1mmigrations\n\x1b[0m' ' [ ] 0001_initial\n' ' [ ] 0002_second\n', out.getvalue().lower() ) call_command("migrate", "migrations", "0001", verbosity=0) out = io.StringIO() # Giving the explicit app_label tests for selective `show_list` in the command call_command("showmigrations", "migrations", format='list', stdout=out, verbosity=0, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_initial\n' ' [ ] 0002_second\n', out.getvalue().lower() ) out = io.StringIO() # Applied datetimes are displayed at verbosity 2+. call_command('showmigrations', 'migrations', stdout=out, verbosity=2, no_color=True) migration1 = MigrationRecorder(connection).migration_qs.get(app='migrations', name='0001_initial') self.assertEqual( 'migrations\n' ' [x] 0001_initial (applied at %s)\n' ' [ ] 0002_second\n' % migration1.applied.strftime('%Y-%m-%d %H:%M:%S'), out.getvalue().lower() ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_showmigrations_list_squashed(self): out = io.StringIO() call_command('showmigrations', format='list', stdout=out, verbosity=2, no_color=True) self.assertEqual( 'migrations\n' ' [ ] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower(), ) out = io.StringIO() call_command( 'migrate', 'migrations', '0001_squashed_0002', stdout=out, verbosity=2, no_color=True, ) try: self.assertIn( 'operations to perform:\n' ' target specific migration: 0001_squashed_0002, from migrations\n' 'running pre-migrate handlers for application migrations\n' 'running migrations:\n' ' applying migrations.0001_squashed_0002... ok (', out.getvalue().lower(), ) out = io.StringIO() call_command('showmigrations', format='list', stdout=out, verbosity=2, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower(), ) finally: # Unmigrate everything. call_command('migrate', 'migrations', 'zero', verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"}) def test_showmigrations_plan(self): """ Tests --plan output of showmigrations command """ out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[ ] migrations.0001_initial\n" "[ ] migrations.0003_third\n" "[ ] migrations.0002_second\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[ ] migrations.0001_initial\n" "[ ] migrations.0003_third ... (migrations.0001_initial)\n" "[ ] migrations.0002_second ... (migrations.0001_initial, migrations.0003_third)\n", out.getvalue().lower() ) call_command("migrate", "migrations", "0003", verbosity=0) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[x] migrations.0001_initial\n" "[x] migrations.0003_third\n" "[ ] migrations.0002_second\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[x] migrations.0001_initial\n" "[x] migrations.0003_third ... (migrations.0001_initial)\n" "[ ] migrations.0002_second ... (migrations.0001_initial, migrations.0003_third)\n", out.getvalue().lower() ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_plan'}) def test_migrate_plan(self): """Tests migrate --plan output.""" out = io.StringIO() # Show the plan up to the third migration. call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0001_initial\n' ' Create model Salamander\n' ' Raw Python operation -> Grow salamander tail.\n' 'migrations.0002_second\n' ' Create model Book\n' " Raw SQL operation -> ['SELECT * FROM migrations_book']\n" 'migrations.0003_third\n' ' Create model Author\n' " Raw SQL operation -> ['SELECT * FROM migrations_author']\n", out.getvalue() ) try: # Migrate to the third migration. call_command('migrate', 'migrations', '0003', verbosity=0) out = io.StringIO() # Show the plan for when there is nothing to apply. call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' ' No planned migration operations.\n', out.getvalue() ) out = io.StringIO() # Show the plan for reverse migration back to 0001. call_command('migrate', 'migrations', '0001', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0003_third\n' ' Undo Create model Author\n' " Raw SQL operation -> ['SELECT * FROM migrations_book']\n" 'migrations.0002_second\n' ' Undo Create model Book\n' " Raw SQL operation -> ['SELECT * FROM migrations_salamand…\n", out.getvalue() ) out = io.StringIO() # Show the migration plan to fourth, with truncated details. call_command('migrate', 'migrations', '0004', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0004_fourth\n' ' Raw SQL operation -> SELECT * FROM migrations_author WHE…\n', out.getvalue() ) # Show the plan when an operation is irreversible. # Migrate to the fourth migration. call_command('migrate', 'migrations', '0004', verbosity=0) out = io.StringIO() call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0004_fourth\n' ' Raw SQL operation -> IRREVERSIBLE\n', out.getvalue() ) out = io.StringIO() call_command('migrate', 'migrations', '0005', plan=True, stdout=out, no_color=True) # Operation is marked as irreversible only in the revert plan. self.assertEqual( 'Planned operations:\n' 'migrations.0005_fifth\n' ' Raw Python operation\n' ' Raw Python operation\n' ' Raw Python operation -> Feed salamander.\n', out.getvalue() ) call_command('migrate', 'migrations', '0005', verbosity=0) out = io.StringIO() call_command('migrate', 'migrations', '0004', plan=True, stdout=out, no_color=True) self.assertEqual( 'Planned operations:\n' 'migrations.0005_fifth\n' ' Raw Python operation -> IRREVERSIBLE\n' ' Raw Python operation -> IRREVERSIBLE\n' ' Raw Python operation\n', out.getvalue() ) finally: # Cleanup by unmigrating everything: fake the irreversible, then # migrate all to zero. call_command('migrate', 'migrations', '0003', fake=True, verbosity=0) call_command('migrate', 'migrations', 'zero', verbosity=0) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_empty'}) def test_showmigrations_no_migrations(self): out = io.StringIO() call_command('showmigrations', stdout=out, no_color=True) self.assertEqual('migrations\n (no migrations)\n', out.getvalue().lower()) @override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app']) def test_showmigrations_unmigrated_app(self): out = io.StringIO() call_command('showmigrations', 'unmigrated_app', stdout=out, no_color=True) try: self.assertEqual('unmigrated_app\n (no migrations)\n', out.getvalue().lower()) finally: # unmigrated_app.SillyModel has a foreign key to # 'migrations.Tribble', but that model is only defined in a # migration, so the global app registry never sees it and the # reference is left dangling. Remove it to avoid problems in # subsequent tests. apps._pending_operations.pop(('migrations', 'tribble'), None) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_empty"}) def test_showmigrations_plan_no_migrations(self): """ Tests --plan output of showmigrations command without migrations """ out = io.StringIO() call_command('showmigrations', format='plan', stdout=out, no_color=True) self.assertEqual('(no migrations)\n', out.getvalue().lower()) out = io.StringIO() call_command('showmigrations', format='plan', stdout=out, verbosity=2, no_color=True) self.assertEqual('(no migrations)\n', out.getvalue().lower()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"}) def test_showmigrations_plan_squashed(self): """ Tests --plan output of showmigrations command with squashed migrations. """ out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[ ] migrations.1_auto\n" "[ ] migrations.2_auto\n" "[ ] migrations.3_squashed_5\n" "[ ] migrations.6_auto\n" "[ ] migrations.7_auto\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[ ] migrations.1_auto\n" "[ ] migrations.2_auto ... (migrations.1_auto)\n" "[ ] migrations.3_squashed_5 ... (migrations.2_auto)\n" "[ ] migrations.6_auto ... (migrations.3_squashed_5)\n" "[ ] migrations.7_auto ... (migrations.6_auto)\n", out.getvalue().lower() ) call_command("migrate", "migrations", "3_squashed_5", verbosity=0) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out) self.assertEqual( "[x] migrations.1_auto\n" "[x] migrations.2_auto\n" "[x] migrations.3_squashed_5\n" "[ ] migrations.6_auto\n" "[ ] migrations.7_auto\n", out.getvalue().lower() ) out = io.StringIO() call_command("showmigrations", format='plan', stdout=out, verbosity=2) self.assertEqual( "[x] migrations.1_auto\n" "[x] migrations.2_auto ... (migrations.1_auto)\n" "[x] migrations.3_squashed_5 ... (migrations.2_auto)\n" "[ ] migrations.6_auto ... (migrations.3_squashed_5)\n" "[ ] migrations.7_auto ... (migrations.6_auto)\n", out.getvalue().lower() ) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.mutate_state_b', 'migrations.migrations_test_apps.alter_fk.author_app', 'migrations.migrations_test_apps.alter_fk.book_app', ]) def test_showmigrations_plan_single_app_label(self): """ `showmigrations --plan app_label` output with a single app_label. """ # Single app with no dependencies on other apps. out = io.StringIO() call_command('showmigrations', 'mutate_state_b', format='plan', stdout=out) self.assertEqual( '[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0002_add_field\n', out.getvalue() ) # Single app with dependencies. out = io.StringIO() call_command('showmigrations', 'author_app', format='plan', stdout=out) self.assertEqual( '[ ] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n', out.getvalue() ) # Some migrations already applied. call_command('migrate', 'author_app', '0001', verbosity=0) out = io.StringIO() call_command('showmigrations', 'author_app', format='plan', stdout=out) self.assertEqual( '[X] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n', out.getvalue() ) # Cleanup by unmigrating author_app. call_command('migrate', 'author_app', 'zero', verbosity=0) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.mutate_state_b', 'migrations.migrations_test_apps.alter_fk.author_app', 'migrations.migrations_test_apps.alter_fk.book_app', ]) def test_showmigrations_plan_multiple_app_labels(self): """ `showmigrations --plan app_label` output with multiple app_labels. """ # Multiple apps: author_app depends on book_app; mutate_state_b doesn't # depend on other apps. out = io.StringIO() call_command('showmigrations', 'mutate_state_b', 'author_app', format='plan', stdout=out) self.assertEqual( '[ ] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n' '[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0002_add_field\n', out.getvalue() ) # Multiple apps: args order shouldn't matter (the same result is # expected as above). out = io.StringIO() call_command('showmigrations', 'author_app', 'mutate_state_b', format='plan', stdout=out) self.assertEqual( '[ ] author_app.0001_initial\n' '[ ] book_app.0001_initial\n' '[ ] author_app.0002_alter_id\n' '[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0002_add_field\n', out.getvalue() ) @override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app']) def test_showmigrations_plan_app_label_no_migrations(self): out = io.StringIO() call_command('showmigrations', 'unmigrated_app', format='plan', stdout=out, no_color=True) try: self.assertEqual('(no migrations)\n', out.getvalue()) finally: # unmigrated_app.SillyModel has a foreign key to # 'migrations.Tribble', but that model is only defined in a # migration, so the global app registry never sees it and the # reference is left dangling. Remove it to avoid problems in # subsequent tests. apps._pending_operations.pop(('migrations', 'tribble'), None) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_sqlmigrate_forwards(self): """ sqlmigrate outputs forward looking SQL. """ out = io.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out) output = out.getvalue().lower() index_tx_start = output.find(connection.ops.start_transaction_sql().lower()) index_op_desc_author = output.find('-- create model author') index_create_table = output.find('create table') index_op_desc_tribble = output.find('-- create model tribble') index_op_desc_unique_together = output.find('-- alter unique_together') index_tx_end = output.find(connection.ops.end_transaction_sql().lower()) if connection.features.can_rollback_ddl: self.assertGreater(index_tx_start, -1, "Transaction start not found") self.assertGreater( index_tx_end, index_op_desc_unique_together, "Transaction end not found or found before operation description (unique_together)" ) self.assertGreater( index_op_desc_author, index_tx_start, "Operation description (author) not found or found before transaction start" ) self.assertGreater( index_create_table, index_op_desc_author, "CREATE TABLE not found or found before operation description (author)" ) self.assertGreater( index_op_desc_tribble, index_create_table, "Operation description (tribble) not found or found before CREATE TABLE (author)" ) self.assertGreater( index_op_desc_unique_together, index_op_desc_tribble, "Operation description (unique_together) not found or found before operation description (tribble)" ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_sqlmigrate_backwards(self): """ sqlmigrate outputs reverse looking SQL. """ # Cannot generate the reverse SQL unless we've applied the migration. call_command("migrate", "migrations", verbosity=0) out = io.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out, backwards=True) output = out.getvalue().lower() index_tx_start = output.find(connection.ops.start_transaction_sql().lower()) index_op_desc_unique_together = output.find('-- alter unique_together') index_op_desc_tribble = output.find('-- create model tribble') index_op_desc_author = output.find('-- create model author') index_drop_table = output.rfind('drop table') index_tx_end = output.find(connection.ops.end_transaction_sql().lower()) if connection.features.can_rollback_ddl: self.assertGreater(index_tx_start, -1, "Transaction start not found") self.assertGreater( index_tx_end, index_op_desc_unique_together, "Transaction end not found or found before DROP TABLE" ) self.assertGreater( index_op_desc_unique_together, index_tx_start, "Operation description (unique_together) not found or found before transaction start" ) self.assertGreater( index_op_desc_tribble, index_op_desc_unique_together, "Operation description (tribble) not found or found before operation description (unique_together)" ) self.assertGreater( index_op_desc_author, index_op_desc_tribble, "Operation description (author) not found or found before operation description (tribble)" ) self.assertGreater( index_drop_table, index_op_desc_author, "DROP TABLE not found or found before operation description (author)" ) # Cleanup by unmigrating everything call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_non_atomic"}) def test_sqlmigrate_for_non_atomic_migration(self): """ Transaction wrappers aren't shown for non-atomic migrations. """ out = io.StringIO() call_command("sqlmigrate", "migrations", "0001", stdout=out) output = out.getvalue().lower() queries = [q.strip() for q in output.splitlines()] if connection.ops.start_transaction_sql(): self.assertNotIn(connection.ops.start_transaction_sql().lower(), queries) self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_sqlmigrate_for_non_transactional_databases(self): """ Transaction wrappers aren't shown for databases that don't support transactional DDL. """ out = io.StringIO() with mock.patch.object(connection.features, 'can_rollback_ddl', False): call_command('sqlmigrate', 'migrations', '0001', stdout=out) output = out.getvalue().lower() queries = [q.strip() for q in output.splitlines()] start_transaction_sql = connection.ops.start_transaction_sql() if start_transaction_sql: self.assertNotIn(start_transaction_sql.lower(), queries) self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_sqlmigrate_ambiguous_prefix_squashed_migrations(self): msg = ( "More than one migration matches '0001' in app 'migrations'. " "Please be more specific." ) with self.assertRaisesMessage(CommandError, msg): call_command('sqlmigrate', 'migrations', '0001') @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_sqlmigrate_squashed_migration(self): out = io.StringIO() call_command('sqlmigrate', 'migrations', '0001_squashed_0002', stdout=out) output = out.getvalue().lower() self.assertIn('-- create model author', output) self.assertIn('-- create model book', output) self.assertNotIn('-- create model tribble', output) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed'}) def test_sqlmigrate_replaced_migration(self): out = io.StringIO() call_command('sqlmigrate', 'migrations', '0001_initial', stdout=out) output = out.getvalue().lower() self.assertIn('-- create model author', output) self.assertIn('-- create model tribble', output) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_no_operations'}) def test_migrations_no_operations(self): err = io.StringIO() call_command('sqlmigrate', 'migrations', '0001_initial', stderr=err) self.assertEqual(err.getvalue(), 'No operations found.\n') @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.migrated_unapplied_app", "migrations.migrations_test_apps.unmigrated_app", ], ) def test_regression_22823_unmigrated_fk_to_migrated_model(self): """ Assuming you have 3 apps, `A`, `B`, and `C`, such that: * `A` has migrations * `B` has a migration we want to apply * `C` has no migrations, but has an FK to `A` When we try to migrate "B", an exception occurs because the "B" was not included in the ProjectState that is used to detect soft-applied migrations (#22823). """ call_command('migrate', 'migrated_unapplied_app', verbosity=0) # unmigrated_app.SillyModel has a foreign key to 'migrations.Tribble', # but that model is only defined in a migration, so the global app # registry never sees it and the reference is left dangling. Remove it # to avoid problems in subsequent tests. apps._pending_operations.pop(('migrations', 'tribble'), None) @override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app_syncdb']) def test_migrate_syncdb_deferred_sql_executed_with_schemaeditor(self): """ For an app without migrations, editor.execute() is used for executing the syncdb deferred SQL. """ stdout = io.StringIO() with mock.patch.object(BaseDatabaseSchemaEditor, 'execute') as execute: call_command('migrate', run_syncdb=True, verbosity=1, stdout=stdout, no_color=True) create_table_count = len([call for call in execute.mock_calls if 'CREATE TABLE' in str(call)]) self.assertEqual(create_table_count, 2) # There's at least one deferred SQL for creating the foreign key # index. self.assertGreater(len(execute.mock_calls), 2) stdout = stdout.getvalue() self.assertIn('Synchronize unmigrated apps: unmigrated_app_syncdb', stdout) self.assertIn('Creating tables...', stdout) table_name = truncate_name('unmigrated_app_syncdb_classroom', connection.ops.max_name_length()) self.assertIn('Creating table %s' % table_name, stdout) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_migrate_syncdb_app_with_migrations(self): msg = "Can't use run_syncdb with app 'migrations' as it has migrations." with self.assertRaisesMessage(CommandError, msg): call_command('migrate', 'migrations', run_syncdb=True, verbosity=0) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.unmigrated_app_syncdb', 'migrations.migrations_test_apps.unmigrated_app_simple', ]) def test_migrate_syncdb_app_label(self): """ Running migrate --run-syncdb with an app_label only creates tables for the specified app. """ stdout = io.StringIO() with mock.patch.object(BaseDatabaseSchemaEditor, 'execute') as execute: call_command('migrate', 'unmigrated_app_syncdb', run_syncdb=True, stdout=stdout) create_table_count = len([call for call in execute.mock_calls if 'CREATE TABLE' in str(call)]) self.assertEqual(create_table_count, 2) self.assertGreater(len(execute.mock_calls), 2) self.assertIn('Synchronize unmigrated app: unmigrated_app_syncdb', stdout.getvalue()) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_migrate_record_replaced(self): """ Running a single squashed migration should record all of the original replaced migrations as run. """ recorder = MigrationRecorder(connection) out = io.StringIO() call_command("migrate", "migrations", verbosity=0) call_command("showmigrations", "migrations", stdout=out, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower() ) applied_migrations = recorder.applied_migrations() self.assertIn(("migrations", "0001_initial"), applied_migrations) self.assertIn(("migrations", "0002_second"), applied_migrations) self.assertIn(("migrations", "0001_squashed_0002"), applied_migrations) # Rollback changes call_command("migrate", "migrations", "zero", verbosity=0) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_migrate_record_squashed(self): """ Running migrate for a squashed migration should record as run if all of the replaced migrations have been run (#25231). """ recorder = MigrationRecorder(connection) recorder.record_applied("migrations", "0001_initial") recorder.record_applied("migrations", "0002_second") out = io.StringIO() call_command('showmigrations', 'migrations', stdout=out, no_color=True) self.assertEqual( "migrations\n" " [-] 0001_squashed_0002 (2 squashed migrations) " "run 'manage.py migrate' to finish recording.\n", out.getvalue().lower(), ) out = io.StringIO() call_command("migrate", "migrations", verbosity=0) call_command("showmigrations", "migrations", stdout=out, no_color=True) self.assertEqual( 'migrations\n' ' [x] 0001_squashed_0002 (2 squashed migrations)\n', out.getvalue().lower() ) self.assertIn( ("migrations", "0001_squashed_0002"), recorder.applied_migrations() ) # No changes were actually applied so there is nothing to rollback def test_migrate_partially_applied_squashed_migration(self): """ Migrating to a squashed migration specified by name should succeed even if it is partially applied. """ with self.temporary_migration_module(module='migrations.test_migrations'): recorder = MigrationRecorder(connection) try: call_command('migrate', 'migrations', '0001_initial', verbosity=0) call_command( 'squashmigrations', 'migrations', '0002', interactive=False, verbosity=0, ) call_command( 'migrate', 'migrations', '0001_squashed_0002_second', verbosity=0, ) applied_migrations = recorder.applied_migrations() self.assertIn(('migrations', '0002_second'), applied_migrations) finally: # Unmigrate everything. call_command('migrate', 'migrations', 'zero', verbosity=0) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_migrate_inconsistent_history(self): """ Running migrate with some migrations applied before their dependencies should not be allowed. """ recorder = MigrationRecorder(connection) recorder.record_applied("migrations", "0002_second") msg = "Migration migrations.0002_second is applied before its dependency migrations.0001_initial" with self.assertRaisesMessage(InconsistentMigrationHistory, msg): call_command("migrate") applied_migrations = recorder.applied_migrations() self.assertNotIn(("migrations", "0001_initial"), applied_migrations) @override_settings(INSTALLED_APPS=[ 'migrations.migrations_test_apps.migrated_unapplied_app', 'migrations.migrations_test_apps.migrated_app', ]) def test_migrate_not_reflected_changes(self): class NewModel1(models.Model): class Meta(): app_label = 'migrated_app' class NewModel2(models.Model): class Meta(): app_label = 'migrated_unapplied_app' out = io.StringIO() try: call_command('migrate', verbosity=0) call_command('migrate', stdout=out, no_color=True) self.assertEqual( "operations to perform:\n" " apply all migrations: migrated_app, migrated_unapplied_app\n" "running migrations:\n" " no migrations to apply.\n" " your models in app(s): 'migrated_app', " "'migrated_unapplied_app' have changes that are not yet " "reflected in a migration, and so won't be applied.\n" " run 'manage.py makemigrations' to make new migrations, and " "then re-run 'manage.py migrate' to apply them.\n", out.getvalue().lower(), ) finally: # Unmigrate everything. call_command('migrate', 'migrated_app', 'zero', verbosity=0) call_command('migrate', 'migrated_unapplied_app', 'zero', verbosity=0) class MakeMigrationsTests(MigrationTestBase): """ Tests running the makemigrations command. """ def setUp(self): super().setUp() self._old_models = apps.app_configs['migrations'].models.copy() def tearDown(self): apps.app_configs['migrations'].models = self._old_models apps.all_models['migrations'] = self._old_models apps.clear_cache() super().tearDown() def test_files_content(self): self.assertTableNotExists("migrations_unicodemodel") apps.register_model('migrations', UnicodeModel) with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", verbosity=0) # Check for empty __init__.py file in migrations folder init_file = os.path.join(migration_dir, "__init__.py") self.assertTrue(os.path.exists(init_file)) with open(init_file) as fp: content = fp.read() self.assertEqual(content, '') # Check for existing 0001_initial.py file in migration folder initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) with open(initial_file, encoding='utf-8') as fp: content = fp.read() self.assertIn('migrations.CreateModel', content) self.assertIn('initial = True', content) self.assertIn('úñí©óðé µóðéø', content) # Meta.verbose_name self.assertIn('úñí©óðé µóðéøß', content) # Meta.verbose_name_plural self.assertIn('ÚÑÍ¢ÓÐÉ', content) # title.verbose_name self.assertIn('“Ðjáñgó”', content) # title.default def test_makemigrations_order(self): """ makemigrations should recognize number-only migrations (0001.py). """ module = 'migrations.test_migrations_order' with self.temporary_migration_module(module=module) as migration_dir: if hasattr(importlib, 'invalidate_caches'): # importlib caches os.listdir() on some platforms like macOS # (#23850). importlib.invalidate_caches() call_command('makemigrations', 'migrations', '--empty', '-n', 'a', '-v', '0') self.assertTrue(os.path.exists(os.path.join(migration_dir, '0002_a.py'))) def test_makemigrations_empty_connections(self): empty_connections = ConnectionHandler({'default': {}}) with mock.patch('django.core.management.commands.makemigrations.connections', new=empty_connections): # with no apps out = io.StringIO() call_command('makemigrations', stdout=out) self.assertIn('No changes detected', out.getvalue()) # with an app with self.temporary_migration_module() as migration_dir: call_command('makemigrations', 'migrations', verbosity=0) init_file = os.path.join(migration_dir, '__init__.py') self.assertTrue(os.path.exists(init_file)) @override_settings(INSTALLED_APPS=['migrations', 'migrations2']) def test_makemigrations_consistency_checks_respect_routers(self): """ The history consistency checks in makemigrations respect settings.DATABASE_ROUTERS. """ def patched_has_table(migration_recorder): if migration_recorder.connection is connections['other']: raise Exception('Other connection') else: return mock.DEFAULT self.assertTableNotExists('migrations_unicodemodel') apps.register_model('migrations', UnicodeModel) with mock.patch.object( MigrationRecorder, 'has_table', autospec=True, side_effect=patched_has_table) as has_table: with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", verbosity=0) initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) self.assertEqual(has_table.call_count, 1) # 'default' is checked # Router says not to migrate 'other' so consistency shouldn't # be checked. with self.settings(DATABASE_ROUTERS=['migrations.routers.TestRouter']): call_command('makemigrations', 'migrations', verbosity=0) self.assertEqual(has_table.call_count, 2) # 'default' again # With a router that doesn't prohibit migrating 'other', # consistency is checked. with self.settings(DATABASE_ROUTERS=['migrations.routers.DefaultOtherRouter']): with self.assertRaisesMessage(Exception, 'Other connection'): call_command('makemigrations', 'migrations', verbosity=0) self.assertEqual(has_table.call_count, 4) # 'default' and 'other' # With a router that doesn't allow migrating on any database, # no consistency checks are made. with self.settings(DATABASE_ROUTERS=['migrations.routers.TestRouter']): with mock.patch.object(TestRouter, 'allow_migrate', return_value=False) as allow_migrate: call_command('makemigrations', 'migrations', verbosity=0) allow_migrate.assert_any_call('other', 'migrations', model_name='UnicodeModel') # allow_migrate() is called with the correct arguments. self.assertGreater(len(allow_migrate.mock_calls), 0) called_aliases = set() for mock_call in allow_migrate.mock_calls: _, call_args, call_kwargs = mock_call connection_alias, app_name = call_args called_aliases.add(connection_alias) # Raises an error if invalid app_name/model_name occurs. apps.get_app_config(app_name).get_model(call_kwargs['model_name']) self.assertEqual(called_aliases, set(connections)) self.assertEqual(has_table.call_count, 4) def test_failing_migration(self): # If a migration fails to serialize, it shouldn't generate an empty file. #21280 apps.register_model('migrations', UnserializableModel) with self.temporary_migration_module() as migration_dir: with self.assertRaisesMessage(ValueError, 'Cannot serialize'): call_command("makemigrations", "migrations", verbosity=0) initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertFalse(os.path.exists(initial_file)) def test_makemigrations_conflict_exit(self): """ makemigrations exits if it detects a conflict. """ with self.temporary_migration_module(module="migrations.test_migrations_conflict"): with self.assertRaises(CommandError) as context: call_command("makemigrations") self.assertEqual( str(context.exception), "Conflicting migrations detected; multiple leaf nodes in the " "migration graph: (0002_conflicting_second, 0002_second in " "migrations).\n" "To fix them run 'python manage.py makemigrations --merge'" ) def test_makemigrations_merge_no_conflict(self): """ makemigrations exits if in merge mode with no conflicts. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("makemigrations", merge=True, stdout=out) self.assertIn("No conflicts detected to merge.", out.getvalue()) def test_makemigrations_empty_no_app_specified(self): """ makemigrations exits if no app is specified with 'empty' mode. """ msg = 'You must supply at least one app label when using --empty.' with self.assertRaisesMessage(CommandError, msg): call_command("makemigrations", empty=True) def test_makemigrations_empty_migration(self): """ makemigrations properly constructs an empty migration. """ with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", empty=True, verbosity=0) # Check for existing 0001_initial.py file in migration folder initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) with open(initial_file, encoding='utf-8') as fp: content = fp.read() # Remove all whitespace to check for empty dependencies and operations content = content.replace(' ', '') self.assertIn('dependencies=[\n]', content) self.assertIn('operations=[\n]', content) @override_settings(MIGRATION_MODULES={"migrations": None}) def test_makemigrations_disabled_migrations_for_app(self): """ makemigrations raises a nice error when migrations are disabled for an app. """ msg = ( "Django can't create migrations for app 'migrations' because migrations " "have been disabled via the MIGRATION_MODULES setting." ) with self.assertRaisesMessage(ValueError, msg): call_command("makemigrations", "migrations", empty=True, verbosity=0) def test_makemigrations_no_changes_no_apps(self): """ makemigrations exits when there are no changes and no apps are specified. """ out = io.StringIO() call_command("makemigrations", stdout=out) self.assertIn("No changes detected", out.getvalue()) def test_makemigrations_no_changes(self): """ makemigrations exits when there are no changes to an app. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "migrations", stdout=out) self.assertIn("No changes detected in app 'migrations'", out.getvalue()) def test_makemigrations_no_apps_initial(self): """ makemigrations should detect initial is needed on empty migration modules if no app provided. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_empty"): call_command("makemigrations", stdout=out) self.assertIn("0001_initial.py", out.getvalue()) def test_makemigrations_no_init(self): """Migration directories without an __init__.py file are allowed.""" out = io.StringIO() with self.temporary_migration_module(module='migrations.test_migrations_no_init'): call_command('makemigrations', stdout=out) self.assertIn('0001_initial.py', out.getvalue()) def test_makemigrations_migrations_announce(self): """ makemigrations announces the migration at the default verbosity level. """ out = io.StringIO() with self.temporary_migration_module(): call_command("makemigrations", "migrations", stdout=out) self.assertIn("Migrations for 'migrations'", out.getvalue()) def test_makemigrations_no_common_ancestor(self): """ makemigrations fails to merge migrations with no common ancestor. """ with self.assertRaises(ValueError) as context: with self.temporary_migration_module(module="migrations.test_migrations_no_ancestor"): call_command("makemigrations", "migrations", merge=True) exception_message = str(context.exception) self.assertIn("Could not find common ancestor of", exception_message) self.assertIn("0002_second", exception_message) self.assertIn("0002_conflicting_second", exception_message) def test_makemigrations_interactive_reject(self): """ makemigrations enters and exits interactive mode properly. """ # Monkeypatch interactive questioner to auto reject with mock.patch('builtins.input', mock.Mock(return_value='N')): with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=True, verbosity=0) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) def test_makemigrations_interactive_accept(self): """ makemigrations enters interactive mode and merges properly. """ # Monkeypatch interactive questioner to auto accept with mock.patch('builtins.input', mock.Mock(return_value='y')): out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertTrue(os.path.exists(merge_file)) self.assertIn("Created new merge migration", out.getvalue()) def test_makemigrations_default_merge_name(self): out = io.StringIO() with self.temporary_migration_module( module='migrations.test_migrations_conflict' ) as migration_dir: call_command('makemigrations', 'migrations', merge=True, interactive=False, stdout=out) merge_file = os.path.join( migration_dir, '0003_merge_0002_conflicting_second_0002_second.py', ) self.assertIs(os.path.exists(merge_file), True) self.assertIn('Created new merge migration %s' % merge_file, out.getvalue()) @mock.patch('django.db.migrations.utils.datetime') def test_makemigrations_auto_merge_name(self, mock_datetime): mock_datetime.datetime.now.return_value = datetime.datetime(2016, 1, 2, 3, 4) with mock.patch('builtins.input', mock.Mock(return_value='y')): out = io.StringIO() with self.temporary_migration_module( module='migrations.test_migrations_conflict_long_name' ) as migration_dir: call_command("makemigrations", "migrations", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge_20160102_0304.py') self.assertTrue(os.path.exists(merge_file)) self.assertIn("Created new merge migration", out.getvalue()) def test_makemigrations_non_interactive_not_null_addition(self): """ Non-interactive makemigrations fails when a default is missing on a new not-null field. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_int = models.IntegerField() class Meta: app_label = "migrations" with self.assertRaises(SystemExit): with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False) def test_makemigrations_non_interactive_not_null_alteration(self): """ Non-interactive makemigrations fails when a default is missing on a field changed to not-null. """ class Author(models.Model): name = models.CharField(max_length=255) slug = models.SlugField() age = models.IntegerField(default=0) class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Alter field slug on author", out.getvalue()) def test_makemigrations_non_interactive_no_model_rename(self): """ makemigrations adds and removes a possible model rename in non-interactive mode. """ class RenamedModel(models.Model): silly_field = models.BooleanField(default=False) class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Delete model SillyModel", out.getvalue()) self.assertIn("Create model RenamedModel", out.getvalue()) def test_makemigrations_non_interactive_no_field_rename(self): """ makemigrations adds and removes a possible field rename in non-interactive mode. """ class SillyModel(models.Model): silly_rename = models.BooleanField(default=False) class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", interactive=False, stdout=out) self.assertIn("Remove field silly_field from sillymodel", out.getvalue()) self.assertIn("Add field silly_rename to sillymodel", out.getvalue()) def test_makemigrations_handle_merge(self): """ makemigrations properly merges the conflicting migrations with --noinput. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, interactive=False, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertTrue(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertIn("Created new merge migration", output) def test_makemigration_merge_dry_run(self): """ makemigrations respects --dry-run option when fixing migration conflicts (#24427). """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command( "makemigrations", "migrations", name="merge", dry_run=True, merge=True, interactive=False, stdout=out, ) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertNotIn("Created new merge migration", output) def test_makemigration_merge_dry_run_verbosity_3(self): """ `makemigrations --merge --dry-run` writes the merge migration file to stdout with `verbosity == 3` (#24427). """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command( "makemigrations", "migrations", name="merge", dry_run=True, merge=True, interactive=False, stdout=out, verbosity=3, ) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) output = out.getvalue() self.assertIn("Merging migrations", output) self.assertIn("Branch 0002_second", output) self.assertIn("Branch 0002_conflicting_second", output) self.assertNotIn("Created new merge migration", output) # Additional output caused by verbosity 3 # The complete merge migration file that would be written self.assertIn("class Migration(migrations.Migration):", output) self.assertIn("dependencies = [", output) self.assertIn("('migrations', '0002_second')", output) self.assertIn("('migrations', '0002_conflicting_second')", output) self.assertIn("operations = [", output) self.assertIn("]", output) def test_makemigrations_dry_run(self): """ `makemigrations --dry-run` should not ask for defaults. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_date = models.DateField() # Added field without a default class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", dry_run=True, stdout=out) # Output the expected changes directly, without asking for defaults self.assertIn("Add field silly_date to sillymodel", out.getvalue()) def test_makemigrations_dry_run_verbosity_3(self): """ Allow `makemigrations --dry-run` to output the migrations file to stdout (with verbosity == 3). """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) silly_char = models.CharField(default="") class Meta: app_label = "migrations" out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_default"): call_command("makemigrations", "migrations", dry_run=True, stdout=out, verbosity=3) # Normal --dry-run output self.assertIn("- Add field silly_char to sillymodel", out.getvalue()) # Additional output caused by verbosity 3 # The complete migrations file that would be written self.assertIn("class Migration(migrations.Migration):", out.getvalue()) self.assertIn("dependencies = [", out.getvalue()) self.assertIn("('migrations', '0001_initial'),", out.getvalue()) self.assertIn("migrations.AddField(", out.getvalue()) self.assertIn("model_name='sillymodel',", out.getvalue()) self.assertIn("name='silly_char',", out.getvalue()) def test_makemigrations_migrations_modules_path_not_exist(self): """ makemigrations creates migrations when specifying a custom location for migration files using MIGRATION_MODULES if the custom path doesn't already exist. """ class SillyModel(models.Model): silly_field = models.BooleanField(default=False) class Meta: app_label = "migrations" out = io.StringIO() migration_module = "migrations.test_migrations_path_doesnt_exist.foo.bar" with self.temporary_migration_module(module=migration_module) as migration_dir: call_command("makemigrations", "migrations", stdout=out) # Migrations file is actually created in the expected path. initial_file = os.path.join(migration_dir, "0001_initial.py") self.assertTrue(os.path.exists(initial_file)) # Command output indicates the migration is created. self.assertIn(" - Create model SillyModel", out.getvalue()) @override_settings(MIGRATION_MODULES={'migrations': 'some.nonexistent.path'}) def test_makemigrations_migrations_modules_nonexistent_toplevel_package(self): msg = ( 'Could not locate an appropriate location to create migrations ' 'package some.nonexistent.path. Make sure the toplevel package ' 'exists and can be imported.' ) with self.assertRaisesMessage(ValueError, msg): call_command('makemigrations', 'migrations', empty=True, verbosity=0) def test_makemigrations_interactive_by_default(self): """ The user is prompted to merge by default if there are conflicts and merge is True. Answer negative to differentiate it from behavior when --noinput is specified. """ # Monkeypatch interactive questioner to auto reject out = io.StringIO() with mock.patch('builtins.input', mock.Mock(return_value='N')): with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir: call_command("makemigrations", "migrations", name="merge", merge=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') # This will fail if interactive is False by default self.assertFalse(os.path.exists(merge_file)) self.assertNotIn("Created new merge migration", out.getvalue()) @override_settings( INSTALLED_APPS=[ "migrations", "migrations.migrations_test_apps.unspecified_app_with_conflict"]) def test_makemigrations_unspecified_app_with_conflict_no_merge(self): """ makemigrations does not raise a CommandError when an unspecified app has conflicting migrations. """ with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "migrations", merge=False, verbosity=0) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.unspecified_app_with_conflict"]) def test_makemigrations_unspecified_app_with_conflict_merge(self): """ makemigrations does not create a merge for an unspecified app even if it has conflicting migrations. """ # Monkeypatch interactive questioner to auto accept with mock.patch('builtins.input', mock.Mock(return_value='y')): out = io.StringIO() with self.temporary_migration_module(app_label="migrated_app") as migration_dir: call_command("makemigrations", "migrated_app", name="merge", merge=True, interactive=True, stdout=out) merge_file = os.path.join(migration_dir, '0003_merge.py') self.assertFalse(os.path.exists(merge_file)) self.assertIn("No conflicts detected to merge.", out.getvalue()) @override_settings( INSTALLED_APPS=[ "migrations.migrations_test_apps.migrated_app", "migrations.migrations_test_apps.conflicting_app_with_dependencies"]) def test_makemigrations_merge_dont_output_dependency_operations(self): """ makemigrations --merge does not output any operations from apps that don't belong to a given app. """ # Monkeypatch interactive questioner to auto accept with mock.patch('builtins.input', mock.Mock(return_value='N')): out = io.StringIO() with mock.patch('django.core.management.color.supports_color', lambda *args: False): call_command( "makemigrations", "conflicting_app_with_dependencies", merge=True, interactive=True, stdout=out ) self.assertEqual( out.getvalue().lower(), 'merging conflicting_app_with_dependencies\n' ' branch 0002_conflicting_second\n' ' - create model something\n' ' branch 0002_second\n' ' - delete model tribble\n' ' - remove field silly_field from author\n' ' - add field rating to author\n' ' - create model book\n' ) def test_makemigrations_with_custom_name(self): """ makemigrations --name generate a custom migration name. """ with self.temporary_migration_module() as migration_dir: def cmd(migration_count, migration_name, *args): call_command("makemigrations", "migrations", "--verbosity", "0", "--name", migration_name, *args) migration_file = os.path.join(migration_dir, "%s_%s.py" % (migration_count, migration_name)) # Check for existing migration file in migration folder self.assertTrue(os.path.exists(migration_file)) with open(migration_file, encoding='utf-8') as fp: content = fp.read() content = content.replace(" ", "") return content # generate an initial migration migration_name_0001 = "my_initial_migration" content = cmd("0001", migration_name_0001) self.assertIn("dependencies=[\n]", content) # importlib caches os.listdir() on some platforms like macOS # (#23850). if hasattr(importlib, 'invalidate_caches'): importlib.invalidate_caches() # generate an empty migration migration_name_0002 = "my_custom_migration" content = cmd("0002", migration_name_0002, "--empty") self.assertIn("dependencies=[\n('migrations','0001_%s'),\n]" % migration_name_0001, content) self.assertIn("operations=[\n]", content) def test_makemigrations_with_invalid_custom_name(self): msg = 'The migration name must be a valid Python identifier.' with self.assertRaisesMessage(CommandError, msg): call_command('makemigrations', 'migrations', '--name', 'invalid name', '--empty') def test_makemigrations_check(self): """ makemigrations --check should exit with a non-zero status when there are changes to an app requiring migrations. """ with self.temporary_migration_module(): with self.assertRaises(SystemExit): call_command("makemigrations", "--check", "migrations", verbosity=0) with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): call_command("makemigrations", "--check", "migrations", verbosity=0) def test_makemigrations_migration_path_output(self): """ makemigrations should print the relative paths to the migrations unless they are outside of the current tree, in which case the absolute path should be shown. """ out = io.StringIO() apps.register_model('migrations', UnicodeModel) with self.temporary_migration_module() as migration_dir: call_command("makemigrations", "migrations", stdout=out) self.assertIn(os.path.join(migration_dir, '0001_initial.py'), out.getvalue()) def test_makemigrations_migration_path_output_valueerror(self): """ makemigrations prints the absolute path if os.path.relpath() raises a ValueError when it's impossible to obtain a relative path, e.g. on Windows if Django is installed on a different drive than where the migration files are created. """ out = io.StringIO() with self.temporary_migration_module() as migration_dir: with mock.patch('os.path.relpath', side_effect=ValueError): call_command('makemigrations', 'migrations', stdout=out) self.assertIn(os.path.join(migration_dir, '0001_initial.py'), out.getvalue()) def test_makemigrations_inconsistent_history(self): """ makemigrations should raise InconsistentMigrationHistory exception if there are some migrations applied before their dependencies. """ recorder = MigrationRecorder(connection) recorder.record_applied('migrations', '0002_second') msg = "Migration migrations.0002_second is applied before its dependency migrations.0001_initial" with self.temporary_migration_module(module="migrations.test_migrations"): with self.assertRaisesMessage(InconsistentMigrationHistory, msg): call_command("makemigrations") def test_makemigrations_inconsistent_history_db_failure(self): msg = ( "Got an error checking a consistent migration history performed " "for database connection 'default': could not connect to server" ) with mock.patch( 'django.db.migrations.loader.MigrationLoader.check_consistent_history', side_effect=OperationalError('could not connect to server'), ): with self.temporary_migration_module(): with self.assertWarns(RuntimeWarning) as cm: call_command('makemigrations', verbosity=0) self.assertEqual(str(cm.warning), msg) @mock.patch('builtins.input', return_value='1') @mock.patch('django.db.migrations.questioner.sys.stdin', mock.MagicMock(encoding=sys.getdefaultencoding())) def test_makemigrations_auto_now_add_interactive(self, *args): """ makemigrations prompts the user when adding auto_now_add to an existing model. """ class Entry(models.Model): title = models.CharField(max_length=255) creation_date = models.DateTimeField(auto_now_add=True) class Meta: app_label = 'migrations' # Monkeypatch interactive questioner to auto accept with mock.patch('django.db.migrations.questioner.sys.stdout', new_callable=io.StringIO) as prompt_stdout: out = io.StringIO() with self.temporary_migration_module(module='migrations.test_auto_now_add'): call_command('makemigrations', 'migrations', interactive=True, stdout=out) output = out.getvalue() prompt_output = prompt_stdout.getvalue() self.assertIn("You can accept the default 'timezone.now' by pressing 'Enter'", prompt_output) self.assertIn("Add field creation_date to entry", output) class SquashMigrationsTests(MigrationTestBase): """ Tests running the squashmigrations command. """ def test_squashmigrations_squashes(self): """ squashmigrations squashes migrations. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command('squashmigrations', 'migrations', '0002', interactive=False, stdout=out, no_color=True) squashed_migration_file = os.path.join(migration_dir, "0001_squashed_0002_second.py") self.assertTrue(os.path.exists(squashed_migration_file)) self.assertEqual( out.getvalue(), 'Will squash the following migrations:\n' ' - 0001_initial\n' ' - 0002_second\n' 'Optimizing...\n' ' Optimized from 8 operations to 2 operations.\n' 'Created new squashed migration %s\n' ' You should commit this migration but leave the old ones in place;\n' ' the new migration will be used for new installs. Once you are sure\n' ' all instances of the codebase have applied the migrations you squashed,\n' ' you can delete them.\n' % squashed_migration_file ) def test_squashmigrations_initial_attribute(self): with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=0) squashed_migration_file = os.path.join(migration_dir, "0001_squashed_0002_second.py") with open(squashed_migration_file, encoding='utf-8') as fp: content = fp.read() self.assertIn("initial = True", content) def test_squashmigrations_optimizes(self): """ squashmigrations optimizes operations. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=1, stdout=out) self.assertIn("Optimized from 8 operations to 2 operations.", out.getvalue()) def test_ticket_23799_squashmigrations_no_optimize(self): """ squashmigrations --no-optimize doesn't optimize operations. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations"): call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=1, no_optimize=True, stdout=out) self.assertIn("Skipping optimization", out.getvalue()) def test_squashmigrations_valid_start(self): """ squashmigrations accepts a starting migration. """ out = io.StringIO() with self.temporary_migration_module(module="migrations.test_migrations_no_changes") as migration_dir: call_command("squashmigrations", "migrations", "0002", "0003", interactive=False, verbosity=1, stdout=out) squashed_migration_file = os.path.join(migration_dir, "0002_second_squashed_0003_third.py") with open(squashed_migration_file, encoding='utf-8') as fp: content = fp.read() self.assertIn(" ('migrations', '0001_initial')", content) self.assertNotIn("initial = True", content) out = out.getvalue() self.assertNotIn(" - 0001_initial", out) self.assertIn(" - 0002_second", out) self.assertIn(" - 0003_third", out) def test_squashmigrations_invalid_start(self): """ squashmigrations doesn't accept a starting migration after the ending migration. """ with self.temporary_migration_module(module="migrations.test_migrations_no_changes"): msg = ( "The migration 'migrations.0003_third' cannot be found. Maybe " "it comes after the migration 'migrations.0002_second'" ) with self.assertRaisesMessage(CommandError, msg): call_command("squashmigrations", "migrations", "0003", "0002", interactive=False, verbosity=0) def test_squashed_name_with_start_migration_name(self): """--squashed-name specifies the new migration's name.""" squashed_name = 'squashed_name' with self.temporary_migration_module(module='migrations.test_migrations') as migration_dir: call_command( 'squashmigrations', 'migrations', '0001', '0002', squashed_name=squashed_name, interactive=False, verbosity=0, ) squashed_migration_file = os.path.join(migration_dir, '0001_%s.py' % squashed_name) self.assertTrue(os.path.exists(squashed_migration_file)) def test_squashed_name_without_start_migration_name(self): """--squashed-name also works if a start migration is omitted.""" squashed_name = 'squashed_name' with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir: call_command( 'squashmigrations', 'migrations', '0001', squashed_name=squashed_name, interactive=False, verbosity=0, ) squashed_migration_file = os.path.join(migration_dir, '0001_%s.py' % squashed_name) self.assertTrue(os.path.exists(squashed_migration_file)) class AppLabelErrorTests(TestCase): """ This class inherits TestCase because MigrationTestBase uses `available_apps = ['migrations']` which means that it's the only installed app. 'django.contrib.auth' must be in INSTALLED_APPS for some of these tests. """ nonexistent_app_error = "No installed app with label 'nonexistent_app'." did_you_mean_auth_error = ( "No installed app with label 'django.contrib.auth'. Did you mean " "'auth'?" ) def test_makemigrations_nonexistent_app_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('makemigrations', 'nonexistent_app', stderr=err) self.assertIn(self.nonexistent_app_error, err.getvalue()) def test_makemigrations_app_name_specified_as_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('makemigrations', 'django.contrib.auth', stderr=err) self.assertIn(self.did_you_mean_auth_error, err.getvalue()) def test_migrate_nonexistent_app_label(self): with self.assertRaisesMessage(CommandError, self.nonexistent_app_error): call_command('migrate', 'nonexistent_app') def test_migrate_app_name_specified_as_label(self): with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error): call_command('migrate', 'django.contrib.auth') def test_showmigrations_nonexistent_app_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('showmigrations', 'nonexistent_app', stderr=err) self.assertIn(self.nonexistent_app_error, err.getvalue()) def test_showmigrations_app_name_specified_as_label(self): err = io.StringIO() with self.assertRaises(SystemExit): call_command('showmigrations', 'django.contrib.auth', stderr=err) self.assertIn(self.did_you_mean_auth_error, err.getvalue()) def test_sqlmigrate_nonexistent_app_label(self): with self.assertRaisesMessage(CommandError, self.nonexistent_app_error): call_command('sqlmigrate', 'nonexistent_app', '0002') def test_sqlmigrate_app_name_specified_as_label(self): with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error): call_command('sqlmigrate', 'django.contrib.auth', '0002') def test_squashmigrations_nonexistent_app_label(self): with self.assertRaisesMessage(CommandError, self.nonexistent_app_error): call_command('squashmigrations', 'nonexistent_app', '0002') def test_squashmigrations_app_name_specified_as_label(self): with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error): call_command('squashmigrations', 'django.contrib.auth', '0002')
1af5d79e48ea6cd39113885b6b06db13299e77f3ec8608b5cf4886e7d2999e84
import datetime import decimal import enum import functools import math import os import pathlib import re import sys import uuid from unittest import mock import custom_migration_operations.more_operations import custom_migration_operations.operations from django import get_version from django.conf import SettingsReference, settings from django.core.validators import EmailValidator, RegexValidator from django.db import migrations, models from django.db.migrations.serializer import BaseSerializer from django.db.migrations.writer import MigrationWriter, OperationWriter from django.test import SimpleTestCase from django.utils.deconstruct import deconstructible from django.utils.functional import SimpleLazyObject from django.utils.timezone import get_default_timezone, get_fixed_timezone, utc from django.utils.translation import gettext_lazy as _ from .models import FoodManager, FoodQuerySet class DeconstructibleInstances: def deconstruct(self): return ('DeconstructibleInstances', [], {}) class Money(decimal.Decimal): def deconstruct(self): return ( '%s.%s' % (self.__class__.__module__, self.__class__.__name__), [str(self)], {} ) class TestModel1: def upload_to(self): return '/somewhere/dynamic/' thing = models.FileField(upload_to=upload_to) class TextEnum(enum.Enum): A = 'a-value' B = 'value-b' class TextTranslatedEnum(enum.Enum): A = _('a-value') B = _('value-b') class BinaryEnum(enum.Enum): A = b'a-value' B = b'value-b' class IntEnum(enum.IntEnum): A = 1 B = 2 class OperationWriterTests(SimpleTestCase): def test_empty_signature(self): operation = custom_migration_operations.operations.TestOperation() buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.TestOperation(\n' '),' ) def test_args_signature(self): operation = custom_migration_operations.operations.ArgsOperation(1, 2) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ArgsOperation(\n' ' arg1=1,\n' ' arg2=2,\n' '),' ) def test_kwargs_signature(self): operation = custom_migration_operations.operations.KwargsOperation(kwarg1=1) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.KwargsOperation(\n' ' kwarg1=1,\n' '),' ) def test_args_kwargs_signature(self): operation = custom_migration_operations.operations.ArgsKwargsOperation(1, 2, kwarg2=4) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ArgsKwargsOperation(\n' ' arg1=1,\n' ' arg2=2,\n' ' kwarg2=4,\n' '),' ) def test_nested_args_signature(self): operation = custom_migration_operations.operations.ArgsOperation( custom_migration_operations.operations.ArgsOperation(1, 2), custom_migration_operations.operations.KwargsOperation(kwarg1=3, kwarg2=4) ) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ArgsOperation(\n' ' arg1=custom_migration_operations.operations.ArgsOperation(\n' ' arg1=1,\n' ' arg2=2,\n' ' ),\n' ' arg2=custom_migration_operations.operations.KwargsOperation(\n' ' kwarg1=3,\n' ' kwarg2=4,\n' ' ),\n' '),' ) def test_multiline_args_signature(self): operation = custom_migration_operations.operations.ArgsOperation("test\n arg1", "test\narg2") buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, "custom_migration_operations.operations.ArgsOperation(\n" " arg1='test\\n arg1',\n" " arg2='test\\narg2',\n" ")," ) def test_expand_args_signature(self): operation = custom_migration_operations.operations.ExpandArgsOperation([1, 2]) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ExpandArgsOperation(\n' ' arg=[\n' ' 1,\n' ' 2,\n' ' ],\n' '),' ) def test_nested_operation_expand_args_signature(self): operation = custom_migration_operations.operations.ExpandArgsOperation( arg=[ custom_migration_operations.operations.KwargsOperation( kwarg1=1, kwarg2=2, ), ] ) buff, imports = OperationWriter(operation, indentation=0).serialize() self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual( buff, 'custom_migration_operations.operations.ExpandArgsOperation(\n' ' arg=[\n' ' custom_migration_operations.operations.KwargsOperation(\n' ' kwarg1=1,\n' ' kwarg2=2,\n' ' ),\n' ' ],\n' '),' ) class WriterTests(SimpleTestCase): """ Tests the migration writer (makes migration files from Migration instances) """ class NestedEnum(enum.IntEnum): A = 1 B = 2 class NestedChoices(models.TextChoices): X = 'X', 'X value' Y = 'Y', 'Y value' def safe_exec(self, string, value=None): d = {} try: exec(string, globals(), d) except Exception as e: if value: self.fail("Could not exec %r (from value %r): %s" % (string.strip(), value, e)) else: self.fail("Could not exec %r: %s" % (string.strip(), e)) return d def serialize_round_trip(self, value): string, imports = MigrationWriter.serialize(value) return self.safe_exec("%s\ntest_value_result = %s" % ("\n".join(imports), string), value)['test_value_result'] def assertSerializedEqual(self, value): self.assertEqual(self.serialize_round_trip(value), value) def assertSerializedResultEqual(self, value, target): self.assertEqual(MigrationWriter.serialize(value), target) def assertSerializedFieldEqual(self, value): new_value = self.serialize_round_trip(value) self.assertEqual(value.__class__, new_value.__class__) self.assertEqual(value.max_length, new_value.max_length) self.assertEqual(value.null, new_value.null) self.assertEqual(value.unique, new_value.unique) def test_serialize_numbers(self): self.assertSerializedEqual(1) self.assertSerializedEqual(1.2) self.assertTrue(math.isinf(self.serialize_round_trip(float("inf")))) self.assertTrue(math.isinf(self.serialize_round_trip(float("-inf")))) self.assertTrue(math.isnan(self.serialize_round_trip(float("nan")))) self.assertSerializedEqual(decimal.Decimal('1.3')) self.assertSerializedResultEqual( decimal.Decimal('1.3'), ("Decimal('1.3')", {'from decimal import Decimal'}) ) self.assertSerializedEqual(Money('1.3')) self.assertSerializedResultEqual( Money('1.3'), ("migrations.test_writer.Money('1.3')", {'import migrations.test_writer'}) ) def test_serialize_constants(self): self.assertSerializedEqual(None) self.assertSerializedEqual(True) self.assertSerializedEqual(False) def test_serialize_strings(self): self.assertSerializedEqual(b"foobar") string, imports = MigrationWriter.serialize(b"foobar") self.assertEqual(string, "b'foobar'") self.assertSerializedEqual("föobár") string, imports = MigrationWriter.serialize("foobar") self.assertEqual(string, "'foobar'") def test_serialize_multiline_strings(self): self.assertSerializedEqual(b"foo\nbar") string, imports = MigrationWriter.serialize(b"foo\nbar") self.assertEqual(string, "b'foo\\nbar'") self.assertSerializedEqual("föo\nbár") string, imports = MigrationWriter.serialize("foo\nbar") self.assertEqual(string, "'foo\\nbar'") def test_serialize_collections(self): self.assertSerializedEqual({1: 2}) self.assertSerializedEqual(["a", 2, True, None]) self.assertSerializedEqual({2, 3, "eighty"}) self.assertSerializedEqual({"lalalala": ["yeah", "no", "maybe"]}) self.assertSerializedEqual(_('Hello')) def test_serialize_builtin_types(self): self.assertSerializedEqual([list, tuple, dict, set, frozenset]) self.assertSerializedResultEqual( [list, tuple, dict, set, frozenset], ("[list, tuple, dict, set, frozenset]", set()) ) def test_serialize_lazy_objects(self): pattern = re.compile(r'^foo$') lazy_pattern = SimpleLazyObject(lambda: pattern) self.assertEqual(self.serialize_round_trip(lazy_pattern), pattern) def test_serialize_enums(self): self.assertSerializedResultEqual( TextEnum.A, ("migrations.test_writer.TextEnum['A']", {'import migrations.test_writer'}) ) self.assertSerializedResultEqual( TextTranslatedEnum.A, ("migrations.test_writer.TextTranslatedEnum['A']", {'import migrations.test_writer'}) ) self.assertSerializedResultEqual( BinaryEnum.A, ("migrations.test_writer.BinaryEnum['A']", {'import migrations.test_writer'}) ) self.assertSerializedResultEqual( IntEnum.B, ("migrations.test_writer.IntEnum['B']", {'import migrations.test_writer'}) ) self.assertSerializedResultEqual( self.NestedEnum.A, ( "migrations.test_writer.WriterTests.NestedEnum['A']", {'import migrations.test_writer'}, ), ) self.assertSerializedEqual(self.NestedEnum.A) field = models.CharField(default=TextEnum.B, choices=[(m.value, m) for m in TextEnum]) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.CharField(choices=[" "('a-value', migrations.test_writer.TextEnum['A']), " "('value-b', migrations.test_writer.TextEnum['B'])], " "default=migrations.test_writer.TextEnum['B'])" ) field = models.CharField( default=TextTranslatedEnum.A, choices=[(m.value, m) for m in TextTranslatedEnum], ) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.CharField(choices=[" "('a-value', migrations.test_writer.TextTranslatedEnum['A']), " "('value-b', migrations.test_writer.TextTranslatedEnum['B'])], " "default=migrations.test_writer.TextTranslatedEnum['A'])" ) field = models.CharField(default=BinaryEnum.B, choices=[(m.value, m) for m in BinaryEnum]) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.CharField(choices=[" "(b'a-value', migrations.test_writer.BinaryEnum['A']), " "(b'value-b', migrations.test_writer.BinaryEnum['B'])], " "default=migrations.test_writer.BinaryEnum['B'])" ) field = models.IntegerField(default=IntEnum.A, choices=[(m.value, m) for m in IntEnum]) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.IntegerField(choices=[" "(1, migrations.test_writer.IntEnum['A']), " "(2, migrations.test_writer.IntEnum['B'])], " "default=migrations.test_writer.IntEnum['A'])" ) def test_serialize_choices(self): class TextChoices(models.TextChoices): A = 'A', 'A value' B = 'B', 'B value' class IntegerChoices(models.IntegerChoices): A = 1, 'One' B = 2, 'Two' class DateChoices(datetime.date, models.Choices): DATE_1 = 1969, 7, 20, 'First date' DATE_2 = 1969, 11, 19, 'Second date' self.assertSerializedResultEqual(TextChoices.A, ("'A'", set())) self.assertSerializedResultEqual(IntegerChoices.A, ('1', set())) self.assertSerializedResultEqual( DateChoices.DATE_1, ('datetime.date(1969, 7, 20)', {'import datetime'}), ) field = models.CharField(default=TextChoices.B, choices=TextChoices.choices) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.CharField(choices=[('A', 'A value'), ('B', 'B value')], " "default='B')", ) field = models.IntegerField(default=IntegerChoices.B, choices=IntegerChoices.choices) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.IntegerField(choices=[(1, 'One'), (2, 'Two')], default=2)", ) field = models.DateField(default=DateChoices.DATE_2, choices=DateChoices.choices) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.DateField(choices=[" "(datetime.date(1969, 7, 20), 'First date'), " "(datetime.date(1969, 11, 19), 'Second date')], " "default=datetime.date(1969, 11, 19))" ) def test_serialize_nested_class(self): for nested_cls in [self.NestedEnum, self.NestedChoices]: cls_name = nested_cls.__name__ with self.subTest(cls_name): self.assertSerializedResultEqual( nested_cls, ( "migrations.test_writer.WriterTests.%s" % cls_name, {'import migrations.test_writer'}, ), ) def test_serialize_uuid(self): self.assertSerializedEqual(uuid.uuid1()) self.assertSerializedEqual(uuid.uuid4()) uuid_a = uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8') uuid_b = uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2') self.assertSerializedResultEqual( uuid_a, ("uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8')", {'import uuid'}) ) self.assertSerializedResultEqual( uuid_b, ("uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2')", {'import uuid'}) ) field = models.UUIDField(choices=((uuid_a, 'UUID A'), (uuid_b, 'UUID B')), default=uuid_a) string = MigrationWriter.serialize(field)[0] self.assertEqual( string, "models.UUIDField(choices=[" "(uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8'), 'UUID A'), " "(uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2'), 'UUID B')], " "default=uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8'))" ) def test_serialize_pathlib(self): # Pure path objects work in all platforms. self.assertSerializedEqual(pathlib.PurePosixPath()) self.assertSerializedEqual(pathlib.PureWindowsPath()) path = pathlib.PurePosixPath('/path/file.txt') expected = ("pathlib.PurePosixPath('/path/file.txt')", {'import pathlib'}) self.assertSerializedResultEqual(path, expected) path = pathlib.PureWindowsPath('A:\\File.txt') expected = ("pathlib.PureWindowsPath('A:/File.txt')", {'import pathlib'}) self.assertSerializedResultEqual(path, expected) # Concrete path objects work on supported platforms. if sys.platform == 'win32': self.assertSerializedEqual(pathlib.WindowsPath.cwd()) path = pathlib.WindowsPath('A:\\File.txt') expected = ("pathlib.PureWindowsPath('A:/File.txt')", {'import pathlib'}) self.assertSerializedResultEqual(path, expected) else: self.assertSerializedEqual(pathlib.PosixPath.cwd()) path = pathlib.PosixPath('/path/file.txt') expected = ("pathlib.PurePosixPath('/path/file.txt')", {'import pathlib'}) self.assertSerializedResultEqual(path, expected) field = models.FilePathField(path=pathlib.PurePosixPath('/home/user')) string, imports = MigrationWriter.serialize(field) self.assertEqual( string, "models.FilePathField(path=pathlib.PurePosixPath('/home/user'))", ) self.assertIn('import pathlib', imports) def test_serialize_path_like(self): with os.scandir(os.path.dirname(__file__)) as entries: path_like = list(entries)[0] expected = (repr(path_like.path), {}) self.assertSerializedResultEqual(path_like, expected) field = models.FilePathField(path=path_like) string = MigrationWriter.serialize(field)[0] self.assertEqual(string, 'models.FilePathField(path=%r)' % path_like.path) def test_serialize_functions(self): with self.assertRaisesMessage(ValueError, 'Cannot serialize function: lambda'): self.assertSerializedEqual(lambda x: 42) self.assertSerializedEqual(models.SET_NULL) string, imports = MigrationWriter.serialize(models.SET(42)) self.assertEqual(string, 'models.SET(42)') self.serialize_round_trip(models.SET(42)) def test_serialize_datetime(self): self.assertSerializedEqual(datetime.datetime.now()) self.assertSerializedEqual(datetime.datetime.now) self.assertSerializedEqual(datetime.datetime.today()) self.assertSerializedEqual(datetime.datetime.today) self.assertSerializedEqual(datetime.date.today()) self.assertSerializedEqual(datetime.date.today) self.assertSerializedEqual(datetime.datetime.now().time()) self.assertSerializedEqual(datetime.datetime(2014, 1, 1, 1, 1, tzinfo=get_default_timezone())) self.assertSerializedEqual(datetime.datetime(2013, 12, 31, 22, 1, tzinfo=get_fixed_timezone(180))) self.assertSerializedResultEqual( datetime.datetime(2014, 1, 1, 1, 1), ("datetime.datetime(2014, 1, 1, 1, 1)", {'import datetime'}) ) for tzinfo in (utc, datetime.timezone.utc): with self.subTest(tzinfo=tzinfo): self.assertSerializedResultEqual( datetime.datetime(2012, 1, 1, 1, 1, tzinfo=tzinfo), ( "datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc)", {'import datetime', 'from django.utils.timezone import utc'}, ) ) def test_serialize_fields(self): self.assertSerializedFieldEqual(models.CharField(max_length=255)) self.assertSerializedResultEqual( models.CharField(max_length=255), ("models.CharField(max_length=255)", {"from django.db import models"}) ) self.assertSerializedFieldEqual(models.TextField(null=True, blank=True)) self.assertSerializedResultEqual( models.TextField(null=True, blank=True), ("models.TextField(blank=True, null=True)", {'from django.db import models'}) ) def test_serialize_settings(self): self.assertSerializedEqual(SettingsReference(settings.AUTH_USER_MODEL, "AUTH_USER_MODEL")) self.assertSerializedResultEqual( SettingsReference("someapp.model", "AUTH_USER_MODEL"), ("settings.AUTH_USER_MODEL", {"from django.conf import settings"}) ) def test_serialize_iterators(self): self.assertSerializedResultEqual( ((x, x * x) for x in range(3)), ("((0, 0), (1, 1), (2, 4))", set()) ) def test_serialize_compiled_regex(self): """ Make sure compiled regex can be serialized. """ regex = re.compile(r'^\w+$') self.assertSerializedEqual(regex) def test_serialize_class_based_validators(self): """ Ticket #22943: Test serialization of class-based validators, including compiled regexes. """ validator = RegexValidator(message="hello") string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator(message='hello')") self.serialize_round_trip(validator) # Test with a compiled regex. validator = RegexValidator(regex=re.compile(r'^\w+$')) string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator(regex=re.compile('^\\\\w+$'))") self.serialize_round_trip(validator) # Test a string regex with flag validator = RegexValidator(r'^[0-9]+$', flags=re.S) string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator('^[0-9]+$', flags=re.RegexFlag['DOTALL'])") self.serialize_round_trip(validator) # Test message and code validator = RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', 'invalid') string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', 'invalid')") self.serialize_round_trip(validator) # Test with a subclass. validator = EmailValidator(message="hello") string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "django.core.validators.EmailValidator(message='hello')") self.serialize_round_trip(validator) validator = deconstructible(path="migrations.test_writer.EmailValidator")(EmailValidator)(message="hello") string = MigrationWriter.serialize(validator)[0] self.assertEqual(string, "migrations.test_writer.EmailValidator(message='hello')") validator = deconstructible(path="custom.EmailValidator")(EmailValidator)(message="hello") with self.assertRaisesMessage(ImportError, "No module named 'custom'"): MigrationWriter.serialize(validator) validator = deconstructible(path="django.core.validators.EmailValidator2")(EmailValidator)(message="hello") with self.assertRaisesMessage(ValueError, "Could not find object EmailValidator2 in django.core.validators."): MigrationWriter.serialize(validator) def test_serialize_empty_nonempty_tuple(self): """ Ticket #22679: makemigrations generates invalid code for (an empty tuple) default_permissions = () """ empty_tuple = () one_item_tuple = ('a',) many_items_tuple = ('a', 'b', 'c') self.assertSerializedEqual(empty_tuple) self.assertSerializedEqual(one_item_tuple) self.assertSerializedEqual(many_items_tuple) def test_serialize_range(self): string, imports = MigrationWriter.serialize(range(1, 5)) self.assertEqual(string, 'range(1, 5)') self.assertEqual(imports, set()) def test_serialize_builtins(self): string, imports = MigrationWriter.serialize(range) self.assertEqual(string, 'range') self.assertEqual(imports, set()) def test_serialize_unbound_method_reference(self): """An unbound method used within a class body can be serialized.""" self.serialize_round_trip(TestModel1.thing) def test_serialize_local_function_reference(self): """A reference in a local scope can't be serialized.""" class TestModel2: def upload_to(self): return "somewhere dynamic" thing = models.FileField(upload_to=upload_to) with self.assertRaisesMessage(ValueError, 'Could not find function upload_to in migrations.test_writer'): self.serialize_round_trip(TestModel2.thing) def test_serialize_managers(self): self.assertSerializedEqual(models.Manager()) self.assertSerializedResultEqual( FoodQuerySet.as_manager(), ('migrations.models.FoodQuerySet.as_manager()', {'import migrations.models'}) ) self.assertSerializedEqual(FoodManager('a', 'b')) self.assertSerializedEqual(FoodManager('x', 'y', c=3, d=4)) def test_serialize_frozensets(self): self.assertSerializedEqual(frozenset()) self.assertSerializedEqual(frozenset("let it go")) def test_serialize_set(self): self.assertSerializedEqual(set()) self.assertSerializedResultEqual(set(), ('set()', set())) self.assertSerializedEqual({'a'}) self.assertSerializedResultEqual({'a'}, ("{'a'}", set())) def test_serialize_timedelta(self): self.assertSerializedEqual(datetime.timedelta()) self.assertSerializedEqual(datetime.timedelta(minutes=42)) def test_serialize_functools_partial(self): value = functools.partial(datetime.timedelta, 1, seconds=2) result = self.serialize_round_trip(value) self.assertEqual(result.func, value.func) self.assertEqual(result.args, value.args) self.assertEqual(result.keywords, value.keywords) def test_serialize_functools_partialmethod(self): value = functools.partialmethod(datetime.timedelta, 1, seconds=2) result = self.serialize_round_trip(value) self.assertIsInstance(result, functools.partialmethod) self.assertEqual(result.func, value.func) self.assertEqual(result.args, value.args) self.assertEqual(result.keywords, value.keywords) def test_serialize_type_none(self): self.assertSerializedEqual(type(None)) def test_serialize_type_model(self): self.assertSerializedEqual(models.Model) self.assertSerializedResultEqual( MigrationWriter.serialize(models.Model), ("('models.Model', {'from django.db import models'})", set()), ) def test_simple_migration(self): """ Tests serializing a simple migration. """ fields = { 'charfield': models.DateTimeField(default=datetime.datetime.now), 'datetimefield': models.DateTimeField(default=datetime.datetime.now), } options = { 'verbose_name': 'My model', 'verbose_name_plural': 'My models', } migration = type("Migration", (migrations.Migration,), { "operations": [ migrations.CreateModel("MyModel", tuple(fields.items()), options, (models.Model,)), migrations.CreateModel("MyModel2", tuple(fields.items()), bases=(models.Model,)), migrations.CreateModel( name="MyModel3", fields=tuple(fields.items()), options=options, bases=(models.Model,) ), migrations.DeleteModel("MyModel"), migrations.AddField("OtherModel", "datetimefield", fields["datetimefield"]), ], "dependencies": [("testapp", "some_other_one")], }) writer = MigrationWriter(migration) output = writer.as_string() # We don't test the output formatting - that's too fragile. # Just make sure it runs for now, and that things look alright. result = self.safe_exec(output) self.assertIn("Migration", result) def test_migration_path(self): test_apps = [ 'migrations.migrations_test_apps.normal', 'migrations.migrations_test_apps.with_package_model', 'migrations.migrations_test_apps.without_init_file', ] base_dir = os.path.dirname(os.path.dirname(__file__)) for app in test_apps: with self.modify_settings(INSTALLED_APPS={'append': app}): migration = migrations.Migration('0001_initial', app.split('.')[-1]) expected_path = os.path.join(base_dir, *(app.split('.') + ['migrations', '0001_initial.py'])) writer = MigrationWriter(migration) self.assertEqual(writer.path, expected_path) def test_custom_operation(self): migration = type("Migration", (migrations.Migration,), { "operations": [ custom_migration_operations.operations.TestOperation(), custom_migration_operations.operations.CreateModel(), migrations.CreateModel("MyModel", (), {}, (models.Model,)), custom_migration_operations.more_operations.TestOperation() ], "dependencies": [] }) writer = MigrationWriter(migration) output = writer.as_string() result = self.safe_exec(output) self.assertIn("custom_migration_operations", result) self.assertNotEqual( result['custom_migration_operations'].operations.TestOperation, result['custom_migration_operations'].more_operations.TestOperation ) def test_sorted_imports(self): """ #24155 - Tests ordering of imports. """ migration = type("Migration", (migrations.Migration,), { "operations": [ migrations.AddField("mymodel", "myfield", models.DateTimeField( default=datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc), )), ] }) writer = MigrationWriter(migration) output = writer.as_string() self.assertIn( "import datetime\n" "from django.db import migrations, models\n" "from django.utils.timezone import utc\n", output ) def test_migration_file_header_comments(self): """ Test comments at top of file. """ migration = type("Migration", (migrations.Migration,), { "operations": [] }) dt = datetime.datetime(2015, 7, 31, 4, 40, 0, 0, tzinfo=utc) with mock.patch('django.db.migrations.writer.now', lambda: dt): for include_header in (True, False): with self.subTest(include_header=include_header): writer = MigrationWriter(migration, include_header) output = writer.as_string() self.assertEqual( include_header, output.startswith( "# Generated by Django %s on 2015-07-31 04:40\n\n" % get_version() ) ) if not include_header: # Make sure the output starts with something that's not # a comment or indentation or blank line self.assertRegex(output.splitlines(keepends=True)[0], r"^[^#\s]+") def test_models_import_omitted(self): """ django.db.models shouldn't be imported if unused. """ migration = type("Migration", (migrations.Migration,), { "operations": [ migrations.AlterModelOptions( name='model', options={'verbose_name': 'model', 'verbose_name_plural': 'models'}, ), ] }) writer = MigrationWriter(migration) output = writer.as_string() self.assertIn("from django.db import migrations\n", output) def test_deconstruct_class_arguments(self): # Yes, it doesn't make sense to use a class as a default for a # CharField. It does make sense for custom fields though, for example # an enumfield that takes the enum class as an argument. string = MigrationWriter.serialize(models.CharField(default=DeconstructibleInstances))[0] self.assertEqual(string, "models.CharField(default=migrations.test_writer.DeconstructibleInstances)") def test_register_serializer(self): class ComplexSerializer(BaseSerializer): def serialize(self): return 'complex(%r)' % self.value, {} MigrationWriter.register_serializer(complex, ComplexSerializer) self.assertSerializedEqual(complex(1, 2)) MigrationWriter.unregister_serializer(complex) with self.assertRaisesMessage(ValueError, 'Cannot serialize: (1+2j)'): self.assertSerializedEqual(complex(1, 2)) def test_register_non_serializer(self): with self.assertRaisesMessage(ValueError, "'TestModel1' must inherit from 'BaseSerializer'."): MigrationWriter.register_serializer(complex, TestModel1)
47a93f98fb2be0848d41b3525ec8d315b88087354c9d1fa122096dcb7b427bf7
import compileall import os from importlib import import_module from django.db import connection, connections from django.db.migrations.exceptions import ( AmbiguityError, InconsistentMigrationHistory, NodeNotFoundError, ) from django.db.migrations.loader import MigrationLoader from django.db.migrations.recorder import MigrationRecorder from django.test import TestCase, modify_settings, override_settings from .test_base import MigrationTestBase class RecorderTests(TestCase): """ Tests recording migrations as applied or not. """ databases = {'default', 'other'} def test_apply(self): """ Tests marking migrations as applied/unapplied. """ recorder = MigrationRecorder(connection) self.assertEqual( {(x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"}, set(), ) recorder.record_applied("myapp", "0432_ponies") self.assertEqual( {(x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"}, {("myapp", "0432_ponies")}, ) # That should not affect records of another database recorder_other = MigrationRecorder(connections['other']) self.assertEqual( {(x, y) for (x, y) in recorder_other.applied_migrations() if x == "myapp"}, set(), ) recorder.record_unapplied("myapp", "0432_ponies") self.assertEqual( {(x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"}, set(), ) class LoaderTests(TestCase): """ Tests the disk and database loader, and running through migrations in memory. """ def setUp(self): self.applied_records = [] def tearDown(self): # Unapply records on databases that don't roll back changes after each # test method. if not connection.features.supports_transactions: for recorder, app, name in self.applied_records: recorder.record_unapplied(app, name) def record_applied(self, recorder, app, name): recorder.record_applied(app, name) self.applied_records.append((recorder, app, name)) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) @modify_settings(INSTALLED_APPS={'append': 'basic'}) def test_load(self): """ Makes sure the loader can load the migrations for the test apps, and then render them out to a new Apps. """ # Load and test the plan migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.graph.forwards_plan(("migrations", "0002_second")), [ ("migrations", "0001_initial"), ("migrations", "0002_second"), ], ) # Now render it out! project_state = migration_loader.project_state(("migrations", "0002_second")) self.assertEqual(len(project_state.models), 2) author_state = project_state.models["migrations", "author"] self.assertEqual( list(author_state.fields), ["id", "name", "slug", "age", "rating"] ) book_state = project_state.models["migrations", "book"] self.assertEqual(list(book_state.fields), ['id', 'author']) # Ensure we've included unmigrated apps in there too self.assertIn("basic", project_state.real_apps) @override_settings(MIGRATION_MODULES={ 'migrations': 'migrations.test_migrations', 'migrations2': 'migrations2.test_migrations_2', }) @modify_settings(INSTALLED_APPS={'append': 'migrations2'}) def test_plan_handles_repeated_migrations(self): """ _generate_plan() doesn't readd migrations already in the plan (#29180). """ migration_loader = MigrationLoader(connection) nodes = [('migrations', '0002_second'), ('migrations2', '0001_initial')] self.assertEqual( migration_loader.graph._generate_plan(nodes, at_end=True), [('migrations', '0001_initial'), ('migrations', '0002_second'), ('migrations2', '0001_initial')] ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_unmigdep"}) def test_load_unmigrated_dependency(self): """ Makes sure the loader can load migrations with a dependency on an unmigrated app. """ # Load and test the plan migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.graph.forwards_plan(("migrations", "0001_initial")), [ ('contenttypes', '0001_initial'), ('auth', '0001_initial'), ("migrations", "0001_initial"), ], ) # Now render it out! project_state = migration_loader.project_state(("migrations", "0001_initial")) self.assertEqual(len([m for a, m in project_state.models if a == "migrations"]), 1) book_state = project_state.models["migrations", "book"] self.assertEqual(list(book_state.fields), ['id', 'user']) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"}) def test_run_before(self): """ Makes sure the loader uses Migration.run_before. """ # Load and test the plan migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.graph.forwards_plan(("migrations", "0002_second")), [ ("migrations", "0001_initial"), ("migrations", "0003_third"), ("migrations", "0002_second"), ], ) @override_settings(MIGRATION_MODULES={ "migrations": "migrations.test_migrations_first", "migrations2": "migrations2.test_migrations_2_first", }) @modify_settings(INSTALLED_APPS={'append': 'migrations2'}) def test_first(self): """ Makes sure the '__first__' migrations build correctly. """ migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.graph.forwards_plan(("migrations", "second")), [ ("migrations", "thefirst"), ("migrations2", "0001_initial"), ("migrations2", "0002_second"), ("migrations", "second"), ], ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_name_match(self): "Tests prefix name matching" migration_loader = MigrationLoader(connection) self.assertEqual( migration_loader.get_migration_by_prefix("migrations", "0001").name, "0001_initial", ) msg = "There is more than one migration for 'migrations' with the prefix '0'" with self.assertRaisesMessage(AmbiguityError, msg): migration_loader.get_migration_by_prefix("migrations", "0") msg = "There is no migration for 'migrations' with the prefix 'blarg'" with self.assertRaisesMessage(KeyError, msg): migration_loader.get_migration_by_prefix("migrations", "blarg") def test_load_import_error(self): with override_settings(MIGRATION_MODULES={"migrations": "import_error_package"}): with self.assertRaises(ImportError): MigrationLoader(connection) def test_load_module_file(self): with override_settings(MIGRATION_MODULES={"migrations": "migrations.faulty_migrations.file"}): loader = MigrationLoader(connection) self.assertIn( "migrations", loader.unmigrated_apps, "App with migrations module file not in unmigrated apps." ) def test_load_empty_dir(self): with override_settings(MIGRATION_MODULES={"migrations": "migrations.faulty_migrations.namespace"}): loader = MigrationLoader(connection) self.assertIn( "migrations", loader.unmigrated_apps, "App missing __init__.py in migrations module not in unmigrated apps." ) @override_settings( INSTALLED_APPS=['migrations.migrations_test_apps.migrated_app'], ) def test_marked_as_migrated(self): """ Undefined MIGRATION_MODULES implies default migration module. """ migration_loader = MigrationLoader(connection) self.assertEqual(migration_loader.migrated_apps, {'migrated_app'}) self.assertEqual(migration_loader.unmigrated_apps, set()) @override_settings( INSTALLED_APPS=['migrations.migrations_test_apps.migrated_app'], MIGRATION_MODULES={"migrated_app": None}, ) def test_marked_as_unmigrated(self): """ MIGRATION_MODULES allows disabling of migrations for a particular app. """ migration_loader = MigrationLoader(connection) self.assertEqual(migration_loader.migrated_apps, set()) self.assertEqual(migration_loader.unmigrated_apps, {'migrated_app'}) @override_settings( INSTALLED_APPS=['migrations.migrations_test_apps.migrated_app'], MIGRATION_MODULES={'migrated_app': 'missing-module'}, ) def test_explicit_missing_module(self): """ If a MIGRATION_MODULES override points to a missing module, the error raised during the importation attempt should be propagated unless `ignore_no_migrations=True`. """ with self.assertRaisesMessage(ImportError, 'missing-module'): migration_loader = MigrationLoader(connection) migration_loader = MigrationLoader(connection, ignore_no_migrations=True) self.assertEqual(migration_loader.migrated_apps, set()) self.assertEqual(migration_loader.unmigrated_apps, {'migrated_app'}) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}) def test_loading_squashed(self): "Tests loading a squashed migration" migration_loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.addCleanup(recorder.flush) # Loading with nothing applied should just give us the one node self.assertEqual( len([x for x in migration_loader.graph.nodes if x[0] == "migrations"]), 1, ) # However, fake-apply one migration and it should now use the old two self.record_applied(recorder, 'migrations', '0001_initial') migration_loader.build_graph() self.assertEqual( len([x for x in migration_loader.graph.nodes if x[0] == "migrations"]), 2, ) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"}) def test_loading_squashed_complex(self): "Tests loading a complex set of squashed migrations" loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.addCleanup(recorder.flush) def num_nodes(): plan = set(loader.graph.forwards_plan(('migrations', '7_auto'))) return len(plan - loader.applied_migrations.keys()) # Empty database: use squashed migration loader.build_graph() self.assertEqual(num_nodes(), 5) # Starting at 1 or 2 should use the squashed migration too self.record_applied(recorder, 'migrations', '1_auto') loader.build_graph() self.assertEqual(num_nodes(), 4) self.record_applied(recorder, 'migrations', '2_auto') loader.build_graph() self.assertEqual(num_nodes(), 3) # However, starting at 3 to 5 cannot use the squashed migration self.record_applied(recorder, 'migrations', '3_auto') loader.build_graph() self.assertEqual(num_nodes(), 4) self.record_applied(recorder, 'migrations', '4_auto') loader.build_graph() self.assertEqual(num_nodes(), 3) # Starting at 5 to 7 we are past the squashed migrations. self.record_applied(recorder, 'migrations', '5_auto') loader.build_graph() self.assertEqual(num_nodes(), 2) self.record_applied(recorder, 'migrations', '6_auto') loader.build_graph() self.assertEqual(num_nodes(), 1) self.record_applied(recorder, 'migrations', '7_auto') loader.build_graph() self.assertEqual(num_nodes(), 0) @override_settings(MIGRATION_MODULES={ "app1": "migrations.test_migrations_squashed_complex_multi_apps.app1", "app2": "migrations.test_migrations_squashed_complex_multi_apps.app2", }) @modify_settings(INSTALLED_APPS={'append': [ "migrations.test_migrations_squashed_complex_multi_apps.app1", "migrations.test_migrations_squashed_complex_multi_apps.app2", ]}) def test_loading_squashed_complex_multi_apps(self): loader = MigrationLoader(connection) loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) expected_plan = { ('app1', '1_auto'), ('app2', '1_squashed_2'), ('app1', '2_squashed_3'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) @override_settings(MIGRATION_MODULES={ "app1": "migrations.test_migrations_squashed_complex_multi_apps.app1", "app2": "migrations.test_migrations_squashed_complex_multi_apps.app2", }) @modify_settings(INSTALLED_APPS={'append': [ "migrations.test_migrations_squashed_complex_multi_apps.app1", "migrations.test_migrations_squashed_complex_multi_apps.app2", ]}) def test_loading_squashed_complex_multi_apps_partially_applied(self): loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.record_applied(recorder, 'app1', '1_auto') self.record_applied(recorder, 'app1', '2_auto') loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) plan = plan - loader.applied_migrations.keys() expected_plan = { ('app2', '1_squashed_2'), ('app1', '3_auto'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_erroneous"}) def test_loading_squashed_erroneous(self): "Tests loading a complex but erroneous set of squashed migrations" loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.addCleanup(recorder.flush) def num_nodes(): plan = set(loader.graph.forwards_plan(('migrations', '7_auto'))) return len(plan - loader.applied_migrations.keys()) # Empty database: use squashed migration loader.build_graph() self.assertEqual(num_nodes(), 5) # Starting at 1 or 2 should use the squashed migration too self.record_applied(recorder, 'migrations', '1_auto') loader.build_graph() self.assertEqual(num_nodes(), 4) self.record_applied(recorder, 'migrations', '2_auto') loader.build_graph() self.assertEqual(num_nodes(), 3) # However, starting at 3 or 4, nonexistent migrations would be needed. msg = ("Migration migrations.6_auto depends on nonexistent node ('migrations', '5_auto'). " "Django tried to replace migration migrations.5_auto with any of " "[migrations.3_squashed_5] but wasn't able to because some of the replaced " "migrations are already applied.") self.record_applied(recorder, 'migrations', '3_auto') with self.assertRaisesMessage(NodeNotFoundError, msg): loader.build_graph() self.record_applied(recorder, 'migrations', '4_auto') with self.assertRaisesMessage(NodeNotFoundError, msg): loader.build_graph() # Starting at 5 to 7 we are passed the squashed migrations self.record_applied(recorder, 'migrations', '5_auto') loader.build_graph() self.assertEqual(num_nodes(), 2) self.record_applied(recorder, 'migrations', '6_auto') loader.build_graph() self.assertEqual(num_nodes(), 1) self.record_applied(recorder, 'migrations', '7_auto') loader.build_graph() self.assertEqual(num_nodes(), 0) @override_settings( MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}, INSTALLED_APPS=['migrations'], ) def test_check_consistent_history(self): loader = MigrationLoader(connection=None) loader.check_consistent_history(connection) recorder = MigrationRecorder(connection) self.record_applied(recorder, 'migrations', '0002_second') msg = ( "Migration migrations.0002_second is applied before its dependency " "migrations.0001_initial on database 'default'." ) with self.assertRaisesMessage(InconsistentMigrationHistory, msg): loader.check_consistent_history(connection) @override_settings( MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed_extra'}, INSTALLED_APPS=['migrations'], ) def test_check_consistent_history_squashed(self): """ MigrationLoader.check_consistent_history() should ignore unapplied squashed migrations that have all of their `replaces` applied. """ loader = MigrationLoader(connection=None) recorder = MigrationRecorder(connection) self.record_applied(recorder, 'migrations', '0001_initial') self.record_applied(recorder, 'migrations', '0002_second') loader.check_consistent_history(connection) self.record_applied(recorder, 'migrations', '0003_third') loader.check_consistent_history(connection) @override_settings(MIGRATION_MODULES={ "app1": "migrations.test_migrations_squashed_ref_squashed.app1", "app2": "migrations.test_migrations_squashed_ref_squashed.app2", }) @modify_settings(INSTALLED_APPS={'append': [ "migrations.test_migrations_squashed_ref_squashed.app1", "migrations.test_migrations_squashed_ref_squashed.app2", ]}) def test_loading_squashed_ref_squashed(self): "Tests loading a squashed migration with a new migration referencing it" r""" The sample migrations are structured like this: app_1 1 --> 2 ---------------------*--> 3 *--> 4 \ / / *-------------------*----/--> 2_sq_3 --* \ / / =============== \ ============= / == / ====================== app_2 *--> 1_sq_2 --* / \ / *--> 1 --> 2 --* Where 2_sq_3 is a replacing migration for 2 and 3 in app_1, as 1_sq_2 is a replacing migration for 1 and 2 in app_2. """ loader = MigrationLoader(connection) recorder = MigrationRecorder(connection) self.addCleanup(recorder.flush) # Load with nothing applied: both migrations squashed. loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) plan = plan - loader.applied_migrations.keys() expected_plan = { ('app1', '1_auto'), ('app2', '1_squashed_2'), ('app1', '2_squashed_3'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) # Fake-apply a few from app1: unsquashes migration in app1. self.record_applied(recorder, 'app1', '1_auto') self.record_applied(recorder, 'app1', '2_auto') loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) plan = plan - loader.applied_migrations.keys() expected_plan = { ('app2', '1_squashed_2'), ('app1', '3_auto'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) # Fake-apply one from app2: unsquashes migration in app2 too. self.record_applied(recorder, 'app2', '1_auto') loader.build_graph() plan = set(loader.graph.forwards_plan(('app1', '4_auto'))) plan = plan - loader.applied_migrations.keys() expected_plan = { ('app2', '2_auto'), ('app1', '3_auto'), ('app1', '4_auto'), } self.assertEqual(plan, expected_plan) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_private'}) def test_ignore_files(self): """Files prefixed with underscore, tilde, or dot aren't loaded.""" loader = MigrationLoader(connection) loader.load_disk() migrations = [name for app, name in loader.disk_migrations if app == 'migrations'] self.assertEqual(migrations, ['0001_initial']) @override_settings( MIGRATION_MODULES={'migrations': 'migrations.test_migrations_namespace_package'}, ) def test_loading_namespace_package(self): """Migration directories without an __init__.py file are ignored.""" loader = MigrationLoader(connection) loader.load_disk() migrations = [name for app, name in loader.disk_migrations if app == 'migrations'] self.assertEqual(migrations, []) @override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'}) def test_loading_package_without__file__(self): """ To support frozen environments, MigrationLoader loads migrations from regular packages with no __file__ attribute. """ test_module = import_module('migrations.test_migrations') loader = MigrationLoader(connection) # __file__ == __spec__.origin or the latter is None and former is # undefined. module_file = test_module.__file__ module_origin = test_module.__spec__.origin module_has_location = test_module.__spec__.has_location try: del test_module.__file__ test_module.__spec__.origin = None test_module.__spec__.has_location = False loader.load_disk() migrations = [ name for app, name in loader.disk_migrations if app == 'migrations' ] self.assertCountEqual(migrations, ['0001_initial', '0002_second']) finally: test_module.__file__ = module_file test_module.__spec__.origin = module_origin test_module.__spec__.has_location = module_has_location class PycLoaderTests(MigrationTestBase): def test_valid(self): """ To support frozen environments, MigrationLoader loads .pyc migrations. """ with self.temporary_migration_module(module='migrations.test_migrations') as migration_dir: # Compile .py files to .pyc files and delete .py files. compileall.compile_dir(migration_dir, force=True, quiet=1, legacy=True) for name in os.listdir(migration_dir): if name.endswith('.py'): os.remove(os.path.join(migration_dir, name)) loader = MigrationLoader(connection) self.assertIn(('migrations', '0001_initial'), loader.disk_migrations) def test_invalid(self): """ MigrationLoader reraises ImportErrors caused by "bad magic number" pyc files with a more helpful message. """ with self.temporary_migration_module(module='migrations.test_migrations_bad_pyc') as migration_dir: # The -tpl suffix is to avoid the pyc exclusion in MANIFEST.in. os.rename( os.path.join(migration_dir, '0001_initial.pyc-tpl'), os.path.join(migration_dir, '0001_initial.pyc'), ) msg = ( r"Couldn't import '\w+.migrations.0001_initial' as it appears " "to be a stale .pyc file." ) with self.assertRaisesRegex(ImportError, msg): MigrationLoader(connection)
f658cf1be67aae2326a581c8102e30545c95dda6a73c12a21e4ca29c11b4c15f
from django.core.exceptions import FieldDoesNotExist from django.db import ( IntegrityError, connection, migrations, models, transaction, ) from django.db.migrations.migration import Migration from django.db.migrations.operations.fields import FieldOperation from django.db.migrations.state import ModelState, ProjectState from django.db.models.functions import Abs from django.db.transaction import atomic from django.test import SimpleTestCase, override_settings, skipUnlessDBFeature from django.test.utils import CaptureQueriesContext from .models import FoodManager, FoodQuerySet, UnicodeModel from .test_base import OperationTestBase class Mixin: pass class OperationTests(OperationTestBase): """ Tests running the operations and making sure they do what they say they do. Each test looks at their state changing, and then their database operation - both forwards and backwards. """ def test_create_model(self): """ Tests the CreateModel operation. Most other tests use this operation as part of setup, so check failures here first. """ operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], ) self.assertEqual(operation.describe(), "Create model Pony") self.assertEqual(operation.migration_name_fragment, 'pony') # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crmo", new_state) self.assertEqual(new_state.models["test_crmo", "pony"].name, "Pony") self.assertEqual(len(new_state.models["test_crmo", "pony"].fields), 2) # Test the database alteration self.assertTableNotExists("test_crmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crmo", editor, project_state, new_state) self.assertTableExists("test_crmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmo", editor, new_state, project_state) self.assertTableNotExists("test_crmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["fields", "name"]) # And default manager not in set operation = migrations.CreateModel("Foo", fields=[], managers=[("objects", models.Manager())]) definition = operation.deconstruct() self.assertNotIn('managers', definition[2]) def test_create_model_with_duplicate_field_name(self): with self.assertRaisesMessage(ValueError, 'Found duplicate value pink in CreateModel fields argument.'): migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.TextField()), ("pink", models.IntegerField(default=1)), ], ) def test_create_model_with_duplicate_base(self): message = 'Found duplicate value test_crmo.pony in CreateModel bases argument.' with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=("test_crmo.Pony", "test_crmo.Pony",), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=("test_crmo.Pony", "test_crmo.pony",), ) message = 'Found duplicate value migrations.unicodemodel in CreateModel bases argument.' with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, UnicodeModel,), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, 'migrations.unicodemodel',), ) with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(UnicodeModel, 'migrations.UnicodeModel',), ) message = "Found duplicate value <class 'django.db.models.base.Model'> in CreateModel bases argument." with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(models.Model, models.Model,), ) message = "Found duplicate value <class 'migrations.test_operations.Mixin'> in CreateModel bases argument." with self.assertRaisesMessage(ValueError, message): migrations.CreateModel( "Pony", fields=[], bases=(Mixin, Mixin,), ) def test_create_model_with_duplicate_manager_name(self): with self.assertRaisesMessage(ValueError, 'Found duplicate value objects in CreateModel managers argument.'): migrations.CreateModel( "Pony", fields=[], managers=[ ("objects", models.Manager()), ("objects", models.Manager()), ], ) def test_create_model_with_unique_after(self): """ Tests the CreateModel operation directly followed by an AlterUniqueTogether (bug #22844 - sqlite remake issues) """ operation1 = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], ) operation2 = migrations.CreateModel( "Rider", [ ("id", models.AutoField(primary_key=True)), ("number", models.IntegerField(default=1)), ("pony", models.ForeignKey("test_crmoua.Pony", models.CASCADE)), ], ) operation3 = migrations.AlterUniqueTogether( "Rider", [ ("number", "pony"), ], ) # Test the database alteration project_state = ProjectState() self.assertTableNotExists("test_crmoua_pony") self.assertTableNotExists("test_crmoua_rider") with connection.schema_editor() as editor: new_state = project_state.clone() operation1.state_forwards("test_crmoua", new_state) operation1.database_forwards("test_crmoua", editor, project_state, new_state) project_state, new_state = new_state, new_state.clone() operation2.state_forwards("test_crmoua", new_state) operation2.database_forwards("test_crmoua", editor, project_state, new_state) project_state, new_state = new_state, new_state.clone() operation3.state_forwards("test_crmoua", new_state) operation3.database_forwards("test_crmoua", editor, project_state, new_state) self.assertTableExists("test_crmoua_pony") self.assertTableExists("test_crmoua_rider") def test_create_model_m2m(self): """ Test the creation of a model with a ManyToMany field and the auto-created "through" model. """ project_state = self.set_up_test_model("test_crmomm") operation = migrations.CreateModel( "Stable", [ ("id", models.AutoField(primary_key=True)), ("ponies", models.ManyToManyField("Pony", related_name="stables")) ] ) # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_crmomm", new_state) # Test the database alteration self.assertTableNotExists("test_crmomm_stable_ponies") with connection.schema_editor() as editor: operation.database_forwards("test_crmomm", editor, project_state, new_state) self.assertTableExists("test_crmomm_stable") self.assertTableExists("test_crmomm_stable_ponies") self.assertColumnNotExists("test_crmomm_stable", "ponies") # Make sure the M2M field actually works with atomic(): Pony = new_state.apps.get_model("test_crmomm", "Pony") Stable = new_state.apps.get_model("test_crmomm", "Stable") stable = Stable.objects.create() p1 = Pony.objects.create(pink=False, weight=4.55) p2 = Pony.objects.create(pink=True, weight=5.43) stable.ponies.add(p1, p2) self.assertEqual(stable.ponies.count(), 2) stable.ponies.all().delete() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmomm", editor, new_state, project_state) self.assertTableNotExists("test_crmomm_stable") self.assertTableNotExists("test_crmomm_stable_ponies") def test_create_model_inheritance(self): """ Tests the CreateModel operation on a multi-table inheritance setup. """ project_state = self.set_up_test_model("test_crmoih") # Test the state alteration operation = migrations.CreateModel( "ShetlandPony", [ ('pony_ptr', models.OneToOneField( 'test_crmoih.Pony', models.CASCADE, auto_created=True, primary_key=True, to_field='id', serialize=False, )), ("cuteness", models.IntegerField(default=1)), ], ) new_state = project_state.clone() operation.state_forwards("test_crmoih", new_state) self.assertIn(("test_crmoih", "shetlandpony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crmoih_shetlandpony") with connection.schema_editor() as editor: operation.database_forwards("test_crmoih", editor, project_state, new_state) self.assertTableExists("test_crmoih_shetlandpony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmoih", editor, new_state, project_state) self.assertTableNotExists("test_crmoih_shetlandpony") def test_create_proxy_model(self): """ CreateModel ignores proxy models. """ project_state = self.set_up_test_model("test_crprmo") # Test the state alteration operation = migrations.CreateModel( "ProxyPony", [], options={"proxy": True}, bases=("test_crprmo.Pony",), ) self.assertEqual(operation.describe(), "Create proxy model ProxyPony") new_state = project_state.clone() operation.state_forwards("test_crprmo", new_state) self.assertIn(("test_crprmo", "proxypony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crprmo", editor, project_state, new_state) self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crprmo", editor, new_state, project_state) self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["bases", "fields", "name", "options"]) def test_create_unmanaged_model(self): """ CreateModel ignores unmanaged models. """ project_state = self.set_up_test_model("test_crummo") # Test the state alteration operation = migrations.CreateModel( "UnmanagedPony", [], options={"proxy": True}, bases=("test_crummo.Pony",), ) self.assertEqual(operation.describe(), "Create proxy model UnmanagedPony") new_state = project_state.clone() operation.state_forwards("test_crummo", new_state) self.assertIn(("test_crummo", "unmanagedpony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crummo", editor, project_state, new_state) self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crummo", editor, new_state, project_state) self.assertTableNotExists("test_crummo_unmanagedpony") self.assertTableExists("test_crummo_pony") @skipUnlessDBFeature('supports_table_check_constraints') def test_create_model_with_constraint(self): where = models.Q(pink__gt=2) check_constraint = models.CheckConstraint(check=where, name='test_constraint_pony_pink_gt_2') operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=3)), ], options={'constraints': [check_constraint]}, ) # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crmo", new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) # Test database alteration self.assertTableNotExists("test_crmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crmo", editor, project_state, new_state) self.assertTableExists("test_crmo_pony") with connection.cursor() as cursor: with self.assertRaises(IntegrityError): cursor.execute("INSERT INTO test_crmo_pony (id, pink) VALUES (1, 1)") # Test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmo", editor, new_state, project_state) self.assertTableNotExists("test_crmo_pony") # Test deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(definition[1], []) self.assertEqual(definition[2]['options']['constraints'], [check_constraint]) def test_create_model_with_partial_unique_constraint(self): partial_unique_constraint = models.UniqueConstraint( fields=['pink'], condition=models.Q(weight__gt=5), name='test_constraint_pony_pink_for_weight_gt_5_uniq', ) operation = migrations.CreateModel( 'Pony', [ ('id', models.AutoField(primary_key=True)), ('pink', models.IntegerField(default=3)), ('weight', models.FloatField()), ], options={'constraints': [partial_unique_constraint]}, ) # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards('test_crmo', new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) # Test database alteration self.assertTableNotExists('test_crmo_pony') with connection.schema_editor() as editor: operation.database_forwards('test_crmo', editor, project_state, new_state) self.assertTableExists('test_crmo_pony') # Test constraint works Pony = new_state.apps.get_model('test_crmo', 'Pony') Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=6.0) if connection.features.supports_partial_indexes: with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=7.0) else: Pony.objects.create(pink=1, weight=7.0) # Test reversal with connection.schema_editor() as editor: operation.database_backwards('test_crmo', editor, new_state, project_state) self.assertTableNotExists('test_crmo_pony') # Test deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], 'CreateModel') self.assertEqual(definition[1], []) self.assertEqual(definition[2]['options']['constraints'], [partial_unique_constraint]) def test_create_model_with_deferred_unique_constraint(self): deferred_unique_constraint = models.UniqueConstraint( fields=['pink'], name='deferrable_pink_constraint', deferrable=models.Deferrable.DEFERRED, ) operation = migrations.CreateModel( 'Pony', [ ('id', models.AutoField(primary_key=True)), ('pink', models.IntegerField(default=3)), ], options={'constraints': [deferred_unique_constraint]}, ) project_state = ProjectState() new_state = project_state.clone() operation.state_forwards('test_crmo', new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) self.assertTableNotExists('test_crmo_pony') # Create table. with connection.schema_editor() as editor: operation.database_forwards('test_crmo', editor, project_state, new_state) self.assertTableExists('test_crmo_pony') Pony = new_state.apps.get_model('test_crmo', 'Pony') Pony.objects.create(pink=1) if connection.features.supports_deferrable_unique_constraints: # Unique constraint is deferred. with transaction.atomic(): obj = Pony.objects.create(pink=1) obj.pink = 2 obj.save() # Constraint behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with transaction.atomic(), connection.cursor() as cursor: quoted_name = connection.ops.quote_name(deferred_unique_constraint.name) cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % quoted_name) obj = Pony.objects.create(pink=1) obj.pink = 3 obj.save() else: Pony.objects.create(pink=1) # Reversal. with connection.schema_editor() as editor: operation.database_backwards('test_crmo', editor, new_state, project_state) self.assertTableNotExists('test_crmo_pony') # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'CreateModel') self.assertEqual(definition[1], []) self.assertEqual( definition[2]['options']['constraints'], [deferred_unique_constraint], ) @skipUnlessDBFeature('supports_covering_indexes') def test_create_model_with_covering_unique_constraint(self): covering_unique_constraint = models.UniqueConstraint( fields=['pink'], include=['weight'], name='test_constraint_pony_pink_covering_weight', ) operation = migrations.CreateModel( 'Pony', [ ('id', models.AutoField(primary_key=True)), ('pink', models.IntegerField(default=3)), ('weight', models.FloatField()), ], options={'constraints': [covering_unique_constraint]}, ) project_state = ProjectState() new_state = project_state.clone() operation.state_forwards('test_crmo', new_state) self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1) self.assertTableNotExists('test_crmo_pony') # Create table. with connection.schema_editor() as editor: operation.database_forwards('test_crmo', editor, project_state, new_state) self.assertTableExists('test_crmo_pony') Pony = new_state.apps.get_model('test_crmo', 'Pony') Pony.objects.create(pink=1, weight=4.0) with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=7.0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards('test_crmo', editor, new_state, project_state) self.assertTableNotExists('test_crmo_pony') # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'CreateModel') self.assertEqual(definition[1], []) self.assertEqual( definition[2]['options']['constraints'], [covering_unique_constraint], ) def test_create_model_managers(self): """ The managers on a model are set. """ project_state = self.set_up_test_model("test_cmoma") # Test the state alteration operation = migrations.CreateModel( "Food", fields=[ ("id", models.AutoField(primary_key=True)), ], managers=[ ("food_qs", FoodQuerySet.as_manager()), ("food_mgr", FoodManager("a", "b")), ("food_mgr_kwargs", FoodManager("x", "y", 3, 4)), ] ) self.assertEqual(operation.describe(), "Create model Food") new_state = project_state.clone() operation.state_forwards("test_cmoma", new_state) self.assertIn(("test_cmoma", "food"), new_state.models) managers = new_state.models["test_cmoma", "food"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) def test_delete_model(self): """ Tests the DeleteModel operation. """ project_state = self.set_up_test_model("test_dlmo") # Test the state alteration operation = migrations.DeleteModel("Pony") self.assertEqual(operation.describe(), "Delete model Pony") self.assertEqual(operation.migration_name_fragment, 'delete_pony') new_state = project_state.clone() operation.state_forwards("test_dlmo", new_state) self.assertNotIn(("test_dlmo", "pony"), new_state.models) # Test the database alteration self.assertTableExists("test_dlmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_dlmo", editor, project_state, new_state) self.assertTableNotExists("test_dlmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dlmo", editor, new_state, project_state) self.assertTableExists("test_dlmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "DeleteModel") self.assertEqual(definition[1], []) self.assertEqual(list(definition[2]), ["name"]) def test_delete_proxy_model(self): """ Tests the DeleteModel operation ignores proxy models. """ project_state = self.set_up_test_model("test_dlprmo", proxy_model=True) # Test the state alteration operation = migrations.DeleteModel("ProxyPony") new_state = project_state.clone() operation.state_forwards("test_dlprmo", new_state) self.assertIn(("test_dlprmo", "proxypony"), project_state.models) self.assertNotIn(("test_dlprmo", "proxypony"), new_state.models) # Test the database alteration self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") with connection.schema_editor() as editor: operation.database_forwards("test_dlprmo", editor, project_state, new_state) self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dlprmo", editor, new_state, project_state) self.assertTableExists("test_dlprmo_pony") self.assertTableNotExists("test_dlprmo_proxypony") def test_delete_mti_model(self): project_state = self.set_up_test_model('test_dlmtimo', mti_model=True) # Test the state alteration operation = migrations.DeleteModel('ShetlandPony') new_state = project_state.clone() operation.state_forwards('test_dlmtimo', new_state) self.assertIn(('test_dlmtimo', 'shetlandpony'), project_state.models) self.assertNotIn(('test_dlmtimo', 'shetlandpony'), new_state.models) # Test the database alteration self.assertTableExists('test_dlmtimo_pony') self.assertTableExists('test_dlmtimo_shetlandpony') self.assertColumnExists('test_dlmtimo_shetlandpony', 'pony_ptr_id') with connection.schema_editor() as editor: operation.database_forwards('test_dlmtimo', editor, project_state, new_state) self.assertTableExists('test_dlmtimo_pony') self.assertTableNotExists('test_dlmtimo_shetlandpony') # And test reversal with connection.schema_editor() as editor: operation.database_backwards('test_dlmtimo', editor, new_state, project_state) self.assertTableExists('test_dlmtimo_pony') self.assertTableExists('test_dlmtimo_shetlandpony') self.assertColumnExists('test_dlmtimo_shetlandpony', 'pony_ptr_id') def test_rename_model(self): """ Tests the RenameModel operation. """ project_state = self.set_up_test_model("test_rnmo", related_model=True) # Test the state alteration operation = migrations.RenameModel("Pony", "Horse") self.assertEqual(operation.describe(), "Rename model Pony to Horse") self.assertEqual(operation.migration_name_fragment, 'rename_pony_horse') # Test initial state and database self.assertIn(("test_rnmo", "pony"), project_state.models) self.assertNotIn(("test_rnmo", "horse"), project_state.models) self.assertTableExists("test_rnmo_pony") self.assertTableNotExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # Migrate forwards new_state = project_state.clone() atomic_rename = connection.features.supports_atomic_references_rename new_state = self.apply_operations("test_rnmo", new_state, [operation], atomic=atomic_rename) # Test new state and database self.assertNotIn(("test_rnmo", "pony"), new_state.models) self.assertIn(("test_rnmo", "horse"), new_state.models) # RenameModel also repoints all incoming FKs and M2Ms self.assertEqual( new_state.models['test_rnmo', 'rider'].fields['pony'].remote_field.model, 'test_rnmo.Horse', ) self.assertTableNotExists("test_rnmo_pony") self.assertTableExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # Migrate backwards original_state = self.unapply_operations("test_rnmo", project_state, [operation], atomic=atomic_rename) # Test original state and database self.assertIn(("test_rnmo", "pony"), original_state.models) self.assertNotIn(("test_rnmo", "horse"), original_state.models) self.assertEqual( original_state.models['test_rnmo', 'rider'].fields['pony'].remote_field.model, 'Pony', ) self.assertTableExists("test_rnmo_pony") self.assertTableNotExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RenameModel") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'old_name': "Pony", 'new_name': "Horse"}) def test_rename_model_state_forwards(self): """ RenameModel operations shouldn't trigger the caching of rendered apps on state without prior apps. """ state = ProjectState() state.add_model(ModelState('migrations', 'Foo', [])) operation = migrations.RenameModel('Foo', 'Bar') operation.state_forwards('migrations', state) self.assertNotIn('apps', state.__dict__) self.assertNotIn(('migrations', 'foo'), state.models) self.assertIn(('migrations', 'bar'), state.models) # Now with apps cached. apps = state.apps operation = migrations.RenameModel('Bar', 'Foo') operation.state_forwards('migrations', state) self.assertIs(state.apps, apps) self.assertNotIn(('migrations', 'bar'), state.models) self.assertIn(('migrations', 'foo'), state.models) def test_rename_model_with_self_referential_fk(self): """ Tests the RenameModel operation on model with self referential FK. """ project_state = self.set_up_test_model("test_rmwsrf", related_model=True) # Test the state alteration operation = migrations.RenameModel("Rider", "HorseRider") self.assertEqual(operation.describe(), "Rename model Rider to HorseRider") new_state = project_state.clone() operation.state_forwards("test_rmwsrf", new_state) self.assertNotIn(("test_rmwsrf", "rider"), new_state.models) self.assertIn(("test_rmwsrf", "horserider"), new_state.models) # Remember, RenameModel also repoints all incoming FKs and M2Ms self.assertEqual( 'self', new_state.models["test_rmwsrf", "horserider"].fields['friend'].remote_field.model ) HorseRider = new_state.apps.get_model('test_rmwsrf', 'horserider') self.assertIs(HorseRider._meta.get_field('horserider').remote_field.model, HorseRider) # Test the database alteration self.assertTableExists("test_rmwsrf_rider") self.assertTableNotExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")) atomic_rename = connection.features.supports_atomic_references_rename with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_forwards("test_rmwsrf", editor, project_state, new_state) self.assertTableNotExists("test_rmwsrf_rider") self.assertTableExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKNotExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_horserider", "id")) # And test reversal with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_backwards("test_rmwsrf", editor, new_state, project_state) self.assertTableExists("test_rmwsrf_rider") self.assertTableNotExists("test_rmwsrf_horserider") if connection.features.supports_foreign_keys: self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")) self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")) def test_rename_model_with_superclass_fk(self): """ Tests the RenameModel operation on a model which has a superclass that has a foreign key. """ project_state = self.set_up_test_model("test_rmwsc", related_model=True, mti_model=True) # Test the state alteration operation = migrations.RenameModel("ShetlandPony", "LittleHorse") self.assertEqual(operation.describe(), "Rename model ShetlandPony to LittleHorse") new_state = project_state.clone() operation.state_forwards("test_rmwsc", new_state) self.assertNotIn(("test_rmwsc", "shetlandpony"), new_state.models) self.assertIn(("test_rmwsc", "littlehorse"), new_state.models) # RenameModel shouldn't repoint the superclass's relations, only local ones self.assertEqual( project_state.models['test_rmwsc', 'rider'].fields['pony'].remote_field.model, new_state.models['test_rmwsc', 'rider'].fields['pony'].remote_field.model, ) # Before running the migration we have a table for Shetland Pony, not Little Horse self.assertTableExists("test_rmwsc_shetlandpony") self.assertTableNotExists("test_rmwsc_littlehorse") if connection.features.supports_foreign_keys: # and the foreign key on rider points to pony, not shetland pony self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")) self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_shetlandpony", "id")) with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor: operation.database_forwards("test_rmwsc", editor, project_state, new_state) # Now we have a little horse table, not shetland pony self.assertTableNotExists("test_rmwsc_shetlandpony") self.assertTableExists("test_rmwsc_littlehorse") if connection.features.supports_foreign_keys: # but the Foreign keys still point at pony, not little horse self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")) self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_littlehorse", "id")) def test_rename_model_with_self_referential_m2m(self): app_label = "test_rename_model_with_self_referential_m2m" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("ReflexivePony", fields=[ ("id", models.AutoField(primary_key=True)), ("ponies", models.ManyToManyField("self")), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("ReflexivePony", "ReflexivePony2"), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, "ReflexivePony2") pony = Pony.objects.create() pony.ponies.add(pony) def test_rename_model_with_m2m(self): app_label = "test_rename_model_with_m2m" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ("riders", models.ManyToManyField("Rider")), ]), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("Pony", "Pony2"), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, "Pony2") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) self.assertEqual(Pony.objects.count(), 2) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2) def test_rename_m2m_target_model(self): app_label = "test_rename_m2m_target_model" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ("riders", models.ManyToManyField("Rider")), ]), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("Rider", "Rider2"), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider2") pony = Pony.objects.create() rider = Rider.objects.create() pony.riders.add(rider) self.assertEqual(Pony.objects.count(), 2) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2) def test_rename_m2m_through_model(self): app_label = "test_rename_through" project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel("Rider", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("Pony", fields=[ ("id", models.AutoField(primary_key=True)), ]), migrations.CreateModel("PonyRider", fields=[ ("id", models.AutoField(primary_key=True)), ("rider", models.ForeignKey("test_rename_through.Rider", models.CASCADE)), ("pony", models.ForeignKey("test_rename_through.Pony", models.CASCADE)), ]), migrations.AddField( "Pony", "riders", models.ManyToManyField("test_rename_through.Rider", through="test_rename_through.PonyRider"), ), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") PonyRider = project_state.apps.get_model(app_label, "PonyRider") pony = Pony.objects.create() rider = Rider.objects.create() PonyRider.objects.create(pony=pony, rider=rider) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameModel("PonyRider", "PonyRider2"), ]) Pony = project_state.apps.get_model(app_label, "Pony") Rider = project_state.apps.get_model(app_label, "Rider") PonyRider = project_state.apps.get_model(app_label, "PonyRider2") pony = Pony.objects.first() rider = Rider.objects.create() PonyRider.objects.create(pony=pony, rider=rider) self.assertEqual(Pony.objects.count(), 1) self.assertEqual(Rider.objects.count(), 2) self.assertEqual(PonyRider.objects.count(), 2) self.assertEqual(pony.riders.count(), 2) def test_rename_m2m_model_after_rename_field(self): """RenameModel renames a many-to-many column after a RenameField.""" app_label = 'test_rename_multiple' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=20)), ]), migrations.CreateModel('Rider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('test_rename_multiple.Pony', models.CASCADE)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('riders', models.ManyToManyField('Rider')), ]), migrations.RenameField(model_name='pony', old_name='name', new_name='fancy_name'), migrations.RenameModel(old_name='Rider', new_name='Jockey'), ], atomic=connection.features.supports_atomic_references_rename) Pony = project_state.apps.get_model(app_label, 'Pony') Jockey = project_state.apps.get_model(app_label, 'Jockey') PonyRider = project_state.apps.get_model(app_label, 'PonyRider') # No "no such column" error means the column was renamed correctly. pony = Pony.objects.create(fancy_name='a good name') jockey = Jockey.objects.create(pony=pony) ponyrider = PonyRider.objects.create() ponyrider.riders.add(jockey) def test_add_field(self): """ Tests the AddField operation. """ # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=5), ) self.assertEqual(operation.describe(), "Add field height to Pony") self.assertEqual(operation.migration_name_fragment, 'pony_height') project_state, new_state = self.make_test_state("test_adfl", operation) self.assertEqual(len(new_state.models["test_adfl", "pony"].fields), 4) field = new_state.models['test_adfl', 'pony'].fields['height'] self.assertEqual(field.default, 5) # Test the database alteration self.assertColumnNotExists("test_adfl_pony", "height") with connection.schema_editor() as editor: operation.database_forwards("test_adfl", editor, project_state, new_state) self.assertColumnExists("test_adfl_pony", "height") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adfl", editor, new_state, project_state) self.assertColumnNotExists("test_adfl_pony", "height") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"]) def test_add_charfield(self): """ Tests the AddField operation on TextField. """ project_state = self.set_up_test_model("test_adchfl") Pony = project_state.apps.get_model("test_adchfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adchfl", project_state, [ migrations.AddField( "Pony", "text", models.CharField(max_length=10, default="some text"), ), migrations.AddField( "Pony", "empty", models.CharField(max_length=10, default=""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.CharField(max_length=10, default="42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.CharField(max_length=10, default='"\'"'), ), ]) Pony = new_state.apps.get_model("test_adchfl", "Pony") pony = Pony.objects.get(pk=pony.pk) self.assertEqual(pony.text, "some text") self.assertEqual(pony.empty, "") self.assertEqual(pony.digits, "42") self.assertEqual(pony.quotes, '"\'"') def test_add_textfield(self): """ Tests the AddField operation on TextField. """ project_state = self.set_up_test_model("test_adtxtfl") Pony = project_state.apps.get_model("test_adtxtfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adtxtfl", project_state, [ migrations.AddField( "Pony", "text", models.TextField(default="some text"), ), migrations.AddField( "Pony", "empty", models.TextField(default=""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.TextField(default="42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.TextField(default='"\'"'), ), ]) Pony = new_state.apps.get_model("test_adtxtfl", "Pony") pony = Pony.objects.get(pk=pony.pk) self.assertEqual(pony.text, "some text") self.assertEqual(pony.empty, "") self.assertEqual(pony.digits, "42") self.assertEqual(pony.quotes, '"\'"') def test_add_binaryfield(self): """ Tests the AddField operation on TextField/BinaryField. """ project_state = self.set_up_test_model("test_adbinfl") Pony = project_state.apps.get_model("test_adbinfl", "Pony") pony = Pony.objects.create(weight=42) new_state = self.apply_operations("test_adbinfl", project_state, [ migrations.AddField( "Pony", "blob", models.BinaryField(default=b"some text"), ), migrations.AddField( "Pony", "empty", models.BinaryField(default=b""), ), # If not properly quoted digits would be interpreted as an int. migrations.AddField( "Pony", "digits", models.BinaryField(default=b"42"), ), # Manual quoting is fragile and could trip on quotes. Refs #xyz. migrations.AddField( "Pony", "quotes", models.BinaryField(default=b'"\'"'), ), ]) Pony = new_state.apps.get_model("test_adbinfl", "Pony") pony = Pony.objects.get(pk=pony.pk) # SQLite returns buffer/memoryview, cast to bytes for checking. self.assertEqual(bytes(pony.blob), b"some text") self.assertEqual(bytes(pony.empty), b"") self.assertEqual(bytes(pony.digits), b"42") self.assertEqual(bytes(pony.quotes), b'"\'"') def test_column_name_quoting(self): """ Column names that are SQL keywords shouldn't cause problems when used in migrations (#22168). """ project_state = self.set_up_test_model("test_regr22168") operation = migrations.AddField( "Pony", "order", models.IntegerField(default=0), ) new_state = project_state.clone() operation.state_forwards("test_regr22168", new_state) with connection.schema_editor() as editor: operation.database_forwards("test_regr22168", editor, project_state, new_state) self.assertColumnExists("test_regr22168_pony", "order") def test_add_field_preserve_default(self): """ Tests the AddField operation's state alteration when preserve_default = False. """ project_state = self.set_up_test_model("test_adflpd") # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=4), preserve_default=False, ) new_state = project_state.clone() operation.state_forwards("test_adflpd", new_state) self.assertEqual(len(new_state.models["test_adflpd", "pony"].fields), 4) field = new_state.models['test_adflpd', 'pony'].fields['height'] self.assertEqual(field.default, models.NOT_PROVIDED) # Test the database alteration project_state.apps.get_model("test_adflpd", "pony").objects.create( weight=4, ) self.assertColumnNotExists("test_adflpd_pony", "height") with connection.schema_editor() as editor: operation.database_forwards("test_adflpd", editor, project_state, new_state) self.assertColumnExists("test_adflpd_pony", "height") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name", "preserve_default"]) def test_add_field_m2m(self): """ Tests the AddField operation with a ManyToManyField. """ project_state = self.set_up_test_model("test_adflmm", second_model=True) # Test the state alteration operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) new_state = project_state.clone() operation.state_forwards("test_adflmm", new_state) self.assertEqual(len(new_state.models["test_adflmm", "pony"].fields), 4) # Test the database alteration self.assertTableNotExists("test_adflmm_pony_stables") with connection.schema_editor() as editor: operation.database_forwards("test_adflmm", editor, project_state, new_state) self.assertTableExists("test_adflmm_pony_stables") self.assertColumnNotExists("test_adflmm_pony", "stables") # Make sure the M2M field actually works with atomic(): Pony = new_state.apps.get_model("test_adflmm", "Pony") p = Pony.objects.create(pink=False, weight=4.55) p.stables.create() self.assertEqual(p.stables.count(), 1) p.stables.all().delete() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adflmm", editor, new_state, project_state) self.assertTableNotExists("test_adflmm_pony_stables") def test_alter_field_m2m(self): project_state = self.set_up_test_model("test_alflmm", second_model=True) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") self.assertFalse(Pony._meta.get_field('stables').blank) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AlterField( "Pony", "stables", models.ManyToManyField(to="Stable", related_name="ponies", blank=True) ) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") self.assertTrue(Pony._meta.get_field('stables').blank) def test_repoint_field_m2m(self): project_state = self.set_up_test_model("test_alflmm", second_model=True, third_model=True) project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AddField("Pony", "places", models.ManyToManyField("Stable", related_name="ponies")) ]) Pony = project_state.apps.get_model("test_alflmm", "Pony") project_state = self.apply_operations("test_alflmm", project_state, operations=[ migrations.AlterField("Pony", "places", models.ManyToManyField(to="Van", related_name="ponies")) ]) # Ensure the new field actually works Pony = project_state.apps.get_model("test_alflmm", "Pony") p = Pony.objects.create(pink=False, weight=4.55) p.places.create() self.assertEqual(p.places.count(), 1) p.places.all().delete() def test_remove_field_m2m(self): project_state = self.set_up_test_model("test_rmflmm", second_model=True) project_state = self.apply_operations("test_rmflmm", project_state, operations=[ migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) ]) self.assertTableExists("test_rmflmm_pony_stables") with_field_state = project_state.clone() operations = [migrations.RemoveField("Pony", "stables")] project_state = self.apply_operations("test_rmflmm", project_state, operations=operations) self.assertTableNotExists("test_rmflmm_pony_stables") # And test reversal self.unapply_operations("test_rmflmm", with_field_state, operations=operations) self.assertTableExists("test_rmflmm_pony_stables") def test_remove_field_m2m_with_through(self): project_state = self.set_up_test_model("test_rmflmmwt", second_model=True) self.assertTableNotExists("test_rmflmmwt_ponystables") project_state = self.apply_operations("test_rmflmmwt", project_state, operations=[ migrations.CreateModel("PonyStables", fields=[ ("pony", models.ForeignKey('test_rmflmmwt.Pony', models.CASCADE)), ("stable", models.ForeignKey('test_rmflmmwt.Stable', models.CASCADE)), ]), migrations.AddField( "Pony", "stables", models.ManyToManyField("Stable", related_name="ponies", through='test_rmflmmwt.PonyStables') ) ]) self.assertTableExists("test_rmflmmwt_ponystables") operations = [migrations.RemoveField("Pony", "stables"), migrations.DeleteModel("PonyStables")] self.apply_operations("test_rmflmmwt", project_state, operations=operations) def test_remove_field(self): """ Tests the RemoveField operation. """ project_state = self.set_up_test_model("test_rmfl") # Test the state alteration operation = migrations.RemoveField("Pony", "pink") self.assertEqual(operation.describe(), "Remove field pink from Pony") self.assertEqual(operation.migration_name_fragment, 'remove_pony_pink') new_state = project_state.clone() operation.state_forwards("test_rmfl", new_state) self.assertEqual(len(new_state.models["test_rmfl", "pony"].fields), 2) # Test the database alteration self.assertColumnExists("test_rmfl_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_rmfl", editor, project_state, new_state) self.assertColumnNotExists("test_rmfl_pony", "pink") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rmfl", editor, new_state, project_state) self.assertColumnExists("test_rmfl_pony", "pink") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RemoveField") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': 'pink'}) def test_remove_fk(self): """ Tests the RemoveField operation on a foreign key. """ project_state = self.set_up_test_model("test_rfk", related_model=True) self.assertColumnExists("test_rfk_rider", "pony_id") operation = migrations.RemoveField("Rider", "pony") new_state = project_state.clone() operation.state_forwards("test_rfk", new_state) with connection.schema_editor() as editor: operation.database_forwards("test_rfk", editor, project_state, new_state) self.assertColumnNotExists("test_rfk_rider", "pony_id") with connection.schema_editor() as editor: operation.database_backwards("test_rfk", editor, new_state, project_state) self.assertColumnExists("test_rfk_rider", "pony_id") def test_alter_model_table(self): """ Tests the AlterModelTable operation. """ project_state = self.set_up_test_model("test_almota") # Test the state alteration operation = migrations.AlterModelTable("Pony", "test_almota_pony_2") self.assertEqual(operation.describe(), "Rename table for Pony to test_almota_pony_2") self.assertEqual(operation.migration_name_fragment, 'alter_pony_table') new_state = project_state.clone() operation.state_forwards("test_almota", new_state) self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony_2") # Test the database alteration self.assertTableExists("test_almota_pony") self.assertTableNotExists("test_almota_pony_2") with connection.schema_editor() as editor: operation.database_forwards("test_almota", editor, project_state, new_state) self.assertTableNotExists("test_almota_pony") self.assertTableExists("test_almota_pony_2") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_almota", editor, new_state, project_state) self.assertTableExists("test_almota_pony") self.assertTableNotExists("test_almota_pony_2") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelTable") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'table': "test_almota_pony_2"}) def test_alter_model_table_none(self): """ Tests the AlterModelTable operation if the table name is set to None. """ operation = migrations.AlterModelTable("Pony", None) self.assertEqual(operation.describe(), "Rename table for Pony to (default)") def test_alter_model_table_noop(self): """ Tests the AlterModelTable operation if the table name is not changed. """ project_state = self.set_up_test_model("test_almota") # Test the state alteration operation = migrations.AlterModelTable("Pony", "test_almota_pony") new_state = project_state.clone() operation.state_forwards("test_almota", new_state) self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony") # Test the database alteration self.assertTableExists("test_almota_pony") with connection.schema_editor() as editor: operation.database_forwards("test_almota", editor, project_state, new_state) self.assertTableExists("test_almota_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_almota", editor, new_state, project_state) self.assertTableExists("test_almota_pony") def test_alter_model_table_m2m(self): """ AlterModelTable should rename auto-generated M2M tables. """ app_label = "test_talflmltlm2m" pony_db_table = 'pony_foo' project_state = self.set_up_test_model(app_label, second_model=True, db_table=pony_db_table) # Add the M2M field first_state = project_state.clone() operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable")) operation.state_forwards(app_label, first_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, first_state) original_m2m_table = "%s_%s" % (pony_db_table, "stables") new_m2m_table = "%s_%s" % (app_label, "pony_stables") self.assertTableExists(original_m2m_table) self.assertTableNotExists(new_m2m_table) # Rename the Pony db_table which should also rename the m2m table. second_state = first_state.clone() operation = migrations.AlterModelTable(name='pony', table=None) operation.state_forwards(app_label, second_state) atomic_rename = connection.features.supports_atomic_references_rename with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_forwards(app_label, editor, first_state, second_state) self.assertTableExists(new_m2m_table) self.assertTableNotExists(original_m2m_table) # And test reversal with connection.schema_editor(atomic=atomic_rename) as editor: operation.database_backwards(app_label, editor, second_state, first_state) self.assertTableExists(original_m2m_table) self.assertTableNotExists(new_m2m_table) def test_alter_field(self): """ Tests the AlterField operation. """ project_state = self.set_up_test_model("test_alfl") # Test the state alteration operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True)) self.assertEqual(operation.describe(), "Alter field pink on Pony") self.assertEqual(operation.migration_name_fragment, 'alter_pony_pink') new_state = project_state.clone() operation.state_forwards("test_alfl", new_state) self.assertIs(project_state.models['test_alfl', 'pony'].fields['pink'].null, False) self.assertIs(new_state.models['test_alfl', 'pony'].fields['pink'].null, True) # Test the database alteration self.assertColumnNotNull("test_alfl_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_alfl", editor, project_state, new_state) self.assertColumnNull("test_alfl_pony", "pink") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alfl", editor, new_state, project_state) self.assertColumnNotNull("test_alfl_pony", "pink") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterField") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"]) def test_alter_field_add_db_column_noop(self): """ AlterField operation is a noop when adding only a db_column and the column name is not changed. """ app_label = 'test_afadbn' project_state = self.set_up_test_model(app_label, related_model=True) pony_table = '%s_pony' % app_label new_state = project_state.clone() operation = migrations.AlterField('Pony', 'weight', models.FloatField(db_column='weight')) operation.state_forwards(app_label, new_state) self.assertIsNone( project_state.models[app_label, 'pony'].fields['weight'].db_column, ) self.assertEqual( new_state.models[app_label, 'pony'].fields['weight'].db_column, 'weight', ) self.assertColumnExists(pony_table, 'weight') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_forwards(app_label, editor, project_state, new_state) self.assertColumnExists(pony_table, 'weight') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_backwards(app_label, editor, new_state, project_state) self.assertColumnExists(pony_table, 'weight') rider_table = '%s_rider' % app_label new_state = project_state.clone() operation = migrations.AlterField( 'Rider', 'pony', models.ForeignKey('Pony', models.CASCADE, db_column='pony_id'), ) operation.state_forwards(app_label, new_state) self.assertIsNone( project_state.models[app_label, 'rider'].fields['pony'].db_column, ) self.assertIs( new_state.models[app_label, 'rider'].fields['pony'].db_column, 'pony_id', ) self.assertColumnExists(rider_table, 'pony_id') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_forwards(app_label, editor, project_state, new_state) self.assertColumnExists(rider_table, 'pony_id') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_forwards(app_label, editor, new_state, project_state) self.assertColumnExists(rider_table, 'pony_id') def test_alter_field_pk(self): """ Tests the AlterField operation on primary keys (for things like PostgreSQL's SERIAL weirdness) """ project_state = self.set_up_test_model("test_alflpk") # Test the state alteration operation = migrations.AlterField("Pony", "id", models.IntegerField(primary_key=True)) new_state = project_state.clone() operation.state_forwards("test_alflpk", new_state) self.assertIsInstance( project_state.models['test_alflpk', 'pony'].fields['id'], models.AutoField, ) self.assertIsInstance( new_state.models['test_alflpk', 'pony'].fields['id'], models.IntegerField, ) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alflpk", editor, project_state, new_state) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflpk", editor, new_state, project_state) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_pk_fk(self): """ Tests the AlterField operation on primary keys changes any FKs pointing to it. """ project_state = self.set_up_test_model("test_alflpkfk", related_model=True) project_state = self.apply_operations('test_alflpkfk', project_state, [ migrations.CreateModel('Stable', fields=[ ('ponies', models.ManyToManyField('Pony')), ]), migrations.AddField( 'Pony', 'stables', models.ManyToManyField('Stable'), ), ]) # Test the state alteration operation = migrations.AlterField("Pony", "id", models.FloatField(primary_key=True)) new_state = project_state.clone() operation.state_forwards("test_alflpkfk", new_state) self.assertIsInstance( project_state.models['test_alflpkfk', 'pony'].fields['id'], models.AutoField, ) self.assertIsInstance( new_state.models['test_alflpkfk', 'pony'].fields['id'], models.FloatField, ) def assertIdTypeEqualsFkType(): with connection.cursor() as cursor: id_type, id_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_pony") if c.name == "id" ][0] fk_type, fk_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_rider") if c.name == "pony_id" ][0] m2m_fk_type, m2m_fk_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description( cursor, 'test_alflpkfk_pony_stables', ) if c.name == 'pony_id' ][0] remote_m2m_fk_type, remote_m2m_fk_null = [ (c.type_code, c.null_ok) for c in connection.introspection.get_table_description( cursor, 'test_alflpkfk_stable_ponies', ) if c.name == 'pony_id' ][0] self.assertEqual(id_type, fk_type) self.assertEqual(id_type, m2m_fk_type) self.assertEqual(id_type, remote_m2m_fk_type) self.assertEqual(id_null, fk_null) self.assertEqual(id_null, m2m_fk_null) self.assertEqual(id_null, remote_m2m_fk_null) assertIdTypeEqualsFkType() # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alflpkfk", editor, project_state, new_state) assertIdTypeEqualsFkType() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflpkfk", editor, new_state, project_state) assertIdTypeEqualsFkType() def test_alter_field_pk_mti_fk(self): app_label = 'test_alflpkmtifk' project_state = self.set_up_test_model(app_label, mti_model=True) project_state = self.apply_operations(app_label, project_state, [ migrations.CreateModel('ShetlandRider', fields=[ ( 'pony', models.ForeignKey(f'{app_label}.ShetlandPony', models.CASCADE), ), ]), ]) operation = migrations.AlterField( 'Pony', 'id', models.BigAutoField(primary_key=True), ) new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertIsInstance( new_state.models[app_label, 'pony'].fields['id'], models.BigAutoField, ) def _get_column_id_type(cursor, table, column): return [ c.type_code for c in connection.introspection.get_table_description( cursor, f'{app_label}_{table}', ) if c.name == column ][0] def assertIdTypeEqualsMTIFkType(): with connection.cursor() as cursor: parent_id_type = _get_column_id_type(cursor, 'pony', 'id') child_id_type = _get_column_id_type(cursor, 'shetlandpony', 'pony_ptr_id') mti_id_type = _get_column_id_type(cursor, 'shetlandrider', 'pony_id') self.assertEqual(parent_id_type, child_id_type) self.assertEqual(parent_id_type, mti_id_type) assertIdTypeEqualsMTIFkType() # Alter primary key. with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) assertIdTypeEqualsMTIFkType() if connection.features.supports_foreign_keys: self.assertFKExists( f'{app_label}_shetlandpony', ['pony_ptr_id'], (f'{app_label}_pony', 'id'), ) self.assertFKExists( f'{app_label}_shetlandrider', ['pony_id'], (f'{app_label}_shetlandpony', 'pony_ptr_id'), ) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) assertIdTypeEqualsMTIFkType() if connection.features.supports_foreign_keys: self.assertFKExists( f'{app_label}_shetlandpony', ['pony_ptr_id'], (f'{app_label}_pony', 'id'), ) self.assertFKExists( f'{app_label}_shetlandrider', ['pony_id'], (f'{app_label}_shetlandpony', 'pony_ptr_id'), ) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_reloads_state_on_fk_with_to_field_target_type_change(self): app_label = 'test_alflrsfkwtflttc' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.AutoField(primary_key=True)), ('code', models.IntegerField(unique=True)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE, to_field='code')), ]), ]) operation = migrations.AlterField( 'Rider', 'code', models.CharField(max_length=100, unique=True), ) self.apply_operations(app_label, project_state, operations=[operation]) id_type, id_null = [ (c.type_code, c.null_ok) for c in self.get_table_description('%s_rider' % app_label) if c.name == 'code' ][0] fk_type, fk_null = [ (c.type_code, c.null_ok) for c in self.get_table_description('%s_pony' % app_label) if c.name == 'rider_id' ][0] self.assertEqual(id_type, fk_type) self.assertEqual(id_null, fk_null) @skipUnlessDBFeature('supports_foreign_keys') def test_alter_field_reloads_state_on_fk_with_to_field_related_name_target_type_change(self): app_label = 'test_alflrsfkwtflrnttc' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.AutoField(primary_key=True)), ('code', models.PositiveIntegerField(unique=True)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('rider', models.ForeignKey( '%s.Rider' % app_label, models.CASCADE, to_field='code', related_name='+', )), ]), ]) operation = migrations.AlterField( 'Rider', 'code', models.CharField(max_length=100, unique=True), ) self.apply_operations(app_label, project_state, operations=[operation]) def test_alter_field_reloads_state_on_fk_target_changes(self): """ If AlterField doesn't reload state appropriately, the second AlterField crashes on MySQL due to not dropping the PonyRider.pony foreign key constraint before modifying the column. """ app_label = 'alter_alter_field_reloads_state_on_fk_target_changes' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE)), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.AlterField('Rider', 'id', models.CharField(primary_key=True, max_length=99)), migrations.AlterField('Pony', 'id', models.CharField(primary_key=True, max_length=99)), ]) def test_alter_field_reloads_state_on_fk_with_to_field_target_changes(self): """ If AlterField doesn't reload state appropriately, the second AlterField crashes on MySQL due to not dropping the PonyRider.pony foreign key constraint before modifying the column. """ app_label = 'alter_alter_field_reloads_state_on_fk_with_to_field_target_changes' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('slug', models.CharField(unique=True, max_length=100)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE, to_field='slug')), ('slug', models.CharField(unique=True, max_length=100)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE, to_field='slug')), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.AlterField('Rider', 'slug', models.CharField(unique=True, max_length=99)), migrations.AlterField('Pony', 'slug', models.CharField(unique=True, max_length=99)), ]) def test_rename_field_reloads_state_on_fk_target_changes(self): """ If RenameField doesn't reload state appropriately, the AlterField crashes on MySQL due to not dropping the PonyRider.pony foreign key constraint before modifying the column. """ app_label = 'alter_rename_field_reloads_state_on_fk_target_changes' project_state = self.apply_operations(app_label, ProjectState(), operations=[ migrations.CreateModel('Rider', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ]), migrations.CreateModel('Pony', fields=[ ('id', models.CharField(primary_key=True, max_length=100)), ('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE)), ]), migrations.CreateModel('PonyRider', fields=[ ('id', models.AutoField(primary_key=True)), ('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE)), ]), ]) project_state = self.apply_operations(app_label, project_state, operations=[ migrations.RenameField('Rider', 'id', 'id2'), migrations.AlterField('Pony', 'id', models.CharField(primary_key=True, max_length=99)), ], atomic=connection.features.supports_atomic_references_rename) def test_rename_field(self): """ Tests the RenameField operation. """ project_state = self.set_up_test_model("test_rnfl", unique_together=True, index_together=True) # Test the state alteration operation = migrations.RenameField("Pony", "pink", "blue") self.assertEqual(operation.describe(), "Rename field pink on Pony to blue") self.assertEqual(operation.migration_name_fragment, 'rename_pink_pony_blue') new_state = project_state.clone() operation.state_forwards("test_rnfl", new_state) self.assertIn("blue", new_state.models["test_rnfl", "pony"].fields) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].fields) # Make sure the unique_together has the renamed column too self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['unique_together'][0]) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['unique_together'][0]) # Make sure the index_together has the renamed column too self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['index_together'][0]) self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['index_together'][0]) # Test the database alteration self.assertColumnExists("test_rnfl_pony", "pink") self.assertColumnNotExists("test_rnfl_pony", "blue") with connection.schema_editor() as editor: operation.database_forwards("test_rnfl", editor, project_state, new_state) self.assertColumnExists("test_rnfl_pony", "blue") self.assertColumnNotExists("test_rnfl_pony", "pink") # Ensure the unique constraint has been ported over with connection.cursor() as cursor: cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)") with self.assertRaises(IntegrityError): with atomic(): cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_rnfl_pony") # Ensure the index constraint has been ported over self.assertIndexExists("test_rnfl_pony", ["weight", "blue"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rnfl", editor, new_state, project_state) self.assertColumnExists("test_rnfl_pony", "pink") self.assertColumnNotExists("test_rnfl_pony", "blue") # Ensure the index constraint has been reset self.assertIndexExists("test_rnfl_pony", ["weight", "pink"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RenameField") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'old_name': "pink", 'new_name': "blue"}) def test_rename_field_with_db_column(self): project_state = self.apply_operations('test_rfwdbc', ProjectState(), operations=[ migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField(db_column='db_field')), ('fk_field', models.ForeignKey( 'Pony', models.CASCADE, db_column='db_fk_field', )), ]), ]) new_state = project_state.clone() operation = migrations.RenameField('Pony', 'field', 'renamed_field') operation.state_forwards('test_rfwdbc', new_state) self.assertIn('renamed_field', new_state.models['test_rfwdbc', 'pony'].fields) self.assertNotIn('field', new_state.models['test_rfwdbc', 'pony'].fields) self.assertColumnExists('test_rfwdbc_pony', 'db_field') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_forwards('test_rfwdbc', editor, project_state, new_state) self.assertColumnExists('test_rfwdbc_pony', 'db_field') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_backwards('test_rfwdbc', editor, new_state, project_state) self.assertColumnExists('test_rfwdbc_pony', 'db_field') new_state = project_state.clone() operation = migrations.RenameField('Pony', 'fk_field', 'renamed_fk_field') operation.state_forwards('test_rfwdbc', new_state) self.assertIn('renamed_fk_field', new_state.models['test_rfwdbc', 'pony'].fields) self.assertNotIn('fk_field', new_state.models['test_rfwdbc', 'pony'].fields) self.assertColumnExists('test_rfwdbc_pony', 'db_fk_field') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_forwards('test_rfwdbc', editor, project_state, new_state) self.assertColumnExists('test_rfwdbc_pony', 'db_fk_field') with connection.schema_editor() as editor: with self.assertNumQueries(0): operation.database_backwards('test_rfwdbc', editor, new_state, project_state) self.assertColumnExists('test_rfwdbc_pony', 'db_fk_field') def test_rename_field_case(self): project_state = self.apply_operations('test_rfmx', ProjectState(), operations=[ migrations.CreateModel('Pony', fields=[ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField()), ]), ]) new_state = project_state.clone() operation = migrations.RenameField('Pony', 'field', 'FiElD') operation.state_forwards('test_rfmx', new_state) self.assertIn('FiElD', new_state.models['test_rfmx', 'pony'].fields) self.assertColumnExists('test_rfmx_pony', 'field') with connection.schema_editor() as editor: operation.database_forwards('test_rfmx', editor, project_state, new_state) self.assertColumnExists( 'test_rfmx_pony', connection.introspection.identifier_converter('FiElD'), ) with connection.schema_editor() as editor: operation.database_backwards('test_rfmx', editor, new_state, project_state) self.assertColumnExists('test_rfmx_pony', 'field') def test_rename_missing_field(self): state = ProjectState() state.add_model(ModelState('app', 'model', [])) with self.assertRaisesMessage(FieldDoesNotExist, "app.model has no field named 'field'"): migrations.RenameField('model', 'field', 'new_field').state_forwards('app', state) def test_rename_referenced_field_state_forward(self): state = ProjectState() state.add_model(ModelState('app', 'Model', [ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField(unique=True)), ])) state.add_model(ModelState('app', 'OtherModel', [ ('id', models.AutoField(primary_key=True)), ('fk', models.ForeignKey('Model', models.CASCADE, to_field='field')), ('fo', models.ForeignObject('Model', models.CASCADE, from_fields=('fk',), to_fields=('field',))), ])) operation = migrations.RenameField('Model', 'field', 'renamed') new_state = state.clone() operation.state_forwards('app', new_state) self.assertEqual(new_state.models['app', 'othermodel'].fields['fk'].remote_field.field_name, 'renamed') self.assertEqual(new_state.models['app', 'othermodel'].fields['fk'].from_fields, ['self']) self.assertEqual(new_state.models['app', 'othermodel'].fields['fk'].to_fields, ('renamed',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].from_fields, ('fk',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].to_fields, ('renamed',)) operation = migrations.RenameField('OtherModel', 'fk', 'renamed_fk') new_state = state.clone() operation.state_forwards('app', new_state) self.assertEqual(new_state.models['app', 'othermodel'].fields['renamed_fk'].remote_field.field_name, 'renamed') self.assertEqual(new_state.models['app', 'othermodel'].fields['renamed_fk'].from_fields, ('self',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['renamed_fk'].to_fields, ('renamed',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].from_fields, ('renamed_fk',)) self.assertEqual(new_state.models['app', 'othermodel'].fields['fo'].to_fields, ('renamed',)) def test_alter_unique_together(self): """ Tests the AlterUniqueTogether operation. """ project_state = self.set_up_test_model("test_alunto") # Test the state alteration operation = migrations.AlterUniqueTogether("Pony", [("pink", "weight")]) self.assertEqual(operation.describe(), "Alter unique_together for Pony (1 constraint(s))") self.assertEqual( operation.migration_name_fragment, 'alter_pony_unique_together', ) new_state = project_state.clone() operation.state_forwards("test_alunto", new_state) self.assertEqual(len(project_state.models["test_alunto", "pony"].options.get("unique_together", set())), 0) self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1) # Make sure we can insert duplicate rows with connection.cursor() as cursor: cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alunto", editor, project_state, new_state) cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") with self.assertRaises(IntegrityError): with atomic(): cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alunto", editor, new_state, project_state) cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # Test flat unique_together operation = migrations.AlterUniqueTogether("Pony", ("pink", "weight")) operation.state_forwards("test_alunto", new_state) self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterUniqueTogether") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'unique_together': {("pink", "weight")}}) def test_alter_unique_together_remove(self): operation = migrations.AlterUniqueTogether("Pony", None) self.assertEqual(operation.describe(), "Alter unique_together for Pony (0 constraint(s))") def test_add_index(self): """ Test the AddIndex operation. """ project_state = self.set_up_test_model("test_adin") msg = ( "Indexes passed to AddIndex operations require a name argument. " "<Index: fields=['pink']> doesn't have one." ) with self.assertRaisesMessage(ValueError, msg): migrations.AddIndex("Pony", models.Index(fields=["pink"])) index = models.Index(fields=["pink"], name="test_adin_pony_pink_idx") operation = migrations.AddIndex("Pony", index) self.assertEqual(operation.describe(), "Create index test_adin_pony_pink_idx on field(s) pink of model Pony") self.assertEqual( operation.migration_name_fragment, 'pony_test_adin_pony_pink_idx', ) new_state = project_state.clone() operation.state_forwards("test_adin", new_state) # Test the database alteration self.assertEqual(len(new_state.models["test_adin", "pony"].options['indexes']), 1) self.assertIndexNotExists("test_adin_pony", ["pink"]) with connection.schema_editor() as editor: operation.database_forwards("test_adin", editor, project_state, new_state) self.assertIndexExists("test_adin_pony", ["pink"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adin", editor, new_state, project_state) self.assertIndexNotExists("test_adin_pony", ["pink"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AddIndex") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'index': index}) def test_remove_index(self): """ Test the RemoveIndex operation. """ project_state = self.set_up_test_model("test_rmin", multicol_index=True) self.assertTableExists("test_rmin_pony") self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) operation = migrations.RemoveIndex("Pony", "pony_test_idx") self.assertEqual(operation.describe(), "Remove index pony_test_idx from Pony") self.assertEqual( operation.migration_name_fragment, 'remove_pony_pony_test_idx', ) new_state = project_state.clone() operation.state_forwards("test_rmin", new_state) # Test the state alteration self.assertEqual(len(new_state.models["test_rmin", "pony"].options['indexes']), 0) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_rmin", editor, project_state, new_state) self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rmin", editor, new_state, project_state) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RemoveIndex") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': "pony_test_idx"}) # Also test a field dropped with index - sqlite remake issue operations = [ migrations.RemoveIndex("Pony", "pony_test_idx"), migrations.RemoveField("Pony", "pink"), ] self.assertColumnExists("test_rmin_pony", "pink") self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) # Test database alteration new_state = project_state.clone() self.apply_operations('test_rmin', new_state, operations=operations) self.assertColumnNotExists("test_rmin_pony", "pink") self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"]) # And test reversal self.unapply_operations("test_rmin", project_state, operations=operations) self.assertIndexExists("test_rmin_pony", ["pink", "weight"]) def test_add_index_state_forwards(self): project_state = self.set_up_test_model('test_adinsf') index = models.Index(fields=['pink'], name='test_adinsf_pony_pink_idx') old_model = project_state.apps.get_model('test_adinsf', 'Pony') new_state = project_state.clone() operation = migrations.AddIndex('Pony', index) operation.state_forwards('test_adinsf', new_state) new_model = new_state.apps.get_model('test_adinsf', 'Pony') self.assertIsNot(old_model, new_model) def test_remove_index_state_forwards(self): project_state = self.set_up_test_model('test_rminsf') index = models.Index(fields=['pink'], name='test_rminsf_pony_pink_idx') migrations.AddIndex('Pony', index).state_forwards('test_rminsf', project_state) old_model = project_state.apps.get_model('test_rminsf', 'Pony') new_state = project_state.clone() operation = migrations.RemoveIndex('Pony', 'test_rminsf_pony_pink_idx') operation.state_forwards('test_rminsf', new_state) new_model = new_state.apps.get_model('test_rminsf', 'Pony') self.assertIsNot(old_model, new_model) @skipUnlessDBFeature('supports_expression_indexes') def test_add_func_index(self): app_label = 'test_addfuncin' index_name = f'{app_label}_pony_abs_idx' table_name = f'{app_label}_pony' project_state = self.set_up_test_model(app_label) index = models.Index(Abs('weight'), name=index_name) operation = migrations.AddIndex('Pony', index) self.assertEqual( operation.describe(), 'Create index test_addfuncin_pony_abs_idx on Abs(F(weight)) on model Pony', ) self.assertEqual( operation.migration_name_fragment, 'pony_test_addfuncin_pony_abs_idx', ) new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['indexes']), 1) self.assertIndexNameNotExists(table_name, index_name) # Add index. with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexNameExists(table_name, index_name) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) self.assertIndexNameNotExists(table_name, index_name) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'AddIndex') self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': 'Pony', 'index': index}) @skipUnlessDBFeature('supports_expression_indexes') def test_remove_func_index(self): app_label = 'test_rmfuncin' index_name = f'{app_label}_pony_abs_idx' table_name = f'{app_label}_pony' project_state = self.set_up_test_model(app_label, indexes=[ models.Index(Abs('weight'), name=index_name), ]) self.assertTableExists(table_name) self.assertIndexNameExists(table_name, index_name) operation = migrations.RemoveIndex('Pony', index_name) self.assertEqual( operation.describe(), 'Remove index test_rmfuncin_pony_abs_idx from Pony', ) self.assertEqual( operation.migration_name_fragment, 'remove_pony_test_rmfuncin_pony_abs_idx', ) new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['indexes']), 0) # Remove index. with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexNameNotExists(table_name, index_name) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) self.assertIndexNameExists(table_name, index_name) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'RemoveIndex') self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': 'Pony', 'name': index_name}) def test_alter_field_with_index(self): """ Test AlterField operation with an index to ensure indexes created via Meta.indexes don't get dropped with sqlite3 remake. """ project_state = self.set_up_test_model("test_alflin", index=True) operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True)) new_state = project_state.clone() operation.state_forwards("test_alflin", new_state) # Test the database alteration self.assertColumnNotNull("test_alflin_pony", "pink") with connection.schema_editor() as editor: operation.database_forwards("test_alflin", editor, project_state, new_state) # Index hasn't been dropped self.assertIndexExists("test_alflin_pony", ["pink"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflin", editor, new_state, project_state) # Ensure the index is still there self.assertIndexExists("test_alflin_pony", ["pink"]) def test_alter_index_together(self): """ Tests the AlterIndexTogether operation. """ project_state = self.set_up_test_model("test_alinto") # Test the state alteration operation = migrations.AlterIndexTogether("Pony", [("pink", "weight")]) self.assertEqual(operation.describe(), "Alter index_together for Pony (1 constraint(s))") self.assertEqual( operation.migration_name_fragment, 'alter_pony_index_together', ) new_state = project_state.clone() operation.state_forwards("test_alinto", new_state) self.assertEqual(len(project_state.models["test_alinto", "pony"].options.get("index_together", set())), 0) self.assertEqual(len(new_state.models["test_alinto", "pony"].options.get("index_together", set())), 1) # Make sure there's no matching index self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"]) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alinto", editor, project_state, new_state) self.assertIndexExists("test_alinto_pony", ["pink", "weight"]) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alinto", editor, new_state, project_state) self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"]) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterIndexTogether") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'index_together': {("pink", "weight")}}) def test_alter_index_together_remove(self): operation = migrations.AlterIndexTogether("Pony", None) self.assertEqual(operation.describe(), "Alter index_together for Pony (0 constraint(s))") @skipUnlessDBFeature('allows_multiple_constraints_on_same_fields') def test_alter_index_together_remove_with_unique_together(self): app_label = 'test_alintoremove_wunto' table_name = '%s_pony' % app_label project_state = self.set_up_test_model(app_label, unique_together=True) self.assertUniqueConstraintExists(table_name, ['pink', 'weight']) # Add index together. new_state = project_state.clone() operation = migrations.AlterIndexTogether('Pony', [('pink', 'weight')]) operation.state_forwards(app_label, new_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexExists(table_name, ['pink', 'weight']) # Remove index together. project_state = new_state new_state = project_state.clone() operation = migrations.AlterIndexTogether('Pony', set()) operation.state_forwards(app_label, new_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexNotExists(table_name, ['pink', 'weight']) self.assertUniqueConstraintExists(table_name, ['pink', 'weight']) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_constraint(self): project_state = self.set_up_test_model("test_addconstraint") gt_check = models.Q(pink__gt=2) gt_constraint = models.CheckConstraint(check=gt_check, name="test_add_constraint_pony_pink_gt_2") gt_operation = migrations.AddConstraint("Pony", gt_constraint) self.assertEqual( gt_operation.describe(), "Create constraint test_add_constraint_pony_pink_gt_2 on model Pony" ) self.assertEqual( gt_operation.migration_name_fragment, 'pony_test_add_constraint_pony_pink_gt_2', ) # Test the state alteration new_state = project_state.clone() gt_operation.state_forwards("test_addconstraint", new_state) self.assertEqual(len(new_state.models["test_addconstraint", "pony"].options["constraints"]), 1) Pony = new_state.apps.get_model("test_addconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 1) # Test the database alteration with connection.schema_editor() as editor: gt_operation.database_forwards("test_addconstraint", editor, project_state, new_state) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=1.0) # Add another one. lt_check = models.Q(pink__lt=100) lt_constraint = models.CheckConstraint(check=lt_check, name="test_add_constraint_pony_pink_lt_100") lt_operation = migrations.AddConstraint("Pony", lt_constraint) lt_operation.state_forwards("test_addconstraint", new_state) self.assertEqual(len(new_state.models["test_addconstraint", "pony"].options["constraints"]), 2) Pony = new_state.apps.get_model("test_addconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 2) with connection.schema_editor() as editor: lt_operation.database_forwards("test_addconstraint", editor, project_state, new_state) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=100, weight=1.0) # Test reversal with connection.schema_editor() as editor: gt_operation.database_backwards("test_addconstraint", editor, new_state, project_state) Pony.objects.create(pink=1, weight=1.0) # Test deconstruction definition = gt_operation.deconstruct() self.assertEqual(definition[0], "AddConstraint") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'constraint': gt_constraint}) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_constraint_percent_escaping(self): app_label = 'add_constraint_string_quoting' operations = [ migrations.CreateModel( 'Author', fields=[ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=100)), ('surname', models.CharField(max_length=100, default='')), ('rebate', models.CharField(max_length=100)), ], ), ] from_state = self.apply_operations(app_label, ProjectState(), operations) # "%" generated in startswith lookup should be escaped in a way that is # considered a leading wildcard. check = models.Q(name__startswith='Albert') constraint = models.CheckConstraint(check=check, name='name_constraint') operation = migrations.AddConstraint('Author', constraint) to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Author = to_state.apps.get_model(app_label, 'Author') with self.assertRaises(IntegrityError), transaction.atomic(): Author.objects.create(name='Artur') # Literal "%" should be escaped in a way that is not a considered a # wildcard. check = models.Q(rebate__endswith='%') constraint = models.CheckConstraint(check=check, name='rebate_constraint') operation = migrations.AddConstraint('Author', constraint) from_state = to_state to_state = from_state.clone() operation.state_forwards(app_label, to_state) Author = to_state.apps.get_model(app_label, 'Author') with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Author = to_state.apps.get_model(app_label, 'Author') with self.assertRaises(IntegrityError), transaction.atomic(): Author.objects.create(name='Albert', rebate='10$') author = Author.objects.create(name='Albert', rebate='10%') self.assertEqual(Author.objects.get(), author) # Right-hand-side baked "%" literals should not be used for parameters # interpolation. check = ~models.Q(surname__startswith=models.F('name')) constraint = models.CheckConstraint(check=check, name='name_constraint_rhs') operation = migrations.AddConstraint('Author', constraint) from_state = to_state to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Author = to_state.apps.get_model(app_label, 'Author') with self.assertRaises(IntegrityError), transaction.atomic(): Author.objects.create(name='Albert', surname='Alberto') @skipUnlessDBFeature('supports_table_check_constraints') def test_add_or_constraint(self): app_label = 'test_addorconstraint' constraint_name = 'add_constraint_or' from_state = self.set_up_test_model(app_label) check = models.Q(pink__gt=2, weight__gt=2) | models.Q(weight__lt=0) constraint = models.CheckConstraint(check=check, name=constraint_name) operation = migrations.AddConstraint('Pony', constraint) to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Pony = to_state.apps.get_model(app_label, 'Pony') with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=2, weight=3.0) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=3, weight=1.0) Pony.objects.bulk_create([ Pony(pink=3, weight=-1.0), Pony(pink=1, weight=-1.0), Pony(pink=3, weight=3.0), ]) @skipUnlessDBFeature('supports_table_check_constraints') def test_add_constraint_combinable(self): app_label = 'test_addconstraint_combinable' operations = [ migrations.CreateModel( 'Book', fields=[ ('id', models.AutoField(primary_key=True)), ('read', models.PositiveIntegerField()), ('unread', models.PositiveIntegerField()), ], ), ] from_state = self.apply_operations(app_label, ProjectState(), operations) constraint = models.CheckConstraint( check=models.Q(read=(100 - models.F('unread'))), name='test_addconstraint_combinable_sum_100', ) operation = migrations.AddConstraint('Book', constraint) to_state = from_state.clone() operation.state_forwards(app_label, to_state) with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, from_state, to_state) Book = to_state.apps.get_model(app_label, 'Book') with self.assertRaises(IntegrityError), transaction.atomic(): Book.objects.create(read=70, unread=10) Book.objects.create(read=70, unread=30) @skipUnlessDBFeature('supports_table_check_constraints') def test_remove_constraint(self): project_state = self.set_up_test_model("test_removeconstraint", constraints=[ models.CheckConstraint(check=models.Q(pink__gt=2), name="test_remove_constraint_pony_pink_gt_2"), models.CheckConstraint(check=models.Q(pink__lt=100), name="test_remove_constraint_pony_pink_lt_100"), ]) gt_operation = migrations.RemoveConstraint("Pony", "test_remove_constraint_pony_pink_gt_2") self.assertEqual( gt_operation.describe(), "Remove constraint test_remove_constraint_pony_pink_gt_2 from model Pony" ) self.assertEqual( gt_operation.migration_name_fragment, 'remove_pony_test_remove_constraint_pony_pink_gt_2', ) # Test state alteration new_state = project_state.clone() gt_operation.state_forwards("test_removeconstraint", new_state) self.assertEqual(len(new_state.models["test_removeconstraint", "pony"].options['constraints']), 1) Pony = new_state.apps.get_model("test_removeconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 1) # Test database alteration with connection.schema_editor() as editor: gt_operation.database_forwards("test_removeconstraint", editor, project_state, new_state) Pony.objects.create(pink=1, weight=1.0).delete() with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=100, weight=1.0) # Remove the other one. lt_operation = migrations.RemoveConstraint("Pony", "test_remove_constraint_pony_pink_lt_100") lt_operation.state_forwards("test_removeconstraint", new_state) self.assertEqual(len(new_state.models["test_removeconstraint", "pony"].options['constraints']), 0) Pony = new_state.apps.get_model("test_removeconstraint", "Pony") self.assertEqual(len(Pony._meta.constraints), 0) with connection.schema_editor() as editor: lt_operation.database_forwards("test_removeconstraint", editor, project_state, new_state) Pony.objects.create(pink=100, weight=1.0).delete() # Test reversal with connection.schema_editor() as editor: gt_operation.database_backwards("test_removeconstraint", editor, new_state, project_state) with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=1.0) # Test deconstruction definition = gt_operation.deconstruct() self.assertEqual(definition[0], "RemoveConstraint") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': "Pony", 'name': "test_remove_constraint_pony_pink_gt_2"}) def test_add_partial_unique_constraint(self): project_state = self.set_up_test_model('test_addpartialuniqueconstraint') partial_unique_constraint = models.UniqueConstraint( fields=['pink'], condition=models.Q(weight__gt=5), name='test_constraint_pony_pink_for_weight_gt_5_uniq', ) operation = migrations.AddConstraint('Pony', partial_unique_constraint) self.assertEqual( operation.describe(), 'Create constraint test_constraint_pony_pink_for_weight_gt_5_uniq ' 'on model Pony' ) # Test the state alteration new_state = project_state.clone() operation.state_forwards('test_addpartialuniqueconstraint', new_state) self.assertEqual(len(new_state.models['test_addpartialuniqueconstraint', 'pony'].options['constraints']), 1) Pony = new_state.apps.get_model('test_addpartialuniqueconstraint', 'Pony') self.assertEqual(len(Pony._meta.constraints), 1) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards('test_addpartialuniqueconstraint', editor, project_state, new_state) # Test constraint works Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=6.0) if connection.features.supports_partial_indexes: with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=7.0) else: Pony.objects.create(pink=1, weight=7.0) # Test reversal with connection.schema_editor() as editor: operation.database_backwards('test_addpartialuniqueconstraint', editor, new_state, project_state) # Test constraint doesn't work Pony.objects.create(pink=1, weight=7.0) # Test deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': 'Pony', 'constraint': partial_unique_constraint}) def test_remove_partial_unique_constraint(self): project_state = self.set_up_test_model('test_removepartialuniqueconstraint', constraints=[ models.UniqueConstraint( fields=['pink'], condition=models.Q(weight__gt=5), name='test_constraint_pony_pink_for_weight_gt_5_uniq', ), ]) gt_operation = migrations.RemoveConstraint('Pony', 'test_constraint_pony_pink_for_weight_gt_5_uniq') self.assertEqual( gt_operation.describe(), 'Remove constraint test_constraint_pony_pink_for_weight_gt_5_uniq from model Pony' ) # Test state alteration new_state = project_state.clone() gt_operation.state_forwards('test_removepartialuniqueconstraint', new_state) self.assertEqual(len(new_state.models['test_removepartialuniqueconstraint', 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model('test_removepartialuniqueconstraint', 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) # Test database alteration with connection.schema_editor() as editor: gt_operation.database_forwards('test_removepartialuniqueconstraint', editor, project_state, new_state) # Test constraint doesn't work Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=6.0) Pony.objects.create(pink=1, weight=7.0).delete() # Test reversal with connection.schema_editor() as editor: gt_operation.database_backwards('test_removepartialuniqueconstraint', editor, new_state, project_state) # Test constraint works if connection.features.supports_partial_indexes: with self.assertRaises(IntegrityError), transaction.atomic(): Pony.objects.create(pink=1, weight=7.0) else: Pony.objects.create(pink=1, weight=7.0) # Test deconstruction definition = gt_operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], { 'model_name': 'Pony', 'name': 'test_constraint_pony_pink_for_weight_gt_5_uniq', }) def test_add_deferred_unique_constraint(self): app_label = 'test_adddeferred_uc' project_state = self.set_up_test_model(app_label) deferred_unique_constraint = models.UniqueConstraint( fields=['pink'], name='deferred_pink_constraint_add', deferrable=models.Deferrable.DEFERRED, ) operation = migrations.AddConstraint('Pony', deferred_unique_constraint) self.assertEqual( operation.describe(), 'Create constraint deferred_pink_constraint_add on model Pony', ) # Add constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 1) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 1) with connection.schema_editor() as editor, CaptureQueriesContext(connection) as ctx: operation.database_forwards(app_label, editor, project_state, new_state) Pony.objects.create(pink=1, weight=4.0) if connection.features.supports_deferrable_unique_constraints: # Unique constraint is deferred. with transaction.atomic(): obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 2 obj.save() # Constraint behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with transaction.atomic(), connection.cursor() as cursor: quoted_name = connection.ops.quote_name(deferred_unique_constraint.name) cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % quoted_name) obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 3 obj.save() else: self.assertEqual(len(ctx), 0) Pony.objects.create(pink=1, weight=4.0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual( definition[2], {'model_name': 'Pony', 'constraint': deferred_unique_constraint}, ) def test_remove_deferred_unique_constraint(self): app_label = 'test_removedeferred_uc' deferred_unique_constraint = models.UniqueConstraint( fields=['pink'], name='deferred_pink_constraint_rm', deferrable=models.Deferrable.DEFERRED, ) project_state = self.set_up_test_model(app_label, constraints=[deferred_unique_constraint]) operation = migrations.RemoveConstraint('Pony', deferred_unique_constraint.name) self.assertEqual( operation.describe(), 'Remove constraint deferred_pink_constraint_rm from model Pony', ) # Remove constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) with connection.schema_editor() as editor, CaptureQueriesContext(connection) as ctx: operation.database_forwards(app_label, editor, project_state, new_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0).delete() if not connection.features.supports_deferrable_unique_constraints: self.assertEqual(len(ctx), 0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) if connection.features.supports_deferrable_unique_constraints: # Unique constraint is deferred. with transaction.atomic(): obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 2 obj.save() # Constraint behavior can be changed with SET CONSTRAINTS. with self.assertRaises(IntegrityError): with transaction.atomic(), connection.cursor() as cursor: quoted_name = connection.ops.quote_name(deferred_unique_constraint.name) cursor.execute('SET CONSTRAINTS %s IMMEDIATE' % quoted_name) obj = Pony.objects.create(pink=1, weight=4.0) obj.pink = 3 obj.save() else: Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], { 'model_name': 'Pony', 'name': 'deferred_pink_constraint_rm', }) def test_add_covering_unique_constraint(self): app_label = 'test_addcovering_uc' project_state = self.set_up_test_model(app_label) covering_unique_constraint = models.UniqueConstraint( fields=['pink'], name='covering_pink_constraint_add', include=['weight'], ) operation = migrations.AddConstraint('Pony', covering_unique_constraint) self.assertEqual( operation.describe(), 'Create constraint covering_pink_constraint_add on model Pony', ) # Add constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 1) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 1) with connection.schema_editor() as editor, CaptureQueriesContext(connection) as ctx: operation.database_forwards(app_label, editor, project_state, new_state) Pony.objects.create(pink=1, weight=4.0) if connection.features.supports_covering_indexes: with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=4.0) else: self.assertEqual(len(ctx), 0) Pony.objects.create(pink=1, weight=4.0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual( definition[2], {'model_name': 'Pony', 'constraint': covering_unique_constraint}, ) def test_remove_covering_unique_constraint(self): app_label = 'test_removecovering_uc' covering_unique_constraint = models.UniqueConstraint( fields=['pink'], name='covering_pink_constraint_rm', include=['weight'], ) project_state = self.set_up_test_model(app_label, constraints=[covering_unique_constraint]) operation = migrations.RemoveConstraint('Pony', covering_unique_constraint.name) self.assertEqual( operation.describe(), 'Remove constraint covering_pink_constraint_rm from model Pony', ) # Remove constraint. new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) with connection.schema_editor() as editor, CaptureQueriesContext(connection) as ctx: operation.database_forwards(app_label, editor, project_state, new_state) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=4.0).delete() if not connection.features.supports_covering_indexes: self.assertEqual(len(ctx), 0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) if connection.features.supports_covering_indexes: with self.assertRaises(IntegrityError): Pony.objects.create(pink=1, weight=4.0) else: Pony.objects.create(pink=1, weight=4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], { 'model_name': 'Pony', 'name': 'covering_pink_constraint_rm', }) def test_add_func_unique_constraint(self): app_label = 'test_adfuncuc' constraint_name = f'{app_label}_pony_abs_uq' table_name = f'{app_label}_pony' project_state = self.set_up_test_model(app_label) constraint = models.UniqueConstraint(Abs('weight'), name=constraint_name) operation = migrations.AddConstraint('Pony', constraint) self.assertEqual( operation.describe(), 'Create constraint test_adfuncuc_pony_abs_uq on model Pony', ) self.assertEqual( operation.migration_name_fragment, 'pony_test_adfuncuc_pony_abs_uq', ) new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 1) self.assertIndexNameNotExists(table_name, constraint_name) # Add constraint. with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) Pony = new_state.apps.get_model(app_label, 'Pony') Pony.objects.create(weight=4.0) if connection.features.supports_expression_indexes: self.assertIndexNameExists(table_name, constraint_name) with self.assertRaises(IntegrityError): Pony.objects.create(weight=-4.0) else: self.assertIndexNameNotExists(table_name, constraint_name) Pony.objects.create(weight=-4.0) # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) self.assertIndexNameNotExists(table_name, constraint_name) # Constraint doesn't work. Pony.objects.create(weight=-4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'AddConstraint') self.assertEqual(definition[1], []) self.assertEqual( definition[2], {'model_name': 'Pony', 'constraint': constraint}, ) def test_remove_func_unique_constraint(self): app_label = 'test_rmfuncuc' constraint_name = f'{app_label}_pony_abs_uq' table_name = f'{app_label}_pony' project_state = self.set_up_test_model(app_label, constraints=[ models.UniqueConstraint(Abs('weight'), name=constraint_name), ]) self.assertTableExists(table_name) if connection.features.supports_expression_indexes: self.assertIndexNameExists(table_name, constraint_name) operation = migrations.RemoveConstraint('Pony', constraint_name) self.assertEqual( operation.describe(), 'Remove constraint test_rmfuncuc_pony_abs_uq from model Pony', ) self.assertEqual( operation.migration_name_fragment, 'remove_pony_test_rmfuncuc_pony_abs_uq', ) new_state = project_state.clone() operation.state_forwards(app_label, new_state) self.assertEqual(len(new_state.models[app_label, 'pony'].options['constraints']), 0) Pony = new_state.apps.get_model(app_label, 'Pony') self.assertEqual(len(Pony._meta.constraints), 0) # Remove constraint. with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) self.assertIndexNameNotExists(table_name, constraint_name) # Constraint doesn't work. Pony.objects.create(pink=1, weight=4.0) Pony.objects.create(pink=1, weight=-4.0).delete() # Reversal. with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) if connection.features.supports_expression_indexes: self.assertIndexNameExists(table_name, constraint_name) with self.assertRaises(IntegrityError): Pony.objects.create(weight=-4.0) else: self.assertIndexNameNotExists(table_name, constraint_name) Pony.objects.create(weight=-4.0) # Deconstruction. definition = operation.deconstruct() self.assertEqual(definition[0], 'RemoveConstraint') self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'model_name': 'Pony', 'name': constraint_name}) def test_alter_model_options(self): """ Tests the AlterModelOptions operation. """ project_state = self.set_up_test_model("test_almoop") # Test the state alteration (no DB alteration to test) operation = migrations.AlterModelOptions("Pony", {"permissions": [("can_groom", "Can groom")]}) self.assertEqual(operation.describe(), "Change Meta options on Pony") self.assertEqual(operation.migration_name_fragment, 'alter_pony_options') new_state = project_state.clone() operation.state_forwards("test_almoop", new_state) self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 0) self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 1) self.assertEqual(new_state.models["test_almoop", "pony"].options["permissions"][0][0], "can_groom") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelOptions") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'options': {"permissions": [("can_groom", "Can groom")]}}) def test_alter_model_options_emptying(self): """ The AlterModelOptions operation removes keys from the dict (#23121) """ project_state = self.set_up_test_model("test_almoop", options=True) # Test the state alteration (no DB alteration to test) operation = migrations.AlterModelOptions("Pony", {}) self.assertEqual(operation.describe(), "Change Meta options on Pony") new_state = project_state.clone() operation.state_forwards("test_almoop", new_state) self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 1) self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 0) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterModelOptions") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Pony", 'options': {}}) def test_alter_order_with_respect_to(self): """ Tests the AlterOrderWithRespectTo operation. """ project_state = self.set_up_test_model("test_alorwrtto", related_model=True) # Test the state alteration operation = migrations.AlterOrderWithRespectTo("Rider", "pony") self.assertEqual(operation.describe(), "Set order_with_respect_to on Rider to pony") self.assertEqual( operation.migration_name_fragment, 'alter_rider_order_with_respect_to', ) new_state = project_state.clone() operation.state_forwards("test_alorwrtto", new_state) self.assertIsNone( project_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None) ) self.assertEqual( new_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None), "pony" ) # Make sure there's no matching index self.assertColumnNotExists("test_alorwrtto_rider", "_order") # Create some rows before alteration rendered_state = project_state.apps pony = rendered_state.get_model("test_alorwrtto", "Pony").objects.create(weight=50) rider1 = rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony) rider1.friend = rider1 rider1.save() rider2 = rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony) rider2.friend = rider2 rider2.save() # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alorwrtto", editor, project_state, new_state) self.assertColumnExists("test_alorwrtto_rider", "_order") # Check for correct value in rows updated_riders = new_state.apps.get_model("test_alorwrtto", "Rider").objects.all() self.assertEqual(updated_riders[0]._order, 0) self.assertEqual(updated_riders[1]._order, 0) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alorwrtto", editor, new_state, project_state) self.assertColumnNotExists("test_alorwrtto_rider", "_order") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "AlterOrderWithRespectTo") self.assertEqual(definition[1], []) self.assertEqual(definition[2], {'name': "Rider", 'order_with_respect_to': "pony"}) def test_alter_model_managers(self): """ The managers on a model are set. """ project_state = self.set_up_test_model("test_almoma") # Test the state alteration operation = migrations.AlterModelManagers( "Pony", managers=[ ("food_qs", FoodQuerySet.as_manager()), ("food_mgr", FoodManager("a", "b")), ("food_mgr_kwargs", FoodManager("x", "y", 3, 4)), ] ) self.assertEqual(operation.describe(), "Change managers on Pony") self.assertEqual(operation.migration_name_fragment, 'alter_pony_managers') managers = project_state.models["test_almoma", "pony"].managers self.assertEqual(managers, []) new_state = project_state.clone() operation.state_forwards("test_almoma", new_state) self.assertIn(("test_almoma", "pony"), new_state.models) managers = new_state.models["test_almoma", "pony"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) rendered_state = new_state.apps model = rendered_state.get_model('test_almoma', 'pony') self.assertIsInstance(model.food_qs, models.Manager) self.assertIsInstance(model.food_mgr, FoodManager) self.assertIsInstance(model.food_mgr_kwargs, FoodManager) def test_alter_model_managers_emptying(self): """ The managers on a model are set. """ project_state = self.set_up_test_model("test_almomae", manager_model=True) # Test the state alteration operation = migrations.AlterModelManagers("Food", managers=[]) self.assertEqual(operation.describe(), "Change managers on Food") self.assertIn(("test_almomae", "food"), project_state.models) managers = project_state.models["test_almomae", "food"].managers self.assertEqual(managers[0][0], "food_qs") self.assertIsInstance(managers[0][1], models.Manager) self.assertEqual(managers[1][0], "food_mgr") self.assertIsInstance(managers[1][1], FoodManager) self.assertEqual(managers[1][1].args, ("a", "b", 1, 2)) self.assertEqual(managers[2][0], "food_mgr_kwargs") self.assertIsInstance(managers[2][1], FoodManager) self.assertEqual(managers[2][1].args, ("x", "y", 3, 4)) new_state = project_state.clone() operation.state_forwards("test_almomae", new_state) managers = new_state.models["test_almomae", "food"].managers self.assertEqual(managers, []) def test_alter_fk(self): """ Creating and then altering an FK works correctly and deals with the pending SQL (#23091) """ project_state = self.set_up_test_model("test_alfk") # Test adding and then altering the FK in one go create_operation = migrations.CreateModel( name="Rider", fields=[ ("id", models.AutoField(primary_key=True)), ("pony", models.ForeignKey("Pony", models.CASCADE)), ], ) create_state = project_state.clone() create_operation.state_forwards("test_alfk", create_state) alter_operation = migrations.AlterField( model_name='Rider', name='pony', field=models.ForeignKey("Pony", models.CASCADE, editable=False), ) alter_state = create_state.clone() alter_operation.state_forwards("test_alfk", alter_state) with connection.schema_editor() as editor: create_operation.database_forwards("test_alfk", editor, project_state, create_state) alter_operation.database_forwards("test_alfk", editor, create_state, alter_state) def test_alter_fk_non_fk(self): """ Altering an FK to a non-FK works (#23244) """ # Test the state alteration operation = migrations.AlterField( model_name="Rider", name="pony", field=models.FloatField(), ) project_state, new_state = self.make_test_state("test_afknfk", operation, related_model=True) # Test the database alteration self.assertColumnExists("test_afknfk_rider", "pony_id") self.assertColumnNotExists("test_afknfk_rider", "pony") with connection.schema_editor() as editor: operation.database_forwards("test_afknfk", editor, project_state, new_state) self.assertColumnExists("test_afknfk_rider", "pony") self.assertColumnNotExists("test_afknfk_rider", "pony_id") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_afknfk", editor, new_state, project_state) self.assertColumnExists("test_afknfk_rider", "pony_id") self.assertColumnNotExists("test_afknfk_rider", "pony") def test_run_sql(self): """ Tests the RunSQL operation. """ project_state = self.set_up_test_model("test_runsql") # Create the operation operation = migrations.RunSQL( # Use a multi-line string with a comment to test splitting on SQLite and MySQL respectively "CREATE TABLE i_love_ponies (id int, special_thing varchar(15));\n" "INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'i love ponies'); -- this is magic!\n" "INSERT INTO i_love_ponies (id, special_thing) VALUES (2, 'i love django');\n" "UPDATE i_love_ponies SET special_thing = 'Ponies' WHERE special_thing LIKE '%%ponies';" "UPDATE i_love_ponies SET special_thing = 'Django' WHERE special_thing LIKE '%django';", # Run delete queries to test for parameter substitution failure # reported in #23426 "DELETE FROM i_love_ponies WHERE special_thing LIKE '%Django%';" "DELETE FROM i_love_ponies WHERE special_thing LIKE '%%Ponies%%';" "DROP TABLE i_love_ponies", state_operations=[migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))])], ) self.assertEqual(operation.describe(), "Raw SQL operation") # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_runsql", new_state) self.assertEqual(len(new_state.models["test_runsql", "somethingelse"].fields), 1) # Make sure there's no table self.assertTableNotExists("i_love_ponies") # Test SQL collection with connection.schema_editor(collect_sql=True) as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) self.assertIn("LIKE '%%ponies';", "\n".join(editor.collected_sql)) operation.database_backwards("test_runsql", editor, project_state, new_state) self.assertIn("LIKE '%%Ponies%%';", "\n".join(editor.collected_sql)) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) self.assertTableExists("i_love_ponies") # Make sure all the SQL was processed with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 2) cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Django'") self.assertEqual(cursor.fetchall()[0][0], 1) cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Ponies'") self.assertEqual(cursor.fetchall()[0][0], 1) # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_runsql", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunSQL") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["reverse_sql", "sql", "state_operations"]) # And elidable reduction self.assertIs(False, operation.reduce(operation, [])) elidable_operation = migrations.RunSQL('SELECT 1 FROM void;', elidable=True) self.assertEqual(elidable_operation.reduce(operation, []), [operation]) def test_run_sql_params(self): """ #23426 - RunSQL should accept parameters. """ project_state = self.set_up_test_model("test_runsql") # Create the operation operation = migrations.RunSQL( ["CREATE TABLE i_love_ponies (id int, special_thing varchar(15));"], ["DROP TABLE i_love_ponies"], ) param_operation = migrations.RunSQL( # forwards ( "INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'Django');", ["INSERT INTO i_love_ponies (id, special_thing) VALUES (2, %s);", ['Ponies']], ("INSERT INTO i_love_ponies (id, special_thing) VALUES (%s, %s);", (3, 'Python',)), ), # backwards [ "DELETE FROM i_love_ponies WHERE special_thing = 'Django';", ["DELETE FROM i_love_ponies WHERE special_thing = 'Ponies';", None], ("DELETE FROM i_love_ponies WHERE id = %s OR special_thing = %s;", [3, 'Python']), ] ) # Make sure there's no table self.assertTableNotExists("i_love_ponies") new_state = project_state.clone() # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) # Test parameter passing with connection.schema_editor() as editor: param_operation.database_forwards("test_runsql", editor, project_state, new_state) # Make sure all the SQL was processed with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 3) with connection.schema_editor() as editor: param_operation.database_backwards("test_runsql", editor, new_state, project_state) with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 0) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_runsql", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") def test_run_sql_params_invalid(self): """ #23426 - RunSQL should fail when a list of statements with an incorrect number of tuples is given. """ project_state = self.set_up_test_model("test_runsql") new_state = project_state.clone() operation = migrations.RunSQL( # forwards [ ["INSERT INTO foo (bar) VALUES ('buz');"] ], # backwards ( ("DELETE FROM foo WHERE bar = 'buz';", 'invalid', 'parameter count'), ), ) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 1"): operation.database_forwards("test_runsql", editor, project_state, new_state) with connection.schema_editor() as editor: with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 3"): operation.database_backwards("test_runsql", editor, new_state, project_state) def test_run_sql_noop(self): """ #24098 - Tests no-op RunSQL operations. """ operation = migrations.RunSQL(migrations.RunSQL.noop, migrations.RunSQL.noop) with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, None, None) operation.database_backwards("test_runsql", editor, None, None) def test_run_sql_add_missing_semicolon_on_collect_sql(self): project_state = self.set_up_test_model('test_runsql') new_state = project_state.clone() tests = [ 'INSERT INTO test_runsql_pony (pink, weight) VALUES (1, 1);\n', 'INSERT INTO test_runsql_pony (pink, weight) VALUES (1, 1)\n', ] for sql in tests: with self.subTest(sql=sql): operation = migrations.RunSQL(sql, migrations.RunPython.noop) with connection.schema_editor(collect_sql=True) as editor: operation.database_forwards('test_runsql', editor, project_state, new_state) collected_sql = '\n'.join(editor.collected_sql) self.assertEqual(collected_sql.count(';'), 1) def test_run_python(self): """ Tests the RunPython operation """ project_state = self.set_up_test_model("test_runpython", mti_model=True) # Create the operation def inner_method(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") Pony.objects.create(pink=1, weight=3.55) Pony.objects.create(weight=5) def inner_method_reverse(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") Pony.objects.filter(pink=1, weight=3.55).delete() Pony.objects.filter(weight=5).delete() operation = migrations.RunPython(inner_method, reverse_code=inner_method_reverse) self.assertEqual(operation.describe(), "Raw Python operation") # Test the state alteration does nothing new_state = project_state.clone() operation.state_forwards("test_runpython", new_state) self.assertEqual(new_state, project_state) # Test the database alteration self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2) # Now test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0) # Now test we can't use a string with self.assertRaisesMessage(ValueError, 'RunPython must be supplied with a callable'): migrations.RunPython("print 'ahahaha'") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["code", "reverse_code"]) # Also test reversal fails, with an operation identical to above but without reverse_code set no_reverse_operation = migrations.RunPython(inner_method) self.assertFalse(no_reverse_operation.reversible) with connection.schema_editor() as editor: no_reverse_operation.database_forwards("test_runpython", editor, project_state, new_state) with self.assertRaises(NotImplementedError): no_reverse_operation.database_backwards("test_runpython", editor, new_state, project_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2) def create_ponies(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") pony1 = Pony.objects.create(pink=1, weight=3.55) self.assertIsNot(pony1.pk, None) pony2 = Pony.objects.create(weight=5) self.assertIsNot(pony2.pk, None) self.assertNotEqual(pony1.pk, pony2.pk) operation = migrations.RunPython(create_ponies) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 4) # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["code"]) def create_shetlandponies(models, schema_editor): ShetlandPony = models.get_model("test_runpython", "ShetlandPony") pony1 = ShetlandPony.objects.create(weight=4.0) self.assertIsNot(pony1.pk, None) pony2 = ShetlandPony.objects.create(weight=5.0) self.assertIsNot(pony2.pk, None) self.assertNotEqual(pony1.pk, pony2.pk) operation = migrations.RunPython(create_shetlandponies) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 6) self.assertEqual(project_state.apps.get_model("test_runpython", "ShetlandPony").objects.count(), 2) # And elidable reduction self.assertIs(False, operation.reduce(operation, [])) elidable_operation = migrations.RunPython(inner_method, elidable=True) self.assertEqual(elidable_operation.reduce(operation, []), [operation]) def test_run_python_atomic(self): """ Tests the RunPython operation correctly handles the "atomic" keyword """ project_state = self.set_up_test_model("test_runpythonatomic", mti_model=True) def inner_method(models, schema_editor): Pony = models.get_model("test_runpythonatomic", "Pony") Pony.objects.create(pink=1, weight=3.55) raise ValueError("Adrian hates ponies.") # Verify atomicity when applying. atomic_migration = Migration("test", "test_runpythonatomic") atomic_migration.operations = [migrations.RunPython(inner_method, reverse_code=inner_method)] non_atomic_migration = Migration("test", "test_runpythonatomic") non_atomic_migration.operations = [migrations.RunPython(inner_method, reverse_code=inner_method, atomic=False)] # If we're a fully-transactional database, both versions should rollback if connection.features.can_rollback_ddl: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) # Otherwise, the non-atomic operation should leave a row there else: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.apply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1) # Reset object count to zero and verify atomicity when unapplying. project_state.apps.get_model("test_runpythonatomic", "Pony").objects.all().delete() # On a fully-transactional database, both versions rollback. if connection.features.can_rollback_ddl: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) # Otherwise, the non-atomic operation leaves a row there. else: self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0) with self.assertRaises(ValueError): with connection.schema_editor() as editor: non_atomic_migration.unapply(project_state, editor) self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1) # Verify deconstruction. definition = non_atomic_migration.operations[0].deconstruct() self.assertEqual(definition[0], "RunPython") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["atomic", "code", "reverse_code"]) def test_run_python_related_assignment(self): """ #24282 - Model changes to a FK reverse side update the model on the FK side as well. """ def inner_method(models, schema_editor): Author = models.get_model("test_authors", "Author") Book = models.get_model("test_books", "Book") author = Author.objects.create(name="Hemingway") Book.objects.create(title="Old Man and The Sea", author=author) create_author = migrations.CreateModel( "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_book = migrations.CreateModel( "Book", [ ("id", models.AutoField(primary_key=True)), ("title", models.CharField(max_length=100)), ("author", models.ForeignKey("test_authors.Author", models.CASCADE)) ], options={}, ) add_hometown = migrations.AddField( "Author", "hometown", models.CharField(max_length=100), ) create_old_man = migrations.RunPython(inner_method, inner_method) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_author.state_forwards("test_authors", new_state) create_author.database_forwards("test_authors", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_book.state_forwards("test_books", new_state) create_book.database_forwards("test_books", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: add_hometown.state_forwards("test_authors", new_state) add_hometown.database_forwards("test_authors", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_old_man.state_forwards("test_books", new_state) create_old_man.database_forwards("test_books", editor, project_state, new_state) def test_model_with_bigautofield(self): """ A model with BigAutoField can be created. """ def create_data(models, schema_editor): Author = models.get_model("test_author", "Author") Book = models.get_model("test_book", "Book") author1 = Author.objects.create(name="Hemingway") Book.objects.create(title="Old Man and The Sea", author=author1) Book.objects.create(id=2 ** 33, title="A farewell to arms", author=author1) author2 = Author.objects.create(id=2 ** 33, name="Remarque") Book.objects.create(title="All quiet on the western front", author=author2) Book.objects.create(title="Arc de Triomphe", author=author2) create_author = migrations.CreateModel( "Author", [ ("id", models.BigAutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_book = migrations.CreateModel( "Book", [ ("id", models.BigAutoField(primary_key=True)), ("title", models.CharField(max_length=100)), ("author", models.ForeignKey(to="test_author.Author", on_delete=models.CASCADE)) ], options={}, ) fill_data = migrations.RunPython(create_data) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_author.state_forwards("test_author", new_state) create_author.database_forwards("test_author", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_book.state_forwards("test_book", new_state) create_book.database_forwards("test_book", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_data.state_forwards("fill_data", new_state) fill_data.database_forwards("fill_data", editor, project_state, new_state) def _test_autofield_foreignfield_growth(self, source_field, target_field, target_value): """ A field may be migrated in the following ways: - AutoField to BigAutoField - SmallAutoField to AutoField - SmallAutoField to BigAutoField """ def create_initial_data(models, schema_editor): Article = models.get_model("test_article", "Article") Blog = models.get_model("test_blog", "Blog") blog = Blog.objects.create(name="web development done right") Article.objects.create(name="Frameworks", blog=blog) Article.objects.create(name="Programming Languages", blog=blog) def create_big_data(models, schema_editor): Article = models.get_model("test_article", "Article") Blog = models.get_model("test_blog", "Blog") blog2 = Blog.objects.create(name="Frameworks", id=target_value) Article.objects.create(name="Django", blog=blog2) Article.objects.create(id=target_value, name="Django2", blog=blog2) create_blog = migrations.CreateModel( "Blog", [ ("id", source_field(primary_key=True)), ("name", models.CharField(max_length=100)), ], options={}, ) create_article = migrations.CreateModel( "Article", [ ("id", source_field(primary_key=True)), ("blog", models.ForeignKey(to="test_blog.Blog", on_delete=models.CASCADE)), ("name", models.CharField(max_length=100)), ("data", models.TextField(default="")), ], options={}, ) fill_initial_data = migrations.RunPython(create_initial_data, create_initial_data) fill_big_data = migrations.RunPython(create_big_data, create_big_data) grow_article_id = migrations.AlterField('Article', 'id', target_field(primary_key=True)) grow_blog_id = migrations.AlterField('Blog', 'id', target_field(primary_key=True)) project_state = ProjectState() new_state = project_state.clone() with connection.schema_editor() as editor: create_blog.state_forwards("test_blog", new_state) create_blog.database_forwards("test_blog", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: create_article.state_forwards("test_article", new_state) create_article.database_forwards("test_article", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_initial_data.state_forwards("fill_initial_data", new_state) fill_initial_data.database_forwards("fill_initial_data", editor, project_state, new_state) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: grow_article_id.state_forwards("test_article", new_state) grow_article_id.database_forwards("test_article", editor, project_state, new_state) state = new_state.clone() article = state.apps.get_model("test_article.Article") self.assertIsInstance(article._meta.pk, target_field) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: grow_blog_id.state_forwards("test_blog", new_state) grow_blog_id.database_forwards("test_blog", editor, project_state, new_state) state = new_state.clone() blog = state.apps.get_model("test_blog.Blog") self.assertIsInstance(blog._meta.pk, target_field) project_state = new_state new_state = new_state.clone() with connection.schema_editor() as editor: fill_big_data.state_forwards("fill_big_data", new_state) fill_big_data.database_forwards("fill_big_data", editor, project_state, new_state) def test_autofield__bigautofield_foreignfield_growth(self): """A field may be migrated from AutoField to BigAutoField.""" self._test_autofield_foreignfield_growth( models.AutoField, models.BigAutoField, 2 ** 33, ) def test_smallfield_autofield_foreignfield_growth(self): """A field may be migrated from SmallAutoField to AutoField.""" self._test_autofield_foreignfield_growth( models.SmallAutoField, models.AutoField, 2 ** 22, ) def test_smallfield_bigautofield_foreignfield_growth(self): """A field may be migrated from SmallAutoField to BigAutoField.""" self._test_autofield_foreignfield_growth( models.SmallAutoField, models.BigAutoField, 2 ** 33, ) def test_run_python_noop(self): """ #24098 - Tests no-op RunPython operations. """ project_state = ProjectState() new_state = project_state.clone() operation = migrations.RunPython(migrations.RunPython.noop, migrations.RunPython.noop) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) operation.database_backwards("test_runpython", editor, new_state, project_state) def test_separate_database_and_state(self): """ Tests the SeparateDatabaseAndState operation. """ project_state = self.set_up_test_model("test_separatedatabaseandstate") # Create the operation database_operation = migrations.RunSQL( "CREATE TABLE i_love_ponies (id int, special_thing int);", "DROP TABLE i_love_ponies;" ) state_operation = migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))]) operation = migrations.SeparateDatabaseAndState( state_operations=[state_operation], database_operations=[database_operation] ) self.assertEqual(operation.describe(), "Custom state/database change combination") # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_separatedatabaseandstate", new_state) self.assertEqual(len(new_state.models["test_separatedatabaseandstate", "somethingelse"].fields), 1) # Make sure there's no table self.assertTableNotExists("i_love_ponies") # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_separatedatabaseandstate", editor, project_state, new_state) self.assertTableExists("i_love_ponies") # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_separatedatabaseandstate", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "SeparateDatabaseAndState") self.assertEqual(definition[1], []) self.assertEqual(sorted(definition[2]), ["database_operations", "state_operations"]) def test_separate_database_and_state2(self): """ A complex SeparateDatabaseAndState operation: Multiple operations both for state and database. Verify the state dependencies within each list and that state ops don't affect the database. """ app_label = "test_separatedatabaseandstate2" project_state = self.set_up_test_model(app_label) # Create the operation database_operations = [ migrations.CreateModel( "ILovePonies", [("id", models.AutoField(primary_key=True))], options={"db_table": "iloveponies"}, ), migrations.CreateModel( "ILoveMorePonies", # We use IntegerField and not AutoField because # the model is going to be deleted immediately # and with an AutoField this fails on Oracle [("id", models.IntegerField(primary_key=True))], options={"db_table": "ilovemoreponies"}, ), migrations.DeleteModel("ILoveMorePonies"), migrations.CreateModel( "ILoveEvenMorePonies", [("id", models.AutoField(primary_key=True))], options={"db_table": "iloveevenmoreponies"}, ), ] state_operations = [ migrations.CreateModel( "SomethingElse", [("id", models.AutoField(primary_key=True))], options={"db_table": "somethingelse"}, ), migrations.DeleteModel("SomethingElse"), migrations.CreateModel( "SomethingCompletelyDifferent", [("id", models.AutoField(primary_key=True))], options={"db_table": "somethingcompletelydifferent"}, ), ] operation = migrations.SeparateDatabaseAndState( state_operations=state_operations, database_operations=database_operations, ) # Test the state alteration new_state = project_state.clone() operation.state_forwards(app_label, new_state) def assertModelsAndTables(after_db): # Tables and models exist, or don't, as they should: self.assertNotIn((app_label, "somethingelse"), new_state.models) self.assertEqual(len(new_state.models[app_label, "somethingcompletelydifferent"].fields), 1) self.assertNotIn((app_label, "iloveponiesonies"), new_state.models) self.assertNotIn((app_label, "ilovemoreponies"), new_state.models) self.assertNotIn((app_label, "iloveevenmoreponies"), new_state.models) self.assertTableNotExists("somethingelse") self.assertTableNotExists("somethingcompletelydifferent") self.assertTableNotExists("ilovemoreponies") if after_db: self.assertTableExists("iloveponies") self.assertTableExists("iloveevenmoreponies") else: self.assertTableNotExists("iloveponies") self.assertTableNotExists("iloveevenmoreponies") assertModelsAndTables(after_db=False) # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards(app_label, editor, project_state, new_state) assertModelsAndTables(after_db=True) # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards(app_label, editor, new_state, project_state) assertModelsAndTables(after_db=False) class SwappableOperationTests(OperationTestBase): """ Key operations ignore swappable models (we don't want to replicate all of them here, as the functionality is in a common base class anyway) """ available_apps = ['migrations'] @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_create_ignore_swapped(self): """ The CreateTable operation ignores swapped models. """ operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], options={ "swappable": "TEST_SWAP_MODEL", }, ) # Test the state alteration (it should still be there!) project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crigsw", new_state) self.assertEqual(new_state.models["test_crigsw", "pony"].name, "Pony") self.assertEqual(len(new_state.models["test_crigsw", "pony"].fields), 2) # Test the database alteration self.assertTableNotExists("test_crigsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crigsw", editor, project_state, new_state) self.assertTableNotExists("test_crigsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crigsw", editor, new_state, project_state) self.assertTableNotExists("test_crigsw_pony") @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_delete_ignore_swapped(self): """ Tests the DeleteModel operation ignores swapped models. """ operation = migrations.DeleteModel("Pony") project_state, new_state = self.make_test_state("test_dligsw", operation) # Test the database alteration self.assertTableNotExists("test_dligsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_dligsw", editor, project_state, new_state) self.assertTableNotExists("test_dligsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_dligsw", editor, new_state, project_state) self.assertTableNotExists("test_dligsw_pony") @override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel") def test_add_field_ignore_swapped(self): """ Tests the AddField operation. """ # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=5), ) project_state, new_state = self.make_test_state("test_adfligsw", operation) # Test the database alteration self.assertTableNotExists("test_adfligsw_pony") with connection.schema_editor() as editor: operation.database_forwards("test_adfligsw", editor, project_state, new_state) self.assertTableNotExists("test_adfligsw_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adfligsw", editor, new_state, project_state) self.assertTableNotExists("test_adfligsw_pony") @override_settings(TEST_SWAP_MODEL='migrations.SomeFakeModel') def test_indexes_ignore_swapped(self): """ Add/RemoveIndex operations ignore swapped models. """ operation = migrations.AddIndex('Pony', models.Index(fields=['pink'], name='my_name_idx')) project_state, new_state = self.make_test_state('test_adinigsw', operation) with connection.schema_editor() as editor: # No database queries should be run for swapped models operation.database_forwards('test_adinigsw', editor, project_state, new_state) operation.database_backwards('test_adinigsw', editor, new_state, project_state) operation = migrations.RemoveIndex('Pony', models.Index(fields=['pink'], name='my_name_idx')) project_state, new_state = self.make_test_state("test_rminigsw", operation) with connection.schema_editor() as editor: operation.database_forwards('test_rminigsw', editor, project_state, new_state) operation.database_backwards('test_rminigsw', editor, new_state, project_state) class TestCreateModel(SimpleTestCase): def test_references_model_mixin(self): migrations.CreateModel( 'name', fields=[], bases=(Mixin, models.Model), ).references_model('other_model', 'migrations') class FieldOperationTests(SimpleTestCase): def test_references_model(self): operation = FieldOperation('MoDel', 'field', models.ForeignKey('Other', models.CASCADE)) # Model name match. self.assertIs(operation.references_model('mOdEl', 'migrations'), True) # Referenced field. self.assertIs(operation.references_model('oTher', 'migrations'), True) # Doesn't reference. self.assertIs(operation.references_model('Whatever', 'migrations'), False) def test_references_field_by_name(self): operation = FieldOperation('MoDel', 'field', models.BooleanField(default=False)) self.assertIs(operation.references_field('model', 'field', 'migrations'), True) def test_references_field_by_remote_field_model(self): operation = FieldOperation('Model', 'field', models.ForeignKey('Other', models.CASCADE)) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Missing', 'whatever', 'migrations'), False) def test_references_field_by_from_fields(self): operation = FieldOperation( 'Model', 'field', models.fields.related.ForeignObject('Other', models.CASCADE, ['from'], ['to']) ) self.assertIs(operation.references_field('Model', 'from', 'migrations'), True) self.assertIs(operation.references_field('Model', 'to', 'migrations'), False) self.assertIs(operation.references_field('Other', 'from', 'migrations'), False) self.assertIs(operation.references_field('Model', 'to', 'migrations'), False) def test_references_field_by_to_fields(self): operation = FieldOperation('Model', 'field', models.ForeignKey('Other', models.CASCADE, to_field='field')) self.assertIs(operation.references_field('Other', 'field', 'migrations'), True) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), False) self.assertIs(operation.references_field('Missing', 'whatever', 'migrations'), False) def test_references_field_by_through(self): operation = FieldOperation('Model', 'field', models.ManyToManyField('Other', through='Through')) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Through', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Missing', 'whatever', 'migrations'), False) def test_reference_field_by_through_fields(self): operation = FieldOperation( 'Model', 'field', models.ManyToManyField('Other', through='Through', through_fields=('first', 'second')) ) self.assertIs(operation.references_field('Other', 'whatever', 'migrations'), True) self.assertIs(operation.references_field('Through', 'whatever', 'migrations'), False) self.assertIs(operation.references_field('Through', 'first', 'migrations'), True) self.assertIs(operation.references_field('Through', 'second', 'migrations'), True)
2e4a3d6aa17422f6733ee66a853a7a468138454f25a11d41e72a981f1a7f81ea
import functools import re from unittest import mock from django.apps import apps from django.conf import settings from django.contrib.auth.models import AbstractBaseUser from django.core.validators import RegexValidator, validate_slug from django.db import connection, migrations, models from django.db.migrations.autodetector import MigrationAutodetector from django.db.migrations.graph import MigrationGraph from django.db.migrations.loader import MigrationLoader from django.db.migrations.questioner import MigrationQuestioner from django.db.migrations.state import ModelState, ProjectState from django.test import SimpleTestCase, TestCase, override_settings from django.test.utils import isolate_lru_cache from .models import FoodManager, FoodQuerySet class DeconstructibleObject: """ A custom deconstructible object. """ def __init__(self, *args, **kwargs): self.args = args self.kwargs = kwargs def deconstruct(self): return ( self.__module__ + '.' + self.__class__.__name__, self.args, self.kwargs ) class AutodetectorTests(TestCase): """ Tests the migration autodetector. """ author_empty = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True))]) author_name = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ]) author_name_null = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, null=True)), ]) author_name_longer = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=400)), ]) author_name_renamed = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("names", models.CharField(max_length=200)), ]) author_name_default = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default='Ada Lovelace')), ]) author_name_check_constraint = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ], {'constraints': [models.CheckConstraint(check=models.Q(name__contains='Bob'), name='name_contains_bob')]}, ) author_dates_of_birth_auto_now = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("date_of_birth", models.DateField(auto_now=True)), ("date_time_of_birth", models.DateTimeField(auto_now=True)), ("time_of_birth", models.TimeField(auto_now=True)), ]) author_dates_of_birth_auto_now_add = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("date_of_birth", models.DateField(auto_now_add=True)), ("date_time_of_birth", models.DateTimeField(auto_now_add=True)), ("time_of_birth", models.TimeField(auto_now_add=True)), ]) author_name_deconstructible_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject())), ]) author_name_deconstructible_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject())), ]) author_name_deconstructible_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=models.IntegerField())), ]) author_name_deconstructible_4 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=models.IntegerField())), ]) author_name_deconstructible_list_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 123])), ]) author_name_deconstructible_list_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 123])), ]) author_name_deconstructible_list_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 999])), ]) author_name_deconstructible_tuple_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 123))), ]) author_name_deconstructible_tuple_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 123))), ]) author_name_deconstructible_tuple_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 999))), ]) author_name_deconstructible_dict_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default={ 'item': DeconstructibleObject(), 'otheritem': 123 })), ]) author_name_deconstructible_dict_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default={ 'item': DeconstructibleObject(), 'otheritem': 123 })), ]) author_name_deconstructible_dict_3 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default={ 'item': DeconstructibleObject(), 'otheritem': 999 })), ]) author_name_nested_deconstructible_1 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_2 = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_changed_arg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2-changed'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_extra_arg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), None, a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), ))), ]) author_name_nested_deconstructible_changed_kwarg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c-changed')), ))), ]) author_name_nested_deconstructible_extra_kwarg = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200, default=DeconstructibleObject( DeconstructibleObject(1), (DeconstructibleObject('t1'), DeconstructibleObject('t2'),), a=DeconstructibleObject('A'), b=DeconstructibleObject(B=DeconstructibleObject('c')), c=None, ))), ]) author_custom_pk = ModelState("testapp", "Author", [("pk_field", models.IntegerField(primary_key=True))]) author_with_biography_non_blank = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField()), ("biography", models.TextField()), ]) author_with_biography_blank = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(blank=True)), ("biography", models.TextField(blank=True)), ]) author_with_book = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) author_with_book_order_wrt = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ], options={"order_with_respect_to": "book"}) author_renamed_with_book = ModelState("testapp", "Writer", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) author_with_publisher_string = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("publisher_name", models.CharField(max_length=200)), ]) author_with_publisher = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)), ]) author_with_user = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("user", models.ForeignKey("auth.User", models.CASCADE)), ]) author_with_custom_user = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ("user", models.ForeignKey("thirdapp.CustomUser", models.CASCADE)), ]) author_proxy = ModelState("testapp", "AuthorProxy", [], {"proxy": True}, ("testapp.author",)) author_proxy_options = ModelState("testapp", "AuthorProxy", [], { "proxy": True, "verbose_name": "Super Author", }, ("testapp.author",)) author_proxy_notproxy = ModelState("testapp", "AuthorProxy", [], {}, ("testapp.author",)) author_proxy_third = ModelState("thirdapp", "AuthorProxy", [], {"proxy": True}, ("testapp.author",)) author_proxy_third_notproxy = ModelState("thirdapp", "AuthorProxy", [], {}, ("testapp.author",)) author_proxy_proxy = ModelState("testapp", "AAuthorProxyProxy", [], {"proxy": True}, ("testapp.authorproxy",)) author_unmanaged = ModelState("testapp", "AuthorUnmanaged", [], {"managed": False}, ("testapp.author",)) author_unmanaged_managed = ModelState("testapp", "AuthorUnmanaged", [], {}, ("testapp.author",)) author_unmanaged_default_pk = ModelState("testapp", "Author", [("id", models.AutoField(primary_key=True))]) author_unmanaged_custom_pk = ModelState("testapp", "Author", [ ("pk_field", models.IntegerField(primary_key=True)), ]) author_with_m2m = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher")), ]) author_with_m2m_blank = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher", blank=True)), ]) author_with_m2m_through = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher", through="testapp.Contract")), ]) author_with_renamed_m2m_through = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.ManyToManyField("testapp.Publisher", through="testapp.Deal")), ]) author_with_former_m2m = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("publishers", models.CharField(max_length=100)), ]) author_with_options = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ], { "permissions": [('can_hire', 'Can hire')], "verbose_name": "Authi", }) author_with_db_table_options = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_one"}) author_with_new_db_table_options = ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_two"}) author_renamed_with_db_table_options = ModelState("testapp", "NewAuthor", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_one"}) author_renamed_with_new_db_table_options = ModelState("testapp", "NewAuthor", [ ("id", models.AutoField(primary_key=True)), ], {"db_table": "author_three"}) contract = ModelState("testapp", "Contract", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)), ]) contract_renamed = ModelState("testapp", "Deal", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)), ]) publisher = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ]) publisher_with_author = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("name", models.CharField(max_length=100)), ]) publisher_with_aardvark_author = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Aardvark", models.CASCADE)), ("name", models.CharField(max_length=100)), ]) publisher_with_book = ModelState("testapp", "Publisher", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("otherapp.Book", models.CASCADE)), ("name", models.CharField(max_length=100)), ]) other_pony = ModelState("otherapp", "Pony", [ ("id", models.AutoField(primary_key=True)), ]) other_pony_food = ModelState("otherapp", "Pony", [ ("id", models.AutoField(primary_key=True)), ], managers=[ ('food_qs', FoodQuerySet.as_manager()), ('food_mgr', FoodManager('a', 'b')), ('food_mgr_kwargs', FoodManager('x', 'y', 3, 4)), ]) other_stable = ModelState("otherapp", "Stable", [("id", models.AutoField(primary_key=True))]) third_thing = ModelState("thirdapp", "Thing", [("id", models.AutoField(primary_key=True))]) book = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_proxy_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("thirdapp.AuthorProxy", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_proxy_proxy_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.AAuthorProxyProxy", models.CASCADE)), ]) book_migrations_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("migrations.UnmigratedModel", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_with_no_author_fk = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.IntegerField()), ("title", models.CharField(max_length=200)), ]) book_with_no_author = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("title", models.CharField(max_length=200)), ]) book_with_author_renamed = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Writer", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_with_field_and_author_renamed = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("writer", models.ForeignKey("testapp.Writer", models.CASCADE)), ("title", models.CharField(max_length=200)), ]) book_with_multiple_authors = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("authors", models.ManyToManyField("testapp.Author")), ("title", models.CharField(max_length=200)), ]) book_with_multiple_authors_through_attribution = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("authors", models.ManyToManyField("testapp.Author", through="otherapp.Attribution")), ("title", models.CharField(max_length=200)), ]) book_indexes = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "indexes": [models.Index(fields=["author", "title"], name="book_title_author_idx")], }) book_unordered_indexes = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "indexes": [models.Index(fields=["title", "author"], name="book_author_title_idx")], }) book_foo_together = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("author", "title")}, "unique_together": {("author", "title")}, }) book_foo_together_2 = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "author")}, "unique_together": {("title", "author")}, }) book_foo_together_3 = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("newfield", models.IntegerField()), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "newfield")}, "unique_together": {("title", "newfield")}, }) book_foo_together_4 = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("newfield2", models.IntegerField()), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "newfield2")}, "unique_together": {("title", "newfield2")}, }) attribution = ModelState("otherapp", "Attribution", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("testapp.Author", models.CASCADE)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) edition = ModelState("thirdapp", "Edition", [ ("id", models.AutoField(primary_key=True)), ("book", models.ForeignKey("otherapp.Book", models.CASCADE)), ]) custom_user = ModelState("thirdapp", "CustomUser", [ ("id", models.AutoField(primary_key=True)), ("username", models.CharField(max_length=255)), ], bases=(AbstractBaseUser,)) custom_user_no_inherit = ModelState("thirdapp", "CustomUser", [ ("id", models.AutoField(primary_key=True)), ("username", models.CharField(max_length=255)), ]) aardvark = ModelState("thirdapp", "Aardvark", [("id", models.AutoField(primary_key=True))]) aardvark_testapp = ModelState("testapp", "Aardvark", [("id", models.AutoField(primary_key=True))]) aardvark_based_on_author = ModelState("testapp", "Aardvark", [], bases=("testapp.Author",)) aardvark_pk_fk_author = ModelState("testapp", "Aardvark", [ ("id", models.OneToOneField("testapp.Author", models.CASCADE, primary_key=True)), ]) knight = ModelState("eggs", "Knight", [("id", models.AutoField(primary_key=True))]) rabbit = ModelState("eggs", "Rabbit", [ ("id", models.AutoField(primary_key=True)), ("knight", models.ForeignKey("eggs.Knight", models.CASCADE)), ("parent", models.ForeignKey("eggs.Rabbit", models.CASCADE)), ], { "unique_together": {("parent", "knight")}, "indexes": [models.Index(fields=["parent", "knight"], name='rabbit_circular_fk_index')], }) def repr_changes(self, changes, include_dependencies=False): output = "" for app_label, migrations_ in sorted(changes.items()): output += " %s:\n" % app_label for migration in migrations_: output += " %s\n" % migration.name for operation in migration.operations: output += " %s\n" % operation if include_dependencies: output += " Dependencies:\n" if migration.dependencies: for dep in migration.dependencies: output += " %s\n" % (dep,) else: output += " None\n" return output def assertNumberMigrations(self, changes, app_label, number): if len(changes.get(app_label, [])) != number: self.fail("Incorrect number of migrations (%s) for %s (expected %s)\n%s" % ( len(changes.get(app_label, [])), app_label, number, self.repr_changes(changes), )) def assertMigrationDependencies(self, changes, app_label, position, dependencies): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] if set(migration.dependencies) != set(dependencies): self.fail("Migration dependencies mismatch for %s.%s (expected %s):\n%s" % ( app_label, migration.name, dependencies, self.repr_changes(changes, include_dependencies=True), )) def assertOperationTypes(self, changes, app_label, position, types): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] real_types = [operation.__class__.__name__ for operation in migration.operations] if types != real_types: self.fail("Operation type mismatch for %s.%s (expected %s):\n%s" % ( app_label, migration.name, types, self.repr_changes(changes), )) def assertOperationAttributes(self, changes, app_label, position, operation_position, **attrs): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] if len(changes[app_label]) < position + 1: self.fail("No operation at index %s for %s.%s\n%s" % ( operation_position, app_label, migration.name, self.repr_changes(changes), )) operation = migration.operations[operation_position] for attr, value in attrs.items(): if getattr(operation, attr, None) != value: self.fail("Attribute mismatch for %s.%s op #%s, %s (expected %r, got %r):\n%s" % ( app_label, migration.name, operation_position, attr, value, getattr(operation, attr, None), self.repr_changes(changes), )) def assertOperationFieldAttributes(self, changes, app_label, position, operation_position, **attrs): if not changes.get(app_label): self.fail("No migrations found for %s\n%s" % (app_label, self.repr_changes(changes))) if len(changes[app_label]) < position + 1: self.fail("No migration at index %s for %s\n%s" % (position, app_label, self.repr_changes(changes))) migration = changes[app_label][position] if len(changes[app_label]) < position + 1: self.fail("No operation at index %s for %s.%s\n%s" % ( operation_position, app_label, migration.name, self.repr_changes(changes), )) operation = migration.operations[operation_position] if not hasattr(operation, 'field'): self.fail("No field attribute for %s.%s op #%s." % ( app_label, migration.name, operation_position, )) field = operation.field for attr, value in attrs.items(): if getattr(field, attr, None) != value: self.fail("Field attribute mismatch for %s.%s op #%s, field.%s (expected %r, got %r):\n%s" % ( app_label, migration.name, operation_position, attr, value, getattr(field, attr, None), self.repr_changes(changes), )) def make_project_state(self, model_states): "Shortcut to make ProjectStates from lists of predefined models" project_state = ProjectState() for model_state in model_states: project_state.add_model(model_state.clone()) return project_state def get_changes(self, before_states, after_states, questioner=None): if not isinstance(before_states, ProjectState): before_states = self.make_project_state(before_states) if not isinstance(after_states, ProjectState): after_states = self.make_project_state(after_states) return MigrationAutodetector( before_states, after_states, questioner, )._detect_changes() def test_arrange_for_graph(self): """Tests auto-naming of migrations for graph matching.""" # Make a fake graph graph = MigrationGraph() graph.add_node(("testapp", "0001_initial"), None) graph.add_node(("testapp", "0002_foobar"), None) graph.add_node(("otherapp", "0001_initial"), None) graph.add_dependency("testapp.0002_foobar", ("testapp", "0002_foobar"), ("testapp", "0001_initial")) graph.add_dependency("testapp.0002_foobar", ("testapp", "0002_foobar"), ("otherapp", "0001_initial")) # Use project state to make a new migration change set before = self.make_project_state([self.publisher, self.other_pony]) after = self.make_project_state([ self.author_empty, self.publisher, self.other_pony, self.other_stable, ]) autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes() # Run through arrange_for_graph changes = autodetector.arrange_for_graph(changes, graph) # Make sure there's a new name, deps match, etc. self.assertEqual(changes["testapp"][0].name, "0003_author") self.assertEqual(changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]) self.assertEqual(changes["otherapp"][0].name, '0002_stable') self.assertEqual(changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]) def test_arrange_for_graph_with_multiple_initial(self): # Make a fake graph. graph = MigrationGraph() # Use project state to make a new migration change set. before = self.make_project_state([]) after = self.make_project_state([self.author_with_book, self.book, self.attribution]) autodetector = MigrationAutodetector(before, after, MigrationQuestioner({'ask_initial': True})) changes = autodetector._detect_changes() changes = autodetector.arrange_for_graph(changes, graph) self.assertEqual(changes['otherapp'][0].name, '0001_initial') self.assertEqual(changes['otherapp'][0].dependencies, []) self.assertEqual(changes['otherapp'][1].name, '0002_initial') self.assertCountEqual( changes['otherapp'][1].dependencies, [('testapp', '0001_initial'), ('otherapp', '0001_initial')], ) self.assertEqual(changes['testapp'][0].name, '0001_initial') self.assertEqual(changes['testapp'][0].dependencies, [('otherapp', '0001_initial')]) def test_trim_apps(self): """ Trim does not remove dependencies but does remove unwanted apps. """ # Use project state to make a new migration change set before = self.make_project_state([]) after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable, self.third_thing]) autodetector = MigrationAutodetector(before, after, MigrationQuestioner({"ask_initial": True})) changes = autodetector._detect_changes() # Run through arrange_for_graph graph = MigrationGraph() changes = autodetector.arrange_for_graph(changes, graph) changes["testapp"][0].dependencies.append(("otherapp", "0001_initial")) changes = autodetector._trim_to_apps(changes, {"testapp"}) # Make sure there's the right set of migrations self.assertEqual(changes["testapp"][0].name, "0001_initial") self.assertEqual(changes["otherapp"][0].name, "0001_initial") self.assertNotIn("thirdapp", changes) def test_custom_migration_name(self): """Tests custom naming of migrations for graph matching.""" # Make a fake graph graph = MigrationGraph() graph.add_node(("testapp", "0001_initial"), None) graph.add_node(("testapp", "0002_foobar"), None) graph.add_node(("otherapp", "0001_initial"), None) graph.add_dependency("testapp.0002_foobar", ("testapp", "0002_foobar"), ("testapp", "0001_initial")) # Use project state to make a new migration change set before = self.make_project_state([]) after = self.make_project_state([self.author_empty, self.other_pony, self.other_stable]) autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes() # Run through arrange_for_graph migration_name = 'custom_name' changes = autodetector.arrange_for_graph(changes, graph, migration_name) # Make sure there's a new name, deps match, etc. self.assertEqual(changes["testapp"][0].name, "0003_%s" % migration_name) self.assertEqual(changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]) self.assertEqual(changes["otherapp"][0].name, "0002_%s" % migration_name) self.assertEqual(changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]) def test_new_model(self): """Tests autodetection of new models.""" changes = self.get_changes([], [self.other_pony_food]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Pony") self.assertEqual([name for name, mgr in changes['otherapp'][0].operations[0].managers], ['food_qs', 'food_mgr', 'food_mgr_kwargs']) def test_old_model(self): """Tests deletion of old models.""" changes = self.get_changes([self.author_empty], []) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["DeleteModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author") def test_add_field(self): """Tests autodetection of new fields.""" changes = self.get_changes([self.author_empty], [self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name") @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_date_fields_with_auto_now_not_asking_for_default(self, mocked_ask_method): changes = self.get_changes([self.author_empty], [self.author_dates_of_birth_auto_now]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField", "AddField"]) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now=True) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_date_fields_with_auto_now_add_not_asking_for_null_addition(self, mocked_ask_method): changes = self.get_changes([self.author_empty], [self.author_dates_of_birth_auto_now_add]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField", "AddField"]) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now_add=True) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_auto_now_add_addition') def test_add_date_fields_with_auto_now_add_asking_for_default(self, mocked_ask_method): changes = self.get_changes([self.author_empty], [self.author_dates_of_birth_auto_now_add]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField", "AddField"]) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now_add=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now_add=True) self.assertEqual(mocked_ask_method.call_count, 3) def test_remove_field(self): """Tests autodetection of removed fields.""" changes = self.get_changes([self.author_name], [self.author_empty]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RemoveField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name") def test_alter_field(self): """Tests autodetection of new fields.""" changes = self.get_changes([self.author_name], [self.author_name_longer]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=True) def test_supports_functools_partial(self): def _content_file_name(instance, filename, key, **kwargs): return '{}/{}'.format(instance, filename) def content_file_name(key, **kwargs): return functools.partial(_content_file_name, key, **kwargs) # An unchanged partial reference. before = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('file'))), ])] after = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('file'))), ])] changes = self.get_changes(before, after) self.assertNumberMigrations(changes, 'testapp', 0) # A changed partial reference. args_changed = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('other-file'))), ])] changes = self.get_changes(before, args_changed) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['AlterField']) # Can't use assertOperationFieldAttributes because we need the # deconstructed version, i.e., the exploded func/args/keywords rather # than the partial: we don't care if it's not the same instance of the # partial, only if it's the same source function, args, and keywords. value = changes['testapp'][0].operations[0].field.upload_to self.assertEqual( (_content_file_name, ('other-file',), {}), (value.func, value.args, value.keywords) ) kwargs_changed = [ModelState("testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("file", models.FileField(max_length=200, upload_to=content_file_name('file', spam='eggs'))), ])] changes = self.get_changes(before, kwargs_changed) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['AlterField']) value = changes['testapp'][0].operations[0].field.upload_to self.assertEqual( (_content_file_name, ('file',), {'spam': 'eggs'}), (value.func, value.args, value.keywords) ) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration', side_effect=AssertionError("Should not have prompted for not null addition")) def test_alter_field_to_not_null_with_default(self, mocked_ask_method): """ #23609 - Tests autodetection of nullable to non-nullable alterations. """ changes = self.get_changes([self.author_name_null], [self.author_name_default]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, default='Ada Lovelace') @mock.patch( 'django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration', return_value=models.NOT_PROVIDED, ) def test_alter_field_to_not_null_without_default(self, mocked_ask_method): """ #23609 - Tests autodetection of nullable to non-nullable alterations. """ changes = self.get_changes([self.author_name_null], [self.author_name]) self.assertEqual(mocked_ask_method.call_count, 1) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=True) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, default=models.NOT_PROVIDED) @mock.patch( 'django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration', return_value='Some Name', ) def test_alter_field_to_not_null_oneoff_default(self, mocked_ask_method): """ #23609 - Tests autodetection of nullable to non-nullable alterations. """ changes = self.get_changes([self.author_name_null], [self.author_name]) self.assertEqual(mocked_ask_method.call_count, 1) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="name", preserve_default=False) self.assertOperationFieldAttributes(changes, "testapp", 0, 0, default="Some Name") def test_rename_field(self): """Tests autodetection of renamed fields.""" changes = self.get_changes( [self.author_name], [self.author_name_renamed], MigrationQuestioner({"ask_rename": True}) ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name="name", new_name="names") def test_rename_field_foreign_key_to_field(self): before = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField(unique=True)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE, to_field='field')), ]), ] after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('renamed_field', models.IntegerField(unique=True)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE, to_field='renamed_field')), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) # Right number/type of migrations? self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField']) self.assertOperationAttributes(changes, 'app', 0, 0, old_name='field', new_name='renamed_field') def test_rename_foreign_object_fields(self): fields = ('first', 'second') renamed_fields = ('first_renamed', 'second_renamed') before = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('first', models.IntegerField()), ('second', models.IntegerField()), ], options={'unique_together': {fields}}), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('first', models.IntegerField()), ('second', models.IntegerField()), ('foo', models.ForeignObject( 'app.Foo', models.CASCADE, from_fields=fields, to_fields=fields, )), ]), ] # Case 1: to_fields renames. after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('first_renamed', models.IntegerField()), ('second_renamed', models.IntegerField()), ], options={'unique_together': {renamed_fields}}), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('first', models.IntegerField()), ('second', models.IntegerField()), ('foo', models.ForeignObject( 'app.Foo', models.CASCADE, from_fields=fields, to_fields=renamed_fields, )), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField', 'RenameField', 'AlterUniqueTogether']) self.assertOperationAttributes( changes, 'app', 0, 0, model_name='foo', old_name='first', new_name='first_renamed', ) self.assertOperationAttributes( changes, 'app', 0, 1, model_name='foo', old_name='second', new_name='second_renamed', ) # Case 2: from_fields renames. after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('first', models.IntegerField()), ('second', models.IntegerField()), ], options={'unique_together': {fields}}), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('first_renamed', models.IntegerField()), ('second_renamed', models.IntegerField()), ('foo', models.ForeignObject( 'app.Foo', models.CASCADE, from_fields=renamed_fields, to_fields=fields, )), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField', 'RenameField']) self.assertOperationAttributes( changes, 'app', 0, 0, model_name='bar', old_name='first', new_name='first_renamed', ) self.assertOperationAttributes( changes, 'app', 0, 1, model_name='bar', old_name='second', new_name='second_renamed', ) def test_rename_referenced_primary_key(self): before = [ ModelState('app', 'Foo', [ ('id', models.CharField(primary_key=True, serialize=False)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE)), ]), ] after = [ ModelState('app', 'Foo', [ ('renamed_id', models.CharField(primary_key=True, serialize=False)) ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE)), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField']) self.assertOperationAttributes(changes, 'app', 0, 0, old_name='id', new_name='renamed_id') def test_rename_field_preserved_db_column(self): """ RenameField is used if a field is renamed and db_column equal to the old field's column is added. """ before = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('field', models.IntegerField()), ]), ] after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ('renamed_field', models.IntegerField(db_column='field')), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField', 'AlterField']) self.assertOperationAttributes( changes, 'app', 0, 0, model_name='foo', old_name='field', new_name='renamed_field', ) self.assertOperationAttributes(changes, 'app', 0, 1, model_name='foo', name='renamed_field') self.assertEqual(changes['app'][0].operations[-1].field.deconstruct(), ( 'renamed_field', 'django.db.models.IntegerField', [], {'db_column': 'field'}, )) def test_rename_related_field_preserved_db_column(self): before = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('foo', models.ForeignKey('app.Foo', models.CASCADE)), ]), ] after = [ ModelState('app', 'Foo', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('app', 'Bar', [ ('id', models.AutoField(primary_key=True)), ('renamed_foo', models.ForeignKey('app.Foo', models.CASCADE, db_column='foo_id')), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename': True})) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RenameField', 'AlterField']) self.assertOperationAttributes( changes, 'app', 0, 0, model_name='bar', old_name='foo', new_name='renamed_foo', ) self.assertOperationAttributes(changes, 'app', 0, 1, model_name='bar', name='renamed_foo') self.assertEqual(changes['app'][0].operations[-1].field.deconstruct(), ( 'renamed_foo', 'django.db.models.ForeignKey', [], {'to': 'app.foo', 'on_delete': models.CASCADE, 'db_column': 'foo_id'}, )) def test_rename_model(self): """Tests autodetection of renamed models.""" changes = self.get_changes( [self.author_with_book, self.book], [self.author_renamed_with_book, self.book_with_author_renamed], MigrationQuestioner({"ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name="Author", new_name="Writer") # Now that RenameModel handles related fields too, there should be # no AlterField for the related field. self.assertNumberMigrations(changes, 'otherapp', 0) def test_rename_model_case(self): """ Model name is case-insensitive. Changing case doesn't lead to any autodetected operations. """ author_renamed = ModelState('testapp', 'author', [ ('id', models.AutoField(primary_key=True)), ]) changes = self.get_changes( [self.author_empty, self.book], [author_renamed, self.book], questioner=MigrationQuestioner({'ask_rename_model': True}), ) self.assertNumberMigrations(changes, 'testapp', 0) self.assertNumberMigrations(changes, 'otherapp', 0) def test_rename_m2m_through_model(self): """ Tests autodetection of renamed models that are used in M2M relations as through models. """ changes = self.get_changes( [self.author_with_m2m_through, self.publisher, self.contract], [self.author_with_renamed_m2m_through, self.publisher, self.contract_renamed], MigrationQuestioner({'ask_rename_model': True}) ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['RenameModel']) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name='Contract', new_name='Deal') def test_rename_model_with_renamed_rel_field(self): """ Tests autodetection of renamed models while simultaneously renaming one of the fields that relate to the renamed model. """ changes = self.get_changes( [self.author_with_book, self.book], [self.author_renamed_with_book, self.book_with_field_and_author_renamed], MigrationQuestioner({"ask_rename": True, "ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name="Author", new_name="Writer") # Right number/type of migrations for related field rename? # Alter is already taken care of. self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["RenameField"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, old_name="author", new_name="writer") def test_rename_model_with_fks_in_different_position(self): """ #24537 - The order of fields in a model does not influence the RenameModel detection. """ before = [ ModelState("testapp", "EntityA", [ ("id", models.AutoField(primary_key=True)), ]), ModelState("testapp", "EntityB", [ ("id", models.AutoField(primary_key=True)), ("some_label", models.CharField(max_length=255)), ("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)), ]), ] after = [ ModelState("testapp", "EntityA", [ ("id", models.AutoField(primary_key=True)), ]), ModelState("testapp", "RenamedEntityB", [ ("id", models.AutoField(primary_key=True)), ("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)), ("some_label", models.CharField(max_length=255)), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({"ask_rename_model": True})) self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, old_name="EntityB", new_name="RenamedEntityB") def test_rename_model_reverse_relation_dependencies(self): """ The migration to rename a model pointed to by a foreign key in another app must run after the other app's migration that adds the foreign key with model's original name. Therefore, the renaming migration has a dependency on that other migration. """ before = [ ModelState('testapp', 'EntityA', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('otherapp', 'EntityB', [ ('id', models.AutoField(primary_key=True)), ('entity_a', models.ForeignKey('testapp.EntityA', models.CASCADE)), ]), ] after = [ ModelState('testapp', 'RenamedEntityA', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('otherapp', 'EntityB', [ ('id', models.AutoField(primary_key=True)), ('entity_a', models.ForeignKey('testapp.RenamedEntityA', models.CASCADE)), ]), ] changes = self.get_changes(before, after, MigrationQuestioner({'ask_rename_model': True})) self.assertNumberMigrations(changes, 'testapp', 1) self.assertMigrationDependencies(changes, 'testapp', 0, [('otherapp', '__first__')]) self.assertOperationTypes(changes, 'testapp', 0, ['RenameModel']) self.assertOperationAttributes(changes, 'testapp', 0, 0, old_name='EntityA', new_name='RenamedEntityA') def test_fk_dependency(self): """Having a ForeignKey automatically adds a dependency.""" # Note that testapp (author) has no dependencies, # otherapp (book) depends on testapp (author), # thirdapp (edition) depends on otherapp (book) changes = self.get_changes([], [self.author_name, self.book, self.edition]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, []) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("testapp", "auto_1")]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="Edition") self.assertMigrationDependencies(changes, 'thirdapp', 0, [("otherapp", "auto_1")]) def test_proxy_fk_dependency(self): """FK dependencies still work on proxy models.""" # Note that testapp (author) has no dependencies, # otherapp (book) depends on testapp (authorproxy) changes = self.get_changes([], [self.author_empty, self.author_proxy_third, self.book_proxy_fk]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, []) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("thirdapp", "auto_1")]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="AuthorProxy") self.assertMigrationDependencies(changes, 'thirdapp', 0, [("testapp", "auto_1")]) def test_same_app_no_fk_dependency(self): """ A migration with a FK between two models of the same app does not have a dependency to itself. """ changes = self.get_changes([], [self.author_with_publisher, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher") self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, []) def test_circular_fk_dependency(self): """ Having a circular ForeignKey dependency automatically resolves the situation into 2 migrations on one side and 1 on the other. """ changes = self.get_changes([], [self.author_with_book, self.book, self.publisher_with_book]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher") self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, [("otherapp", "auto_1")]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 2) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationTypes(changes, 'otherapp', 1, ["AddField"]) self.assertMigrationDependencies(changes, 'otherapp', 0, []) self.assertMigrationDependencies(changes, 'otherapp', 1, [("otherapp", "auto_1"), ("testapp", "auto_1")]) # both split migrations should be `initial` self.assertTrue(changes['otherapp'][0].initial) self.assertTrue(changes['otherapp'][1].initial) def test_same_app_circular_fk_dependency(self): """ A migration with a FK between two models of the same app does not have a dependency to itself. """ changes = self.get_changes([], [self.author_with_publisher, self.publisher_with_author]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel", "AddField"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author") self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher") self.assertOperationAttributes(changes, "testapp", 0, 2, name="publisher") self.assertMigrationDependencies(changes, 'testapp', 0, []) def test_same_app_circular_fk_dependency_with_unique_together_and_indexes(self): """ #22275 - A migration with circular FK dependency does not try to create unique together constraint and indexes before creating all required fields first. """ changes = self.get_changes([], [self.knight, self.rabbit]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'eggs', 1) self.assertOperationTypes( changes, 'eggs', 0, ["CreateModel", "CreateModel", "AddIndex", "AlterUniqueTogether"] ) self.assertNotIn("unique_together", changes['eggs'][0].operations[0].options) self.assertNotIn("unique_together", changes['eggs'][0].operations[1].options) self.assertMigrationDependencies(changes, 'eggs', 0, []) def test_alter_db_table_add(self): """Tests detection for adding db_table in model's options.""" changes = self.get_changes([self.author_empty], [self.author_with_db_table_options]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", table="author_one") def test_alter_db_table_change(self): """Tests detection for changing db_table in model's options'.""" changes = self.get_changes([self.author_with_db_table_options], [self.author_with_new_db_table_options]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", table="author_two") def test_alter_db_table_remove(self): """Tests detection for removing db_table in model's options.""" changes = self.get_changes([self.author_with_db_table_options], [self.author_empty]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", table=None) def test_alter_db_table_no_changes(self): """ Alter_db_table doesn't generate a migration if no changes have been made. """ changes = self.get_changes([self.author_with_db_table_options], [self.author_with_db_table_options]) # Right number of migrations? self.assertEqual(len(changes), 0) def test_keep_db_table_with_model_change(self): """ Tests when model changes but db_table stays as-is, autodetector must not create more than one operation. """ changes = self.get_changes( [self.author_with_db_table_options], [self.author_renamed_with_db_table_options], MigrationQuestioner({"ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, old_name="Author", new_name="NewAuthor") def test_alter_db_table_with_model_change(self): """ Tests when model and db_table changes, autodetector must create two operations. """ changes = self.get_changes( [self.author_with_db_table_options], [self.author_renamed_with_new_db_table_options], MigrationQuestioner({"ask_rename_model": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RenameModel", "AlterModelTable"]) self.assertOperationAttributes(changes, "testapp", 0, 0, old_name="Author", new_name="NewAuthor") self.assertOperationAttributes(changes, "testapp", 0, 1, name="newauthor", table="author_three") def test_identical_regex_doesnt_alter(self): from_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[ RegexValidator( re.compile('^[-a-zA-Z0-9_]+\\Z'), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid' ) ]))] ) to_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[validate_slug]))] ) changes = self.get_changes([from_state], [to_state]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 0) def test_different_regex_does_alter(self): from_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[ RegexValidator( re.compile('^[a-z]+\\Z', 32), 'Enter a valid “slug” consisting of letters, numbers, underscores or hyphens.', 'invalid' ) ]))] ) to_state = ModelState( "testapp", "model", [("id", models.AutoField(primary_key=True, validators=[validate_slug]))] ) changes = self.get_changes([from_state], [to_state]) self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterField"]) def test_empty_foo_together(self): """ #23452 - Empty unique/index_together shouldn't generate a migration. """ # Explicitly testing for not specified, since this is the case after # a CreateModel operation w/o any definition on the original model model_state_not_specified = ModelState("a", "model", [("id", models.AutoField(primary_key=True))]) # Explicitly testing for None, since this was the issue in #23452 after # an AlterFooTogether operation with e.g. () as value model_state_none = ModelState("a", "model", [ ("id", models.AutoField(primary_key=True)) ], { "index_together": None, "unique_together": None, }) # Explicitly testing for the empty set, since we now always have sets. # During removal (('col1', 'col2'),) --> () this becomes set([]) model_state_empty = ModelState("a", "model", [ ("id", models.AutoField(primary_key=True)) ], { "index_together": set(), "unique_together": set(), }) def test(from_state, to_state, msg): changes = self.get_changes([from_state], [to_state]) if changes: ops = ', '.join(o.__class__.__name__ for o in changes['a'][0].operations) self.fail('Created operation(s) %s from %s' % (ops, msg)) tests = ( (model_state_not_specified, model_state_not_specified, '"not specified" to "not specified"'), (model_state_not_specified, model_state_none, '"not specified" to "None"'), (model_state_not_specified, model_state_empty, '"not specified" to "empty"'), (model_state_none, model_state_not_specified, '"None" to "not specified"'), (model_state_none, model_state_none, '"None" to "None"'), (model_state_none, model_state_empty, '"None" to "empty"'), (model_state_empty, model_state_not_specified, '"empty" to "not specified"'), (model_state_empty, model_state_none, '"empty" to "None"'), (model_state_empty, model_state_empty, '"empty" to "empty"'), ) for t in tests: test(*t) def test_create_model_with_indexes(self): """Test creation of new model with indexes already defined.""" author = ModelState('otherapp', 'Author', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=200)), ], {'indexes': [models.Index(fields=['name'], name='create_model_with_indexes_idx')]}) changes = self.get_changes([], [author]) added_index = models.Index(fields=['name'], name='create_model_with_indexes_idx') # Right number of migrations? self.assertEqual(len(changes['otherapp']), 1) # Right number of actions? migration = changes['otherapp'][0] self.assertEqual(len(migration.operations), 2) # Right actions order? self.assertOperationTypes(changes, 'otherapp', 0, ['CreateModel', 'AddIndex']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name='Author') self.assertOperationAttributes(changes, 'otherapp', 0, 1, model_name='author', index=added_index) def test_add_indexes(self): """Test change detection of new indexes.""" changes = self.get_changes([self.author_empty, self.book], [self.author_empty, self.book_indexes]) self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AddIndex']) added_index = models.Index(fields=['author', 'title'], name='book_title_author_idx') self.assertOperationAttributes(changes, 'otherapp', 0, 0, model_name='book', index=added_index) def test_remove_indexes(self): """Test change detection of removed indexes.""" changes = self.get_changes([self.author_empty, self.book_indexes], [self.author_empty, self.book]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveIndex']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, model_name='book', name='book_title_author_idx') def test_order_fields_indexes(self): """Test change detection of reordering of fields in indexes.""" changes = self.get_changes( [self.author_empty, self.book_indexes], [self.author_empty, self.book_unordered_indexes] ) self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveIndex', 'AddIndex']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, model_name='book', name='book_title_author_idx') added_index = models.Index(fields=['title', 'author'], name='book_author_title_idx') self.assertOperationAttributes(changes, 'otherapp', 0, 1, model_name='book', index=added_index) def test_create_model_with_check_constraint(self): """Test creation of new model with constraints already defined.""" author = ModelState('otherapp', 'Author', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=200)), ], {'constraints': [models.CheckConstraint(check=models.Q(name__contains='Bob'), name='name_contains_bob')]}) changes = self.get_changes([], [author]) added_constraint = models.CheckConstraint(check=models.Q(name__contains='Bob'), name='name_contains_bob') # Right number of migrations? self.assertEqual(len(changes['otherapp']), 1) # Right number of actions? migration = changes['otherapp'][0] self.assertEqual(len(migration.operations), 2) # Right actions order? self.assertOperationTypes(changes, 'otherapp', 0, ['CreateModel', 'AddConstraint']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name='Author') self.assertOperationAttributes(changes, 'otherapp', 0, 1, model_name='author', constraint=added_constraint) def test_add_constraints(self): """Test change detection of new constraints.""" changes = self.get_changes([self.author_name], [self.author_name_check_constraint]) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['AddConstraint']) added_constraint = models.CheckConstraint(check=models.Q(name__contains='Bob'), name='name_contains_bob') self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name='author', constraint=added_constraint) def test_remove_constraints(self): """Test change detection of removed constraints.""" changes = self.get_changes([self.author_name_check_constraint], [self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['RemoveConstraint']) self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name='author', name='name_contains_bob') def test_add_foo_together(self): """Tests index/unique_together detection.""" changes = self.get_changes([self.author_empty, self.book], [self.author_empty, self.book_foo_together]) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together={("author", "title")}) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together={("author", "title")}) def test_remove_foo_together(self): """Tests index/unique_together detection.""" changes = self.get_changes([self.author_empty, self.book_foo_together], [self.author_empty, self.book]) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together=set()) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together=set()) def test_foo_together_remove_fk(self): """Tests unique_together and field removal detection & ordering""" changes = self.get_changes( [self.author_empty, self.book_foo_together], [self.author_empty, self.book_with_no_author] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, [ "AlterUniqueTogether", "AlterIndexTogether", "RemoveField" ]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together=set()) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together=set()) self.assertOperationAttributes(changes, "otherapp", 0, 2, model_name="book", name="author") def test_foo_together_no_changes(self): """ index/unique_together doesn't generate a migration if no changes have been made. """ changes = self.get_changes( [self.author_empty, self.book_foo_together], [self.author_empty, self.book_foo_together] ) # Right number of migrations? self.assertEqual(len(changes), 0) def test_foo_together_ordering(self): """ index/unique_together also triggers on ordering changes. """ changes = self.get_changes( [self.author_empty, self.book_foo_together], [self.author_empty, self.book_foo_together_2] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together={("title", "author")}) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together={("title", "author")}) def test_add_field_and_foo_together(self): """ Added fields will be created before using them in index/unique_together. """ changes = self.get_changes([self.author_empty, self.book], [self.author_empty, self.book_foo_together_3]) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AddField", "AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", unique_together={("title", "newfield")}) self.assertOperationAttributes(changes, "otherapp", 0, 2, name="book", index_together={("title", "newfield")}) def test_create_model_and_unique_together(self): author = ModelState("otherapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=200)), ]) book_with_author = ModelState("otherapp", "Book", [ ("id", models.AutoField(primary_key=True)), ("author", models.ForeignKey("otherapp.Author", models.CASCADE)), ("title", models.CharField(max_length=200)), ], { "index_together": {("title", "author")}, "unique_together": {("title", "author")}, }) changes = self.get_changes([self.book_with_no_author], [author, book_with_author]) # Right number of migrations? self.assertEqual(len(changes['otherapp']), 1) # Right number of actions? migration = changes['otherapp'][0] self.assertEqual(len(migration.operations), 4) # Right actions order? self.assertOperationTypes( changes, 'otherapp', 0, ['CreateModel', 'AddField', 'AlterUniqueTogether', 'AlterIndexTogether'] ) def test_remove_field_and_foo_together(self): """ Removed fields will be removed after updating index/unique_together. """ changes = self.get_changes( [self.author_empty, self.book_foo_together_3], [self.author_empty, self.book_foo_together] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether", "RemoveField"]) self.assertOperationAttributes(changes, "otherapp", 0, 0, name="book", unique_together={("author", "title")}) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", index_together={("author", "title")}) self.assertOperationAttributes(changes, "otherapp", 0, 2, model_name="book", name="newfield") def test_rename_field_and_foo_together(self): """ Removed fields will be removed after updating index/unique_together. """ changes = self.get_changes( [self.author_empty, self.book_foo_together_3], [self.author_empty, self.book_foo_together_4], MigrationQuestioner({"ask_rename": True}), ) # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, "otherapp", 0, ["RenameField", "AlterUniqueTogether", "AlterIndexTogether"]) self.assertOperationAttributes(changes, "otherapp", 0, 1, name="book", unique_together={ ("title", "newfield2") }) self.assertOperationAttributes(changes, "otherapp", 0, 2, name="book", index_together={("title", "newfield2")}) def test_proxy(self): """The autodetector correctly deals with proxy models.""" # First, we test adding a proxy model changes = self.get_changes([self.author_empty], [self.author_empty, self.author_proxy]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"]) self.assertOperationAttributes( changes, "testapp", 0, 0, name="AuthorProxy", options={"proxy": True, "indexes": [], "constraints": []} ) # Now, we test turning a proxy model into a non-proxy model # It should delete the proxy then make the real one changes = self.get_changes( [self.author_empty, self.author_proxy], [self.author_empty, self.author_proxy_notproxy] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel", "CreateModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="AuthorProxy") self.assertOperationAttributes(changes, "testapp", 0, 1, name="AuthorProxy", options={}) def test_proxy_custom_pk(self): """ #23415 - The autodetector must correctly deal with custom FK on proxy models. """ # First, we test the default pk field name changes = self.get_changes([], [self.author_empty, self.author_proxy_third, self.book_proxy_fk]) # The model the FK is pointing from and to. self.assertEqual( changes['otherapp'][0].operations[0].fields[2][1].remote_field.model, 'thirdapp.AuthorProxy', ) # Now, we test the custom pk field name changes = self.get_changes([], [self.author_custom_pk, self.author_proxy_third, self.book_proxy_fk]) # The model the FK is pointing from and to. self.assertEqual( changes['otherapp'][0].operations[0].fields[2][1].remote_field.model, 'thirdapp.AuthorProxy', ) def test_proxy_to_mti_with_fk_to_proxy(self): # First, test the pk table and field name. to_state = self.make_project_state( [self.author_empty, self.author_proxy_third, self.book_proxy_fk], ) changes = self.get_changes([], to_state) fk_field = changes['otherapp'][0].operations[0].fields[2][1] self.assertEqual( to_state.get_concrete_model_key(fk_field.remote_field.model), ('testapp', 'author'), ) self.assertEqual(fk_field.remote_field.model, 'thirdapp.AuthorProxy') # Change AuthorProxy to use MTI. from_state = to_state.clone() to_state = self.make_project_state( [self.author_empty, self.author_proxy_third_notproxy, self.book_proxy_fk], ) changes = self.get_changes(from_state, to_state) # Right number/type of migrations for the AuthorProxy model? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ['DeleteModel', 'CreateModel']) # Right number/type of migrations for the Book model with a FK to # AuthorProxy? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AlterField']) # otherapp should depend on thirdapp. self.assertMigrationDependencies(changes, 'otherapp', 0, [('thirdapp', 'auto_1')]) # Now, test the pk table and field name. fk_field = changes['otherapp'][0].operations[0].field self.assertEqual( to_state.get_concrete_model_key(fk_field.remote_field.model), ('thirdapp', 'authorproxy'), ) self.assertEqual(fk_field.remote_field.model, 'thirdapp.AuthorProxy') def test_proxy_to_mti_with_fk_to_proxy_proxy(self): # First, test the pk table and field name. to_state = self.make_project_state([ self.author_empty, self.author_proxy, self.author_proxy_proxy, self.book_proxy_proxy_fk, ]) changes = self.get_changes([], to_state) fk_field = changes['otherapp'][0].operations[0].fields[1][1] self.assertEqual( to_state.get_concrete_model_key(fk_field.remote_field.model), ('testapp', 'author'), ) self.assertEqual(fk_field.remote_field.model, 'testapp.AAuthorProxyProxy') # Change AuthorProxy to use MTI. FK still points to AAuthorProxyProxy, # a proxy of AuthorProxy. from_state = to_state.clone() to_state = self.make_project_state([ self.author_empty, self.author_proxy_notproxy, self.author_proxy_proxy, self.book_proxy_proxy_fk, ]) changes = self.get_changes(from_state, to_state) # Right number/type of migrations for the AuthorProxy model? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['DeleteModel', 'CreateModel']) # Right number/type of migrations for the Book model with a FK to # AAuthorProxyProxy? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AlterField']) # otherapp should depend on testapp. self.assertMigrationDependencies(changes, 'otherapp', 0, [('testapp', 'auto_1')]) # Now, test the pk table and field name. fk_field = changes['otherapp'][0].operations[0].field self.assertEqual( to_state.get_concrete_model_key(fk_field.remote_field.model), ('testapp', 'authorproxy'), ) self.assertEqual(fk_field.remote_field.model, 'testapp.AAuthorProxyProxy') def test_unmanaged_create(self): """The autodetector correctly deals with managed models.""" # First, we test adding an unmanaged model changes = self.get_changes([self.author_empty], [self.author_empty, self.author_unmanaged]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="AuthorUnmanaged", options={"managed": False}) def test_unmanaged_delete(self): changes = self.get_changes([self.author_empty, self.author_unmanaged], [self.author_empty]) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['DeleteModel']) def test_unmanaged_to_managed(self): # Now, we test turning an unmanaged model into a managed model changes = self.get_changes( [self.author_empty, self.author_unmanaged], [self.author_empty, self.author_unmanaged_managed] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="authorunmanaged", options={}) def test_managed_to_unmanaged(self): # Now, we turn managed to unmanaged. changes = self.get_changes( [self.author_empty, self.author_unmanaged_managed], [self.author_empty, self.author_unmanaged] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="authorunmanaged", options={"managed": False}) def test_unmanaged_custom_pk(self): """ #23415 - The autodetector must correctly deal with custom FK on unmanaged models. """ # First, we test the default pk field name changes = self.get_changes([], [self.author_unmanaged_default_pk, self.book]) # The model the FK on the book model points to. fk_field = changes['otherapp'][0].operations[0].fields[2][1] self.assertEqual(fk_field.remote_field.model, 'testapp.Author') # Now, we test the custom pk field name changes = self.get_changes([], [self.author_unmanaged_custom_pk, self.book]) # The model the FK on the book model points to. fk_field = changes['otherapp'][0].operations[0].fields[2][1] self.assertEqual(fk_field.remote_field.model, 'testapp.Author') @override_settings(AUTH_USER_MODEL="thirdapp.CustomUser") def test_swappable(self): with isolate_lru_cache(apps.get_swappable_settings_name): changes = self.get_changes([self.custom_user], [self.custom_user, self.author_with_custom_user]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertMigrationDependencies(changes, 'testapp', 0, [("__setting__", "AUTH_USER_MODEL")]) def test_swappable_changed(self): with isolate_lru_cache(apps.get_swappable_settings_name): before = self.make_project_state([self.custom_user, self.author_with_user]) with override_settings(AUTH_USER_MODEL="thirdapp.CustomUser"): after = self.make_project_state([self.custom_user, self.author_with_custom_user]) autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes() # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name="author", name='user') fk_field = changes['testapp'][0].operations[0].field self.assertEqual(fk_field.remote_field.model, 'thirdapp.CustomUser') def test_add_field_with_default(self): """#22030 - Adding a field with a default should work.""" changes = self.get_changes([self.author_empty], [self.author_name_default]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="name") def test_custom_deconstructible(self): """ Two instances which deconstruct to the same value aren't considered a change. """ changes = self.get_changes([self.author_name_deconstructible_1], [self.author_name_deconstructible_2]) # Right number of migrations? self.assertEqual(len(changes), 0) def test_deconstruct_field_kwarg(self): """Field instances are handled correctly by nested deconstruction.""" changes = self.get_changes([self.author_name_deconstructible_3], [self.author_name_deconstructible_4]) self.assertEqual(changes, {}) def test_deconstructible_list(self): """Nested deconstruction descends into lists.""" # When lists contain items that deconstruct to identical values, those lists # should be considered equal for the purpose of detecting state changes # (even if the original items are unequal). changes = self.get_changes( [self.author_name_deconstructible_list_1], [self.author_name_deconstructible_list_2] ) self.assertEqual(changes, {}) # Legitimate differences within the deconstructed lists should be reported # as a change changes = self.get_changes( [self.author_name_deconstructible_list_1], [self.author_name_deconstructible_list_3] ) self.assertEqual(len(changes), 1) def test_deconstructible_tuple(self): """Nested deconstruction descends into tuples.""" # When tuples contain items that deconstruct to identical values, those tuples # should be considered equal for the purpose of detecting state changes # (even if the original items are unequal). changes = self.get_changes( [self.author_name_deconstructible_tuple_1], [self.author_name_deconstructible_tuple_2] ) self.assertEqual(changes, {}) # Legitimate differences within the deconstructed tuples should be reported # as a change changes = self.get_changes( [self.author_name_deconstructible_tuple_1], [self.author_name_deconstructible_tuple_3] ) self.assertEqual(len(changes), 1) def test_deconstructible_dict(self): """Nested deconstruction descends into dict values.""" # When dicts contain items whose values deconstruct to identical values, # those dicts should be considered equal for the purpose of detecting # state changes (even if the original values are unequal). changes = self.get_changes( [self.author_name_deconstructible_dict_1], [self.author_name_deconstructible_dict_2] ) self.assertEqual(changes, {}) # Legitimate differences within the deconstructed dicts should be reported # as a change changes = self.get_changes( [self.author_name_deconstructible_dict_1], [self.author_name_deconstructible_dict_3] ) self.assertEqual(len(changes), 1) def test_nested_deconstructible_objects(self): """ Nested deconstruction is applied recursively to the args/kwargs of deconstructed objects. """ # If the items within a deconstructed object's args/kwargs have the same # deconstructed values - whether or not the items themselves are different # instances - then the object as a whole is regarded as unchanged. changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_2] ) self.assertEqual(changes, {}) # Differences that exist solely within the args list of a deconstructed object # should be reported as changes changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_changed_arg] ) self.assertEqual(len(changes), 1) # Additional args should also be reported as a change changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_extra_arg] ) self.assertEqual(len(changes), 1) # Differences that exist solely within the kwargs dict of a deconstructed object # should be reported as changes changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_changed_kwarg] ) self.assertEqual(len(changes), 1) # Additional kwargs should also be reported as a change changes = self.get_changes( [self.author_name_nested_deconstructible_1], [self.author_name_nested_deconstructible_extra_kwarg] ) self.assertEqual(len(changes), 1) def test_deconstruct_type(self): """ #22951 -- Uninstantiated classes with deconstruct are correctly returned by deep_deconstruct during serialization. """ author = ModelState( "testapp", "Author", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField( max_length=200, # IntegerField intentionally not instantiated. default=models.IntegerField, )) ], ) changes = self.get_changes([], [author]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) def test_replace_string_with_foreignkey(self): """ #22300 - Adding an FK in the same "spot" as a deleted CharField should work. """ changes = self.get_changes([self.author_with_publisher_string], [self.author_with_publisher, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "RemoveField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Publisher") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="publisher_name") self.assertOperationAttributes(changes, 'testapp', 0, 2, name="publisher") def test_foreign_key_removed_before_target_model(self): """ Removing an FK and the model it targets in the same change must remove the FK field before the model to maintain consistency. """ changes = self.get_changes( [self.author_with_publisher, self.publisher], [self.author_name] ) # removes both the model and FK # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["RemoveField", "DeleteModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publisher") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="Publisher") @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_many_to_many(self, mocked_ask_method): """#22435 - Adding a ManyToManyField should not prompt for a default.""" changes = self.get_changes([self.author_empty, self.publisher], [self.author_with_m2m, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publishers") def test_alter_many_to_many(self): changes = self.get_changes( [self.author_with_m2m, self.publisher], [self.author_with_m2m_blank, self.publisher] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publishers") def test_create_with_through_model(self): """ Adding a m2m with a through model and the models that use it should be ordered correctly. """ changes = self.get_changes([], [self.author_with_m2m_through, self.publisher, self.contract]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, [ 'CreateModel', 'CreateModel', 'CreateModel', 'AddField', ]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name='Author') self.assertOperationAttributes(changes, 'testapp', 0, 1, name='Publisher') self.assertOperationAttributes(changes, 'testapp', 0, 2, name='Contract') self.assertOperationAttributes(changes, 'testapp', 0, 3, model_name='author', name='publishers') def test_many_to_many_removed_before_through_model(self): """ Removing a ManyToManyField and the "through" model in the same change must remove the field before the model to maintain consistency. """ changes = self.get_changes( [self.book_with_multiple_authors_through_attribution, self.author_name, self.attribution], [self.book_with_no_author, self.author_name], ) # Remove both the through model and ManyToMany # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveField', 'DeleteModel']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name='authors', model_name='book') self.assertOperationAttributes(changes, 'otherapp', 0, 1, name='Attribution') def test_many_to_many_removed_before_through_model_2(self): """ Removing a model that contains a ManyToManyField and the "through" model in the same change must remove the field before the model to maintain consistency. """ changes = self.get_changes( [self.book_with_multiple_authors_through_attribution, self.author_name, self.attribution], [self.author_name], ) # Remove both the through model and ManyToMany # Right number/type of migrations? self.assertNumberMigrations(changes, "otherapp", 1) self.assertOperationTypes(changes, 'otherapp', 0, ['RemoveField', 'DeleteModel', 'DeleteModel']) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name='authors', model_name='book') self.assertOperationAttributes(changes, 'otherapp', 0, 1, name='Attribution') self.assertOperationAttributes(changes, 'otherapp', 0, 2, name='Book') def test_m2m_w_through_multistep_remove(self): """ A model with a m2m field that specifies a "through" model cannot be removed in the same migration as that through model as the schema will pass through an inconsistent state. The autodetector should produce two migrations to avoid this issue. """ changes = self.get_changes([self.author_with_m2m_through, self.publisher, self.contract], [self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, [ "RemoveField", "RemoveField", "DeleteModel", "DeleteModel" ]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", model_name='contract') self.assertOperationAttributes(changes, "testapp", 0, 1, name="publisher", model_name='contract') self.assertOperationAttributes(changes, "testapp", 0, 2, name="Author") self.assertOperationAttributes(changes, "testapp", 0, 3, name="Contract") def test_concrete_field_changed_to_many_to_many(self): """ #23938 - Changing a concrete field into a ManyToManyField first removes the concrete field and then adds the m2m field. """ changes = self.get_changes([self.author_with_former_m2m], [self.author_with_m2m, self.publisher]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "RemoveField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name='Publisher') self.assertOperationAttributes(changes, 'testapp', 0, 1, name="publishers", model_name='author') self.assertOperationAttributes(changes, 'testapp', 0, 2, name="publishers", model_name='author') def test_many_to_many_changed_to_concrete_field(self): """ #23938 - Changing a ManyToManyField into a concrete field first removes the m2m field and then adds the concrete field. """ changes = self.get_changes([self.author_with_m2m, self.publisher], [self.author_with_former_m2m]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["RemoveField", "AddField", "DeleteModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="publishers", model_name='author') self.assertOperationAttributes(changes, 'testapp', 0, 1, name="publishers", model_name='author') self.assertOperationAttributes(changes, 'testapp', 0, 2, name='Publisher') self.assertOperationFieldAttributes(changes, 'testapp', 0, 1, max_length=100) def test_non_circular_foreignkey_dependency_removal(self): """ If two models with a ForeignKey from one to the other are removed at the same time, the autodetector should remove them in the correct order. """ changes = self.get_changes([self.author_with_publisher, self.publisher_with_author], []) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["RemoveField", "DeleteModel", "DeleteModel"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", model_name='publisher') self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author") self.assertOperationAttributes(changes, "testapp", 0, 2, name="Publisher") def test_alter_model_options(self): """Changing a model's options should make a change.""" changes = self.get_changes([self.author_empty], [self.author_with_options]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, options={ "permissions": [('can_hire', 'Can hire')], "verbose_name": "Authi", }) # Changing them back to empty should also make a change changes = self.get_changes([self.author_with_options], [self.author_empty]) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="author", options={}) def test_alter_model_options_proxy(self): """Changing a proxy model's options should also make a change.""" changes = self.get_changes( [self.author_proxy, self.author_empty], [self.author_proxy_options, self.author_empty] ) # Right number/type of migrations? self.assertNumberMigrations(changes, "testapp", 1) self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"]) self.assertOperationAttributes(changes, "testapp", 0, 0, name="authorproxy", options={ "verbose_name": "Super Author" }) def test_set_alter_order_with_respect_to(self): """Setting order_with_respect_to adds a field.""" changes = self.get_changes([self.book, self.author_with_book], [self.book, self.author_with_book_order_wrt]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterOrderWithRespectTo"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="author", order_with_respect_to="book") def test_add_alter_order_with_respect_to(self): """ Setting order_with_respect_to when adding the FK too does things in the right order. """ changes = self.get_changes([self.author_name], [self.book, self.author_with_book_order_wrt]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AlterOrderWithRespectTo"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, model_name="author", name="book") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="author", order_with_respect_to="book") def test_remove_alter_order_with_respect_to(self): """ Removing order_with_respect_to when removing the FK too does things in the right order. """ changes = self.get_changes([self.book, self.author_with_book_order_wrt], [self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AlterOrderWithRespectTo", "RemoveField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="author", order_with_respect_to=None) self.assertOperationAttributes(changes, 'testapp', 0, 1, model_name="author", name="book") def test_add_model_order_with_respect_to(self): """ Setting order_with_respect_to when adding the whole model does things in the right order. """ changes = self.get_changes([], [self.book, self.author_with_book_order_wrt]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel"]) self.assertOperationAttributes( changes, 'testapp', 0, 0, name="Author", options={'order_with_respect_to': 'book'} ) self.assertNotIn("_order", [name for name, field in changes['testapp'][0].operations[0].fields]) def test_add_model_order_with_respect_to_index_foo_together(self): changes = self.get_changes([], [ self.book, ModelState('testapp', 'Author', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=200)), ('book', models.ForeignKey('otherapp.Book', models.CASCADE)), ], options={ 'order_with_respect_to': 'book', 'index_together': {('name', '_order')}, 'unique_together': {('id', '_order')}, }), ]) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ['CreateModel']) self.assertOperationAttributes( changes, 'testapp', 0, 0, name='Author', options={ 'order_with_respect_to': 'book', 'index_together': {('name', '_order')}, 'unique_together': {('id', '_order')}, }, ) def test_add_model_order_with_respect_to_index_constraint(self): tests = [ ( 'AddIndex', {'indexes': [ models.Index(fields=['_order'], name='book_order_idx'), ]}, ), ( 'AddConstraint', {'constraints': [ models.CheckConstraint( check=models.Q(_order__gt=1), name='book_order_gt_1', ), ]}, ), ] for operation, extra_option in tests: with self.subTest(operation=operation): after = ModelState('testapp', 'Author', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=200)), ('book', models.ForeignKey('otherapp.Book', models.CASCADE)), ], options={ 'order_with_respect_to': 'book', **extra_option, }) changes = self.get_changes([], [self.book, after]) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, [ 'CreateModel', operation, ]) self.assertOperationAttributes( changes, 'testapp', 0, 0, name='Author', options={'order_with_respect_to': 'book'}, ) def test_set_alter_order_with_respect_to_index_constraint_foo_together(self): tests = [ ( 'AddIndex', {'indexes': [ models.Index(fields=['_order'], name='book_order_idx'), ]}, ), ( 'AddConstraint', {'constraints': [ models.CheckConstraint( check=models.Q(_order__gt=1), name='book_order_gt_1', ), ]}, ), ('AlterIndexTogether', {'index_together': {('name', '_order')}}), ('AlterUniqueTogether', {'unique_together': {('id', '_order')}}), ] for operation, extra_option in tests: with self.subTest(operation=operation): after = ModelState('testapp', 'Author', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=200)), ('book', models.ForeignKey('otherapp.Book', models.CASCADE)), ], options={ 'order_with_respect_to': 'book', **extra_option, }) changes = self.get_changes( [self.book, self.author_with_book], [self.book, after], ) self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, [ 'AlterOrderWithRespectTo', operation, ]) def test_alter_model_managers(self): """ Changing the model managers adds a new operation. """ changes = self.get_changes([self.other_pony], [self.other_pony_food]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["AlterModelManagers"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="pony") self.assertEqual([name for name, mgr in changes['otherapp'][0].operations[0].managers], ['food_qs', 'food_mgr', 'food_mgr_kwargs']) self.assertEqual(changes['otherapp'][0].operations[0].managers[1][1].args, ('a', 'b', 1, 2)) self.assertEqual(changes['otherapp'][0].operations[0].managers[2][1].args, ('x', 'y', 3, 4)) def test_swappable_first_inheritance(self): """Swappable models get their CreateModel first.""" changes = self.get_changes([], [self.custom_user, self.aardvark]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="CustomUser") self.assertOperationAttributes(changes, 'thirdapp', 0, 1, name="Aardvark") def test_default_related_name_option(self): model_state = ModelState('app', 'model', [ ('id', models.AutoField(primary_key=True)), ], options={'default_related_name': 'related_name'}) changes = self.get_changes([], [model_state]) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['CreateModel']) self.assertOperationAttributes( changes, 'app', 0, 0, name='model', options={'default_related_name': 'related_name'}, ) altered_model_state = ModelState('app', 'Model', [ ('id', models.AutoField(primary_key=True)), ]) changes = self.get_changes([model_state], [altered_model_state]) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['AlterModelOptions']) self.assertOperationAttributes(changes, 'app', 0, 0, name='model', options={}) @override_settings(AUTH_USER_MODEL="thirdapp.CustomUser") def test_swappable_first_setting(self): """Swappable models get their CreateModel first.""" with isolate_lru_cache(apps.get_swappable_settings_name): changes = self.get_changes([], [self.custom_user_no_inherit, self.aardvark]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'thirdapp', 1) self.assertOperationTypes(changes, 'thirdapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'thirdapp', 0, 0, name="CustomUser") self.assertOperationAttributes(changes, 'thirdapp', 0, 1, name="Aardvark") def test_bases_first(self): """Bases of other models come first.""" changes = self.get_changes([], [self.aardvark_based_on_author, self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="Aardvark") def test_bases_first_mixed_case_app_label(self): app_label = 'MiXedCaseApp' changes = self.get_changes([], [ ModelState(app_label, 'owner', [ ('id', models.AutoField(primary_key=True)), ]), ModelState(app_label, 'place', [ ('id', models.AutoField(primary_key=True)), ('owner', models.ForeignKey('MiXedCaseApp.owner', models.CASCADE)), ]), ModelState(app_label, 'restaurant', [], bases=('MiXedCaseApp.place',)), ]) self.assertNumberMigrations(changes, app_label, 1) self.assertOperationTypes(changes, app_label, 0, [ 'CreateModel', 'CreateModel', 'CreateModel', ]) self.assertOperationAttributes(changes, app_label, 0, 0, name='owner') self.assertOperationAttributes(changes, app_label, 0, 1, name='place') self.assertOperationAttributes(changes, app_label, 0, 2, name='restaurant') def test_multiple_bases(self): """#23956 - Inheriting models doesn't move *_ptr fields into AddField operations.""" A = ModelState("app", "A", [("a_id", models.AutoField(primary_key=True))]) B = ModelState("app", "B", [("b_id", models.AutoField(primary_key=True))]) C = ModelState("app", "C", [], bases=("app.A", "app.B")) D = ModelState("app", "D", [], bases=("app.A", "app.B")) E = ModelState("app", "E", [], bases=("app.A", "app.B")) changes = self.get_changes([], [A, B, C, D, E]) # Right number/type of migrations? self.assertNumberMigrations(changes, "app", 1) self.assertOperationTypes(changes, "app", 0, [ "CreateModel", "CreateModel", "CreateModel", "CreateModel", "CreateModel" ]) self.assertOperationAttributes(changes, "app", 0, 0, name="A") self.assertOperationAttributes(changes, "app", 0, 1, name="B") self.assertOperationAttributes(changes, "app", 0, 2, name="C") self.assertOperationAttributes(changes, "app", 0, 3, name="D") self.assertOperationAttributes(changes, "app", 0, 4, name="E") def test_proxy_bases_first(self): """Bases of proxies come first.""" changes = self.get_changes([], [self.author_empty, self.author_proxy, self.author_proxy_proxy]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="AuthorProxy") self.assertOperationAttributes(changes, 'testapp', 0, 2, name="AAuthorProxyProxy") def test_pk_fk_included(self): """ A relation used as the primary key is kept as part of CreateModel. """ changes = self.get_changes([], [self.aardvark_pk_fk_author, self.author_name]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "CreateModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Author") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="Aardvark") def test_first_dependency(self): """ A dependency to an app with no migrations uses __first__. """ # Load graph loader = MigrationLoader(connection) before = self.make_project_state([]) after = self.make_project_state([self.book_migrations_fk]) after.real_apps = ["migrations"] autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes(graph=loader.graph) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("migrations", "__first__")]) @override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"}) def test_last_dependency(self): """ A dependency to an app with existing migrations uses the last migration of that app. """ # Load graph loader = MigrationLoader(connection) before = self.make_project_state([]) after = self.make_project_state([self.book_migrations_fk]) after.real_apps = ["migrations"] autodetector = MigrationAutodetector(before, after) changes = autodetector._detect_changes(graph=loader.graph) # Right number/type of migrations? self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ["CreateModel"]) self.assertOperationAttributes(changes, 'otherapp', 0, 0, name="Book") self.assertMigrationDependencies(changes, 'otherapp', 0, [("migrations", "0002_second")]) def test_alter_fk_before_model_deletion(self): """ ForeignKeys are altered _before_ the model they used to refer to are deleted. """ changes = self.get_changes( [self.author_name, self.publisher_with_author], [self.aardvark_testapp, self.publisher_with_aardvark_author] ) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["CreateModel", "AlterField", "DeleteModel"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="Aardvark") self.assertOperationAttributes(changes, 'testapp', 0, 1, name="author") self.assertOperationAttributes(changes, 'testapp', 0, 2, name="Author") def test_fk_dependency_other_app(self): """ #23100 - ForeignKeys correctly depend on other apps' models. """ changes = self.get_changes([self.author_name, self.book], [self.author_with_book, self.book]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0, name="book") self.assertMigrationDependencies(changes, 'testapp', 0, [("otherapp", "__first__")]) def test_alter_field_to_fk_dependency_other_app(self): changes = self.get_changes( [self.author_empty, self.book_with_no_author_fk], [self.author_empty, self.book], ) self.assertNumberMigrations(changes, 'otherapp', 1) self.assertOperationTypes(changes, 'otherapp', 0, ['AlterField']) self.assertMigrationDependencies(changes, 'otherapp', 0, [('testapp', '__first__')]) def test_circular_dependency_mixed_addcreate(self): """ #23315 - The dependency resolver knows to put all CreateModel before AddField and not become unsolvable. """ address = ModelState("a", "Address", [ ("id", models.AutoField(primary_key=True)), ("country", models.ForeignKey("b.DeliveryCountry", models.CASCADE)), ]) person = ModelState("a", "Person", [ ("id", models.AutoField(primary_key=True)), ]) apackage = ModelState("b", "APackage", [ ("id", models.AutoField(primary_key=True)), ("person", models.ForeignKey("a.Person", models.CASCADE)), ]) country = ModelState("b", "DeliveryCountry", [ ("id", models.AutoField(primary_key=True)), ]) changes = self.get_changes([], [address, person, apackage, country]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 2) self.assertNumberMigrations(changes, 'b', 1) self.assertOperationTypes(changes, 'a', 0, ["CreateModel", "CreateModel"]) self.assertOperationTypes(changes, 'a', 1, ["AddField"]) self.assertOperationTypes(changes, 'b', 0, ["CreateModel", "CreateModel"]) @override_settings(AUTH_USER_MODEL="a.Tenant") def test_circular_dependency_swappable(self): """ #23322 - The dependency resolver knows to explicitly resolve swappable models. """ with isolate_lru_cache(apps.get_swappable_settings_name): tenant = ModelState("a", "Tenant", [ ("id", models.AutoField(primary_key=True)), ("primary_address", models.ForeignKey("b.Address", models.CASCADE))], bases=(AbstractBaseUser,) ) address = ModelState("b", "Address", [ ("id", models.AutoField(primary_key=True)), ("tenant", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE)), ]) changes = self.get_changes([], [address, tenant]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 2) self.assertOperationTypes(changes, 'a', 0, ["CreateModel"]) self.assertOperationTypes(changes, 'a', 1, ["AddField"]) self.assertMigrationDependencies(changes, 'a', 0, []) self.assertMigrationDependencies(changes, 'a', 1, [('a', 'auto_1'), ('b', 'auto_1')]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'b', 1) self.assertOperationTypes(changes, 'b', 0, ["CreateModel"]) self.assertMigrationDependencies(changes, 'b', 0, [('__setting__', 'AUTH_USER_MODEL')]) @override_settings(AUTH_USER_MODEL="b.Tenant") def test_circular_dependency_swappable2(self): """ #23322 - The dependency resolver knows to explicitly resolve swappable models but with the swappable not being the first migrated model. """ with isolate_lru_cache(apps.get_swappable_settings_name): address = ModelState("a", "Address", [ ("id", models.AutoField(primary_key=True)), ("tenant", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE)), ]) tenant = ModelState("b", "Tenant", [ ("id", models.AutoField(primary_key=True)), ("primary_address", models.ForeignKey("a.Address", models.CASCADE))], bases=(AbstractBaseUser,) ) changes = self.get_changes([], [address, tenant]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 2) self.assertOperationTypes(changes, 'a', 0, ["CreateModel"]) self.assertOperationTypes(changes, 'a', 1, ["AddField"]) self.assertMigrationDependencies(changes, 'a', 0, []) self.assertMigrationDependencies(changes, 'a', 1, [('__setting__', 'AUTH_USER_MODEL'), ('a', 'auto_1')]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'b', 1) self.assertOperationTypes(changes, 'b', 0, ["CreateModel"]) self.assertMigrationDependencies(changes, 'b', 0, [('a', 'auto_1')]) @override_settings(AUTH_USER_MODEL="a.Person") def test_circular_dependency_swappable_self(self): """ #23322 - The dependency resolver knows to explicitly resolve swappable models. """ with isolate_lru_cache(apps.get_swappable_settings_name): person = ModelState("a", "Person", [ ("id", models.AutoField(primary_key=True)), ("parent1", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE, related_name='children')) ]) changes = self.get_changes([], [person]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'a', 1) self.assertOperationTypes(changes, 'a', 0, ["CreateModel"]) self.assertMigrationDependencies(changes, 'a', 0, []) @override_settings(AUTH_USER_MODEL='a.User') def test_swappable_circular_multi_mti(self): with isolate_lru_cache(apps.get_swappable_settings_name): parent = ModelState('a', 'Parent', [ ('user', models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE)) ]) child = ModelState('a', 'Child', [], bases=('a.Parent',)) user = ModelState('a', 'User', [], bases=(AbstractBaseUser, 'a.Child')) changes = self.get_changes([], [parent, child, user]) self.assertNumberMigrations(changes, 'a', 1) self.assertOperationTypes(changes, 'a', 0, ['CreateModel', 'CreateModel', 'CreateModel', 'AddField']) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition', side_effect=AssertionError("Should not have prompted for not null addition")) def test_add_blank_textfield_and_charfield(self, mocked_ask_method): """ #23405 - Adding a NOT NULL and blank `CharField` or `TextField` without default should not prompt for a default. """ changes = self.get_changes([self.author_empty], [self.author_with_biography_blank]) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0) @mock.patch('django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition') def test_add_non_blank_textfield_and_charfield(self, mocked_ask_method): """ #23405 - Adding a NOT NULL and non-blank `CharField` or `TextField` without default should prompt for a default. """ changes = self.get_changes([self.author_empty], [self.author_with_biography_non_blank]) self.assertEqual(mocked_ask_method.call_count, 2) # Right number/type of migrations? self.assertNumberMigrations(changes, 'testapp', 1) self.assertOperationTypes(changes, 'testapp', 0, ["AddField", "AddField"]) self.assertOperationAttributes(changes, 'testapp', 0, 0) def test_mti_inheritance_model_removal(self): Animal = ModelState('app', 'Animal', [ ("id", models.AutoField(primary_key=True)), ]) Dog = ModelState('app', 'Dog', [], bases=('app.Animal',)) changes = self.get_changes([Animal, Dog], [Animal]) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['DeleteModel']) self.assertOperationAttributes(changes, 'app', 0, 0, name='Dog') def test_add_model_with_field_removed_from_base_model(self): """ Removing a base field takes place before adding a new inherited model that has a field with the same name. """ before = [ ModelState('app', 'readable', [ ('id', models.AutoField(primary_key=True)), ('title', models.CharField(max_length=200)), ]), ] after = [ ModelState('app', 'readable', [ ('id', models.AutoField(primary_key=True)), ]), ModelState('app', 'book', [ ('title', models.CharField(max_length=200)), ], bases=('app.readable',)), ] changes = self.get_changes(before, after) self.assertNumberMigrations(changes, 'app', 1) self.assertOperationTypes(changes, 'app', 0, ['RemoveField', 'CreateModel']) self.assertOperationAttributes(changes, 'app', 0, 0, name='title', model_name='readable') self.assertOperationAttributes(changes, 'app', 0, 1, name='book') class MigrationSuggestNameTests(SimpleTestCase): def test_no_operations(self): class Migration(migrations.Migration): operations = [] migration = Migration('some_migration', 'test_app') self.assertIs(migration.suggest_name().startswith('auto_'), True) def test_no_operations_initial(self): class Migration(migrations.Migration): initial = True operations = [] migration = Migration('some_migration', 'test_app') self.assertEqual(migration.suggest_name(), 'initial') def test_single_operation(self): class Migration(migrations.Migration): operations = [migrations.CreateModel('Person', fields=[])] migration = Migration('0001_initial', 'test_app') self.assertEqual(migration.suggest_name(), 'person') class Migration(migrations.Migration): operations = [migrations.DeleteModel('Person')] migration = Migration('0002_initial', 'test_app') self.assertEqual(migration.suggest_name(), 'delete_person') def test_single_operation_long_name(self): class Migration(migrations.Migration): operations = [migrations.CreateModel('A' * 53, fields=[])] migration = Migration('some_migration', 'test_app') self.assertEqual(migration.suggest_name(), 'a' * 53) def test_two_operations(self): class Migration(migrations.Migration): operations = [ migrations.CreateModel('Person', fields=[]), migrations.DeleteModel('Animal'), ] migration = Migration('some_migration', 'test_app') self.assertEqual(migration.suggest_name(), 'person_delete_animal') def test_two_create_models(self): class Migration(migrations.Migration): operations = [ migrations.CreateModel('Person', fields=[]), migrations.CreateModel('Animal', fields=[]), ] migration = Migration('0001_initial', 'test_app') self.assertEqual(migration.suggest_name(), 'person_animal') def test_two_create_models_with_initial_true(self): class Migration(migrations.Migration): initial = True operations = [ migrations.CreateModel('Person', fields=[]), migrations.CreateModel('Animal', fields=[]), ] migration = Migration('0001_initial', 'test_app') self.assertEqual(migration.suggest_name(), 'initial') def test_many_operations_suffix(self): class Migration(migrations.Migration): operations = [ migrations.CreateModel('Person1', fields=[]), migrations.CreateModel('Person2', fields=[]), migrations.CreateModel('Person3', fields=[]), migrations.DeleteModel('Person4'), migrations.DeleteModel('Person5'), ] migration = Migration('some_migration', 'test_app') self.assertEqual( migration.suggest_name(), 'person1_person2_person3_delete_person4_and_more', ) def test_operation_with_no_suggested_name(self): class Migration(migrations.Migration): operations = [ migrations.CreateModel('Person', fields=[]), migrations.RunSQL('SELECT 1 FROM person;'), ] migration = Migration('some_migration', 'test_app') self.assertIs(migration.suggest_name().startswith('auto_'), True) def test_none_name(self): class Migration(migrations.Migration): operations = [migrations.RunSQL('SELECT 1 FROM person;')] migration = Migration('0001_initial', 'test_app') suggest_name = migration.suggest_name() self.assertIs(suggest_name.startswith('auto_'), True) def test_none_name_with_initial_true(self): class Migration(migrations.Migration): initial = True operations = [migrations.RunSQL('SELECT 1 FROM person;')] migration = Migration('0001_initial', 'test_app') self.assertEqual(migration.suggest_name(), 'initial') def test_auto(self): migration = migrations.Migration('0001_initial', 'test_app') suggest_name = migration.suggest_name() self.assertIs(suggest_name.startswith('auto_'), True)
86496a862e8c9c1abcb53493679f71e896ea57f40ab2488234a3ee2f0478cef3
from django.apps.registry import Apps from django.contrib.contenttypes.fields import GenericForeignKey from django.db import models from django.db.migrations.exceptions import InvalidBasesError from django.db.migrations.operations import ( AddField, AlterField, DeleteModel, RemoveField, ) from django.db.migrations.state import ( ModelState, ProjectState, get_related_models_recursive, ) from django.test import SimpleTestCase, override_settings from django.test.utils import isolate_apps from .models import ( FoodManager, FoodQuerySet, ModelWithCustomBase, NoMigrationFoodManager, UnicodeModel, ) class StateTests(SimpleTestCase): """ Tests state construction, rendering and modification by operations. """ def test_create(self): """ Tests making a ProjectState from an Apps """ new_apps = Apps(["migrations"]) class Author(models.Model): name = models.CharField(max_length=255) bio = models.TextField() age = models.IntegerField(blank=True, null=True) class Meta: app_label = "migrations" apps = new_apps unique_together = ["name", "bio"] index_together = ["bio", "age"] class AuthorProxy(Author): class Meta: app_label = "migrations" apps = new_apps proxy = True ordering = ["name"] class SubAuthor(Author): width = models.FloatField(null=True) class Meta: app_label = "migrations" apps = new_apps class Book(models.Model): title = models.CharField(max_length=1000) author = models.ForeignKey(Author, models.CASCADE) contributors = models.ManyToManyField(Author) class Meta: app_label = "migrations" apps = new_apps verbose_name = "tome" db_table = "test_tome" indexes = [models.Index(fields=['title'])] class Food(models.Model): food_mgr = FoodManager('a', 'b') food_qs = FoodQuerySet.as_manager() food_no_mgr = NoMigrationFoodManager('x', 'y') class Meta: app_label = "migrations" apps = new_apps class FoodNoManagers(models.Model): class Meta: app_label = "migrations" apps = new_apps class FoodNoDefaultManager(models.Model): food_no_mgr = NoMigrationFoodManager('x', 'y') food_mgr = FoodManager('a', 'b') food_qs = FoodQuerySet.as_manager() class Meta: app_label = "migrations" apps = new_apps mgr1 = FoodManager('a', 'b') mgr2 = FoodManager('x', 'y', c=3, d=4) class FoodOrderedManagers(models.Model): # The managers on this model should be ordered by their creation # counter and not by the order in model body food_no_mgr = NoMigrationFoodManager('x', 'y') food_mgr2 = mgr2 food_mgr1 = mgr1 class Meta: app_label = "migrations" apps = new_apps project_state = ProjectState.from_apps(new_apps) author_state = project_state.models['migrations', 'author'] author_proxy_state = project_state.models['migrations', 'authorproxy'] sub_author_state = project_state.models['migrations', 'subauthor'] book_state = project_state.models['migrations', 'book'] food_state = project_state.models['migrations', 'food'] food_no_managers_state = project_state.models['migrations', 'foodnomanagers'] food_no_default_manager_state = project_state.models['migrations', 'foodnodefaultmanager'] food_order_manager_state = project_state.models['migrations', 'foodorderedmanagers'] book_index = models.Index(fields=['title']) book_index.set_name_with_model(Book) self.assertEqual(author_state.app_label, "migrations") self.assertEqual(author_state.name, "Author") self.assertEqual(list(author_state.fields), ["id", "name", "bio", "age"]) self.assertEqual(author_state.fields['name'].max_length, 255) self.assertIs(author_state.fields['bio'].null, False) self.assertIs(author_state.fields['age'].null, True) self.assertEqual( author_state.options, { "unique_together": {("name", "bio")}, "index_together": {("bio", "age")}, "indexes": [], "constraints": [], } ) self.assertEqual(author_state.bases, (models.Model,)) self.assertEqual(book_state.app_label, "migrations") self.assertEqual(book_state.name, "Book") self.assertEqual(list(book_state.fields), ["id", "title", "author", "contributors"]) self.assertEqual(book_state.fields['title'].max_length, 1000) self.assertIs(book_state.fields['author'].null, False) self.assertEqual(book_state.fields['contributors'].__class__.__name__, 'ManyToManyField') self.assertEqual( book_state.options, {"verbose_name": "tome", "db_table": "test_tome", "indexes": [book_index], "constraints": []}, ) self.assertEqual(book_state.bases, (models.Model,)) self.assertEqual(author_proxy_state.app_label, "migrations") self.assertEqual(author_proxy_state.name, "AuthorProxy") self.assertEqual(author_proxy_state.fields, {}) self.assertEqual( author_proxy_state.options, {"proxy": True, "ordering": ["name"], "indexes": [], "constraints": []}, ) self.assertEqual(author_proxy_state.bases, ("migrations.author",)) self.assertEqual(sub_author_state.app_label, "migrations") self.assertEqual(sub_author_state.name, "SubAuthor") self.assertEqual(len(sub_author_state.fields), 2) self.assertEqual(sub_author_state.bases, ("migrations.author",)) # The default manager is used in migrations self.assertEqual([name for name, mgr in food_state.managers], ['food_mgr']) self.assertTrue(all(isinstance(name, str) for name, mgr in food_state.managers)) self.assertEqual(food_state.managers[0][1].args, ('a', 'b', 1, 2)) # No explicit managers defined. Migrations will fall back to the default self.assertEqual(food_no_managers_state.managers, []) # food_mgr is used in migration but isn't the default mgr, hence add the # default self.assertEqual([name for name, mgr in food_no_default_manager_state.managers], ['food_no_mgr', 'food_mgr']) self.assertTrue(all(isinstance(name, str) for name, mgr in food_no_default_manager_state.managers)) self.assertEqual(food_no_default_manager_state.managers[0][1].__class__, models.Manager) self.assertIsInstance(food_no_default_manager_state.managers[1][1], FoodManager) self.assertEqual([name for name, mgr in food_order_manager_state.managers], ['food_mgr1', 'food_mgr2']) self.assertTrue(all(isinstance(name, str) for name, mgr in food_order_manager_state.managers)) self.assertEqual([mgr.args for name, mgr in food_order_manager_state.managers], [('a', 'b', 1, 2), ('x', 'y', 3, 4)]) def test_custom_default_manager_added_to_the_model_state(self): """ When the default manager of the model is a custom manager, it needs to be added to the model state. """ new_apps = Apps(['migrations']) custom_manager = models.Manager() class Author(models.Model): objects = models.TextField() authors = custom_manager class Meta: app_label = 'migrations' apps = new_apps project_state = ProjectState.from_apps(new_apps) author_state = project_state.models['migrations', 'author'] self.assertEqual(author_state.managers, [('authors', custom_manager)]) def test_custom_default_manager_named_objects_with_false_migration_flag(self): """ When a manager is added with a name of 'objects' but it does not have `use_in_migrations = True`, no migration should be added to the model state (#26643). """ new_apps = Apps(['migrations']) class Author(models.Model): objects = models.Manager() class Meta: app_label = 'migrations' apps = new_apps project_state = ProjectState.from_apps(new_apps) author_state = project_state.models['migrations', 'author'] self.assertEqual(author_state.managers, []) def test_no_duplicate_managers(self): """ When a manager is added with `use_in_migrations = True` and a parent model had a manager with the same name and `use_in_migrations = True`, the parent's manager shouldn't appear in the model state (#26881). """ new_apps = Apps(['migrations']) class PersonManager(models.Manager): use_in_migrations = True class Person(models.Model): objects = PersonManager() class Meta: abstract = True class BossManager(PersonManager): use_in_migrations = True class Boss(Person): objects = BossManager() class Meta: app_label = 'migrations' apps = new_apps project_state = ProjectState.from_apps(new_apps) boss_state = project_state.models['migrations', 'boss'] self.assertEqual(boss_state.managers, [('objects', Boss.objects)]) def test_custom_default_manager(self): new_apps = Apps(['migrations']) class Author(models.Model): manager1 = models.Manager() manager2 = models.Manager() class Meta: app_label = 'migrations' apps = new_apps default_manager_name = 'manager2' project_state = ProjectState.from_apps(new_apps) author_state = project_state.models['migrations', 'author'] self.assertEqual(author_state.options['default_manager_name'], 'manager2') self.assertEqual(author_state.managers, [('manager2', Author.manager1)]) def test_custom_base_manager(self): new_apps = Apps(['migrations']) class Author(models.Model): manager1 = models.Manager() manager2 = models.Manager() class Meta: app_label = 'migrations' apps = new_apps base_manager_name = 'manager2' class Author2(models.Model): manager1 = models.Manager() manager2 = models.Manager() class Meta: app_label = 'migrations' apps = new_apps base_manager_name = 'manager1' project_state = ProjectState.from_apps(new_apps) author_state = project_state.models['migrations', 'author'] self.assertEqual(author_state.options['base_manager_name'], 'manager2') self.assertEqual(author_state.managers, [ ('manager1', Author.manager1), ('manager2', Author.manager2), ]) author2_state = project_state.models['migrations', 'author2'] self.assertEqual(author2_state.options['base_manager_name'], 'manager1') self.assertEqual(author2_state.managers, [ ('manager1', Author2.manager1), ]) def test_apps_bulk_update(self): """ StateApps.bulk_update() should update apps.ready to False and reset the value afterward. """ project_state = ProjectState() apps = project_state.apps with apps.bulk_update(): self.assertFalse(apps.ready) self.assertTrue(apps.ready) with self.assertRaises(ValueError): with apps.bulk_update(): self.assertFalse(apps.ready) raise ValueError() self.assertTrue(apps.ready) def test_render(self): """ Tests rendering a ProjectState into an Apps. """ project_state = ProjectState() project_state.add_model(ModelState( app_label="migrations", name="Tag", fields=[ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ("hidden", models.BooleanField()), ], )) project_state.add_model(ModelState( app_label="migrations", name="SubTag", fields=[ ('tag_ptr', models.OneToOneField( 'migrations.Tag', models.CASCADE, auto_created=True, parent_link=True, primary_key=True, to_field='id', serialize=False, )), ("awesome", models.BooleanField()), ], bases=("migrations.Tag",), )) base_mgr = models.Manager() mgr1 = FoodManager('a', 'b') mgr2 = FoodManager('x', 'y', c=3, d=4) project_state.add_model(ModelState( app_label="migrations", name="Food", fields=[ ("id", models.AutoField(primary_key=True)), ], managers=[ # The ordering we really want is objects, mgr1, mgr2 ('default', base_mgr), ('food_mgr2', mgr2), ('food_mgr1', mgr1), ] )) new_apps = project_state.apps self.assertEqual(new_apps.get_model("migrations", "Tag")._meta.get_field("name").max_length, 100) self.assertIs(new_apps.get_model("migrations", "Tag")._meta.get_field("hidden").null, False) self.assertEqual(len(new_apps.get_model("migrations", "SubTag")._meta.local_fields), 2) Food = new_apps.get_model("migrations", "Food") self.assertEqual([mgr.name for mgr in Food._meta.managers], ['default', 'food_mgr1', 'food_mgr2']) self.assertTrue(all(isinstance(mgr.name, str) for mgr in Food._meta.managers)) self.assertEqual([mgr.__class__ for mgr in Food._meta.managers], [models.Manager, FoodManager, FoodManager]) def test_render_model_inheritance(self): class Book(models.Model): title = models.CharField(max_length=1000) class Meta: app_label = "migrations" apps = Apps() class Novel(Book): class Meta: app_label = "migrations" apps = Apps() # First, test rendering individually apps = Apps(["migrations"]) # We shouldn't be able to render yet ms = ModelState.from_model(Novel) with self.assertRaises(InvalidBasesError): ms.render(apps) # Once the parent model is in the app registry, it should be fine ModelState.from_model(Book).render(apps) ModelState.from_model(Novel).render(apps) def test_render_model_with_multiple_inheritance(self): class Foo(models.Model): class Meta: app_label = "migrations" apps = Apps() class Bar(models.Model): class Meta: app_label = "migrations" apps = Apps() class FooBar(Foo, Bar): class Meta: app_label = "migrations" apps = Apps() class AbstractSubFooBar(FooBar): class Meta: abstract = True apps = Apps() class SubFooBar(AbstractSubFooBar): class Meta: app_label = "migrations" apps = Apps() apps = Apps(["migrations"]) # We shouldn't be able to render yet ms = ModelState.from_model(FooBar) with self.assertRaises(InvalidBasesError): ms.render(apps) # Once the parent models are in the app registry, it should be fine ModelState.from_model(Foo).render(apps) self.assertSequenceEqual(ModelState.from_model(Foo).bases, [models.Model]) ModelState.from_model(Bar).render(apps) self.assertSequenceEqual(ModelState.from_model(Bar).bases, [models.Model]) ModelState.from_model(FooBar).render(apps) self.assertSequenceEqual(ModelState.from_model(FooBar).bases, ['migrations.foo', 'migrations.bar']) ModelState.from_model(SubFooBar).render(apps) self.assertSequenceEqual(ModelState.from_model(SubFooBar).bases, ['migrations.foobar']) def test_render_project_dependencies(self): """ The ProjectState render method correctly renders models to account for inter-model base dependencies. """ new_apps = Apps() class A(models.Model): class Meta: app_label = "migrations" apps = new_apps class B(A): class Meta: app_label = "migrations" apps = new_apps class C(B): class Meta: app_label = "migrations" apps = new_apps class D(A): class Meta: app_label = "migrations" apps = new_apps class E(B): class Meta: app_label = "migrations" apps = new_apps proxy = True class F(D): class Meta: app_label = "migrations" apps = new_apps proxy = True # Make a ProjectState and render it project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) project_state.add_model(ModelState.from_model(C)) project_state.add_model(ModelState.from_model(D)) project_state.add_model(ModelState.from_model(E)) project_state.add_model(ModelState.from_model(F)) final_apps = project_state.apps self.assertEqual(len(final_apps.get_models()), 6) # Now make an invalid ProjectState and make sure it fails project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) project_state.add_model(ModelState.from_model(C)) project_state.add_model(ModelState.from_model(F)) with self.assertRaises(InvalidBasesError): project_state.apps def test_render_unique_app_labels(self): """ The ProjectState render method doesn't raise an ImproperlyConfigured exception about unique labels if two dotted app names have the same last part. """ class A(models.Model): class Meta: app_label = "django.contrib.auth" class B(models.Model): class Meta: app_label = "vendor.auth" # Make a ProjectState and render it project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) self.assertEqual(len(project_state.apps.get_models()), 2) def test_reload_related_model_on_non_relational_fields(self): """ The model is reloaded even on changes that are not involved in relations. Other models pointing to or from it are also reloaded. """ project_state = ProjectState() project_state.apps # Render project state. project_state.add_model(ModelState('migrations', 'A', [])) project_state.add_model(ModelState('migrations', 'B', [ ('a', models.ForeignKey('A', models.CASCADE)), ])) project_state.add_model(ModelState('migrations', 'C', [ ('b', models.ForeignKey('B', models.CASCADE)), ('name', models.TextField()), ])) project_state.add_model(ModelState('migrations', 'D', [ ('a', models.ForeignKey('A', models.CASCADE)), ])) operation = AlterField( model_name='C', name='name', field=models.TextField(blank=True), ) operation.state_forwards('migrations', project_state) project_state.reload_model('migrations', 'a', delay=True) A = project_state.apps.get_model('migrations.A') B = project_state.apps.get_model('migrations.B') D = project_state.apps.get_model('migrations.D') self.assertIs(B._meta.get_field('a').related_model, A) self.assertIs(D._meta.get_field('a').related_model, A) def test_reload_model_relationship_consistency(self): project_state = ProjectState() project_state.add_model(ModelState('migrations', 'A', [])) project_state.add_model(ModelState('migrations', 'B', [ ('a', models.ForeignKey('A', models.CASCADE)), ])) project_state.add_model(ModelState('migrations', 'C', [ ('b', models.ForeignKey('B', models.CASCADE)), ])) A = project_state.apps.get_model('migrations.A') B = project_state.apps.get_model('migrations.B') C = project_state.apps.get_model('migrations.C') self.assertEqual([r.related_model for r in A._meta.related_objects], [B]) self.assertEqual([r.related_model for r in B._meta.related_objects], [C]) self.assertEqual([r.related_model for r in C._meta.related_objects], []) project_state.reload_model('migrations', 'a', delay=True) A = project_state.apps.get_model('migrations.A') B = project_state.apps.get_model('migrations.B') C = project_state.apps.get_model('migrations.C') self.assertEqual([r.related_model for r in A._meta.related_objects], [B]) self.assertEqual([r.related_model for r in B._meta.related_objects], [C]) self.assertEqual([r.related_model for r in C._meta.related_objects], []) def test_add_relations(self): """ #24573 - Adding relations to existing models should reload the referenced models too. """ new_apps = Apps() class A(models.Model): class Meta: app_label = 'something' apps = new_apps class B(A): class Meta: app_label = 'something' apps = new_apps class C(models.Model): class Meta: app_label = 'something' apps = new_apps project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) project_state.add_model(ModelState.from_model(C)) project_state.apps # We need to work with rendered models old_state = project_state.clone() model_a_old = old_state.apps.get_model('something', 'A') model_b_old = old_state.apps.get_model('something', 'B') model_c_old = old_state.apps.get_model('something', 'C') # The relations between the old models are correct self.assertIs(model_a_old._meta.get_field('b').related_model, model_b_old) self.assertIs(model_b_old._meta.get_field('a_ptr').related_model, model_a_old) operation = AddField('c', 'to_a', models.OneToOneField( 'something.A', models.CASCADE, related_name='from_c', )) operation.state_forwards('something', project_state) model_a_new = project_state.apps.get_model('something', 'A') model_b_new = project_state.apps.get_model('something', 'B') model_c_new = project_state.apps.get_model('something', 'C') # All models have changed self.assertIsNot(model_a_old, model_a_new) self.assertIsNot(model_b_old, model_b_new) self.assertIsNot(model_c_old, model_c_new) # The relations between the old models still hold self.assertIs(model_a_old._meta.get_field('b').related_model, model_b_old) self.assertIs(model_b_old._meta.get_field('a_ptr').related_model, model_a_old) # The relations between the new models correct self.assertIs(model_a_new._meta.get_field('b').related_model, model_b_new) self.assertIs(model_b_new._meta.get_field('a_ptr').related_model, model_a_new) self.assertIs(model_a_new._meta.get_field('from_c').related_model, model_c_new) self.assertIs(model_c_new._meta.get_field('to_a').related_model, model_a_new) def test_remove_relations(self): """ #24225 - Relations between models are updated while remaining the relations and references for models of an old state. """ new_apps = Apps() class A(models.Model): class Meta: app_label = "something" apps = new_apps class B(models.Model): to_a = models.ForeignKey(A, models.CASCADE) class Meta: app_label = "something" apps = new_apps def get_model_a(state): return [mod for mod in state.apps.get_models() if mod._meta.model_name == 'a'][0] project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) self.assertEqual(len(get_model_a(project_state)._meta.related_objects), 1) old_state = project_state.clone() operation = RemoveField("b", "to_a") operation.state_forwards("something", project_state) # Model from old_state still has the relation model_a_old = get_model_a(old_state) model_a_new = get_model_a(project_state) self.assertIsNot(model_a_old, model_a_new) self.assertEqual(len(model_a_old._meta.related_objects), 1) self.assertEqual(len(model_a_new._meta.related_objects), 0) # Same test for deleted model project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) project_state.add_model(ModelState.from_model(B)) old_state = project_state.clone() operation = DeleteModel("b") operation.state_forwards("something", project_state) model_a_old = get_model_a(old_state) model_a_new = get_model_a(project_state) self.assertIsNot(model_a_old, model_a_new) self.assertEqual(len(model_a_old._meta.related_objects), 1) self.assertEqual(len(model_a_new._meta.related_objects), 0) def test_self_relation(self): """ #24513 - Modifying an object pointing to itself would cause it to be rendered twice and thus breaking its related M2M through objects. """ class A(models.Model): to_a = models.ManyToManyField('something.A', symmetrical=False) class Meta: app_label = "something" def get_model_a(state): return [mod for mod in state.apps.get_models() if mod._meta.model_name == 'a'][0] project_state = ProjectState() project_state.add_model(ModelState.from_model(A)) self.assertEqual(len(get_model_a(project_state)._meta.related_objects), 1) old_state = project_state.clone() operation = AlterField( model_name="a", name="to_a", field=models.ManyToManyField("something.A", symmetrical=False, blank=True) ) # At this point the model would be rendered twice causing its related # M2M through objects to point to an old copy and thus breaking their # attribute lookup. operation.state_forwards("something", project_state) model_a_old = get_model_a(old_state) model_a_new = get_model_a(project_state) self.assertIsNot(model_a_old, model_a_new) # The old model's _meta is still consistent field_to_a_old = model_a_old._meta.get_field("to_a") self.assertEqual(field_to_a_old.m2m_field_name(), "from_a") self.assertEqual(field_to_a_old.m2m_reverse_field_name(), "to_a") self.assertIs(field_to_a_old.related_model, model_a_old) self.assertIs(field_to_a_old.remote_field.through._meta.get_field('to_a').related_model, model_a_old) self.assertIs(field_to_a_old.remote_field.through._meta.get_field('from_a').related_model, model_a_old) # The new model's _meta is still consistent field_to_a_new = model_a_new._meta.get_field("to_a") self.assertEqual(field_to_a_new.m2m_field_name(), "from_a") self.assertEqual(field_to_a_new.m2m_reverse_field_name(), "to_a") self.assertIs(field_to_a_new.related_model, model_a_new) self.assertIs(field_to_a_new.remote_field.through._meta.get_field('to_a').related_model, model_a_new) self.assertIs(field_to_a_new.remote_field.through._meta.get_field('from_a').related_model, model_a_new) def test_equality(self): """ == and != are implemented correctly. """ # Test two things that should be equal project_state = ProjectState() project_state.add_model(ModelState( "migrations", "Tag", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=100)), ("hidden", models.BooleanField()), ], {}, None, )) project_state.apps # Fill the apps cached property other_state = project_state.clone() self.assertEqual(project_state, project_state) self.assertEqual(project_state, other_state) self.assertIs(project_state != project_state, False) self.assertIs(project_state != other_state, False) self.assertNotEqual(project_state.apps, other_state.apps) # Make a very small change (max_len 99) and see if that affects it project_state = ProjectState() project_state.add_model(ModelState( "migrations", "Tag", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=99)), ("hidden", models.BooleanField()), ], {}, None, )) self.assertNotEqual(project_state, other_state) self.assertIs(project_state == other_state, False) def test_dangling_references_throw_error(self): new_apps = Apps() class Author(models.Model): name = models.TextField() class Meta: app_label = "migrations" apps = new_apps class Publisher(models.Model): name = models.TextField() class Meta: app_label = "migrations" apps = new_apps class Book(models.Model): author = models.ForeignKey(Author, models.CASCADE) publisher = models.ForeignKey(Publisher, models.CASCADE) class Meta: app_label = "migrations" apps = new_apps class Magazine(models.Model): authors = models.ManyToManyField(Author) class Meta: app_label = "migrations" apps = new_apps # Make a valid ProjectState and render it project_state = ProjectState() project_state.add_model(ModelState.from_model(Author)) project_state.add_model(ModelState.from_model(Publisher)) project_state.add_model(ModelState.from_model(Book)) project_state.add_model(ModelState.from_model(Magazine)) self.assertEqual(len(project_state.apps.get_models()), 4) # now make an invalid one with a ForeignKey project_state = ProjectState() project_state.add_model(ModelState.from_model(Book)) msg = ( "The field migrations.Book.author was declared with a lazy reference " "to 'migrations.author', but app 'migrations' doesn't provide model 'author'.\n" "The field migrations.Book.publisher was declared with a lazy reference " "to 'migrations.publisher', but app 'migrations' doesn't provide model 'publisher'." ) with self.assertRaisesMessage(ValueError, msg): project_state.apps # And another with ManyToManyField. project_state = ProjectState() project_state.add_model(ModelState.from_model(Magazine)) msg = ( "The field migrations.Magazine.authors was declared with a lazy reference " "to 'migrations.author\', but app 'migrations' doesn't provide model 'author'.\n" "The field migrations.Magazine_authors.author was declared with a lazy reference " "to \'migrations.author\', but app 'migrations' doesn't provide model 'author'." ) with self.assertRaisesMessage(ValueError, msg): project_state.apps # And now with multiple models and multiple fields. project_state.add_model(ModelState.from_model(Book)) msg = ( "The field migrations.Book.author was declared with a lazy reference " "to 'migrations.author', but app 'migrations' doesn't provide model 'author'.\n" "The field migrations.Book.publisher was declared with a lazy reference " "to 'migrations.publisher', but app 'migrations' doesn't provide model 'publisher'.\n" "The field migrations.Magazine.authors was declared with a lazy reference " "to 'migrations.author', but app 'migrations' doesn't provide model 'author'.\n" "The field migrations.Magazine_authors.author was declared with a lazy reference " "to 'migrations.author', but app 'migrations' doesn't provide model 'author'." ) with self.assertRaisesMessage(ValueError, msg): project_state.apps def test_reference_mixed_case_app_label(self): new_apps = Apps() class Author(models.Model): class Meta: app_label = 'MiXedCase_migrations' apps = new_apps class Book(models.Model): author = models.ForeignKey(Author, models.CASCADE) class Meta: app_label = 'MiXedCase_migrations' apps = new_apps class Magazine(models.Model): authors = models.ManyToManyField(Author) class Meta: app_label = 'MiXedCase_migrations' apps = new_apps project_state = ProjectState() project_state.add_model(ModelState.from_model(Author)) project_state.add_model(ModelState.from_model(Book)) project_state.add_model(ModelState.from_model(Magazine)) self.assertEqual(len(project_state.apps.get_models()), 3) def test_real_apps(self): """ Including real apps can resolve dangling FK errors. This test relies on the fact that contenttypes is always loaded. """ new_apps = Apps() class TestModel(models.Model): ct = models.ForeignKey("contenttypes.ContentType", models.CASCADE) class Meta: app_label = "migrations" apps = new_apps # If we just stick it into an empty state it should fail project_state = ProjectState() project_state.add_model(ModelState.from_model(TestModel)) with self.assertRaises(ValueError): project_state.apps # If we include the real app it should succeed project_state = ProjectState(real_apps=["contenttypes"]) project_state.add_model(ModelState.from_model(TestModel)) rendered_state = project_state.apps self.assertEqual( len([x for x in rendered_state.get_models() if x._meta.app_label == "migrations"]), 1, ) def test_ignore_order_wrt(self): """ Makes sure ProjectState doesn't include OrderWrt fields when making from existing models. """ new_apps = Apps() class Author(models.Model): name = models.TextField() class Meta: app_label = "migrations" apps = new_apps class Book(models.Model): author = models.ForeignKey(Author, models.CASCADE) class Meta: app_label = "migrations" apps = new_apps order_with_respect_to = "author" # Make a valid ProjectState and render it project_state = ProjectState() project_state.add_model(ModelState.from_model(Author)) project_state.add_model(ModelState.from_model(Book)) self.assertEqual( list(project_state.models['migrations', 'book'].fields), ["id", "author"], ) def test_manager_refer_correct_model_version(self): """ #24147 - Managers refer to the correct version of a historical model """ project_state = ProjectState() project_state.add_model(ModelState( app_label="migrations", name="Tag", fields=[ ("id", models.AutoField(primary_key=True)), ("hidden", models.BooleanField()), ], managers=[ ('food_mgr', FoodManager('a', 'b')), ('food_qs', FoodQuerySet.as_manager()), ] )) old_model = project_state.apps.get_model('migrations', 'tag') new_state = project_state.clone() operation = RemoveField("tag", "hidden") operation.state_forwards("migrations", new_state) new_model = new_state.apps.get_model('migrations', 'tag') self.assertIsNot(old_model, new_model) self.assertIs(old_model, old_model.food_mgr.model) self.assertIs(old_model, old_model.food_qs.model) self.assertIs(new_model, new_model.food_mgr.model) self.assertIs(new_model, new_model.food_qs.model) self.assertIsNot(old_model.food_mgr, new_model.food_mgr) self.assertIsNot(old_model.food_qs, new_model.food_qs) self.assertIsNot(old_model.food_mgr.model, new_model.food_mgr.model) self.assertIsNot(old_model.food_qs.model, new_model.food_qs.model) def test_choices_iterator(self): """ #24483 - ProjectState.from_apps should not destructively consume Field.choices iterators. """ new_apps = Apps(["migrations"]) choices = [('a', 'A'), ('b', 'B')] class Author(models.Model): name = models.CharField(max_length=255) choice = models.CharField(max_length=255, choices=iter(choices)) class Meta: app_label = "migrations" apps = new_apps ProjectState.from_apps(new_apps) choices_field = Author._meta.get_field('choice') self.assertEqual(list(choices_field.choices), choices) class ModelStateTests(SimpleTestCase): def test_custom_model_base(self): state = ModelState.from_model(ModelWithCustomBase) self.assertEqual(state.bases, (models.Model,)) def test_bound_field_sanity_check(self): field = models.CharField(max_length=1) field.model = models.Model with self.assertRaisesMessage(ValueError, 'ModelState.fields cannot be bound to a model - "field" is.'): ModelState('app', 'Model', [('field', field)]) def test_sanity_check_to(self): field = models.ForeignKey(UnicodeModel, models.CASCADE) with self.assertRaisesMessage( ValueError, 'ModelState.fields cannot refer to a model class - "field.to" does. ' 'Use a string reference instead.' ): ModelState('app', 'Model', [('field', field)]) def test_sanity_check_through(self): field = models.ManyToManyField('UnicodeModel') field.remote_field.through = UnicodeModel with self.assertRaisesMessage( ValueError, 'ModelState.fields cannot refer to a model class - "field.through" does. ' 'Use a string reference instead.' ): ModelState('app', 'Model', [('field', field)]) def test_sanity_index_name(self): field = models.IntegerField() options = {'indexes': [models.Index(fields=['field'])]} msg = ( "Indexes passed to ModelState require a name attribute. <Index: " "fields=['field']> doesn't have one." ) with self.assertRaisesMessage(ValueError, msg): ModelState('app', 'Model', [('field', field)], options=options) def test_fields_immutability(self): """ Rendering a model state doesn't alter its internal fields. """ apps = Apps() field = models.CharField(max_length=1) state = ModelState('app', 'Model', [('name', field)]) Model = state.render(apps) self.assertNotEqual(Model._meta.get_field('name'), field) def test_repr(self): field = models.CharField(max_length=1) state = ModelState('app', 'Model', [('name', field)], bases=['app.A', 'app.B', 'app.C']) self.assertEqual(repr(state), "<ModelState: 'app.Model'>") project_state = ProjectState() project_state.add_model(state) with self.assertRaisesMessage(InvalidBasesError, "Cannot resolve bases for [<ModelState: 'app.Model'>]"): project_state.apps def test_fields_ordering_equality(self): state = ModelState( 'migrations', 'Tag', [ ('id', models.AutoField(primary_key=True)), ('name', models.CharField(max_length=100)), ('hidden', models.BooleanField()), ], ) reordered_state = ModelState( 'migrations', 'Tag', [ ('id', models.AutoField(primary_key=True)), # Purposely re-ordered. ('hidden', models.BooleanField()), ('name', models.CharField(max_length=100)), ], ) self.assertEqual(state, reordered_state) @override_settings(TEST_SWAPPABLE_MODEL='migrations.SomeFakeModel') def test_create_swappable(self): """ Tests making a ProjectState from an Apps with a swappable model """ new_apps = Apps(['migrations']) class Author(models.Model): name = models.CharField(max_length=255) bio = models.TextField() age = models.IntegerField(blank=True, null=True) class Meta: app_label = 'migrations' apps = new_apps swappable = 'TEST_SWAPPABLE_MODEL' author_state = ModelState.from_model(Author) self.assertEqual(author_state.app_label, 'migrations') self.assertEqual(author_state.name, 'Author') self.assertEqual(list(author_state.fields), ['id', 'name', 'bio', 'age']) self.assertEqual(author_state.fields['name'].max_length, 255) self.assertIs(author_state.fields['bio'].null, False) self.assertIs(author_state.fields['age'].null, True) self.assertEqual(author_state.options, {'swappable': 'TEST_SWAPPABLE_MODEL', 'indexes': [], "constraints": []}) self.assertEqual(author_state.bases, (models.Model,)) self.assertEqual(author_state.managers, []) @override_settings(TEST_SWAPPABLE_MODEL='migrations.SomeFakeModel') def test_create_swappable_from_abstract(self): """ A swappable model inheriting from a hierarchy: concrete -> abstract -> concrete. """ new_apps = Apps(['migrations']) class SearchableLocation(models.Model): keywords = models.CharField(max_length=256) class Meta: app_label = 'migrations' apps = new_apps class Station(SearchableLocation): name = models.CharField(max_length=128) class Meta: abstract = True class BusStation(Station): bus_routes = models.CharField(max_length=128) inbound = models.BooleanField(default=False) class Meta(Station.Meta): app_label = 'migrations' apps = new_apps swappable = 'TEST_SWAPPABLE_MODEL' station_state = ModelState.from_model(BusStation) self.assertEqual(station_state.app_label, 'migrations') self.assertEqual(station_state.name, 'BusStation') self.assertEqual( list(station_state.fields), ['searchablelocation_ptr', 'name', 'bus_routes', 'inbound'] ) self.assertEqual(station_state.fields['name'].max_length, 128) self.assertIs(station_state.fields['bus_routes'].null, False) self.assertEqual( station_state.options, {'abstract': False, 'swappable': 'TEST_SWAPPABLE_MODEL', 'indexes': [], 'constraints': []} ) self.assertEqual(station_state.bases, ('migrations.searchablelocation',)) self.assertEqual(station_state.managers, []) @override_settings(TEST_SWAPPABLE_MODEL='migrations.SomeFakeModel') def test_custom_manager_swappable(self): """ Tests making a ProjectState from unused models with custom managers """ new_apps = Apps(['migrations']) class Food(models.Model): food_mgr = FoodManager('a', 'b') food_qs = FoodQuerySet.as_manager() food_no_mgr = NoMigrationFoodManager('x', 'y') class Meta: app_label = "migrations" apps = new_apps swappable = 'TEST_SWAPPABLE_MODEL' food_state = ModelState.from_model(Food) # The default manager is used in migrations self.assertEqual([name for name, mgr in food_state.managers], ['food_mgr']) self.assertEqual(food_state.managers[0][1].args, ('a', 'b', 1, 2)) @isolate_apps('migrations', 'django.contrib.contenttypes') def test_order_with_respect_to_private_field(self): class PrivateFieldModel(models.Model): content_type = models.ForeignKey('contenttypes.ContentType', models.CASCADE) object_id = models.PositiveIntegerField() private = GenericForeignKey() class Meta: order_with_respect_to = 'private' state = ModelState.from_model(PrivateFieldModel) self.assertNotIn('order_with_respect_to', state.options) @isolate_apps('migrations') def test_abstract_model_children_inherit_indexes(self): class Abstract(models.Model): name = models.CharField(max_length=50) class Meta: app_label = 'migrations' abstract = True indexes = [models.Index(fields=['name'])] class Child1(Abstract): pass class Child2(Abstract): pass child1_state = ModelState.from_model(Child1) child2_state = ModelState.from_model(Child2) index_names = [index.name for index in child1_state.options['indexes']] self.assertEqual(index_names, ['migrations__name_b0afd7_idx']) index_names = [index.name for index in child2_state.options['indexes']] self.assertEqual(index_names, ['migrations__name_016466_idx']) # Modifying the state doesn't modify the index on the model. child1_state.options['indexes'][0].name = 'bar' self.assertEqual(Child1._meta.indexes[0].name, 'migrations__name_b0afd7_idx') @isolate_apps('migrations') def test_explicit_index_name(self): class TestModel(models.Model): name = models.CharField(max_length=50) class Meta: app_label = 'migrations' indexes = [models.Index(fields=['name'], name='foo_idx')] model_state = ModelState.from_model(TestModel) index_names = [index.name for index in model_state.options['indexes']] self.assertEqual(index_names, ['foo_idx']) @isolate_apps('migrations') def test_from_model_constraints(self): class ModelWithConstraints(models.Model): size = models.IntegerField() class Meta: constraints = [models.CheckConstraint(check=models.Q(size__gt=1), name='size_gt_1')] state = ModelState.from_model(ModelWithConstraints) model_constraints = ModelWithConstraints._meta.constraints state_constraints = state.options['constraints'] self.assertEqual(model_constraints, state_constraints) self.assertIsNot(model_constraints, state_constraints) self.assertIsNot(model_constraints[0], state_constraints[0]) class RelatedModelsTests(SimpleTestCase): def setUp(self): self.apps = Apps(['migrations.related_models_app']) def create_model(self, name, foreign_keys=[], bases=(), abstract=False, proxy=False): test_name = 'related_models_app' assert not (abstract and proxy) meta_contents = { 'abstract': abstract, 'app_label': test_name, 'apps': self.apps, 'proxy': proxy, } meta = type("Meta", (), meta_contents) if not bases: bases = (models.Model,) body = { 'Meta': meta, '__module__': "__fake__", } fname_base = fname = '%s_%%d' % name.lower() for i, fk in enumerate(foreign_keys, 1): fname = fname_base % i body[fname] = fk return type(name, bases, body) def assertRelated(self, model, needle): self.assertEqual( get_related_models_recursive(model), {(n._meta.app_label, n._meta.model_name) for n in needle}, ) def test_unrelated(self): A = self.create_model("A") B = self.create_model("B") self.assertRelated(A, []) self.assertRelated(B, []) def test_direct_fk(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)]) B = self.create_model("B") self.assertRelated(A, [B]) self.assertRelated(B, [A]) def test_direct_hidden_fk(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE, related_name='+')]) B = self.create_model("B") self.assertRelated(A, [B]) self.assertRelated(B, [A]) def test_fk_through_proxy(self): A = self.create_model("A") B = self.create_model("B", bases=(A,), proxy=True) C = self.create_model("C", bases=(B,), proxy=True) D = self.create_model("D", foreign_keys=[models.ForeignKey('C', models.CASCADE)]) self.assertRelated(A, [B, C, D]) self.assertRelated(B, [A, C, D]) self.assertRelated(C, [A, B, D]) self.assertRelated(D, [A, B, C]) def test_nested_fk(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)]) B = self.create_model("B", foreign_keys=[models.ForeignKey('C', models.CASCADE)]) C = self.create_model("C") self.assertRelated(A, [B, C]) self.assertRelated(B, [A, C]) self.assertRelated(C, [A, B]) def test_two_sided(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)]) B = self.create_model("B", foreign_keys=[models.ForeignKey('A', models.CASCADE)]) self.assertRelated(A, [B]) self.assertRelated(B, [A]) def test_circle(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('B', models.CASCADE)]) B = self.create_model("B", foreign_keys=[models.ForeignKey('C', models.CASCADE)]) C = self.create_model("C", foreign_keys=[models.ForeignKey('A', models.CASCADE)]) self.assertRelated(A, [B, C]) self.assertRelated(B, [A, C]) self.assertRelated(C, [A, B]) def test_base(self): A = self.create_model("A") B = self.create_model("B", bases=(A,)) self.assertRelated(A, [B]) self.assertRelated(B, [A]) def test_nested_base(self): A = self.create_model("A") B = self.create_model("B", bases=(A,)) C = self.create_model("C", bases=(B,)) self.assertRelated(A, [B, C]) self.assertRelated(B, [A, C]) self.assertRelated(C, [A, B]) def test_multiple_bases(self): A = self.create_model("A") B = self.create_model("B") C = self.create_model("C", bases=(A, B,)) self.assertRelated(A, [B, C]) self.assertRelated(B, [A, C]) self.assertRelated(C, [A, B]) def test_multiple_nested_bases(self): A = self.create_model("A") B = self.create_model("B") C = self.create_model("C", bases=(A, B,)) D = self.create_model("D") E = self.create_model("E", bases=(D,)) F = self.create_model("F", bases=(C, E,)) Y = self.create_model("Y") Z = self.create_model("Z", bases=(Y,)) self.assertRelated(A, [B, C, D, E, F]) self.assertRelated(B, [A, C, D, E, F]) self.assertRelated(C, [A, B, D, E, F]) self.assertRelated(D, [A, B, C, E, F]) self.assertRelated(E, [A, B, C, D, F]) self.assertRelated(F, [A, B, C, D, E]) self.assertRelated(Y, [Z]) self.assertRelated(Z, [Y]) def test_base_to_base_fk(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('Y', models.CASCADE)]) B = self.create_model("B", bases=(A,)) Y = self.create_model("Y") Z = self.create_model("Z", bases=(Y,)) self.assertRelated(A, [B, Y, Z]) self.assertRelated(B, [A, Y, Z]) self.assertRelated(Y, [A, B, Z]) self.assertRelated(Z, [A, B, Y]) def test_base_to_subclass_fk(self): A = self.create_model("A", foreign_keys=[models.ForeignKey('Z', models.CASCADE)]) B = self.create_model("B", bases=(A,)) Y = self.create_model("Y") Z = self.create_model("Z", bases=(Y,)) self.assertRelated(A, [B, Y, Z]) self.assertRelated(B, [A, Y, Z]) self.assertRelated(Y, [A, B, Z]) self.assertRelated(Z, [A, B, Y]) def test_direct_m2m(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('B')]) B = self.create_model("B") self.assertRelated(A, [A.a_1.rel.through, B]) self.assertRelated(B, [A, A.a_1.rel.through]) def test_direct_m2m_self(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('A')]) self.assertRelated(A, [A.a_1.rel.through]) def test_intermediate_m2m_self(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('A', through='T')]) T = self.create_model("T", foreign_keys=[ models.ForeignKey('A', models.CASCADE), models.ForeignKey('A', models.CASCADE), ]) self.assertRelated(A, [T]) self.assertRelated(T, [A]) def test_intermediate_m2m(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('B', through='T')]) B = self.create_model("B") T = self.create_model("T", foreign_keys=[ models.ForeignKey('A', models.CASCADE), models.ForeignKey('B', models.CASCADE), ]) self.assertRelated(A, [B, T]) self.assertRelated(B, [A, T]) self.assertRelated(T, [A, B]) def test_intermediate_m2m_extern_fk(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('B', through='T')]) B = self.create_model("B") Z = self.create_model("Z") T = self.create_model("T", foreign_keys=[ models.ForeignKey('A', models.CASCADE), models.ForeignKey('B', models.CASCADE), models.ForeignKey('Z', models.CASCADE), ]) self.assertRelated(A, [B, T, Z]) self.assertRelated(B, [A, T, Z]) self.assertRelated(T, [A, B, Z]) self.assertRelated(Z, [A, B, T]) def test_intermediate_m2m_base(self): A = self.create_model("A", foreign_keys=[models.ManyToManyField('B', through='T')]) B = self.create_model("B") S = self.create_model("S") T = self.create_model("T", foreign_keys=[ models.ForeignKey('A', models.CASCADE), models.ForeignKey('B', models.CASCADE), ], bases=(S,)) self.assertRelated(A, [B, S, T]) self.assertRelated(B, [A, S, T]) self.assertRelated(S, [A, B, T]) self.assertRelated(T, [A, B, S]) def test_generic_fk(self): A = self.create_model("A", foreign_keys=[ models.ForeignKey('B', models.CASCADE), GenericForeignKey(), ]) B = self.create_model("B", foreign_keys=[ models.ForeignKey('C', models.CASCADE), ]) self.assertRelated(A, [B]) self.assertRelated(B, [A]) def test_abstract_base(self): A = self.create_model("A", abstract=True) B = self.create_model("B", bases=(A,)) self.assertRelated(A, [B]) self.assertRelated(B, []) def test_nested_abstract_base(self): A = self.create_model("A", abstract=True) B = self.create_model("B", bases=(A,), abstract=True) C = self.create_model("C", bases=(B,)) self.assertRelated(A, [B, C]) self.assertRelated(B, [C]) self.assertRelated(C, []) def test_proxy_base(self): A = self.create_model("A") B = self.create_model("B", bases=(A,), proxy=True) self.assertRelated(A, [B]) self.assertRelated(B, []) def test_nested_proxy_base(self): A = self.create_model("A") B = self.create_model("B", bases=(A,), proxy=True) C = self.create_model("C", bases=(B,), proxy=True) self.assertRelated(A, [B, C]) self.assertRelated(B, [C]) self.assertRelated(C, []) def test_multiple_mixed_bases(self): A = self.create_model("A", abstract=True) M = self.create_model("M") P = self.create_model("P") Q = self.create_model("Q", bases=(P,), proxy=True) Z = self.create_model("Z", bases=(A, M, Q)) # M has a pointer O2O field p_ptr to P self.assertRelated(A, [M, P, Q, Z]) self.assertRelated(M, [P, Q, Z]) self.assertRelated(P, [M, Q, Z]) self.assertRelated(Q, [M, P, Z]) self.assertRelated(Z, [M, P, Q])
3edd5dc298dec7ecf11177ed95add9c66b9bde1e25322a3c0fcbf11a893e2e96
import re from django.forms import CharField, Form, Media from django.http import HttpRequest, HttpResponse from django.middleware.csrf import ( CsrfViewMiddleware, _does_token_match as equivalent_tokens, get_token, ) from django.template import TemplateDoesNotExist, TemplateSyntaxError from django.template.backends.dummy import TemplateStrings from django.test import SimpleTestCase class TemplateStringsTests(SimpleTestCase): engine_class = TemplateStrings backend_name = 'dummy' options = {} @classmethod def setUpClass(cls): super().setUpClass() params = { 'DIRS': [], 'APP_DIRS': True, 'NAME': cls.backend_name, 'OPTIONS': cls.options, } cls.engine = cls.engine_class(params) def test_from_string(self): template = self.engine.from_string("Hello!\n") content = template.render() self.assertEqual(content, "Hello!\n") def test_get_template(self): template = self.engine.get_template('template_backends/hello.html') content = template.render({'name': 'world'}) self.assertEqual(content, "Hello world!\n") def test_get_template_nonexistent(self): with self.assertRaises(TemplateDoesNotExist) as e: self.engine.get_template('template_backends/nonexistent.html') self.assertEqual(e.exception.backend, self.engine) def test_get_template_syntax_error(self): # There's no way to trigger a syntax error with the dummy backend. # The test still lives here to factor it between other backends. if self.backend_name == 'dummy': self.skipTest("test doesn't apply to dummy backend") with self.assertRaises(TemplateSyntaxError): self.engine.get_template('template_backends/syntax_error.html') def test_html_escaping(self): template = self.engine.get_template('template_backends/hello.html') context = {'name': '<script>alert("XSS!");</script>'} content = template.render(context) self.assertIn('&lt;script&gt;', content) self.assertNotIn('<script>', content) def test_django_html_escaping(self): if self.backend_name == 'dummy': self.skipTest("test doesn't apply to dummy backend") class TestForm(Form): test_field = CharField() media = Media(js=['my-script.js']) form = TestForm() template = self.engine.get_template('template_backends/django_escaping.html') content = template.render({'media': media, 'test_form': form}) expected = '{}\n\n{}\n\n{}'.format(media, form, form['test_field']) self.assertHTMLEqual(content, expected) def test_csrf_token(self): request = HttpRequest() CsrfViewMiddleware(lambda req: HttpResponse()).process_view(request, lambda r: None, (), {}) template = self.engine.get_template('template_backends/csrf.html') content = template.render(request=request) expected = '<input type="hidden" name="csrfmiddlewaretoken" value="([^"]+)">' match = re.match(expected, content) or re.match(expected.replace('"', "'"), content) self.assertTrue(match, "hidden csrftoken field not found in output") self.assertTrue(equivalent_tokens(match[1], get_token(request))) def test_no_directory_traversal(self): with self.assertRaises(TemplateDoesNotExist): self.engine.get_template('../forbidden/template_backends/hello.html') def test_non_ascii_characters(self): template = self.engine.get_template('template_backends/hello.html') content = template.render({'name': 'Jérôme'}) self.assertEqual(content, "Hello Jérôme!\n")
c773d7e6d0cea52f7d24065844ee7bdf41efcb36b8aa98f2b5dd8cc8491889bc
from datetime import datetime from django.core.exceptions import ValidationError from django.db import models def validate_answer_to_universe(value): if value != 42: raise ValidationError('This is not the answer to life, universe and everything!', code='not42') class ModelToValidate(models.Model): name = models.CharField(max_length=100) created = models.DateTimeField(default=datetime.now) number = models.IntegerField(db_column='number_val') parent = models.ForeignKey( 'self', models.SET_NULL, blank=True, null=True, limit_choices_to={'number': 10}, ) email = models.EmailField(blank=True) ufm = models.ForeignKey( 'UniqueFieldsModel', models.SET_NULL, to_field='unique_charfield', blank=True, null=True, ) url = models.URLField(blank=True) f_with_custom_validator = models.IntegerField(blank=True, null=True, validators=[validate_answer_to_universe]) f_with_iterable_of_validators = models.IntegerField(blank=True, null=True, validators=(validate_answer_to_universe,)) slug = models.SlugField(blank=True) def clean(self): super().clean() if self.number == 11: raise ValidationError('Invalid number supplied!') class UniqueFieldsModel(models.Model): unique_charfield = models.CharField(max_length=100, unique=True) unique_integerfield = models.IntegerField(unique=True) non_unique_field = models.IntegerField() class CustomPKModel(models.Model): my_pk_field = models.CharField(max_length=100, primary_key=True) class UniqueTogetherModel(models.Model): cfield = models.CharField(max_length=100) ifield = models.IntegerField() efield = models.EmailField() class Meta: unique_together = (('ifield', 'cfield',), ['ifield', 'efield']) class UniqueForDateModel(models.Model): start_date = models.DateField() end_date = models.DateTimeField() count = models.IntegerField(unique_for_date="start_date", unique_for_year="end_date") order = models.IntegerField(unique_for_month="end_date") name = models.CharField(max_length=100) class CustomMessagesModel(models.Model): other = models.IntegerField(blank=True, null=True) number = models.IntegerField( db_column='number_val', error_messages={'null': 'NULL', 'not42': 'AAARGH', 'not_equal': '%s != me'}, validators=[validate_answer_to_universe] ) class AuthorManager(models.Manager): def get_queryset(self): qs = super().get_queryset() return qs.filter(archived=False) class Author(models.Model): name = models.CharField(max_length=100) archived = models.BooleanField(default=False) objects = AuthorManager() class Article(models.Model): title = models.CharField(max_length=100) author = models.ForeignKey(Author, models.CASCADE) pub_date = models.DateTimeField(blank=True) def clean(self): if self.pub_date is None: self.pub_date = datetime.now() class Post(models.Model): title = models.CharField(max_length=50, unique_for_date='posted', blank=True) slug = models.CharField(max_length=50, unique_for_year='posted', blank=True) subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True) posted = models.DateField() class FlexibleDatePost(models.Model): title = models.CharField(max_length=50, unique_for_date='posted', blank=True) slug = models.CharField(max_length=50, unique_for_year='posted', blank=True) subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True) posted = models.DateField(blank=True, null=True) class UniqueErrorsModel(models.Model): name = models.CharField(max_length=100, unique=True, error_messages={'unique': 'Custom unique name message.'}) no = models.IntegerField(unique=True, error_messages={'unique': 'Custom unique number message.'}) class GenericIPAddressTestModel(models.Model): generic_ip = models.GenericIPAddressField(blank=True, null=True, unique=True) v4_ip = models.GenericIPAddressField(blank=True, null=True, protocol="ipv4") v6_ip = models.GenericIPAddressField(blank=True, null=True, protocol="ipv6") ip_verbose_name = models.GenericIPAddressField("IP Address Verbose", blank=True, null=True) class GenericIPAddrUnpackUniqueTest(models.Model): generic_v4unpack_ip = models.GenericIPAddressField(null=True, blank=True, unique=True, unpack_ipv4=True)
fea5bf1d0fe73fe69c7c829786c5e9ef33a2681f5a37ed45294e86d4f86dfb27
import os import sys import unittest from types import ModuleType, SimpleNamespace from unittest import mock from django.conf import ENVIRONMENT_VARIABLE, LazySettings, Settings, settings from django.core.exceptions import ImproperlyConfigured from django.http import HttpRequest from django.test import ( SimpleTestCase, TestCase, TransactionTestCase, modify_settings, override_settings, signals, ) from django.test.utils import requires_tz_support from django.urls import clear_script_prefix, set_script_prefix from django.utils.deprecation import RemovedInDjango50Warning @modify_settings(ITEMS={ 'prepend': ['b'], 'append': ['d'], 'remove': ['a', 'e'] }) @override_settings(ITEMS=['a', 'c', 'e'], ITEMS_OUTER=[1, 2, 3], TEST='override', TEST_OUTER='outer') class FullyDecoratedTranTestCase(TransactionTestCase): available_apps = [] def test_override(self): self.assertEqual(settings.ITEMS, ['b', 'c', 'd']) self.assertEqual(settings.ITEMS_OUTER, [1, 2, 3]) self.assertEqual(settings.TEST, 'override') self.assertEqual(settings.TEST_OUTER, 'outer') @modify_settings(ITEMS={ 'append': ['e', 'f'], 'prepend': ['a'], 'remove': ['d', 'c'], }) def test_method_list_override(self): self.assertEqual(settings.ITEMS, ['a', 'b', 'e', 'f']) self.assertEqual(settings.ITEMS_OUTER, [1, 2, 3]) @modify_settings(ITEMS={ 'append': ['b'], 'prepend': ['d'], 'remove': ['a', 'c', 'e'], }) def test_method_list_override_no_ops(self): self.assertEqual(settings.ITEMS, ['b', 'd']) @modify_settings(ITEMS={ 'append': 'e', 'prepend': 'a', 'remove': 'c', }) def test_method_list_override_strings(self): self.assertEqual(settings.ITEMS, ['a', 'b', 'd', 'e']) @modify_settings(ITEMS={'remove': ['b', 'd']}) @modify_settings(ITEMS={'append': ['b'], 'prepend': ['d']}) def test_method_list_override_nested_order(self): self.assertEqual(settings.ITEMS, ['d', 'c', 'b']) @override_settings(TEST='override2') def test_method_override(self): self.assertEqual(settings.TEST, 'override2') self.assertEqual(settings.TEST_OUTER, 'outer') def test_decorated_testcase_name(self): self.assertEqual(FullyDecoratedTranTestCase.__name__, 'FullyDecoratedTranTestCase') def test_decorated_testcase_module(self): self.assertEqual(FullyDecoratedTranTestCase.__module__, __name__) @modify_settings(ITEMS={ 'prepend': ['b'], 'append': ['d'], 'remove': ['a', 'e'] }) @override_settings(ITEMS=['a', 'c', 'e'], TEST='override') class FullyDecoratedTestCase(TestCase): def test_override(self): self.assertEqual(settings.ITEMS, ['b', 'c', 'd']) self.assertEqual(settings.TEST, 'override') @modify_settings(ITEMS={ 'append': 'e', 'prepend': 'a', 'remove': 'c', }) @override_settings(TEST='override2') def test_method_override(self): self.assertEqual(settings.ITEMS, ['a', 'b', 'd', 'e']) self.assertEqual(settings.TEST, 'override2') class ClassDecoratedTestCaseSuper(TestCase): """ Dummy class for testing max recursion error in child class call to super(). Refs #17011. """ def test_max_recursion_error(self): pass @override_settings(TEST='override') class ClassDecoratedTestCase(ClassDecoratedTestCaseSuper): @classmethod def setUpClass(cls): super().setUpClass() cls.foo = getattr(settings, 'TEST', 'BUG') def test_override(self): self.assertEqual(settings.TEST, 'override') def test_setupclass_override(self): """Settings are overridden within setUpClass (#21281).""" self.assertEqual(self.foo, 'override') @override_settings(TEST='override2') def test_method_override(self): self.assertEqual(settings.TEST, 'override2') def test_max_recursion_error(self): """ Overriding a method on a super class and then calling that method on the super class should not trigger infinite recursion. See #17011. """ super().test_max_recursion_error() @modify_settings(ITEMS={'append': 'mother'}) @override_settings(ITEMS=['father'], TEST='override-parent') class ParentDecoratedTestCase(TestCase): pass @modify_settings(ITEMS={'append': ['child']}) @override_settings(TEST='override-child') class ChildDecoratedTestCase(ParentDecoratedTestCase): def test_override_settings_inheritance(self): self.assertEqual(settings.ITEMS, ['father', 'mother', 'child']) self.assertEqual(settings.TEST, 'override-child') class SettingsTests(SimpleTestCase): def setUp(self): self.testvalue = None signals.setting_changed.connect(self.signal_callback) def tearDown(self): signals.setting_changed.disconnect(self.signal_callback) def signal_callback(self, sender, setting, value, **kwargs): if setting == 'TEST': self.testvalue = value def test_override(self): settings.TEST = 'test' self.assertEqual('test', settings.TEST) with self.settings(TEST='override'): self.assertEqual('override', settings.TEST) self.assertEqual('test', settings.TEST) del settings.TEST def test_override_change(self): settings.TEST = 'test' self.assertEqual('test', settings.TEST) with self.settings(TEST='override'): self.assertEqual('override', settings.TEST) settings.TEST = 'test2' self.assertEqual('test', settings.TEST) del settings.TEST def test_override_doesnt_leak(self): with self.assertRaises(AttributeError): getattr(settings, 'TEST') with self.settings(TEST='override'): self.assertEqual('override', settings.TEST) settings.TEST = 'test' with self.assertRaises(AttributeError): getattr(settings, 'TEST') @override_settings(TEST='override') def test_decorator(self): self.assertEqual('override', settings.TEST) def test_context_manager(self): with self.assertRaises(AttributeError): getattr(settings, 'TEST') override = override_settings(TEST='override') with self.assertRaises(AttributeError): getattr(settings, 'TEST') override.enable() self.assertEqual('override', settings.TEST) override.disable() with self.assertRaises(AttributeError): getattr(settings, 'TEST') def test_class_decorator(self): # SimpleTestCase can be decorated by override_settings, but not ut.TestCase class SimpleTestCaseSubclass(SimpleTestCase): pass class UnittestTestCaseSubclass(unittest.TestCase): pass decorated = override_settings(TEST='override')(SimpleTestCaseSubclass) self.assertIsInstance(decorated, type) self.assertTrue(issubclass(decorated, SimpleTestCase)) with self.assertRaisesMessage(Exception, "Only subclasses of Django SimpleTestCase"): decorated = override_settings(TEST='override')(UnittestTestCaseSubclass) def test_signal_callback_context_manager(self): with self.assertRaises(AttributeError): getattr(settings, 'TEST') with self.settings(TEST='override'): self.assertEqual(self.testvalue, 'override') self.assertIsNone(self.testvalue) @override_settings(TEST='override') def test_signal_callback_decorator(self): self.assertEqual(self.testvalue, 'override') # # Regression tests for #10130: deleting settings. # def test_settings_delete(self): settings.TEST = 'test' self.assertEqual('test', settings.TEST) del settings.TEST msg = "'Settings' object has no attribute 'TEST'" with self.assertRaisesMessage(AttributeError, msg): getattr(settings, 'TEST') def test_settings_delete_wrapped(self): with self.assertRaisesMessage(TypeError, "can't delete _wrapped."): delattr(settings, '_wrapped') def test_override_settings_delete(self): """ Allow deletion of a setting in an overridden settings set (#18824) """ previous_i18n = settings.USE_I18N previous_l10n = settings.USE_L10N with self.settings(USE_I18N=False): del settings.USE_I18N with self.assertRaises(AttributeError): getattr(settings, 'USE_I18N') # Should also work for a non-overridden setting del settings.USE_L10N with self.assertRaises(AttributeError): getattr(settings, 'USE_L10N') self.assertNotIn('USE_I18N', dir(settings)) self.assertNotIn('USE_L10N', dir(settings)) self.assertEqual(settings.USE_I18N, previous_i18n) self.assertEqual(settings.USE_L10N, previous_l10n) def test_override_settings_nested(self): """ override_settings uses the actual _wrapped attribute at runtime, not when it was instantiated. """ with self.assertRaises(AttributeError): getattr(settings, 'TEST') with self.assertRaises(AttributeError): getattr(settings, 'TEST2') inner = override_settings(TEST2='override') with override_settings(TEST='override'): self.assertEqual('override', settings.TEST) with inner: self.assertEqual('override', settings.TEST) self.assertEqual('override', settings.TEST2) # inner's __exit__ should have restored the settings of the outer # context manager, not those when the class was instantiated self.assertEqual('override', settings.TEST) with self.assertRaises(AttributeError): getattr(settings, 'TEST2') with self.assertRaises(AttributeError): getattr(settings, 'TEST') with self.assertRaises(AttributeError): getattr(settings, 'TEST2') @override_settings(SECRET_KEY='') def test_no_secret_key(self): msg = 'The SECRET_KEY setting must not be empty.' with self.assertRaisesMessage(ImproperlyConfigured, msg): settings.SECRET_KEY def test_no_settings_module(self): msg = ( 'Requested setting%s, but settings are not configured. You ' 'must either define the environment variable DJANGO_SETTINGS_MODULE ' 'or call settings.configure() before accessing settings.' ) orig_settings = os.environ[ENVIRONMENT_VARIABLE] os.environ[ENVIRONMENT_VARIABLE] = '' try: with self.assertRaisesMessage(ImproperlyConfigured, msg % 's'): settings._setup() with self.assertRaisesMessage(ImproperlyConfigured, msg % ' TEST'): settings._setup('TEST') finally: os.environ[ENVIRONMENT_VARIABLE] = orig_settings def test_already_configured(self): with self.assertRaisesMessage(RuntimeError, 'Settings already configured.'): settings.configure() def test_nonupper_settings_prohibited_in_configure(self): s = LazySettings() with self.assertRaisesMessage(TypeError, "Setting 'foo' must be uppercase."): s.configure(foo='bar') def test_nonupper_settings_ignored_in_default_settings(self): s = LazySettings() s.configure(SimpleNamespace(foo='bar')) with self.assertRaises(AttributeError): getattr(s, 'foo') @requires_tz_support @mock.patch('django.conf.global_settings.TIME_ZONE', 'test') def test_incorrect_timezone(self): with self.assertRaisesMessage(ValueError, 'Incorrect timezone setting: test'): settings._setup() def test_use_tz_false_deprecation(self): settings_module = ModuleType('fake_settings_module') settings_module.SECRET_KEY = 'foo' sys.modules['fake_settings_module'] = settings_module msg = ( 'The default value of USE_TZ will change from False to True in ' 'Django 5.0. Set USE_TZ to False in your project settings if you ' 'want to keep the current default behavior.' ) try: with self.assertRaisesMessage(RemovedInDjango50Warning, msg): Settings('fake_settings_module') finally: del sys.modules['fake_settings_module'] class TestComplexSettingOverride(SimpleTestCase): def setUp(self): self.old_warn_override_settings = signals.COMPLEX_OVERRIDE_SETTINGS.copy() signals.COMPLEX_OVERRIDE_SETTINGS.add('TEST_WARN') def tearDown(self): signals.COMPLEX_OVERRIDE_SETTINGS = self.old_warn_override_settings self.assertNotIn('TEST_WARN', signals.COMPLEX_OVERRIDE_SETTINGS) def test_complex_override_warning(self): """Regression test for #19031""" msg = 'Overriding setting TEST_WARN can lead to unexpected behavior.' with self.assertWarnsMessage(UserWarning, msg) as cm: with override_settings(TEST_WARN='override'): self.assertEqual(settings.TEST_WARN, 'override') self.assertEqual(cm.filename, __file__) class SecureProxySslHeaderTest(SimpleTestCase): @override_settings(SECURE_PROXY_SSL_HEADER=None) def test_none(self): req = HttpRequest() self.assertIs(req.is_secure(), False) @override_settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTO', 'https')) def test_set_without_xheader(self): req = HttpRequest() self.assertIs(req.is_secure(), False) @override_settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTO', 'https')) def test_set_with_xheader_wrong(self): req = HttpRequest() req.META['HTTP_X_FORWARDED_PROTO'] = 'wrongvalue' self.assertIs(req.is_secure(), False) @override_settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTO', 'https')) def test_set_with_xheader_right(self): req = HttpRequest() req.META['HTTP_X_FORWARDED_PROTO'] = 'https' self.assertIs(req.is_secure(), True) @override_settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTO', 'https')) def test_xheader_preferred_to_underlying_request(self): class ProxyRequest(HttpRequest): def _get_scheme(self): """Proxy always connecting via HTTPS""" return 'https' # Client connects via HTTP. req = ProxyRequest() req.META['HTTP_X_FORWARDED_PROTO'] = 'http' self.assertIs(req.is_secure(), False) class IsOverriddenTest(SimpleTestCase): def test_configure(self): s = LazySettings() s.configure(SECRET_KEY='foo') self.assertTrue(s.is_overridden('SECRET_KEY')) def test_module(self): settings_module = ModuleType('fake_settings_module') settings_module.SECRET_KEY = 'foo' settings_module.USE_TZ = False sys.modules['fake_settings_module'] = settings_module try: s = Settings('fake_settings_module') self.assertTrue(s.is_overridden('SECRET_KEY')) self.assertFalse(s.is_overridden('ALLOWED_HOSTS')) finally: del sys.modules['fake_settings_module'] def test_override(self): self.assertFalse(settings.is_overridden('ALLOWED_HOSTS')) with override_settings(ALLOWED_HOSTS=[]): self.assertTrue(settings.is_overridden('ALLOWED_HOSTS')) def test_unevaluated_lazysettings_repr(self): lazy_settings = LazySettings() expected = '<LazySettings [Unevaluated]>' self.assertEqual(repr(lazy_settings), expected) def test_evaluated_lazysettings_repr(self): lazy_settings = LazySettings() module = os.environ.get(ENVIRONMENT_VARIABLE) expected = '<LazySettings "%s">' % module # Force evaluation of the lazy object. lazy_settings.APPEND_SLASH self.assertEqual(repr(lazy_settings), expected) def test_usersettingsholder_repr(self): lazy_settings = LazySettings() lazy_settings.configure(APPEND_SLASH=False) expected = '<UserSettingsHolder>' self.assertEqual(repr(lazy_settings._wrapped), expected) def test_settings_repr(self): module = os.environ.get(ENVIRONMENT_VARIABLE) lazy_settings = Settings(module) expected = '<Settings "%s">' % module self.assertEqual(repr(lazy_settings), expected) class TestListSettings(SimpleTestCase): """ Make sure settings that should be lists or tuples throw ImproperlyConfigured if they are set to a string instead of a list or tuple. """ list_or_tuple_settings = ( 'ALLOWED_HOSTS', "INSTALLED_APPS", "TEMPLATE_DIRS", "LOCALE_PATHS", ) def test_tuple_settings(self): settings_module = ModuleType('fake_settings_module') settings_module.SECRET_KEY = 'foo' msg = 'The %s setting must be a list or a tuple.' for setting in self.list_or_tuple_settings: setattr(settings_module, setting, ('non_list_or_tuple_value')) sys.modules['fake_settings_module'] = settings_module try: with self.assertRaisesMessage(ImproperlyConfigured, msg % setting): Settings('fake_settings_module') finally: del sys.modules['fake_settings_module'] delattr(settings_module, setting) class SettingChangeEnterException(Exception): pass class SettingChangeExitException(Exception): pass class OverrideSettingsIsolationOnExceptionTests(SimpleTestCase): """ The override_settings context manager restore settings if one of the receivers of "setting_changed" signal fails. Check the three cases of receiver failure detailed in receiver(). In each case, ALL receivers are called when exiting the context manager. """ def setUp(self): signals.setting_changed.connect(self.receiver) self.addCleanup(signals.setting_changed.disconnect, self.receiver) # Create a spy that's connected to the `setting_changed` signal and # executed AFTER `self.receiver`. self.spy_receiver = mock.Mock() signals.setting_changed.connect(self.spy_receiver) self.addCleanup(signals.setting_changed.disconnect, self.spy_receiver) def receiver(self, **kwargs): """ A receiver that fails while certain settings are being changed. - SETTING_BOTH raises an error while receiving the signal on both entering and exiting the context manager. - SETTING_ENTER raises an error only on enter. - SETTING_EXIT raises an error only on exit. """ setting = kwargs['setting'] enter = kwargs['enter'] if setting in ('SETTING_BOTH', 'SETTING_ENTER') and enter: raise SettingChangeEnterException if setting in ('SETTING_BOTH', 'SETTING_EXIT') and not enter: raise SettingChangeExitException def check_settings(self): """Assert that settings for these tests aren't present.""" self.assertFalse(hasattr(settings, 'SETTING_BOTH')) self.assertFalse(hasattr(settings, 'SETTING_ENTER')) self.assertFalse(hasattr(settings, 'SETTING_EXIT')) self.assertFalse(hasattr(settings, 'SETTING_PASS')) def check_spy_receiver_exit_calls(self, call_count): """ Assert that `self.spy_receiver` was called exactly `call_count` times with the ``enter=False`` keyword argument. """ kwargs_with_exit = [ kwargs for args, kwargs in self.spy_receiver.call_args_list if ('enter', False) in kwargs.items() ] self.assertEqual(len(kwargs_with_exit), call_count) def test_override_settings_both(self): """Receiver fails on both enter and exit.""" with self.assertRaises(SettingChangeEnterException): with override_settings(SETTING_PASS='BOTH', SETTING_BOTH='BOTH'): pass self.check_settings() # Two settings were touched, so expect two calls of `spy_receiver`. self.check_spy_receiver_exit_calls(call_count=2) def test_override_settings_enter(self): """Receiver fails on enter only.""" with self.assertRaises(SettingChangeEnterException): with override_settings(SETTING_PASS='ENTER', SETTING_ENTER='ENTER'): pass self.check_settings() # Two settings were touched, so expect two calls of `spy_receiver`. self.check_spy_receiver_exit_calls(call_count=2) def test_override_settings_exit(self): """Receiver fails on exit only.""" with self.assertRaises(SettingChangeExitException): with override_settings(SETTING_PASS='EXIT', SETTING_EXIT='EXIT'): pass self.check_settings() # Two settings were touched, so expect two calls of `spy_receiver`. self.check_spy_receiver_exit_calls(call_count=2) def test_override_settings_reusable_on_enter(self): """ Error is raised correctly when reusing the same override_settings instance. """ @override_settings(SETTING_ENTER='ENTER') def decorated_function(): pass with self.assertRaises(SettingChangeEnterException): decorated_function() signals.setting_changed.disconnect(self.receiver) # This call shouldn't raise any errors. decorated_function() class MediaURLStaticURLPrefixTest(SimpleTestCase): def set_script_name(self, val): clear_script_prefix() if val is not None: set_script_prefix(val) def test_not_prefixed(self): # Don't add SCRIPT_NAME prefix to absolute paths, URLs, or None. tests = ( '/path/', 'http://myhost.com/path/', 'http://myhost/path/', 'https://myhost/path/', None, ) for setting in ('MEDIA_URL', 'STATIC_URL'): for path in tests: new_settings = {setting: path} with self.settings(**new_settings): for script_name in ['/somesubpath', '/somesubpath/', '/', '', None]: with self.subTest(script_name=script_name, **new_settings): try: self.set_script_name(script_name) self.assertEqual(getattr(settings, setting), path) finally: clear_script_prefix() def test_add_script_name_prefix(self): tests = ( # Relative paths. ('/somesubpath', 'path/', '/somesubpath/path/'), ('/somesubpath/', 'path/', '/somesubpath/path/'), ('/', 'path/', '/path/'), # Invalid URLs. ('/somesubpath/', 'htp://myhost.com/path/', '/somesubpath/htp://myhost.com/path/'), # Blank settings. ('/somesubpath/', '', '/somesubpath/'), ) for setting in ('MEDIA_URL', 'STATIC_URL'): for script_name, path, expected_path in tests: new_settings = {setting: path} with self.settings(**new_settings): with self.subTest(script_name=script_name, **new_settings): try: self.set_script_name(script_name) self.assertEqual(getattr(settings, setting), expected_path) finally: clear_script_prefix()
d03e7ee223566d4705d4515eb39117263f6bd3ed9f4bbdaffbb320d74f15c983
import datetime import itertools import re from importlib import import_module from unittest import mock from urllib.parse import quote, urljoin from django.apps import apps from django.conf import settings from django.contrib.admin.models import LogEntry from django.contrib.auth import ( BACKEND_SESSION_KEY, REDIRECT_FIELD_NAME, SESSION_KEY, ) from django.contrib.auth.forms import ( AuthenticationForm, PasswordChangeForm, SetPasswordForm, ) from django.contrib.auth.models import Permission, User from django.contrib.auth.views import ( INTERNAL_RESET_SESSION_TOKEN, LoginView, logout_then_login, redirect_to_login, ) from django.contrib.contenttypes.models import ContentType from django.contrib.sessions.middleware import SessionMiddleware from django.contrib.sites.requests import RequestSite from django.core import mail from django.core.exceptions import ImproperlyConfigured from django.db import connection from django.http import HttpRequest, HttpResponse from django.middleware.csrf import CsrfViewMiddleware, get_token from django.test import Client, TestCase, override_settings from django.test.client import RedirectCycleError from django.urls import NoReverseMatch, reverse, reverse_lazy from django.utils.http import urlsafe_base64_encode from .client import PasswordResetConfirmClient from .models import CustomUser, UUIDUser from .settings import AUTH_TEMPLATES @override_settings( LANGUAGES=[('en', 'English')], LANGUAGE_CODE='en', TEMPLATES=AUTH_TEMPLATES, ROOT_URLCONF='auth_tests.urls', ) class AuthViewsTestCase(TestCase): """ Helper base class for the test classes that follow. """ @classmethod def setUpTestData(cls): cls.u1 = User.objects.create_user(username='testclient', password='password', email='[email protected]') cls.u3 = User.objects.create_user(username='staff', password='password', email='[email protected]') def login(self, username='testclient', password='password', url='/login/'): response = self.client.post(url, { 'username': username, 'password': password, }) self.assertIn(SESSION_KEY, self.client.session) return response def logout(self): response = self.client.get('/admin/logout/') self.assertEqual(response.status_code, 200) self.assertNotIn(SESSION_KEY, self.client.session) def assertFormError(self, response, error): """Assert that error is found in response.context['form'] errors""" form_errors = list(itertools.chain(*response.context['form'].errors.values())) self.assertIn(str(error), form_errors) @override_settings(ROOT_URLCONF='django.contrib.auth.urls') class AuthViewNamedURLTests(AuthViewsTestCase): def test_named_urls(self): "Named URLs should be reversible" expected_named_urls = [ ('login', [], {}), ('logout', [], {}), ('password_change', [], {}), ('password_change_done', [], {}), ('password_reset', [], {}), ('password_reset_done', [], {}), ('password_reset_confirm', [], { 'uidb64': 'aaaaaaa', 'token': '1111-aaaaa', }), ('password_reset_complete', [], {}), ] for name, args, kwargs in expected_named_urls: with self.subTest(name=name): try: reverse(name, args=args, kwargs=kwargs) except NoReverseMatch: self.fail("Reversal of url named '%s' failed with NoReverseMatch" % name) class PasswordResetTest(AuthViewsTestCase): def setUp(self): self.client = PasswordResetConfirmClient() def test_email_not_found(self): """If the provided email is not registered, don't raise any error but also don't send any email.""" response = self.client.get('/password_reset/') self.assertEqual(response.status_code, 200) response = self.client.post('/password_reset/', {'email': '[email protected]'}) self.assertEqual(response.status_code, 302) self.assertEqual(len(mail.outbox), 0) def test_email_found(self): "Email is sent if a valid email address is provided for password reset" response = self.client.post('/password_reset/', {'email': '[email protected]'}) self.assertEqual(response.status_code, 302) self.assertEqual(len(mail.outbox), 1) self.assertIn("http://", mail.outbox[0].body) self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email) # optional multipart text/html email has been added. Make sure original, # default functionality is 100% the same self.assertFalse(mail.outbox[0].message().is_multipart()) def test_extra_email_context(self): """ extra_email_context should be available in the email template context. """ response = self.client.post( '/password_reset_extra_email_context/', {'email': '[email protected]'}, ) self.assertEqual(response.status_code, 302) self.assertEqual(len(mail.outbox), 1) self.assertIn('Email email context: "Hello!"', mail.outbox[0].body) self.assertIn('http://custom.example.com/reset/', mail.outbox[0].body) def test_html_mail_template(self): """ A multipart email with text/plain and text/html is sent if the html_email_template parameter is passed to the view """ response = self.client.post('/password_reset/html_email_template/', {'email': '[email protected]'}) self.assertEqual(response.status_code, 302) self.assertEqual(len(mail.outbox), 1) message = mail.outbox[0].message() self.assertEqual(len(message.get_payload()), 2) self.assertTrue(message.is_multipart()) self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain') self.assertEqual(message.get_payload(1).get_content_type(), 'text/html') self.assertNotIn('<html>', message.get_payload(0).get_payload()) self.assertIn('<html>', message.get_payload(1).get_payload()) def test_email_found_custom_from(self): "Email is sent if a valid email address is provided for password reset when a custom from_email is provided." response = self.client.post('/password_reset_from_email/', {'email': '[email protected]'}) self.assertEqual(response.status_code, 302) self.assertEqual(len(mail.outbox), 1) self.assertEqual("[email protected]", mail.outbox[0].from_email) # Skip any 500 handler action (like sending more mail...) @override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True) def test_poisoned_http_host(self): "Poisoned HTTP_HOST headers can't be used for reset emails" # This attack is based on the way browsers handle URLs. The colon # should be used to separate the port, but if the URL contains an @, # the colon is interpreted as part of a username for login purposes, # making 'evil.com' the request domain. Since HTTP_HOST is used to # produce a meaningful reset URL, we need to be certain that the # HTTP_HOST header isn't poisoned. This is done as a check when get_host() # is invoked, but we check here as a practical consequence. with self.assertLogs('django.security.DisallowedHost', 'ERROR'): response = self.client.post( '/password_reset/', {'email': '[email protected]'}, HTTP_HOST='www.example:[email protected]' ) self.assertEqual(response.status_code, 400) self.assertEqual(len(mail.outbox), 0) # Skip any 500 handler action (like sending more mail...) @override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True) def test_poisoned_http_host_admin_site(self): "Poisoned HTTP_HOST headers can't be used for reset emails on admin views" with self.assertLogs('django.security.DisallowedHost', 'ERROR'): response = self.client.post( '/admin_password_reset/', {'email': '[email protected]'}, HTTP_HOST='www.example:[email protected]' ) self.assertEqual(response.status_code, 400) self.assertEqual(len(mail.outbox), 0) def _test_confirm_start(self): # Start by creating the email self.client.post('/password_reset/', {'email': '[email protected]'}) self.assertEqual(len(mail.outbox), 1) return self._read_signup_email(mail.outbox[0]) def _read_signup_email(self, email): urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body) self.assertIsNotNone(urlmatch, "No URL found in sent email") return urlmatch[0], urlmatch[1] def test_confirm_valid(self): url, path = self._test_confirm_start() response = self.client.get(path) # redirect to a 'complete' page: self.assertContains(response, "Please enter your new password") def test_confirm_invalid(self): url, path = self._test_confirm_start() # Let's munge the token in the path, but keep the same length, # in case the URLconf will reject a different length. path = path[:-5] + ("0" * 4) + path[-1] response = self.client.get(path) self.assertContains(response, "The password reset link was invalid") def test_confirm_invalid_user(self): # A nonexistent user returns a 200 response, not a 404. response = self.client.get('/reset/123456/1-1/') self.assertContains(response, "The password reset link was invalid") def test_confirm_overflow_user(self): # A base36 user id that overflows int returns a 200 response. response = self.client.get('/reset/zzzzzzzzzzzzz/1-1/') self.assertContains(response, "The password reset link was invalid") def test_confirm_invalid_post(self): # Same as test_confirm_invalid, but trying to do a POST instead. url, path = self._test_confirm_start() path = path[:-5] + ("0" * 4) + path[-1] self.client.post(path, { 'new_password1': 'anewpassword', 'new_password2': ' anewpassword', }) # Check the password has not been changed u = User.objects.get(email='[email protected]') self.assertTrue(not u.check_password("anewpassword")) def test_confirm_invalid_hash(self): """A POST with an invalid token is rejected.""" u = User.objects.get(email='[email protected]') original_password = u.password url, path = self._test_confirm_start() path_parts = path.split('-') path_parts[-1] = ("0") * 20 + '/' path = '-'.join(path_parts) response = self.client.post(path, { 'new_password1': 'anewpassword', 'new_password2': 'anewpassword', }) self.assertIs(response.context['validlink'], False) u.refresh_from_db() self.assertEqual(original_password, u.password) # password hasn't changed def test_confirm_complete(self): url, path = self._test_confirm_start() response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'}) # Check the password has been changed u = User.objects.get(email='[email protected]') self.assertTrue(u.check_password("anewpassword")) # The reset token is deleted from the session. self.assertNotIn(INTERNAL_RESET_SESSION_TOKEN, self.client.session) # Check we can't use the link again response = self.client.get(path) self.assertContains(response, "The password reset link was invalid") def test_confirm_different_passwords(self): url, path = self._test_confirm_start() response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'x'}) self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch']) def test_reset_redirect_default(self): response = self.client.post('/password_reset/', {'email': '[email protected]'}) self.assertRedirects(response, '/password_reset/done/', fetch_redirect_response=False) def test_reset_custom_redirect(self): response = self.client.post('/password_reset/custom_redirect/', {'email': '[email protected]'}) self.assertRedirects(response, '/custom/', fetch_redirect_response=False) def test_reset_custom_redirect_named(self): response = self.client.post('/password_reset/custom_redirect/named/', {'email': '[email protected]'}) self.assertRedirects(response, '/password_reset/', fetch_redirect_response=False) def test_confirm_redirect_default(self): url, path = self._test_confirm_start() response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'}) self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False) def test_confirm_redirect_custom(self): url, path = self._test_confirm_start() path = path.replace('/reset/', '/reset/custom/') response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'}) self.assertRedirects(response, '/custom/', fetch_redirect_response=False) def test_confirm_redirect_custom_named(self): url, path = self._test_confirm_start() path = path.replace('/reset/', '/reset/custom/named/') response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'}) self.assertRedirects(response, '/password_reset/', fetch_redirect_response=False) def test_confirm_custom_reset_url_token(self): url, path = self._test_confirm_start() path = path.replace('/reset/', '/reset/custom/token/') self.client.reset_url_token = 'set-passwordcustom' response = self.client.post( path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'}, ) self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False) def test_confirm_login_post_reset(self): url, path = self._test_confirm_start() path = path.replace('/reset/', '/reset/post_reset_login/') response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'}) self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False) self.assertIn(SESSION_KEY, self.client.session) @override_settings( AUTHENTICATION_BACKENDS=[ 'django.contrib.auth.backends.ModelBackend', 'django.contrib.auth.backends.AllowAllUsersModelBackend', ] ) def test_confirm_login_post_reset_custom_backend(self): # This backend is specified in the URL pattern. backend = 'django.contrib.auth.backends.AllowAllUsersModelBackend' url, path = self._test_confirm_start() path = path.replace('/reset/', '/reset/post_reset_login_custom_backend/') response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'}) self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False) self.assertIn(SESSION_KEY, self.client.session) self.assertEqual(self.client.session[BACKEND_SESSION_KEY], backend) def test_confirm_login_post_reset_already_logged_in(self): url, path = self._test_confirm_start() path = path.replace('/reset/', '/reset/post_reset_login/') self.login() response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'}) self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False) self.assertIn(SESSION_KEY, self.client.session) def test_confirm_display_user_from_form(self): url, path = self._test_confirm_start() response = self.client.get(path) # The password_reset_confirm() view passes the user object to the # SetPasswordForm``, even on GET requests (#16919). For this test, # {{ form.user }}`` is rendered in the template # registration/password_reset_confirm.html. username = User.objects.get(email='[email protected]').username self.assertContains(response, "Hello, %s." % username) # However, the view should NOT pass any user object on a form if the # password reset link was invalid. response = self.client.get('/reset/zzzzzzzzzzzzz/1-1/') self.assertContains(response, "Hello, .") def test_confirm_link_redirects_to_set_password_page(self): url, path = self._test_confirm_start() # Don't use PasswordResetConfirmClient (self.client) here which # automatically fetches the redirect page. client = Client() response = client.get(path) token = response.resolver_match.kwargs['token'] uuidb64 = response.resolver_match.kwargs['uidb64'] self.assertRedirects(response, '/reset/%s/set-password/' % uuidb64) self.assertEqual(client.session['_password_reset_token'], token) def test_confirm_custom_reset_url_token_link_redirects_to_set_password_page(self): url, path = self._test_confirm_start() path = path.replace('/reset/', '/reset/custom/token/') client = Client() response = client.get(path) token = response.resolver_match.kwargs['token'] uuidb64 = response.resolver_match.kwargs['uidb64'] self.assertRedirects(response, '/reset/custom/token/%s/set-passwordcustom/' % uuidb64) self.assertEqual(client.session['_password_reset_token'], token) def test_invalid_link_if_going_directly_to_the_final_reset_password_url(self): url, path = self._test_confirm_start() _, uuidb64, _ = path.strip('/').split('/') response = Client().get('/reset/%s/set-password/' % uuidb64) self.assertContains(response, 'The password reset link was invalid') def test_missing_kwargs(self): msg = "The URL path must contain 'uidb64' and 'token' parameters." with self.assertRaisesMessage(ImproperlyConfigured, msg): self.client.get('/reset/missing_parameters/') @override_settings(AUTH_USER_MODEL='auth_tests.CustomUser') class CustomUserPasswordResetTest(AuthViewsTestCase): user_email = '[email protected]' @classmethod def setUpTestData(cls): cls.u1 = CustomUser.custom_objects.create( email='[email protected]', date_of_birth=datetime.date(1976, 11, 8), ) cls.u1.set_password('password') cls.u1.save() def setUp(self): self.client = PasswordResetConfirmClient() def _test_confirm_start(self): # Start by creating the email response = self.client.post('/password_reset/', {'email': self.user_email}) self.assertEqual(response.status_code, 302) self.assertEqual(len(mail.outbox), 1) return self._read_signup_email(mail.outbox[0]) def _read_signup_email(self, email): urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body) self.assertIsNotNone(urlmatch, "No URL found in sent email") return urlmatch[0], urlmatch[1] def test_confirm_valid_custom_user(self): url, path = self._test_confirm_start() response = self.client.get(path) # redirect to a 'complete' page: self.assertContains(response, "Please enter your new password") # then submit a new password response = self.client.post(path, { 'new_password1': 'anewpassword', 'new_password2': 'anewpassword', }) self.assertRedirects(response, '/reset/done/') @override_settings(AUTH_USER_MODEL='auth_tests.UUIDUser') class UUIDUserPasswordResetTest(CustomUserPasswordResetTest): def _test_confirm_start(self): # instead of fixture UUIDUser.objects.create_user( email=self.user_email, username='foo', password='foo', ) return super()._test_confirm_start() def test_confirm_invalid_uuid(self): """A uidb64 that decodes to a non-UUID doesn't crash.""" _, path = self._test_confirm_start() invalid_uidb64 = urlsafe_base64_encode(b'INVALID_UUID') first, _uuidb64_, second = path.strip('/').split('/') response = self.client.get('/' + '/'.join((first, invalid_uidb64, second)) + '/') self.assertContains(response, 'The password reset link was invalid') class ChangePasswordTest(AuthViewsTestCase): def fail_login(self): response = self.client.post('/login/', { 'username': 'testclient', 'password': 'password', }) self.assertFormError(response, AuthenticationForm.error_messages['invalid_login'] % { 'username': User._meta.get_field('username').verbose_name }) def logout(self): self.client.get('/logout/') def test_password_change_fails_with_invalid_old_password(self): self.login() response = self.client.post('/password_change/', { 'old_password': 'donuts', 'new_password1': 'password1', 'new_password2': 'password1', }) self.assertFormError(response, PasswordChangeForm.error_messages['password_incorrect']) def test_password_change_fails_with_mismatched_passwords(self): self.login() response = self.client.post('/password_change/', { 'old_password': 'password', 'new_password1': 'password1', 'new_password2': 'donuts', }) self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch']) def test_password_change_succeeds(self): self.login() self.client.post('/password_change/', { 'old_password': 'password', 'new_password1': 'password1', 'new_password2': 'password1', }) self.fail_login() self.login(password='password1') def test_password_change_done_succeeds(self): self.login() response = self.client.post('/password_change/', { 'old_password': 'password', 'new_password1': 'password1', 'new_password2': 'password1', }) self.assertRedirects(response, '/password_change/done/', fetch_redirect_response=False) @override_settings(LOGIN_URL='/login/') def test_password_change_done_fails(self): response = self.client.get('/password_change/done/') self.assertRedirects(response, '/login/?next=/password_change/done/', fetch_redirect_response=False) def test_password_change_redirect_default(self): self.login() response = self.client.post('/password_change/', { 'old_password': 'password', 'new_password1': 'password1', 'new_password2': 'password1', }) self.assertRedirects(response, '/password_change/done/', fetch_redirect_response=False) def test_password_change_redirect_custom(self): self.login() response = self.client.post('/password_change/custom/', { 'old_password': 'password', 'new_password1': 'password1', 'new_password2': 'password1', }) self.assertRedirects(response, '/custom/', fetch_redirect_response=False) def test_password_change_redirect_custom_named(self): self.login() response = self.client.post('/password_change/custom/named/', { 'old_password': 'password', 'new_password1': 'password1', 'new_password2': 'password1', }) self.assertRedirects(response, '/password_reset/', fetch_redirect_response=False) class SessionAuthenticationTests(AuthViewsTestCase): def test_user_password_change_updates_session(self): """ #21649 - Ensure contrib.auth.views.password_change updates the user's session auth hash after a password change so the session isn't logged out. """ self.login() original_session_key = self.client.session.session_key response = self.client.post('/password_change/', { 'old_password': 'password', 'new_password1': 'password1', 'new_password2': 'password1', }) # if the hash isn't updated, retrieving the redirection page will fail. self.assertRedirects(response, '/password_change/done/') # The session key is rotated. self.assertNotEqual(original_session_key, self.client.session.session_key) class LoginTest(AuthViewsTestCase): def test_current_site_in_context_after_login(self): response = self.client.get(reverse('login')) self.assertEqual(response.status_code, 200) if apps.is_installed('django.contrib.sites'): Site = apps.get_model('sites.Site') site = Site.objects.get_current() self.assertEqual(response.context['site'], site) self.assertEqual(response.context['site_name'], site.name) else: self.assertIsInstance(response.context['site'], RequestSite) self.assertIsInstance(response.context['form'], AuthenticationForm) def test_security_check(self): login_url = reverse('login') # These URLs should not pass the security check. bad_urls = ( 'http://example.com', 'http:///example.com', 'https://example.com', 'ftp://example.com', '///example.com', '//example.com', 'javascript:alert("XSS")', ) for bad_url in bad_urls: with self.subTest(bad_url=bad_url): nasty_url = '%(url)s?%(next)s=%(bad_url)s' % { 'url': login_url, 'next': REDIRECT_FIELD_NAME, 'bad_url': quote(bad_url), } response = self.client.post(nasty_url, { 'username': 'testclient', 'password': 'password', }) self.assertEqual(response.status_code, 302) self.assertNotIn(bad_url, response.url, '%s should be blocked' % bad_url) # These URLs should pass the security check. good_urls = ( '/view/?param=http://example.com', '/view/?param=https://example.com', '/view?param=ftp://example.com', 'view/?param=//example.com', 'https://testserver/', 'HTTPS://testserver/', '//testserver/', '/url%20with%20spaces/', ) for good_url in good_urls: with self.subTest(good_url=good_url): safe_url = '%(url)s?%(next)s=%(good_url)s' % { 'url': login_url, 'next': REDIRECT_FIELD_NAME, 'good_url': quote(good_url), } response = self.client.post(safe_url, { 'username': 'testclient', 'password': 'password', }) self.assertEqual(response.status_code, 302) self.assertIn(good_url, response.url, '%s should be allowed' % good_url) def test_security_check_https(self): login_url = reverse('login') non_https_next_url = 'http://testserver/path' not_secured_url = '%(url)s?%(next)s=%(next_url)s' % { 'url': login_url, 'next': REDIRECT_FIELD_NAME, 'next_url': quote(non_https_next_url), } post_data = { 'username': 'testclient', 'password': 'password', } response = self.client.post(not_secured_url, post_data, secure=True) self.assertEqual(response.status_code, 302) self.assertNotEqual(response.url, non_https_next_url) self.assertEqual(response.url, settings.LOGIN_REDIRECT_URL) def test_login_form_contains_request(self): # The custom authentication form for this login requires a request to # initialize it. response = self.client.post('/custom_request_auth_login/', { 'username': 'testclient', 'password': 'password', }) # The login was successful. self.assertRedirects(response, settings.LOGIN_REDIRECT_URL, fetch_redirect_response=False) def test_login_csrf_rotate(self): """ Makes sure that a login rotates the currently-used CSRF token. """ def get_response(request): return HttpResponse() # Do a GET to establish a CSRF token # The test client isn't used here as it's a test for middleware. req = HttpRequest() CsrfViewMiddleware(get_response).process_view(req, LoginView.as_view(), (), {}) # get_token() triggers CSRF token inclusion in the response get_token(req) resp = CsrfViewMiddleware(LoginView.as_view())(req) csrf_cookie = resp.cookies.get(settings.CSRF_COOKIE_NAME, None) token1 = csrf_cookie.coded_value # Prepare the POST request req = HttpRequest() req.COOKIES[settings.CSRF_COOKIE_NAME] = token1 req.method = "POST" req.POST = {'username': 'testclient', 'password': 'password', 'csrfmiddlewaretoken': token1} # Use POST request to log in SessionMiddleware(get_response).process_request(req) CsrfViewMiddleware(get_response).process_view(req, LoginView.as_view(), (), {}) req.META["SERVER_NAME"] = "testserver" # Required to have redirect work in login view req.META["SERVER_PORT"] = 80 resp = CsrfViewMiddleware(LoginView.as_view())(req) csrf_cookie = resp.cookies.get(settings.CSRF_COOKIE_NAME, None) token2 = csrf_cookie.coded_value # Check the CSRF token switched self.assertNotEqual(token1, token2) def test_session_key_flushed_on_login(self): """ To avoid reusing another user's session, ensure a new, empty session is created if the existing session corresponds to a different authenticated user. """ self.login() original_session_key = self.client.session.session_key self.login(username='staff') self.assertNotEqual(original_session_key, self.client.session.session_key) def test_session_key_flushed_on_login_after_password_change(self): """ As above, but same user logging in after a password change. """ self.login() original_session_key = self.client.session.session_key # If no password change, session key should not be flushed. self.login() self.assertEqual(original_session_key, self.client.session.session_key) user = User.objects.get(username='testclient') user.set_password('foobar') user.save() self.login(password='foobar') self.assertNotEqual(original_session_key, self.client.session.session_key) def test_login_session_without_hash_session_key(self): """ Session without django.contrib.auth.HASH_SESSION_KEY should login without an exception. """ user = User.objects.get(username='testclient') engine = import_module(settings.SESSION_ENGINE) session = engine.SessionStore() session[SESSION_KEY] = user.id session.save() original_session_key = session.session_key self.client.cookies[settings.SESSION_COOKIE_NAME] = original_session_key self.login() self.assertNotEqual(original_session_key, self.client.session.session_key) def test_login_get_default_redirect_url(self): response = self.login(url='/login/get_default_redirect_url/') self.assertRedirects(response, '/custom/', fetch_redirect_response=False) def test_login_next_page(self): response = self.login(url='/login/next_page/') self.assertRedirects(response, '/somewhere/', fetch_redirect_response=False) def test_login_named_next_page_named(self): response = self.login(url='/login/next_page/named/') self.assertRedirects(response, '/password_reset/', fetch_redirect_response=False) @override_settings(LOGIN_REDIRECT_URL='/custom/') def test_login_next_page_overrides_login_redirect_url_setting(self): response = self.login(url='/login/next_page/') self.assertRedirects(response, '/somewhere/', fetch_redirect_response=False) def test_login_redirect_url_overrides_next_page(self): response = self.login(url='/login/next_page/?next=/test/') self.assertRedirects(response, '/test/', fetch_redirect_response=False) def test_login_redirect_url_overrides_get_default_redirect_url(self): response = self.login(url='/login/get_default_redirect_url/?next=/test/') self.assertRedirects(response, '/test/', fetch_redirect_response=False) class LoginURLSettings(AuthViewsTestCase): """Tests for settings.LOGIN_URL.""" def assertLoginURLEquals(self, url): response = self.client.get('/login_required/') self.assertRedirects(response, url, fetch_redirect_response=False) @override_settings(LOGIN_URL='/login/') def test_standard_login_url(self): self.assertLoginURLEquals('/login/?next=/login_required/') @override_settings(LOGIN_URL='login') def test_named_login_url(self): self.assertLoginURLEquals('/login/?next=/login_required/') @override_settings(LOGIN_URL='http://remote.example.com/login') def test_remote_login_url(self): quoted_next = quote('http://testserver/login_required/') expected = 'http://remote.example.com/login?next=%s' % quoted_next self.assertLoginURLEquals(expected) @override_settings(LOGIN_URL='https:///login/') def test_https_login_url(self): quoted_next = quote('http://testserver/login_required/') expected = 'https:///login/?next=%s' % quoted_next self.assertLoginURLEquals(expected) @override_settings(LOGIN_URL='/login/?pretty=1') def test_login_url_with_querystring(self): self.assertLoginURLEquals('/login/?pretty=1&next=/login_required/') @override_settings(LOGIN_URL='http://remote.example.com/login/?next=/default/') def test_remote_login_url_with_next_querystring(self): quoted_next = quote('http://testserver/login_required/') expected = 'http://remote.example.com/login/?next=%s' % quoted_next self.assertLoginURLEquals(expected) @override_settings(LOGIN_URL=reverse_lazy('login')) def test_lazy_login_url(self): self.assertLoginURLEquals('/login/?next=/login_required/') class LoginRedirectUrlTest(AuthViewsTestCase): """Tests for settings.LOGIN_REDIRECT_URL.""" def assertLoginRedirectURLEqual(self, url): response = self.login() self.assertRedirects(response, url, fetch_redirect_response=False) def test_default(self): self.assertLoginRedirectURLEqual('/accounts/profile/') @override_settings(LOGIN_REDIRECT_URL='/custom/') def test_custom(self): self.assertLoginRedirectURLEqual('/custom/') @override_settings(LOGIN_REDIRECT_URL='password_reset') def test_named(self): self.assertLoginRedirectURLEqual('/password_reset/') @override_settings(LOGIN_REDIRECT_URL='http://remote.example.com/welcome/') def test_remote(self): self.assertLoginRedirectURLEqual('http://remote.example.com/welcome/') class RedirectToLoginTests(AuthViewsTestCase): """Tests for the redirect_to_login view""" @override_settings(LOGIN_URL=reverse_lazy('login')) def test_redirect_to_login_with_lazy(self): login_redirect_response = redirect_to_login(next='/else/where/') expected = '/login/?next=/else/where/' self.assertEqual(expected, login_redirect_response.url) @override_settings(LOGIN_URL=reverse_lazy('login')) def test_redirect_to_login_with_lazy_and_unicode(self): login_redirect_response = redirect_to_login(next='/else/where/झ/') expected = '/login/?next=/else/where/%E0%A4%9D/' self.assertEqual(expected, login_redirect_response.url) class LogoutThenLoginTests(AuthViewsTestCase): """Tests for the logout_then_login view""" def confirm_logged_out(self): self.assertNotIn(SESSION_KEY, self.client.session) @override_settings(LOGIN_URL='/login/') def test_default_logout_then_login(self): self.login() req = HttpRequest() req.method = 'GET' req.session = self.client.session response = logout_then_login(req) self.confirm_logged_out() self.assertRedirects(response, '/login/', fetch_redirect_response=False) def test_logout_then_login_with_custom_login(self): self.login() req = HttpRequest() req.method = 'GET' req.session = self.client.session response = logout_then_login(req, login_url='/custom/') self.confirm_logged_out() self.assertRedirects(response, '/custom/', fetch_redirect_response=False) class LoginRedirectAuthenticatedUser(AuthViewsTestCase): dont_redirect_url = '/login/redirect_authenticated_user_default/' do_redirect_url = '/login/redirect_authenticated_user/' def test_default(self): """Stay on the login page by default.""" self.login() response = self.client.get(self.dont_redirect_url) self.assertEqual(response.status_code, 200) self.assertEqual(response.context['next'], '') def test_guest(self): """If not logged in, stay on the same page.""" response = self.client.get(self.do_redirect_url) self.assertEqual(response.status_code, 200) def test_redirect(self): """If logged in, go to default redirected URL.""" self.login() response = self.client.get(self.do_redirect_url) self.assertRedirects(response, '/accounts/profile/', fetch_redirect_response=False) @override_settings(LOGIN_REDIRECT_URL='/custom/') def test_redirect_url(self): """If logged in, go to custom redirected URL.""" self.login() response = self.client.get(self.do_redirect_url) self.assertRedirects(response, '/custom/', fetch_redirect_response=False) def test_redirect_param(self): """If next is specified as a GET parameter, go there.""" self.login() url = self.do_redirect_url + '?next=/custom_next/' response = self.client.get(url) self.assertRedirects(response, '/custom_next/', fetch_redirect_response=False) def test_redirect_loop(self): """ Detect a redirect loop if LOGIN_REDIRECT_URL is not correctly set, with and without custom parameters. """ self.login() msg = ( "Redirection loop for authenticated user detected. Check that " "your LOGIN_REDIRECT_URL doesn't point to a login page." ) with self.settings(LOGIN_REDIRECT_URL=self.do_redirect_url): with self.assertRaisesMessage(ValueError, msg): self.client.get(self.do_redirect_url) url = self.do_redirect_url + '?bla=2' with self.assertRaisesMessage(ValueError, msg): self.client.get(url) def test_permission_required_not_logged_in(self): # Not logged in ... with self.settings(LOGIN_URL=self.do_redirect_url): # redirected to login. response = self.client.get('/permission_required_redirect/', follow=True) self.assertEqual(response.status_code, 200) # exception raised. response = self.client.get('/permission_required_exception/', follow=True) self.assertEqual(response.status_code, 403) # redirected to login. response = self.client.get('/login_and_permission_required_exception/', follow=True) self.assertEqual(response.status_code, 200) def test_permission_required_logged_in(self): self.login() # Already logged in... with self.settings(LOGIN_URL=self.do_redirect_url): # redirect loop encountered. with self.assertRaisesMessage(RedirectCycleError, 'Redirect loop detected.'): self.client.get('/permission_required_redirect/', follow=True) # exception raised. response = self.client.get('/permission_required_exception/', follow=True) self.assertEqual(response.status_code, 403) # exception raised. response = self.client.get('/login_and_permission_required_exception/', follow=True) self.assertEqual(response.status_code, 403) class LoginSuccessURLAllowedHostsTest(AuthViewsTestCase): def test_success_url_allowed_hosts_same_host(self): response = self.client.post('/login/allowed_hosts/', { 'username': 'testclient', 'password': 'password', 'next': 'https://testserver/home', }) self.assertIn(SESSION_KEY, self.client.session) self.assertRedirects(response, 'https://testserver/home', fetch_redirect_response=False) def test_success_url_allowed_hosts_safe_host(self): response = self.client.post('/login/allowed_hosts/', { 'username': 'testclient', 'password': 'password', 'next': 'https://otherserver/home', }) self.assertIn(SESSION_KEY, self.client.session) self.assertRedirects(response, 'https://otherserver/home', fetch_redirect_response=False) def test_success_url_allowed_hosts_unsafe_host(self): response = self.client.post('/login/allowed_hosts/', { 'username': 'testclient', 'password': 'password', 'next': 'https://evil/home', }) self.assertIn(SESSION_KEY, self.client.session) self.assertRedirects(response, '/accounts/profile/', fetch_redirect_response=False) class LogoutTest(AuthViewsTestCase): def confirm_logged_out(self): self.assertNotIn(SESSION_KEY, self.client.session) def test_logout_default(self): "Logout without next_page option renders the default template" self.login() response = self.client.get('/logout/') self.assertContains(response, 'Logged out') self.confirm_logged_out() def test_logout_with_post(self): self.login() response = self.client.post('/logout/') self.assertContains(response, 'Logged out') self.confirm_logged_out() def test_14377(self): # Bug 14377 self.login() response = self.client.get('/logout/') self.assertIn('site', response.context) def test_logout_doesnt_cache(self): """ The logout() view should send "no-cache" headers for reasons described in #25490. """ response = self.client.get('/logout/') self.assertIn('no-store', response.headers['Cache-Control']) def test_logout_with_overridden_redirect_url(self): # Bug 11223 self.login() response = self.client.get('/logout/next_page/') self.assertRedirects(response, '/somewhere/', fetch_redirect_response=False) response = self.client.get('/logout/next_page/?next=/login/') self.assertRedirects(response, '/login/', fetch_redirect_response=False) self.confirm_logged_out() def test_logout_with_next_page_specified(self): "Logout with next_page option given redirects to specified resource" self.login() response = self.client.get('/logout/next_page/') self.assertRedirects(response, '/somewhere/', fetch_redirect_response=False) self.confirm_logged_out() def test_logout_with_redirect_argument(self): "Logout with query string redirects to specified resource" self.login() response = self.client.get('/logout/?next=/login/') self.assertRedirects(response, '/login/', fetch_redirect_response=False) self.confirm_logged_out() def test_logout_with_custom_redirect_argument(self): "Logout with custom query string redirects to specified resource" self.login() response = self.client.get('/logout/custom_query/?follow=/somewhere/') self.assertRedirects(response, '/somewhere/', fetch_redirect_response=False) self.confirm_logged_out() def test_logout_with_named_redirect(self): "Logout resolves names or URLs passed as next_page." self.login() response = self.client.get('/logout/next_page/named/') self.assertRedirects(response, '/password_reset/', fetch_redirect_response=False) self.confirm_logged_out() def test_success_url_allowed_hosts_same_host(self): self.login() response = self.client.get('/logout/allowed_hosts/?next=https://testserver/') self.assertRedirects(response, 'https://testserver/', fetch_redirect_response=False) self.confirm_logged_out() def test_success_url_allowed_hosts_safe_host(self): self.login() response = self.client.get('/logout/allowed_hosts/?next=https://otherserver/') self.assertRedirects(response, 'https://otherserver/', fetch_redirect_response=False) self.confirm_logged_out() def test_success_url_allowed_hosts_unsafe_host(self): self.login() response = self.client.get('/logout/allowed_hosts/?next=https://evil/') self.assertRedirects(response, '/logout/allowed_hosts/', fetch_redirect_response=False) self.confirm_logged_out() def test_security_check(self): logout_url = reverse('logout') # These URLs should not pass the security check. bad_urls = ( 'http://example.com', 'http:///example.com', 'https://example.com', 'ftp://example.com', '///example.com', '//example.com', 'javascript:alert("XSS")', ) for bad_url in bad_urls: with self.subTest(bad_url=bad_url): nasty_url = '%(url)s?%(next)s=%(bad_url)s' % { 'url': logout_url, 'next': REDIRECT_FIELD_NAME, 'bad_url': quote(bad_url), } self.login() response = self.client.get(nasty_url) self.assertEqual(response.status_code, 302) self.assertNotIn(bad_url, response.url, '%s should be blocked' % bad_url) self.confirm_logged_out() # These URLs should pass the security check. good_urls = ( '/view/?param=http://example.com', '/view/?param=https://example.com', '/view?param=ftp://example.com', 'view/?param=//example.com', 'https://testserver/', 'HTTPS://testserver/', '//testserver/', '/url%20with%20spaces/', ) for good_url in good_urls: with self.subTest(good_url=good_url): safe_url = '%(url)s?%(next)s=%(good_url)s' % { 'url': logout_url, 'next': REDIRECT_FIELD_NAME, 'good_url': quote(good_url), } self.login() response = self.client.get(safe_url) self.assertEqual(response.status_code, 302) self.assertIn(good_url, response.url, '%s should be allowed' % good_url) self.confirm_logged_out() def test_security_check_https(self): logout_url = reverse('logout') non_https_next_url = 'http://testserver/' url = '%(url)s?%(next)s=%(next_url)s' % { 'url': logout_url, 'next': REDIRECT_FIELD_NAME, 'next_url': quote(non_https_next_url), } self.login() response = self.client.get(url, secure=True) self.assertRedirects(response, logout_url, fetch_redirect_response=False) self.confirm_logged_out() def test_logout_preserve_language(self): """Language is preserved after logout.""" self.login() self.client.post('/setlang/', {'language': 'pl'}) self.assertEqual(self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, 'pl') self.client.get('/logout/') self.assertEqual(self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, 'pl') @override_settings(LOGOUT_REDIRECT_URL='/custom/') def test_logout_redirect_url_setting(self): self.login() response = self.client.get('/logout/') self.assertRedirects(response, '/custom/', fetch_redirect_response=False) @override_settings(LOGOUT_REDIRECT_URL='logout') def test_logout_redirect_url_named_setting(self): self.login() response = self.client.get('/logout/') self.assertRedirects(response, '/logout/', fetch_redirect_response=False) def get_perm(Model, perm): ct = ContentType.objects.get_for_model(Model) return Permission.objects.get(content_type=ct, codename=perm) # Redirect in test_user_change_password will fail if session auth hash # isn't updated after password change (#21649) @override_settings(ROOT_URLCONF='auth_tests.urls_admin') class ChangelistTests(AuthViewsTestCase): @classmethod def setUpTestData(cls): super().setUpTestData() # Make me a superuser before logging in. User.objects.filter(username='testclient').update(is_staff=True, is_superuser=True) def setUp(self): self.login() # Get the latest last_login value. self.admin = User.objects.get(pk=self.u1.pk) def get_user_data(self, user): return { 'username': user.username, 'password': user.password, 'email': user.email, 'is_active': user.is_active, 'is_staff': user.is_staff, 'is_superuser': user.is_superuser, 'last_login_0': user.last_login.strftime('%Y-%m-%d'), 'last_login_1': user.last_login.strftime('%H:%M:%S'), 'initial-last_login_0': user.last_login.strftime('%Y-%m-%d'), 'initial-last_login_1': user.last_login.strftime('%H:%M:%S'), 'date_joined_0': user.date_joined.strftime('%Y-%m-%d'), 'date_joined_1': user.date_joined.strftime('%H:%M:%S'), 'initial-date_joined_0': user.date_joined.strftime('%Y-%m-%d'), 'initial-date_joined_1': user.date_joined.strftime('%H:%M:%S'), 'first_name': user.first_name, 'last_name': user.last_name, } # #20078 - users shouldn't be allowed to guess password hashes via # repeated password__startswith queries. def test_changelist_disallows_password_lookups(self): # A lookup that tries to filter on password isn't OK with self.assertLogs('django.security.DisallowedModelAdminLookup', 'ERROR'): response = self.client.get(reverse('auth_test_admin:auth_user_changelist') + '?password__startswith=sha1$') self.assertEqual(response.status_code, 400) def test_user_change_email(self): data = self.get_user_data(self.admin) data['email'] = 'new_' + data['email'] response = self.client.post( reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,)), data ) self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist')) row = LogEntry.objects.latest('id') self.assertEqual(row.get_change_message(), 'Changed Email address.') def test_user_not_change(self): response = self.client.post( reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,)), self.get_user_data(self.admin) ) self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist')) row = LogEntry.objects.latest('id') self.assertEqual(row.get_change_message(), 'No fields changed.') def test_user_change_password(self): user_change_url = reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,)) password_change_url = reverse('auth_test_admin:auth_user_password_change', args=(self.admin.pk,)) response = self.client.get(user_change_url) # Test the link inside password field help_text. rel_link = re.search( r'you can change the password using <a href="([^"]*)">this form</a>', response.content.decode() )[1] self.assertEqual(urljoin(user_change_url, rel_link), password_change_url) response = self.client.post( password_change_url, { 'password1': 'password1', 'password2': 'password1', } ) self.assertRedirects(response, user_change_url) row = LogEntry.objects.latest('id') self.assertEqual(row.get_change_message(), 'Changed password.') self.logout() self.login(password='password1') def test_user_change_different_user_password(self): u = User.objects.get(email='[email protected]') response = self.client.post( reverse('auth_test_admin:auth_user_password_change', args=(u.pk,)), { 'password1': 'password1', 'password2': 'password1', } ) self.assertRedirects(response, reverse('auth_test_admin:auth_user_change', args=(u.pk,))) row = LogEntry.objects.latest('id') self.assertEqual(row.user_id, self.admin.pk) self.assertEqual(row.object_id, str(u.pk)) self.assertEqual(row.get_change_message(), 'Changed password.') def test_password_change_bad_url(self): response = self.client.get(reverse('auth_test_admin:auth_user_password_change', args=('foobar',))) self.assertEqual(response.status_code, 404) @mock.patch('django.contrib.auth.admin.UserAdmin.has_change_permission') def test_user_change_password_passes_user_to_has_change_permission(self, has_change_permission): url = reverse('auth_test_admin:auth_user_password_change', args=(self.admin.pk,)) self.client.post(url, {'password1': 'password1', 'password2': 'password1'}) (_request, user), _kwargs = has_change_permission.call_args self.assertEqual(user.pk, self.admin.pk) def test_view_user_password_is_readonly(self): u = User.objects.get(username='testclient') u.is_superuser = False u.save() original_password = u.password u.user_permissions.add(get_perm(User, 'view_user')) response = self.client.get(reverse('auth_test_admin:auth_user_change', args=(u.pk,)),) algo, salt, hash_string = (u.password.split('$')) self.assertContains(response, '<div class="readonly">testclient</div>') # ReadOnlyPasswordHashWidget is used to render the field. self.assertContains( response, '<strong>algorithm</strong>: %s\n\n' '<strong>salt</strong>: %s********************\n\n' '<strong>hash</strong>: %s**************************\n\n' % ( algo, salt[:2], hash_string[:6], ), html=True, ) # Value in POST data is ignored. data = self.get_user_data(u) data['password'] = 'shouldnotchange' change_url = reverse('auth_test_admin:auth_user_change', args=(u.pk,)) response = self.client.post(change_url, data) self.assertEqual(response.status_code, 403) u.refresh_from_db() self.assertEqual(u.password, original_password) @override_settings( AUTH_USER_MODEL='auth_tests.UUIDUser', ROOT_URLCONF='auth_tests.urls_custom_user_admin', ) class UUIDUserTests(TestCase): def test_admin_password_change(self): u = UUIDUser.objects.create_superuser(username='uuid', email='[email protected]', password='test') self.assertTrue(self.client.login(username='uuid', password='test')) user_change_url = reverse('custom_user_admin:auth_tests_uuiduser_change', args=(u.pk,)) response = self.client.get(user_change_url) self.assertEqual(response.status_code, 200) password_change_url = reverse('custom_user_admin:auth_user_password_change', args=(u.pk,)) response = self.client.get(password_change_url) # The action attribute is omitted. self.assertContains(response, '<form method="post" id="uuiduser_form">') # A LogEntry is created with pk=1 which breaks a FK constraint on MySQL with connection.constraint_checks_disabled(): response = self.client.post(password_change_url, { 'password1': 'password1', 'password2': 'password1', }) self.assertRedirects(response, user_change_url) row = LogEntry.objects.latest('id') self.assertEqual(row.user_id, 1) # hardcoded in CustomUserAdmin.log_change() self.assertEqual(row.object_id, str(u.pk)) self.assertEqual(row.get_change_message(), 'Changed password.') # The LogEntry.user column isn't altered to a UUID type so it's set to # an integer manually in CustomUserAdmin to avoid an error. To avoid a # constraint error, delete the entry before constraints are checked # after the test. row.delete()
939391473dbc5fe2b050c2822d82ff8de866aad30b0c888898a8dc8f83cdd069
from datetime import datetime from django.conf import settings from django.contrib.auth import authenticate from django.contrib.auth.backends import RemoteUserBackend from django.contrib.auth.middleware import RemoteUserMiddleware from django.contrib.auth.models import User from django.middleware.csrf import _get_new_csrf_string, _mask_cipher_secret from django.test import Client, TestCase, modify_settings, override_settings from django.utils import timezone @override_settings(ROOT_URLCONF='auth_tests.urls') class RemoteUserTest(TestCase): middleware = 'django.contrib.auth.middleware.RemoteUserMiddleware' backend = 'django.contrib.auth.backends.RemoteUserBackend' header = 'REMOTE_USER' email_header = 'REMOTE_EMAIL' # Usernames to be passed in REMOTE_USER for the test_known_user test case. known_user = 'knownuser' known_user2 = 'knownuser2' def setUp(self): self.patched_settings = modify_settings( AUTHENTICATION_BACKENDS={'append': self.backend}, MIDDLEWARE={'append': self.middleware}, ) self.patched_settings.enable() def tearDown(self): self.patched_settings.disable() def test_no_remote_user(self): """Users are not created when remote user is not specified.""" num_users = User.objects.count() response = self.client.get('/remote_user/') self.assertTrue(response.context['user'].is_anonymous) self.assertEqual(User.objects.count(), num_users) response = self.client.get('/remote_user/', **{self.header: None}) self.assertTrue(response.context['user'].is_anonymous) self.assertEqual(User.objects.count(), num_users) response = self.client.get('/remote_user/', **{self.header: ''}) self.assertTrue(response.context['user'].is_anonymous) self.assertEqual(User.objects.count(), num_users) def test_csrf_validation_passes_after_process_request_login(self): """ CSRF check must access the CSRF token from the session or cookie, rather than the request, as rotate_token() may have been called by an authentication middleware during the process_request() phase. """ csrf_client = Client(enforce_csrf_checks=True) csrf_secret = _get_new_csrf_string() csrf_token = _mask_cipher_secret(csrf_secret) csrf_token_form = _mask_cipher_secret(csrf_secret) headers = {self.header: 'fakeuser'} data = {'csrfmiddlewaretoken': csrf_token_form} # Verify that CSRF is configured for the view csrf_client.cookies.load({settings.CSRF_COOKIE_NAME: csrf_token}) response = csrf_client.post('/remote_user/', **headers) self.assertEqual(response.status_code, 403) self.assertIn(b'CSRF verification failed.', response.content) # This request will call django.contrib.auth.login() which will call # django.middleware.csrf.rotate_token() thus changing the value of # request.META['CSRF_COOKIE'] from the user submitted value set by # CsrfViewMiddleware.process_request() to the new csrftoken value set # by rotate_token(). Csrf validation should still pass when the view is # later processed by CsrfViewMiddleware.process_view() csrf_client.cookies.load({settings.CSRF_COOKIE_NAME: csrf_token}) response = csrf_client.post('/remote_user/', data, **headers) self.assertEqual(response.status_code, 200) def test_unknown_user(self): """ Tests the case where the username passed in the header does not exist as a User. """ num_users = User.objects.count() response = self.client.get('/remote_user/', **{self.header: 'newuser'}) self.assertEqual(response.context['user'].username, 'newuser') self.assertEqual(User.objects.count(), num_users + 1) User.objects.get(username='newuser') # Another request with same user should not create any new users. response = self.client.get('/remote_user/', **{self.header: 'newuser'}) self.assertEqual(User.objects.count(), num_users + 1) def test_known_user(self): """ Tests the case where the username passed in the header is a valid User. """ User.objects.create(username='knownuser') User.objects.create(username='knownuser2') num_users = User.objects.count() response = self.client.get('/remote_user/', **{self.header: self.known_user}) self.assertEqual(response.context['user'].username, 'knownuser') self.assertEqual(User.objects.count(), num_users) # A different user passed in the headers causes the new user # to be logged in. response = self.client.get('/remote_user/', **{self.header: self.known_user2}) self.assertEqual(response.context['user'].username, 'knownuser2') self.assertEqual(User.objects.count(), num_users) def test_last_login(self): """ A user's last_login is set the first time they make a request but not updated in subsequent requests with the same session. """ user = User.objects.create(username='knownuser') # Set last_login to something so we can determine if it changes. default_login = datetime(2000, 1, 1) if settings.USE_TZ: default_login = default_login.replace(tzinfo=timezone.utc) user.last_login = default_login user.save() response = self.client.get('/remote_user/', **{self.header: self.known_user}) self.assertNotEqual(default_login, response.context['user'].last_login) user = User.objects.get(username='knownuser') user.last_login = default_login user.save() response = self.client.get('/remote_user/', **{self.header: self.known_user}) self.assertEqual(default_login, response.context['user'].last_login) def test_header_disappears(self): """ A logged in user is logged out automatically when the REMOTE_USER header disappears during the same browser session. """ User.objects.create(username='knownuser') # Known user authenticates response = self.client.get('/remote_user/', **{self.header: self.known_user}) self.assertEqual(response.context['user'].username, 'knownuser') # During the session, the REMOTE_USER header disappears. Should trigger logout. response = self.client.get('/remote_user/') self.assertTrue(response.context['user'].is_anonymous) # verify the remoteuser middleware will not remove a user # authenticated via another backend User.objects.create_user(username='modeluser', password='foo') self.client.login(username='modeluser', password='foo') authenticate(username='modeluser', password='foo') response = self.client.get('/remote_user/') self.assertEqual(response.context['user'].username, 'modeluser') def test_user_switch_forces_new_login(self): """ If the username in the header changes between requests that the original user is logged out """ User.objects.create(username='knownuser') # Known user authenticates response = self.client.get('/remote_user/', **{self.header: self.known_user}) self.assertEqual(response.context['user'].username, 'knownuser') # During the session, the REMOTE_USER changes to a different user. response = self.client.get('/remote_user/', **{self.header: "newnewuser"}) # The current user is not the prior remote_user. # In backends that create a new user, username is "newnewuser" # In backends that do not create new users, it is '' (anonymous user) self.assertNotEqual(response.context['user'].username, 'knownuser') def test_inactive_user(self): User.objects.create(username='knownuser', is_active=False) response = self.client.get('/remote_user/', **{self.header: 'knownuser'}) self.assertTrue(response.context['user'].is_anonymous) class RemoteUserNoCreateBackend(RemoteUserBackend): """Backend that doesn't create unknown users.""" create_unknown_user = False class RemoteUserNoCreateTest(RemoteUserTest): """ Contains the same tests as RemoteUserTest, but using a custom auth backend class that doesn't create unknown users. """ backend = 'auth_tests.test_remote_user.RemoteUserNoCreateBackend' def test_unknown_user(self): num_users = User.objects.count() response = self.client.get('/remote_user/', **{self.header: 'newuser'}) self.assertTrue(response.context['user'].is_anonymous) self.assertEqual(User.objects.count(), num_users) class AllowAllUsersRemoteUserBackendTest(RemoteUserTest): """Backend that allows inactive users.""" backend = 'django.contrib.auth.backends.AllowAllUsersRemoteUserBackend' def test_inactive_user(self): user = User.objects.create(username='knownuser', is_active=False) response = self.client.get('/remote_user/', **{self.header: self.known_user}) self.assertEqual(response.context['user'].username, user.username) class CustomRemoteUserBackend(RemoteUserBackend): """ Backend that overrides RemoteUserBackend methods. """ def clean_username(self, username): """ Grabs username before the @ character. """ return username.split('@')[0] def configure_user(self, request, user): """ Sets user's email address using the email specified in an HTTP header. """ user.email = request.META.get(RemoteUserTest.email_header, '') user.save() return user class RemoteUserCustomTest(RemoteUserTest): """ Tests a custom RemoteUserBackend subclass that overrides the clean_username and configure_user methods. """ backend = 'auth_tests.test_remote_user.CustomRemoteUserBackend' # REMOTE_USER strings with email addresses for the custom backend to # clean. known_user = '[email protected]' known_user2 = '[email protected]' def test_known_user(self): """ The strings passed in REMOTE_USER should be cleaned and the known users should not have been configured with an email address. """ super().test_known_user() self.assertEqual(User.objects.get(username='knownuser').email, '') self.assertEqual(User.objects.get(username='knownuser2').email, '') def test_unknown_user(self): """ The unknown user created should be configured with an email address provided in the request header. """ num_users = User.objects.count() response = self.client.get('/remote_user/', **{ self.header: 'newuser', self.email_header: '[email protected]', }) self.assertEqual(response.context['user'].username, 'newuser') self.assertEqual(response.context['user'].email, '[email protected]') self.assertEqual(User.objects.count(), num_users + 1) newuser = User.objects.get(username='newuser') self.assertEqual(newuser.email, '[email protected]') class CustomHeaderMiddleware(RemoteUserMiddleware): """ Middleware that overrides custom HTTP auth user header. """ header = 'HTTP_AUTHUSER' class CustomHeaderRemoteUserTest(RemoteUserTest): """ Tests a custom RemoteUserMiddleware subclass with custom HTTP auth user header. """ middleware = ( 'auth_tests.test_remote_user.CustomHeaderMiddleware' ) header = 'HTTP_AUTHUSER' class PersistentRemoteUserTest(RemoteUserTest): """ PersistentRemoteUserMiddleware keeps the user logged in even if the subsequent calls do not contain the header value. """ middleware = 'django.contrib.auth.middleware.PersistentRemoteUserMiddleware' require_header = False def test_header_disappears(self): """ A logged in user is kept logged in even if the REMOTE_USER header disappears during the same browser session. """ User.objects.create(username='knownuser') # Known user authenticates response = self.client.get('/remote_user/', **{self.header: self.known_user}) self.assertEqual(response.context['user'].username, 'knownuser') # Should stay logged in if the REMOTE_USER header disappears. response = self.client.get('/remote_user/') self.assertFalse(response.context['user'].is_anonymous) self.assertEqual(response.context['user'].username, 'knownuser')
588490708a884b4c32a732ac08371978e23b8bcc1dbe3d9b88e8ba25f69a4524
from django.contrib.auth import authenticate from django.contrib.auth.context_processors import PermLookupDict, PermWrapper from django.contrib.auth.models import Permission, User from django.contrib.contenttypes.models import ContentType from django.db.models import Q from django.test import SimpleTestCase, TestCase, override_settings from .settings import AUTH_MIDDLEWARE, AUTH_TEMPLATES class MockUser: def __repr__(self): return 'MockUser()' def has_module_perms(self, perm): return perm == 'mockapp' def has_perm(self, perm, obj=None): return perm == 'mockapp.someperm' class PermWrapperTests(SimpleTestCase): """ Test some details of the PermWrapper implementation. """ class EQLimiterObject: """ This object makes sure __eq__ will not be called endlessly. """ def __init__(self): self.eq_calls = 0 def __eq__(self, other): if self.eq_calls > 0: return True self.eq_calls += 1 return False def test_repr(self): perms = PermWrapper(MockUser()) self.assertEqual(repr(perms), 'PermWrapper(MockUser())') def test_permwrapper_in(self): """ 'something' in PermWrapper works as expected. """ perms = PermWrapper(MockUser()) # Works for modules and full permissions. self.assertIn('mockapp', perms) self.assertNotIn('nonexistent', perms) self.assertIn('mockapp.someperm', perms) self.assertNotIn('mockapp.nonexistent', perms) def test_permlookupdict_in(self): """ No endless loops if accessed with 'in' - refs #18979. """ pldict = PermLookupDict(MockUser(), 'mockapp') with self.assertRaises(TypeError): self.EQLimiterObject() in pldict def test_iter(self): with self.assertRaisesMessage(TypeError, 'PermWrapper is not iterable.'): iter(PermWrapper(MockUser())) @override_settings(ROOT_URLCONF='auth_tests.urls', TEMPLATES=AUTH_TEMPLATES) class AuthContextProcessorTests(TestCase): """ Tests for the ``django.contrib.auth.context_processors.auth`` processor """ @classmethod def setUpTestData(cls): cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]') @override_settings(MIDDLEWARE=AUTH_MIDDLEWARE) def test_session_not_accessed(self): """ The session is not accessed simply by including the auth context processor """ response = self.client.get('/auth_processor_no_attr_access/') self.assertContains(response, "Session not accessed") @override_settings(MIDDLEWARE=AUTH_MIDDLEWARE) def test_session_is_accessed(self): """ The session is accessed if the auth context processor is used and relevant attributes accessed. """ response = self.client.get('/auth_processor_attr_access/') self.assertContains(response, "Session accessed") def test_perms_attrs(self): u = User.objects.create_user(username='normal', password='secret') u.user_permissions.add( Permission.objects.get( content_type=ContentType.objects.get_for_model(Permission), codename='add_permission')) self.client.force_login(u) response = self.client.get('/auth_processor_perms/') self.assertContains(response, "Has auth permissions") self.assertContains(response, "Has auth.add_permission permissions") self.assertNotContains(response, "nonexistent") def test_perm_in_perms_attrs(self): u = User.objects.create_user(username='normal', password='secret') u.user_permissions.add( Permission.objects.get( content_type=ContentType.objects.get_for_model(Permission), codename='add_permission')) self.client.login(username='normal', password='secret') response = self.client.get('/auth_processor_perm_in_perms/') self.assertContains(response, "Has auth permissions") self.assertContains(response, "Has auth.add_permission permissions") self.assertNotContains(response, "nonexistent") def test_message_attrs(self): self.client.force_login(self.superuser) response = self.client.get('/auth_processor_messages/') self.assertContains(response, "Message 1") def test_user_attrs(self): """ The lazy objects returned behave just like the wrapped objects. """ # These are 'functional' level tests for common use cases. Direct # testing of the implementation (SimpleLazyObject) is in the 'utils' # tests. self.client.login(username='super', password='secret') user = authenticate(username='super', password='secret') response = self.client.get('/auth_processor_user/') self.assertContains(response, "unicode: super") self.assertContains(response, "id: %d" % self.superuser.pk) self.assertContains(response, "username: super") # bug #12037 is tested by the {% url %} in the template: self.assertContains(response, "url: /userpage/super/") # A Q() comparing a user and with another Q() (in an AND or OR fashion). Q(user=response.context['user']) & Q(someflag=True) # Tests for user equality. This is hard because User defines # equality in a non-duck-typing way # See bug #12060 self.assertEqual(response.context['user'], user) self.assertEqual(user, response.context['user'])
396215c4715bcc74e6ec9aa143ff563ba62cfc06d94a35a0043e9c243ed45fc4
from datetime import datetime, timedelta from django.conf import settings from django.contrib.auth.models import User from django.contrib.auth.tokens import PasswordResetTokenGenerator from django.core.exceptions import ImproperlyConfigured from django.test import TestCase from django.test.utils import override_settings from .models import CustomEmailField class MockedPasswordResetTokenGenerator(PasswordResetTokenGenerator): def __init__(self, now): self._now_val = now super().__init__() def _now(self): return self._now_val class TokenGeneratorTest(TestCase): def test_make_token(self): user = User.objects.create_user('tokentestuser', '[email protected]', 'testpw') p0 = PasswordResetTokenGenerator() tk1 = p0.make_token(user) self.assertIs(p0.check_token(user, tk1), True) def test_10265(self): """ The token generated for a user created in the same request will work correctly. """ user = User.objects.create_user('comebackkid', '[email protected]', 'testpw') user_reload = User.objects.get(username='comebackkid') p0 = MockedPasswordResetTokenGenerator(datetime.now()) tk1 = p0.make_token(user) tk2 = p0.make_token(user_reload) self.assertEqual(tk1, tk2) def test_token_with_different_email(self): """Updating the user email address invalidates the token.""" tests = [ (CustomEmailField, None), (CustomEmailField, '[email protected]'), (User, '[email protected]'), ] for model, email in tests: with self.subTest(model=model.__qualname__, email=email): user = model.objects.create_user( 'changeemailuser', email=email, password='testpw', ) p0 = PasswordResetTokenGenerator() tk1 = p0.make_token(user) self.assertIs(p0.check_token(user, tk1), True) setattr(user, user.get_email_field_name(), '[email protected]') user.save() self.assertIs(p0.check_token(user, tk1), False) def test_timeout(self): """The token is valid after n seconds, but no greater.""" # Uses a mocked version of PasswordResetTokenGenerator so we can change # the value of 'now'. user = User.objects.create_user('tokentestuser', '[email protected]', 'testpw') now = datetime.now() p0 = MockedPasswordResetTokenGenerator(now) tk1 = p0.make_token(user) p1 = MockedPasswordResetTokenGenerator( now + timedelta(seconds=settings.PASSWORD_RESET_TIMEOUT) ) self.assertIs(p1.check_token(user, tk1), True) p2 = MockedPasswordResetTokenGenerator( now + timedelta(seconds=(settings.PASSWORD_RESET_TIMEOUT + 1)) ) self.assertIs(p2.check_token(user, tk1), False) with self.settings(PASSWORD_RESET_TIMEOUT=60 * 60): p3 = MockedPasswordResetTokenGenerator( now + timedelta(seconds=settings.PASSWORD_RESET_TIMEOUT) ) self.assertIs(p3.check_token(user, tk1), True) p4 = MockedPasswordResetTokenGenerator( now + timedelta(seconds=(settings.PASSWORD_RESET_TIMEOUT + 1)) ) self.assertIs(p4.check_token(user, tk1), False) def test_check_token_with_nonexistent_token_and_user(self): user = User.objects.create_user('tokentestuser', '[email protected]', 'testpw') p0 = PasswordResetTokenGenerator() tk1 = p0.make_token(user) self.assertIs(p0.check_token(None, tk1), False) self.assertIs(p0.check_token(user, None), False) def test_token_with_different_secret(self): """ A valid token can be created with a secret other than SECRET_KEY by using the PasswordResetTokenGenerator.secret attribute. """ user = User.objects.create_user('tokentestuser', '[email protected]', 'testpw') new_secret = 'abcdefghijkl' # Create and check a token with a different secret. p0 = PasswordResetTokenGenerator() p0.secret = new_secret tk0 = p0.make_token(user) self.assertIs(p0.check_token(user, tk0), True) # Create and check a token with the default secret. p1 = PasswordResetTokenGenerator() self.assertEqual(p1.secret, settings.SECRET_KEY) self.assertNotEqual(p1.secret, new_secret) tk1 = p1.make_token(user) # Tokens created with a different secret don't validate. self.assertIs(p0.check_token(user, tk1), False) self.assertIs(p1.check_token(user, tk0), False) def test_token_with_different_secret_subclass(self): class CustomPasswordResetTokenGenerator(PasswordResetTokenGenerator): secret = 'test-secret' user = User.objects.create_user('tokentestuser', '[email protected]', 'testpw') custom_password_generator = CustomPasswordResetTokenGenerator() tk_custom = custom_password_generator.make_token(user) self.assertIs(custom_password_generator.check_token(user, tk_custom), True) default_password_generator = PasswordResetTokenGenerator() self.assertNotEqual( custom_password_generator.secret, default_password_generator.secret, ) self.assertEqual(default_password_generator.secret, settings.SECRET_KEY) # Tokens created with a different secret don't validate. tk_default = default_password_generator.make_token(user) self.assertIs(custom_password_generator.check_token(user, tk_default), False) self.assertIs(default_password_generator.check_token(user, tk_custom), False) @override_settings(SECRET_KEY='') def test_secret_lazy_validation(self): default_token_generator = PasswordResetTokenGenerator() msg = 'The SECRET_KEY setting must not be empty.' with self.assertRaisesMessage(ImproperlyConfigured, msg): default_token_generator.secret
12e2eec4d56daa334ea4e2b6e6aa19adb3a651eefa4021f5159a100614043e55
from django.contrib import admin from django.contrib.auth import get_user_model from django.contrib.auth.admin import UserAdmin from django.urls import path site = admin.AdminSite(name='custom_user_admin') class CustomUserAdmin(UserAdmin): def log_change(self, request, obj, message): # LogEntry.user column doesn't get altered to expect a UUID, so set an # integer manually to avoid causing an error. original_pk = request.user.pk request.user.pk = 1 super().log_change(request, obj, message) request.user.pk = original_pk site.register(get_user_model(), CustomUserAdmin) urlpatterns = [ path('admin/', site.urls), ]
c61e776319271b0ab3a802acab1c59a901aab070a57d018664fe9ff419666be7
from django.contrib import admin from django.contrib.auth import views from django.contrib.auth.decorators import login_required, permission_required from django.contrib.auth.forms import AuthenticationForm from django.contrib.auth.urls import urlpatterns as auth_urlpatterns from django.contrib.auth.views import LoginView from django.contrib.messages.api import info from django.http import HttpRequest, HttpResponse from django.shortcuts import render from django.template import RequestContext, Template from django.urls import path, re_path, reverse_lazy from django.views.decorators.cache import never_cache from django.views.i18n import set_language class CustomRequestAuthenticationForm(AuthenticationForm): def __init__(self, request, *args, **kwargs): assert isinstance(request, HttpRequest) super().__init__(request, *args, **kwargs) @never_cache def remote_user_auth_view(request): "Dummy view for remote user tests" t = Template("Username is {{ user }}.") c = RequestContext(request, {}) return HttpResponse(t.render(c)) def auth_processor_no_attr_access(request): render(request, 'context_processors/auth_attrs_no_access.html') # *After* rendering, we check whether the session was accessed return render(request, 'context_processors/auth_attrs_test_access.html', {'session_accessed': request.session.accessed}) def auth_processor_attr_access(request): render(request, 'context_processors/auth_attrs_access.html') return render(request, 'context_processors/auth_attrs_test_access.html', {'session_accessed': request.session.accessed}) def auth_processor_user(request): return render(request, 'context_processors/auth_attrs_user.html') def auth_processor_perms(request): return render(request, 'context_processors/auth_attrs_perms.html') def auth_processor_perm_in_perms(request): return render(request, 'context_processors/auth_attrs_perm_in_perms.html') def auth_processor_messages(request): info(request, "Message 1") return render(request, 'context_processors/auth_attrs_messages.html') def userpage(request): pass @permission_required('unknown.permission') def permission_required_redirect(request): pass @permission_required('unknown.permission', raise_exception=True) def permission_required_exception(request): pass @login_required @permission_required('unknown.permission', raise_exception=True) def login_and_permission_required_exception(request): pass class CustomDefaultRedirectURLLoginView(LoginView): def get_default_redirect_url(self): return '/custom/' # special urls for auth test cases urlpatterns = auth_urlpatterns + [ path('logout/custom_query/', views.LogoutView.as_view(redirect_field_name='follow')), path('logout/next_page/', views.LogoutView.as_view(next_page='/somewhere/')), path('logout/next_page/named/', views.LogoutView.as_view(next_page='password_reset')), path('logout/allowed_hosts/', views.LogoutView.as_view(success_url_allowed_hosts={'otherserver'})), path('remote_user/', remote_user_auth_view), path('password_reset_from_email/', views.PasswordResetView.as_view(from_email='[email protected]')), path( 'password_reset_extra_email_context/', views.PasswordResetView.as_view( extra_email_context={'greeting': 'Hello!', 'domain': 'custom.example.com'}, ), ), path( 'password_reset/custom_redirect/', views.PasswordResetView.as_view(success_url='/custom/')), path( 'password_reset/custom_redirect/named/', views.PasswordResetView.as_view(success_url=reverse_lazy('password_reset'))), path( 'password_reset/html_email_template/', views.PasswordResetView.as_view( html_email_template_name='registration/html_password_reset_email.html' )), path( 'reset/custom/<uidb64>/<token>/', views.PasswordResetConfirmView.as_view(success_url='/custom/'), ), path( 'reset/custom/named/<uidb64>/<token>/', views.PasswordResetConfirmView.as_view(success_url=reverse_lazy('password_reset')), ), path( 'reset/custom/token/<uidb64>/<token>/', views.PasswordResetConfirmView.as_view(reset_url_token='set-passwordcustom'), ), path( 'reset/post_reset_login/<uidb64>/<token>/', views.PasswordResetConfirmView.as_view(post_reset_login=True), ), path( 'reset/post_reset_login_custom_backend/<uidb64>/<token>/', views.PasswordResetConfirmView.as_view( post_reset_login=True, post_reset_login_backend='django.contrib.auth.backends.AllowAllUsersModelBackend', ), ), path('reset/missing_parameters/', views.PasswordResetConfirmView.as_view()), path('password_change/custom/', views.PasswordChangeView.as_view(success_url='/custom/')), path('password_change/custom/named/', views.PasswordChangeView.as_view(success_url=reverse_lazy('password_reset'))), path('login_required/', login_required(views.PasswordResetView.as_view())), path('login_required_login_url/', login_required(views.PasswordResetView.as_view(), login_url='/somewhere/')), path('auth_processor_no_attr_access/', auth_processor_no_attr_access), path('auth_processor_attr_access/', auth_processor_attr_access), path('auth_processor_user/', auth_processor_user), path('auth_processor_perms/', auth_processor_perms), path('auth_processor_perm_in_perms/', auth_processor_perm_in_perms), path('auth_processor_messages/', auth_processor_messages), path( 'custom_request_auth_login/', views.LoginView.as_view(authentication_form=CustomRequestAuthenticationForm)), re_path('^userpage/(.+)/$', userpage, name='userpage'), path('login/redirect_authenticated_user_default/', views.LoginView.as_view()), path('login/redirect_authenticated_user/', views.LoginView.as_view(redirect_authenticated_user=True)), path('login/allowed_hosts/', views.LoginView.as_view(success_url_allowed_hosts={'otherserver'})), path('login/get_default_redirect_url/', CustomDefaultRedirectURLLoginView.as_view()), path('login/next_page/', views.LoginView.as_view(next_page='/somewhere/')), path('login/next_page/named/', views.LoginView.as_view(next_page='password_reset')), path('permission_required_redirect/', permission_required_redirect), path('permission_required_exception/', permission_required_exception), path('login_and_permission_required_exception/', login_and_permission_required_exception), path('setlang/', set_language, name='set_language'), # This line is only required to render the password reset with is_admin=True path('admin/', admin.site.urls), ]
4a58bb62bf97df72611265f5253a23fe31dde3b1de7dd6cf704ad1b0a0522176
import datetime import re from unittest import mock from django.contrib.auth.forms import ( AdminPasswordChangeForm, AuthenticationForm, PasswordChangeForm, PasswordResetForm, ReadOnlyPasswordHashField, ReadOnlyPasswordHashWidget, SetPasswordForm, UserChangeForm, UserCreationForm, ) from django.contrib.auth.models import User from django.contrib.auth.signals import user_login_failed from django.contrib.sites.models import Site from django.core import mail from django.core.exceptions import ValidationError from django.core.mail import EmailMultiAlternatives from django.forms import forms from django.forms.fields import CharField, Field, IntegerField from django.test import SimpleTestCase, TestCase, override_settings from django.utils import translation from django.utils.text import capfirst from django.utils.translation import gettext as _ from .models.custom_user import ( CustomUser, CustomUserWithoutIsActiveField, ExtensionUser, ) from .models.with_custom_email_field import CustomEmailField from .models.with_integer_username import IntegerUsernameUser from .settings import AUTH_TEMPLATES class TestDataMixin: @classmethod def setUpTestData(cls): cls.u1 = User.objects.create_user(username='testclient', password='password', email='[email protected]') cls.u2 = User.objects.create_user(username='inactive', password='password', is_active=False) cls.u3 = User.objects.create_user(username='staff', password='password') cls.u4 = User.objects.create(username='empty_password', password='') cls.u5 = User.objects.create(username='unmanageable_password', password='$') cls.u6 = User.objects.create(username='unknown_password', password='foo$bar') class UserCreationFormTest(TestDataMixin, TestCase): def test_user_already_exists(self): data = { 'username': 'testclient', 'password1': 'test123', 'password2': 'test123', } form = UserCreationForm(data) self.assertFalse(form.is_valid()) self.assertEqual(form["username"].errors, [str(User._meta.get_field('username').error_messages['unique'])]) def test_invalid_data(self): data = { 'username': 'jsmith!', 'password1': 'test123', 'password2': 'test123', } form = UserCreationForm(data) self.assertFalse(form.is_valid()) validator = next(v for v in User._meta.get_field('username').validators if v.code == 'invalid') self.assertEqual(form["username"].errors, [str(validator.message)]) def test_password_verification(self): # The verification password is incorrect. data = { 'username': 'jsmith', 'password1': 'test123', 'password2': 'test', } form = UserCreationForm(data) self.assertFalse(form.is_valid()) self.assertEqual(form["password2"].errors, [str(form.error_messages['password_mismatch'])]) def test_both_passwords(self): # One (or both) passwords weren't given data = {'username': 'jsmith'} form = UserCreationForm(data) required_error = [str(Field.default_error_messages['required'])] self.assertFalse(form.is_valid()) self.assertEqual(form['password1'].errors, required_error) self.assertEqual(form['password2'].errors, required_error) data['password2'] = 'test123' form = UserCreationForm(data) self.assertFalse(form.is_valid()) self.assertEqual(form['password1'].errors, required_error) self.assertEqual(form['password2'].errors, []) @mock.patch('django.contrib.auth.password_validation.password_changed') def test_success(self, password_changed): # The success case. data = { 'username': '[email protected]', 'password1': 'test123', 'password2': 'test123', } form = UserCreationForm(data) self.assertTrue(form.is_valid()) form.save(commit=False) self.assertEqual(password_changed.call_count, 0) u = form.save() self.assertEqual(password_changed.call_count, 1) self.assertEqual(repr(u), '<User: [email protected]>') def test_unicode_username(self): data = { 'username': '宝', 'password1': 'test123', 'password2': 'test123', } form = UserCreationForm(data) self.assertTrue(form.is_valid()) u = form.save() self.assertEqual(u.username, '宝') def test_normalize_username(self): # The normalization happens in AbstractBaseUser.clean() and ModelForm # validation calls Model.clean(). ohm_username = 'testΩ' # U+2126 OHM SIGN data = { 'username': ohm_username, 'password1': 'pwd2', 'password2': 'pwd2', } form = UserCreationForm(data) self.assertTrue(form.is_valid()) user = form.save() self.assertNotEqual(user.username, ohm_username) self.assertEqual(user.username, 'testΩ') # U+03A9 GREEK CAPITAL LETTER OMEGA def test_duplicate_normalized_unicode(self): """ To prevent almost identical usernames, visually identical but differing by their unicode code points only, Unicode NFKC normalization should make appear them equal to Django. """ omega_username = 'iamtheΩ' # U+03A9 GREEK CAPITAL LETTER OMEGA ohm_username = 'iamtheΩ' # U+2126 OHM SIGN self.assertNotEqual(omega_username, ohm_username) User.objects.create_user(username=omega_username, password='pwd') data = { 'username': ohm_username, 'password1': 'pwd2', 'password2': 'pwd2', } form = UserCreationForm(data) self.assertFalse(form.is_valid()) self.assertEqual( form.errors['username'], ["A user with that username already exists."] ) @override_settings(AUTH_PASSWORD_VALIDATORS=[ {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'}, {'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 'OPTIONS': { 'min_length': 12, }}, ]) def test_validates_password(self): data = { 'username': 'testclient', 'password1': 'testclient', 'password2': 'testclient', } form = UserCreationForm(data) self.assertFalse(form.is_valid()) self.assertEqual(len(form['password2'].errors), 2) self.assertIn('The password is too similar to the username.', form['password2'].errors) self.assertIn( 'This password is too short. It must contain at least 12 characters.', form['password2'].errors ) def test_custom_form(self): class CustomUserCreationForm(UserCreationForm): class Meta(UserCreationForm.Meta): model = ExtensionUser fields = UserCreationForm.Meta.fields + ('date_of_birth',) data = { 'username': 'testclient', 'password1': 'testclient', 'password2': 'testclient', 'date_of_birth': '1988-02-24', } form = CustomUserCreationForm(data) self.assertTrue(form.is_valid()) def test_custom_form_with_different_username_field(self): class CustomUserCreationForm(UserCreationForm): class Meta(UserCreationForm.Meta): model = CustomUser fields = ('email', 'date_of_birth') data = { 'email': '[email protected]', 'password1': 'testclient', 'password2': 'testclient', 'date_of_birth': '1988-02-24', } form = CustomUserCreationForm(data) self.assertTrue(form.is_valid()) def test_custom_form_hidden_username_field(self): class CustomUserCreationForm(UserCreationForm): class Meta(UserCreationForm.Meta): model = CustomUserWithoutIsActiveField fields = ('email',) # without USERNAME_FIELD data = { 'email': '[email protected]', 'password1': 'testclient', 'password2': 'testclient', } form = CustomUserCreationForm(data) self.assertTrue(form.is_valid()) def test_password_whitespace_not_stripped(self): data = { 'username': 'testuser', 'password1': ' testpassword ', 'password2': ' testpassword ', } form = UserCreationForm(data) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data['password1'], data['password1']) self.assertEqual(form.cleaned_data['password2'], data['password2']) @override_settings(AUTH_PASSWORD_VALIDATORS=[ {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'}, ]) def test_password_help_text(self): form = UserCreationForm() self.assertEqual( form.fields['password1'].help_text, '<ul><li>Your password can’t be too similar to your other personal information.</li></ul>' ) @override_settings(AUTH_PASSWORD_VALIDATORS=[ {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'}, ]) def test_user_create_form_validates_password_with_all_data(self): """UserCreationForm password validation uses all of the form's data.""" class CustomUserCreationForm(UserCreationForm): class Meta(UserCreationForm.Meta): model = User fields = ('username', 'email', 'first_name', 'last_name') form = CustomUserCreationForm({ 'username': 'testuser', 'password1': 'testpassword', 'password2': 'testpassword', 'first_name': 'testpassword', 'last_name': 'lastname', }) self.assertFalse(form.is_valid()) self.assertEqual( form.errors['password2'], ['The password is too similar to the first name.'], ) def test_username_field_autocapitalize_none(self): form = UserCreationForm() self.assertEqual(form.fields['username'].widget.attrs.get('autocapitalize'), 'none') def test_html_autocomplete_attributes(self): form = UserCreationForm() tests = ( ('username', 'username'), ('password1', 'new-password'), ('password2', 'new-password'), ) for field_name, autocomplete in tests: with self.subTest(field_name=field_name, autocomplete=autocomplete): self.assertEqual(form.fields[field_name].widget.attrs['autocomplete'], autocomplete) # To verify that the login form rejects inactive users, use an authentication # backend that allows them. @override_settings(AUTHENTICATION_BACKENDS=['django.contrib.auth.backends.AllowAllUsersModelBackend']) class AuthenticationFormTest(TestDataMixin, TestCase): def test_invalid_username(self): # The user submits an invalid username. data = { 'username': 'jsmith_does_not_exist', 'password': 'test123', } form = AuthenticationForm(None, data) self.assertFalse(form.is_valid()) self.assertEqual( form.non_field_errors(), [ form.error_messages['invalid_login'] % { 'username': User._meta.get_field('username').verbose_name } ] ) def test_inactive_user(self): # The user is inactive. data = { 'username': 'inactive', 'password': 'password', } form = AuthenticationForm(None, data) self.assertFalse(form.is_valid()) self.assertEqual(form.non_field_errors(), [str(form.error_messages['inactive'])]) # Use an authentication backend that rejects inactive users. @override_settings(AUTHENTICATION_BACKENDS=['django.contrib.auth.backends.ModelBackend']) def test_inactive_user_incorrect_password(self): """An invalid login doesn't leak the inactive status of a user.""" data = { 'username': 'inactive', 'password': 'incorrect', } form = AuthenticationForm(None, data) self.assertFalse(form.is_valid()) self.assertEqual( form.non_field_errors(), [ form.error_messages['invalid_login'] % { 'username': User._meta.get_field('username').verbose_name } ] ) def test_login_failed(self): signal_calls = [] def signal_handler(**kwargs): signal_calls.append(kwargs) user_login_failed.connect(signal_handler) fake_request = object() try: form = AuthenticationForm(fake_request, { 'username': 'testclient', 'password': 'incorrect', }) self.assertFalse(form.is_valid()) self.assertIs(signal_calls[0]['request'], fake_request) finally: user_login_failed.disconnect(signal_handler) def test_inactive_user_i18n(self): with self.settings(USE_I18N=True), translation.override('pt-br', deactivate=True): # The user is inactive. data = { 'username': 'inactive', 'password': 'password', } form = AuthenticationForm(None, data) self.assertFalse(form.is_valid()) self.assertEqual(form.non_field_errors(), [str(form.error_messages['inactive'])]) # Use an authentication backend that allows inactive users. @override_settings(AUTHENTICATION_BACKENDS=['django.contrib.auth.backends.AllowAllUsersModelBackend']) def test_custom_login_allowed_policy(self): # The user is inactive, but our custom form policy allows them to log in. data = { 'username': 'inactive', 'password': 'password', } class AuthenticationFormWithInactiveUsersOkay(AuthenticationForm): def confirm_login_allowed(self, user): pass form = AuthenticationFormWithInactiveUsersOkay(None, data) self.assertTrue(form.is_valid()) # Raise a ValidationError in the form to disallow some logins according # to custom logic. class PickyAuthenticationForm(AuthenticationForm): def confirm_login_allowed(self, user): if user.username == "inactive": raise ValidationError("This user is disallowed.") raise ValidationError("Sorry, nobody's allowed in.") form = PickyAuthenticationForm(None, data) self.assertFalse(form.is_valid()) self.assertEqual(form.non_field_errors(), ['This user is disallowed.']) data = { 'username': 'testclient', 'password': 'password', } form = PickyAuthenticationForm(None, data) self.assertFalse(form.is_valid()) self.assertEqual(form.non_field_errors(), ["Sorry, nobody's allowed in."]) def test_success(self): # The success case data = { 'username': 'testclient', 'password': 'password', } form = AuthenticationForm(None, data) self.assertTrue(form.is_valid()) self.assertEqual(form.non_field_errors(), []) def test_unicode_username(self): User.objects.create_user(username='Σαρα', password='pwd') data = { 'username': 'Σαρα', 'password': 'pwd', } form = AuthenticationForm(None, data) self.assertTrue(form.is_valid()) self.assertEqual(form.non_field_errors(), []) @override_settings(AUTH_USER_MODEL='auth_tests.CustomEmailField') def test_username_field_max_length_matches_user_model(self): self.assertEqual(CustomEmailField._meta.get_field('username').max_length, 255) data = { 'username': 'u' * 255, 'password': 'pwd', 'email': '[email protected]', } CustomEmailField.objects.create_user(**data) form = AuthenticationForm(None, data) self.assertEqual(form.fields['username'].max_length, 255) self.assertEqual(form.fields['username'].widget.attrs.get('maxlength'), 255) self.assertEqual(form.errors, {}) @override_settings(AUTH_USER_MODEL='auth_tests.IntegerUsernameUser') def test_username_field_max_length_defaults_to_254(self): self.assertIsNone(IntegerUsernameUser._meta.get_field('username').max_length) data = { 'username': '0123456', 'password': 'password', } IntegerUsernameUser.objects.create_user(**data) form = AuthenticationForm(None, data) self.assertEqual(form.fields['username'].max_length, 254) self.assertEqual(form.fields['username'].widget.attrs.get('maxlength'), 254) self.assertEqual(form.errors, {}) def test_username_field_label(self): class CustomAuthenticationForm(AuthenticationForm): username = CharField(label="Name", max_length=75) form = CustomAuthenticationForm() self.assertEqual(form['username'].label, "Name") def test_username_field_label_not_set(self): class CustomAuthenticationForm(AuthenticationForm): username = CharField() form = CustomAuthenticationForm() username_field = User._meta.get_field(User.USERNAME_FIELD) self.assertEqual(form.fields['username'].label, capfirst(username_field.verbose_name)) def test_username_field_autocapitalize_none(self): form = AuthenticationForm() self.assertEqual(form.fields['username'].widget.attrs.get('autocapitalize'), 'none') def test_username_field_label_empty_string(self): class CustomAuthenticationForm(AuthenticationForm): username = CharField(label='') form = CustomAuthenticationForm() self.assertEqual(form.fields['username'].label, "") def test_password_whitespace_not_stripped(self): data = { 'username': 'testuser', 'password': ' pass ', } form = AuthenticationForm(None, data) form.is_valid() # Not necessary to have valid credentails for the test. self.assertEqual(form.cleaned_data['password'], data['password']) @override_settings(AUTH_USER_MODEL='auth_tests.IntegerUsernameUser') def test_integer_username(self): class CustomAuthenticationForm(AuthenticationForm): username = IntegerField() user = IntegerUsernameUser.objects.create_user(username=0, password='pwd') data = { 'username': 0, 'password': 'pwd', } form = CustomAuthenticationForm(None, data) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data['username'], data['username']) self.assertEqual(form.cleaned_data['password'], data['password']) self.assertEqual(form.errors, {}) self.assertEqual(form.user_cache, user) def test_get_invalid_login_error(self): error = AuthenticationForm().get_invalid_login_error() self.assertIsInstance(error, ValidationError) self.assertEqual( error.message, 'Please enter a correct %(username)s and password. Note that both ' 'fields may be case-sensitive.', ) self.assertEqual(error.code, 'invalid_login') self.assertEqual(error.params, {'username': 'username'}) def test_html_autocomplete_attributes(self): form = AuthenticationForm() tests = ( ('username', 'username'), ('password', 'current-password'), ) for field_name, autocomplete in tests: with self.subTest(field_name=field_name, autocomplete=autocomplete): self.assertEqual(form.fields[field_name].widget.attrs['autocomplete'], autocomplete) class SetPasswordFormTest(TestDataMixin, TestCase): def test_password_verification(self): # The two new passwords do not match. user = User.objects.get(username='testclient') data = { 'new_password1': 'abc123', 'new_password2': 'abc', } form = SetPasswordForm(user, data) self.assertFalse(form.is_valid()) self.assertEqual( form["new_password2"].errors, [str(form.error_messages['password_mismatch'])] ) @mock.patch('django.contrib.auth.password_validation.password_changed') def test_success(self, password_changed): user = User.objects.get(username='testclient') data = { 'new_password1': 'abc123', 'new_password2': 'abc123', } form = SetPasswordForm(user, data) self.assertTrue(form.is_valid()) form.save(commit=False) self.assertEqual(password_changed.call_count, 0) form.save() self.assertEqual(password_changed.call_count, 1) @override_settings(AUTH_PASSWORD_VALIDATORS=[ {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'}, {'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 'OPTIONS': { 'min_length': 12, }}, ]) def test_validates_password(self): user = User.objects.get(username='testclient') data = { 'new_password1': 'testclient', 'new_password2': 'testclient', } form = SetPasswordForm(user, data) self.assertFalse(form.is_valid()) self.assertEqual(len(form["new_password2"].errors), 2) self.assertIn('The password is too similar to the username.', form["new_password2"].errors) self.assertIn( 'This password is too short. It must contain at least 12 characters.', form["new_password2"].errors ) def test_password_whitespace_not_stripped(self): user = User.objects.get(username='testclient') data = { 'new_password1': ' password ', 'new_password2': ' password ', } form = SetPasswordForm(user, data) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data['new_password1'], data['new_password1']) self.assertEqual(form.cleaned_data['new_password2'], data['new_password2']) @override_settings(AUTH_PASSWORD_VALIDATORS=[ {'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'}, {'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 'OPTIONS': { 'min_length': 12, }}, ]) def test_help_text_translation(self): french_help_texts = [ 'Votre mot de passe ne peut pas trop ressembler à vos autres informations personnelles.', 'Votre mot de passe doit contenir au minimum 12 caractères.', ] form = SetPasswordForm(self.u1) with translation.override('fr'): html = form.as_p() for french_text in french_help_texts: self.assertIn(french_text, html) def test_html_autocomplete_attributes(self): form = SetPasswordForm(self.u1) tests = ( ('new_password1', 'new-password'), ('new_password2', 'new-password'), ) for field_name, autocomplete in tests: with self.subTest(field_name=field_name, autocomplete=autocomplete): self.assertEqual(form.fields[field_name].widget.attrs['autocomplete'], autocomplete) class PasswordChangeFormTest(TestDataMixin, TestCase): def test_incorrect_password(self): user = User.objects.get(username='testclient') data = { 'old_password': 'test', 'new_password1': 'abc123', 'new_password2': 'abc123', } form = PasswordChangeForm(user, data) self.assertFalse(form.is_valid()) self.assertEqual(form["old_password"].errors, [str(form.error_messages['password_incorrect'])]) def test_password_verification(self): # The two new passwords do not match. user = User.objects.get(username='testclient') data = { 'old_password': 'password', 'new_password1': 'abc123', 'new_password2': 'abc', } form = PasswordChangeForm(user, data) self.assertFalse(form.is_valid()) self.assertEqual(form["new_password2"].errors, [str(form.error_messages['password_mismatch'])]) @mock.patch('django.contrib.auth.password_validation.password_changed') def test_success(self, password_changed): # The success case. user = User.objects.get(username='testclient') data = { 'old_password': 'password', 'new_password1': 'abc123', 'new_password2': 'abc123', } form = PasswordChangeForm(user, data) self.assertTrue(form.is_valid()) form.save(commit=False) self.assertEqual(password_changed.call_count, 0) form.save() self.assertEqual(password_changed.call_count, 1) def test_field_order(self): # Regression test - check the order of fields: user = User.objects.get(username='testclient') self.assertEqual(list(PasswordChangeForm(user, {}).fields), ['old_password', 'new_password1', 'new_password2']) def test_password_whitespace_not_stripped(self): user = User.objects.get(username='testclient') user.set_password(' oldpassword ') data = { 'old_password': ' oldpassword ', 'new_password1': ' pass ', 'new_password2': ' pass ', } form = PasswordChangeForm(user, data) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data['old_password'], data['old_password']) self.assertEqual(form.cleaned_data['new_password1'], data['new_password1']) self.assertEqual(form.cleaned_data['new_password2'], data['new_password2']) def test_html_autocomplete_attributes(self): user = User.objects.get(username='testclient') form = PasswordChangeForm(user) self.assertEqual(form.fields['old_password'].widget.attrs['autocomplete'], 'current-password') class UserChangeFormTest(TestDataMixin, TestCase): def test_username_validity(self): user = User.objects.get(username='testclient') data = {'username': 'not valid'} form = UserChangeForm(data, instance=user) self.assertFalse(form.is_valid()) validator = next(v for v in User._meta.get_field('username').validators if v.code == 'invalid') self.assertEqual(form["username"].errors, [str(validator.message)]) def test_bug_14242(self): # A regression test, introduce by adding an optimization for the # UserChangeForm. class MyUserForm(UserChangeForm): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['groups'].help_text = 'These groups give users different permissions' class Meta(UserChangeForm.Meta): fields = ('groups',) # Just check we can create it MyUserForm({}) def test_unusable_password(self): user = User.objects.get(username='empty_password') user.set_unusable_password() user.save() form = UserChangeForm(instance=user) self.assertIn(_("No password set."), form.as_table()) def test_bug_17944_empty_password(self): user = User.objects.get(username='empty_password') form = UserChangeForm(instance=user) self.assertIn(_("No password set."), form.as_table()) def test_bug_17944_unmanageable_password(self): user = User.objects.get(username='unmanageable_password') form = UserChangeForm(instance=user) self.assertIn(_("Invalid password format or unknown hashing algorithm."), form.as_table()) def test_bug_17944_unknown_password_algorithm(self): user = User.objects.get(username='unknown_password') form = UserChangeForm(instance=user) self.assertIn(_("Invalid password format or unknown hashing algorithm."), form.as_table()) def test_bug_19133(self): "The change form does not return the password value" # Use the form to construct the POST data user = User.objects.get(username='testclient') form_for_data = UserChangeForm(instance=user) post_data = form_for_data.initial # The password field should be readonly, so anything # posted here should be ignored; the form will be # valid, and give back the 'initial' value for the # password field. post_data['password'] = 'new password' form = UserChangeForm(instance=user, data=post_data) self.assertTrue(form.is_valid()) # original hashed password contains $ self.assertIn('$', form.cleaned_data['password']) def test_bug_19349_bound_password_field(self): user = User.objects.get(username='testclient') form = UserChangeForm(data={}, instance=user) # When rendering the bound password field, # ReadOnlyPasswordHashWidget needs the initial # value to render correctly self.assertEqual(form.initial['password'], form['password'].value()) def test_custom_form(self): class CustomUserChangeForm(UserChangeForm): class Meta(UserChangeForm.Meta): model = ExtensionUser fields = ('username', 'password', 'date_of_birth',) user = User.objects.get(username='testclient') data = { 'username': 'testclient', 'password': 'testclient', 'date_of_birth': '1998-02-24', } form = CustomUserChangeForm(data, instance=user) self.assertTrue(form.is_valid()) form.save() self.assertEqual(form.cleaned_data['username'], 'testclient') self.assertEqual(form.cleaned_data['date_of_birth'], datetime.date(1998, 2, 24)) def test_password_excluded(self): class UserChangeFormWithoutPassword(UserChangeForm): password = None class Meta: model = User exclude = ['password'] form = UserChangeFormWithoutPassword() self.assertNotIn('password', form.fields) def test_username_field_autocapitalize_none(self): form = UserChangeForm() self.assertEqual(form.fields['username'].widget.attrs.get('autocapitalize'), 'none') @override_settings(TEMPLATES=AUTH_TEMPLATES) class PasswordResetFormTest(TestDataMixin, TestCase): @classmethod def setUpClass(cls): super().setUpClass() # This cleanup is necessary because contrib.sites cache # makes tests interfere with each other, see #11505 Site.objects.clear_cache() def create_dummy_user(self): """ Create a user and return a tuple (user_object, username, email). """ username = 'jsmith' email = '[email protected]' user = User.objects.create_user(username, email, 'test123') return (user, username, email) def test_invalid_email(self): data = {'email': 'not valid'} form = PasswordResetForm(data) self.assertFalse(form.is_valid()) self.assertEqual(form['email'].errors, [_('Enter a valid email address.')]) def test_user_email_unicode_collision(self): User.objects.create_user('mike123', '[email protected]', 'test123') User.objects.create_user('mike456', 'mı[email protected]', 'test123') data = {'email': 'mı[email protected]'} form = PasswordResetForm(data) self.assertTrue(form.is_valid()) form.save() self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, ['mı[email protected]']) def test_user_email_domain_unicode_collision(self): User.objects.create_user('mike123', '[email protected]', 'test123') User.objects.create_user('mike456', 'mike@ıxample.org', 'test123') data = {'email': 'mike@ıxample.org'} form = PasswordResetForm(data) self.assertTrue(form.is_valid()) form.save() self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, ['mike@ıxample.org']) def test_user_email_unicode_collision_nonexistent(self): User.objects.create_user('mike123', '[email protected]', 'test123') data = {'email': 'mı[email protected]'} form = PasswordResetForm(data) self.assertTrue(form.is_valid()) form.save() self.assertEqual(len(mail.outbox), 0) def test_user_email_domain_unicode_collision_nonexistent(self): User.objects.create_user('mike123', '[email protected]', 'test123') data = {'email': 'mike@ıxample.org'} form = PasswordResetForm(data) self.assertTrue(form.is_valid()) form.save() self.assertEqual(len(mail.outbox), 0) def test_nonexistent_email(self): """ Test nonexistent email address. This should not fail because it would expose information about registered users. """ data = {'email': '[email protected]'} form = PasswordResetForm(data) self.assertTrue(form.is_valid()) self.assertEqual(len(mail.outbox), 0) def test_cleaned_data(self): (user, username, email) = self.create_dummy_user() data = {'email': email} form = PasswordResetForm(data) self.assertTrue(form.is_valid()) form.save(domain_override='example.com') self.assertEqual(form.cleaned_data['email'], email) self.assertEqual(len(mail.outbox), 1) def test_custom_email_subject(self): data = {'email': '[email protected]'} form = PasswordResetForm(data) self.assertTrue(form.is_valid()) # Since we're not providing a request object, we must provide a # domain_override to prevent the save operation from failing in the # potential case where contrib.sites is not installed. Refs #16412. form.save(domain_override='example.com') self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'Custom password reset on example.com') def test_custom_email_constructor(self): data = {'email': '[email protected]'} class CustomEmailPasswordResetForm(PasswordResetForm): def send_mail(self, subject_template_name, email_template_name, context, from_email, to_email, html_email_template_name=None): EmailMultiAlternatives( "Forgot your password?", "Sorry to hear you forgot your password.", None, [to_email], ['[email protected]'], headers={'Reply-To': '[email protected]'}, alternatives=[ ("Really sorry to hear you forgot your password.", "text/html") ], ).send() form = CustomEmailPasswordResetForm(data) self.assertTrue(form.is_valid()) # Since we're not providing a request object, we must provide a # domain_override to prevent the save operation from failing in the # potential case where contrib.sites is not installed. Refs #16412. form.save(domain_override='example.com') self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'Forgot your password?') self.assertEqual(mail.outbox[0].bcc, ['[email protected]']) self.assertEqual(mail.outbox[0].content_subtype, "plain") def test_preserve_username_case(self): """ Preserve the case of the user name (before the @ in the email address) when creating a user (#5605). """ user = User.objects.create_user('forms_test2', '[email protected]', 'test') self.assertEqual(user.email, '[email protected]') user = User.objects.create_user('forms_test3', 'tesT', 'test') self.assertEqual(user.email, 'tesT') def test_inactive_user(self): """ Inactive user cannot receive password reset email. """ (user, username, email) = self.create_dummy_user() user.is_active = False user.save() form = PasswordResetForm({'email': email}) self.assertTrue(form.is_valid()) form.save() self.assertEqual(len(mail.outbox), 0) def test_unusable_password(self): user = User.objects.create_user('testuser', '[email protected]', 'test') data = {"email": "[email protected]"} form = PasswordResetForm(data) self.assertTrue(form.is_valid()) user.set_unusable_password() user.save() form = PasswordResetForm(data) # The form itself is valid, but no email is sent self.assertTrue(form.is_valid()) form.save() self.assertEqual(len(mail.outbox), 0) def test_save_plaintext_email(self): """ Test the PasswordResetForm.save() method with no html_email_template_name parameter passed in. Test to ensure original behavior is unchanged after the parameter was added. """ (user, username, email) = self.create_dummy_user() form = PasswordResetForm({"email": email}) self.assertTrue(form.is_valid()) form.save() self.assertEqual(len(mail.outbox), 1) message = mail.outbox[0].message() self.assertFalse(message.is_multipart()) self.assertEqual(message.get_content_type(), 'text/plain') self.assertEqual(message.get('subject'), 'Custom password reset on example.com') self.assertEqual(len(mail.outbox[0].alternatives), 0) self.assertEqual(message.get_all('to'), [email]) self.assertTrue(re.match(r'^http://example.com/reset/[\w+/-]', message.get_payload())) def test_save_html_email_template_name(self): """ Test the PasswordResetForm.save() method with html_email_template_name parameter specified. Test to ensure that a multipart email is sent with both text/plain and text/html parts. """ (user, username, email) = self.create_dummy_user() form = PasswordResetForm({"email": email}) self.assertTrue(form.is_valid()) form.save(html_email_template_name='registration/html_password_reset_email.html') self.assertEqual(len(mail.outbox), 1) self.assertEqual(len(mail.outbox[0].alternatives), 1) message = mail.outbox[0].message() self.assertEqual(message.get('subject'), 'Custom password reset on example.com') self.assertEqual(len(message.get_payload()), 2) self.assertTrue(message.is_multipart()) self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain') self.assertEqual(message.get_payload(1).get_content_type(), 'text/html') self.assertEqual(message.get_all('to'), [email]) self.assertTrue(re.match(r'^http://example.com/reset/[\w/-]+', message.get_payload(0).get_payload())) self.assertTrue(re.match( r'^<html><a href="http://example.com/reset/[\w/-]+/">Link</a></html>$', message.get_payload(1).get_payload() )) @override_settings(AUTH_USER_MODEL='auth_tests.CustomEmailField') def test_custom_email_field(self): email = '[email protected]' CustomEmailField.objects.create_user('test name', 'test password', email) form = PasswordResetForm({'email': email}) self.assertTrue(form.is_valid()) form.save() self.assertEqual(form.cleaned_data['email'], email) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].to, [email]) def test_html_autocomplete_attributes(self): form = PasswordResetForm() self.assertEqual(form.fields['email'].widget.attrs['autocomplete'], 'email') class ReadOnlyPasswordHashTest(SimpleTestCase): def test_bug_19349_render_with_none_value(self): # Rendering the widget with value set to None # mustn't raise an exception. widget = ReadOnlyPasswordHashWidget() html = widget.render(name='password', value=None, attrs={}) self.assertIn(_("No password set."), html) @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.PBKDF2PasswordHasher']) def test_render(self): widget = ReadOnlyPasswordHashWidget() value = 'pbkdf2_sha256$100000$a6Pucb1qSFcD$WmCkn9Hqidj48NVe5x0FEM6A9YiOqQcl/83m2Z5udm0=' self.assertHTMLEqual( widget.render('name', value, {'id': 'id_password'}), """ <div id="id_password"> <strong>algorithm</strong>: pbkdf2_sha256 <strong>iterations</strong>: 100000 <strong>salt</strong>: a6Pucb****** <strong>hash</strong>: WmCkn9************************************** </div> """ ) def test_readonly_field_has_changed(self): field = ReadOnlyPasswordHashField() self.assertIs(field.disabled, True) self.assertFalse(field.has_changed('aaa', 'bbb')) def test_label(self): """ ReadOnlyPasswordHashWidget doesn't contain a for attribute in the <label> because it doesn't have any labelable elements. """ class TestForm(forms.Form): hash_field = ReadOnlyPasswordHashField() bound_field = TestForm()['hash_field'] self.assertIsNone(bound_field.field.widget.id_for_label('id')) self.assertEqual(bound_field.label_tag(), '<label>Hash field:</label>') class AdminPasswordChangeFormTest(TestDataMixin, TestCase): @mock.patch('django.contrib.auth.password_validation.password_changed') def test_success(self, password_changed): user = User.objects.get(username='testclient') data = { 'password1': 'test123', 'password2': 'test123', } form = AdminPasswordChangeForm(user, data) self.assertTrue(form.is_valid()) form.save(commit=False) self.assertEqual(password_changed.call_count, 0) form.save() self.assertEqual(password_changed.call_count, 1) def test_password_whitespace_not_stripped(self): user = User.objects.get(username='testclient') data = { 'password1': ' pass ', 'password2': ' pass ', } form = AdminPasswordChangeForm(user, data) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data['password1'], data['password1']) self.assertEqual(form.cleaned_data['password2'], data['password2']) def test_non_matching_passwords(self): user = User.objects.get(username='testclient') data = {'password1': 'password1', 'password2': 'password2'} form = AdminPasswordChangeForm(user, data) self.assertEqual(form.errors['password2'], [form.error_messages['password_mismatch']]) def test_missing_passwords(self): user = User.objects.get(username='testclient') data = {'password1': '', 'password2': ''} form = AdminPasswordChangeForm(user, data) required_error = [Field.default_error_messages['required']] self.assertEqual(form.errors['password1'], required_error) self.assertEqual(form.errors['password2'], required_error) def test_one_password(self): user = User.objects.get(username='testclient') form1 = AdminPasswordChangeForm(user, {'password1': '', 'password2': 'test'}) required_error = [Field.default_error_messages['required']] self.assertEqual(form1.errors['password1'], required_error) self.assertNotIn('password2', form1.errors) form2 = AdminPasswordChangeForm(user, {'password1': 'test', 'password2': ''}) self.assertEqual(form2.errors['password2'], required_error) self.assertNotIn('password1', form2.errors) def test_html_autocomplete_attributes(self): user = User.objects.get(username='testclient') form = AdminPasswordChangeForm(user) tests = ( ('password1', 'new-password'), ('password2', 'new-password'), ) for field_name, autocomplete in tests: with self.subTest(field_name=field_name, autocomplete=autocomplete): self.assertEqual(form.fields[field_name].widget.attrs['autocomplete'], autocomplete)
3752517aa90fdca0949ad359980a4b9c55404750b9082363504668b19a7b9fc1
from unittest import mock, skipUnless from django.conf.global_settings import PASSWORD_HASHERS from django.contrib.auth.hashers import ( UNUSABLE_PASSWORD_PREFIX, UNUSABLE_PASSWORD_SUFFIX_LENGTH, BasePasswordHasher, BCryptPasswordHasher, BCryptSHA256PasswordHasher, MD5PasswordHasher, PBKDF2PasswordHasher, PBKDF2SHA1PasswordHasher, ScryptPasswordHasher, SHA1PasswordHasher, check_password, get_hasher, identify_hasher, is_password_usable, make_password, ) from django.test import SimpleTestCase from django.test.utils import override_settings try: import crypt except ImportError: crypt = None else: # On some platforms (e.g. OpenBSD), crypt.crypt() always return None. if crypt.crypt('') is None: crypt = None try: import bcrypt except ImportError: bcrypt = None try: import argon2 except ImportError: argon2 = None class PBKDF2SingleIterationHasher(PBKDF2PasswordHasher): iterations = 1 @override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS) class TestUtilsHashPass(SimpleTestCase): def test_simple(self): encoded = make_password('lètmein') self.assertTrue(encoded.startswith('pbkdf2_sha256$')) self.assertTrue(is_password_usable(encoded)) self.assertTrue(check_password('lètmein', encoded)) self.assertFalse(check_password('lètmeinz', encoded)) # Blank passwords blank_encoded = make_password('') self.assertTrue(blank_encoded.startswith('pbkdf2_sha256$')) self.assertTrue(is_password_usable(blank_encoded)) self.assertTrue(check_password('', blank_encoded)) self.assertFalse(check_password(' ', blank_encoded)) def test_bytes(self): encoded = make_password(b'bytes_password') self.assertTrue(encoded.startswith('pbkdf2_sha256$')) self.assertIs(is_password_usable(encoded), True) self.assertIs(check_password(b'bytes_password', encoded), True) def test_invalid_password(self): msg = 'Password must be a string or bytes, got int.' with self.assertRaisesMessage(TypeError, msg): make_password(1) def test_pbkdf2(self): encoded = make_password('lètmein', 'seasalt', 'pbkdf2_sha256') self.assertEqual(encoded, 'pbkdf2_sha256$320000$seasalt$Toj2II2rBvFiGQcPmUml1Nlni2UtvyRWwz/jz4q6q/4=') self.assertTrue(is_password_usable(encoded)) self.assertTrue(check_password('lètmein', encoded)) self.assertFalse(check_password('lètmeinz', encoded)) self.assertEqual(identify_hasher(encoded).algorithm, "pbkdf2_sha256") # Blank passwords blank_encoded = make_password('', 'seasalt', 'pbkdf2_sha256') self.assertTrue(blank_encoded.startswith('pbkdf2_sha256$')) self.assertTrue(is_password_usable(blank_encoded)) self.assertTrue(check_password('', blank_encoded)) self.assertFalse(check_password(' ', blank_encoded)) # Salt entropy check. hasher = get_hasher('pbkdf2_sha256') encoded_weak_salt = make_password('lètmein', 'iodizedsalt', 'pbkdf2_sha256') encoded_strong_salt = make_password('lètmein', hasher.salt(), 'pbkdf2_sha256') self.assertIs(hasher.must_update(encoded_weak_salt), True) self.assertIs(hasher.must_update(encoded_strong_salt), False) @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher']) def test_sha1(self): encoded = make_password('lètmein', 'seasalt', 'sha1') self.assertEqual(encoded, 'sha1$seasalt$cff36ea83f5706ce9aa7454e63e431fc726b2dc8') self.assertTrue(is_password_usable(encoded)) self.assertTrue(check_password('lètmein', encoded)) self.assertFalse(check_password('lètmeinz', encoded)) self.assertEqual(identify_hasher(encoded).algorithm, "sha1") # Blank passwords blank_encoded = make_password('', 'seasalt', 'sha1') self.assertTrue(blank_encoded.startswith('sha1$')) self.assertTrue(is_password_usable(blank_encoded)) self.assertTrue(check_password('', blank_encoded)) self.assertFalse(check_password(' ', blank_encoded)) # Salt entropy check. hasher = get_hasher('sha1') encoded_weak_salt = make_password('lètmein', 'iodizedsalt', 'sha1') encoded_strong_salt = make_password('lètmein', hasher.salt(), 'sha1') self.assertIs(hasher.must_update(encoded_weak_salt), True) self.assertIs(hasher.must_update(encoded_strong_salt), False) @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.MD5PasswordHasher']) def test_md5(self): encoded = make_password('lètmein', 'seasalt', 'md5') self.assertEqual(encoded, 'md5$seasalt$3f86d0d3d465b7b458c231bf3555c0e3') self.assertTrue(is_password_usable(encoded)) self.assertTrue(check_password('lètmein', encoded)) self.assertFalse(check_password('lètmeinz', encoded)) self.assertEqual(identify_hasher(encoded).algorithm, "md5") # Blank passwords blank_encoded = make_password('', 'seasalt', 'md5') self.assertTrue(blank_encoded.startswith('md5$')) self.assertTrue(is_password_usable(blank_encoded)) self.assertTrue(check_password('', blank_encoded)) self.assertFalse(check_password(' ', blank_encoded)) # Salt entropy check. hasher = get_hasher('md5') encoded_weak_salt = make_password('lètmein', 'iodizedsalt', 'md5') encoded_strong_salt = make_password('lètmein', hasher.salt(), 'md5') self.assertIs(hasher.must_update(encoded_weak_salt), True) self.assertIs(hasher.must_update(encoded_strong_salt), False) @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.UnsaltedMD5PasswordHasher']) def test_unsalted_md5(self): encoded = make_password('lètmein', '', 'unsalted_md5') self.assertEqual(encoded, '88a434c88cca4e900f7874cd98123f43') self.assertTrue(is_password_usable(encoded)) self.assertTrue(check_password('lètmein', encoded)) self.assertFalse(check_password('lètmeinz', encoded)) self.assertEqual(identify_hasher(encoded).algorithm, "unsalted_md5") # Alternate unsalted syntax alt_encoded = "md5$$%s" % encoded self.assertTrue(is_password_usable(alt_encoded)) self.assertTrue(check_password('lètmein', alt_encoded)) self.assertFalse(check_password('lètmeinz', alt_encoded)) # Blank passwords blank_encoded = make_password('', '', 'unsalted_md5') self.assertTrue(is_password_usable(blank_encoded)) self.assertTrue(check_password('', blank_encoded)) self.assertFalse(check_password(' ', blank_encoded)) @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.UnsaltedSHA1PasswordHasher']) def test_unsalted_sha1(self): encoded = make_password('lètmein', '', 'unsalted_sha1') self.assertEqual(encoded, 'sha1$$6d138ca3ae545631b3abd71a4f076ce759c5700b') self.assertTrue(is_password_usable(encoded)) self.assertTrue(check_password('lètmein', encoded)) self.assertFalse(check_password('lètmeinz', encoded)) self.assertEqual(identify_hasher(encoded).algorithm, "unsalted_sha1") # Raw SHA1 isn't acceptable alt_encoded = encoded[6:] self.assertFalse(check_password('lètmein', alt_encoded)) # Blank passwords blank_encoded = make_password('', '', 'unsalted_sha1') self.assertTrue(blank_encoded.startswith('sha1$')) self.assertTrue(is_password_usable(blank_encoded)) self.assertTrue(check_password('', blank_encoded)) self.assertFalse(check_password(' ', blank_encoded)) @skipUnless(crypt, "no crypt module to generate password.") @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.CryptPasswordHasher']) def test_crypt(self): encoded = make_password('lètmei', 'ab', 'crypt') self.assertEqual(encoded, 'crypt$$ab1Hv2Lg7ltQo') self.assertTrue(is_password_usable(encoded)) self.assertTrue(check_password('lètmei', encoded)) self.assertFalse(check_password('lètmeiz', encoded)) self.assertEqual(identify_hasher(encoded).algorithm, "crypt") # Blank passwords blank_encoded = make_password('', 'ab', 'crypt') self.assertTrue(blank_encoded.startswith('crypt$')) self.assertTrue(is_password_usable(blank_encoded)) self.assertTrue(check_password('', blank_encoded)) self.assertFalse(check_password(' ', blank_encoded)) @skipUnless(bcrypt, "bcrypt not installed") def test_bcrypt_sha256(self): encoded = make_password('lètmein', hasher='bcrypt_sha256') self.assertTrue(is_password_usable(encoded)) self.assertTrue(encoded.startswith('bcrypt_sha256$')) self.assertTrue(check_password('lètmein', encoded)) self.assertFalse(check_password('lètmeinz', encoded)) self.assertEqual(identify_hasher(encoded).algorithm, "bcrypt_sha256") # password truncation no longer works password = ( 'VSK0UYV6FFQVZ0KG88DYN9WADAADZO1CTSIVDJUNZSUML6IBX7LN7ZS3R5' 'JGB3RGZ7VI7G7DJQ9NI8BQFSRPTG6UWTTVESA5ZPUN' ) encoded = make_password(password, hasher='bcrypt_sha256') self.assertTrue(check_password(password, encoded)) self.assertFalse(check_password(password[:72], encoded)) # Blank passwords blank_encoded = make_password('', hasher='bcrypt_sha256') self.assertTrue(blank_encoded.startswith('bcrypt_sha256$')) self.assertTrue(is_password_usable(blank_encoded)) self.assertTrue(check_password('', blank_encoded)) self.assertFalse(check_password(' ', blank_encoded)) @skipUnless(bcrypt, "bcrypt not installed") @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.BCryptPasswordHasher']) def test_bcrypt(self): encoded = make_password('lètmein', hasher='bcrypt') self.assertTrue(is_password_usable(encoded)) self.assertTrue(encoded.startswith('bcrypt$')) self.assertTrue(check_password('lètmein', encoded)) self.assertFalse(check_password('lètmeinz', encoded)) self.assertEqual(identify_hasher(encoded).algorithm, "bcrypt") # Blank passwords blank_encoded = make_password('', hasher='bcrypt') self.assertTrue(blank_encoded.startswith('bcrypt$')) self.assertTrue(is_password_usable(blank_encoded)) self.assertTrue(check_password('', blank_encoded)) self.assertFalse(check_password(' ', blank_encoded)) @skipUnless(bcrypt, "bcrypt not installed") @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.BCryptPasswordHasher']) def test_bcrypt_upgrade(self): hasher = get_hasher('bcrypt') self.assertEqual('bcrypt', hasher.algorithm) self.assertNotEqual(hasher.rounds, 4) old_rounds = hasher.rounds try: # Generate a password with 4 rounds. hasher.rounds = 4 encoded = make_password('letmein', hasher='bcrypt') rounds = hasher.safe_summary(encoded)['work factor'] self.assertEqual(rounds, 4) state = {'upgraded': False} def setter(password): state['upgraded'] = True # No upgrade is triggered. self.assertTrue(check_password('letmein', encoded, setter, 'bcrypt')) self.assertFalse(state['upgraded']) # Revert to the old rounds count and ... hasher.rounds = old_rounds # ... check if the password would get updated to the new count. self.assertTrue(check_password('letmein', encoded, setter, 'bcrypt')) self.assertTrue(state['upgraded']) finally: hasher.rounds = old_rounds @skipUnless(bcrypt, "bcrypt not installed") @override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.BCryptPasswordHasher']) def test_bcrypt_harden_runtime(self): hasher = get_hasher('bcrypt') self.assertEqual('bcrypt', hasher.algorithm) with mock.patch.object(hasher, 'rounds', 4): encoded = make_password('letmein', hasher='bcrypt') with mock.patch.object(hasher, 'rounds', 6), \ mock.patch.object(hasher, 'encode', side_effect=hasher.encode): hasher.harden_runtime('wrong_password', encoded) # Increasing rounds from 4 to 6 means an increase of 4 in workload, # therefore hardening should run 3 times to make the timing the # same (the original encode() call already ran once). self.assertEqual(hasher.encode.call_count, 3) # Get the original salt (includes the original workload factor) algorithm, data = encoded.split('$', 1) expected_call = (('wrong_password', data[:29].encode()),) self.assertEqual(hasher.encode.call_args_list, [expected_call] * 3) def test_unusable(self): encoded = make_password(None) self.assertEqual(len(encoded), len(UNUSABLE_PASSWORD_PREFIX) + UNUSABLE_PASSWORD_SUFFIX_LENGTH) self.assertFalse(is_password_usable(encoded)) self.assertFalse(check_password(None, encoded)) self.assertFalse(check_password(encoded, encoded)) self.assertFalse(check_password(UNUSABLE_PASSWORD_PREFIX, encoded)) self.assertFalse(check_password('', encoded)) self.assertFalse(check_password('lètmein', encoded)) self.assertFalse(check_password('lètmeinz', encoded)) with self.assertRaisesMessage(ValueError, 'Unknown password hashing algorith'): identify_hasher(encoded) # Assert that the unusable passwords actually contain a random part. # This might fail one day due to a hash collision. self.assertNotEqual(encoded, make_password(None), "Random password collision?") def test_unspecified_password(self): """ Makes sure specifying no plain password with a valid encoded password returns `False`. """ self.assertFalse(check_password(None, make_password('lètmein'))) def test_bad_algorithm(self): msg = ( "Unknown password hashing algorithm '%s'. Did you specify it in " "the PASSWORD_HASHERS setting?" ) with self.assertRaisesMessage(ValueError, msg % 'lolcat'): make_password('lètmein', hasher='lolcat') with self.assertRaisesMessage(ValueError, msg % 'lolcat'): identify_hasher('lolcat$salt$hash') def test_is_password_usable(self): passwords = ('lètmein_badencoded', '', None) for password in passwords: with self.subTest(password=password): self.assertIs(is_password_usable(password), True) def test_low_level_pbkdf2(self): hasher = PBKDF2PasswordHasher() encoded = hasher.encode('lètmein', 'seasalt2') self.assertEqual(encoded, 'pbkdf2_sha256$320000$seasalt2$BRr4pYNIQDsLFP+u4dzjs7pFuWJEin4lFMMoO9wBYvo=') self.assertTrue(hasher.verify('lètmein', encoded)) def test_low_level_pbkdf2_sha1(self): hasher = PBKDF2SHA1PasswordHasher() encoded = hasher.encode('lètmein', 'seasalt2') self.assertEqual(encoded, 'pbkdf2_sha1$320000$seasalt2$sDOkTvzV93jPWTRVxFGh50Jefo0=') self.assertTrue(hasher.verify('lètmein', encoded)) @skipUnless(bcrypt, 'bcrypt not installed') def test_bcrypt_salt_check(self): hasher = BCryptPasswordHasher() encoded = hasher.encode('lètmein', hasher.salt()) self.assertIs(hasher.must_update(encoded), False) @skipUnless(bcrypt, 'bcrypt not installed') def test_bcryptsha256_salt_check(self): hasher = BCryptSHA256PasswordHasher() encoded = hasher.encode('lètmein', hasher.salt()) self.assertIs(hasher.must_update(encoded), False) @override_settings( PASSWORD_HASHERS=[ 'django.contrib.auth.hashers.PBKDF2PasswordHasher', 'django.contrib.auth.hashers.SHA1PasswordHasher', 'django.contrib.auth.hashers.MD5PasswordHasher', ], ) def test_upgrade(self): self.assertEqual('pbkdf2_sha256', get_hasher('default').algorithm) for algo in ('sha1', 'md5'): with self.subTest(algo=algo): encoded = make_password('lètmein', hasher=algo) state = {'upgraded': False} def setter(password): state['upgraded'] = True self.assertTrue(check_password('lètmein', encoded, setter)) self.assertTrue(state['upgraded']) def test_no_upgrade(self): encoded = make_password('lètmein') state = {'upgraded': False} def setter(): state['upgraded'] = True self.assertFalse(check_password('WRONG', encoded, setter)) self.assertFalse(state['upgraded']) @override_settings( PASSWORD_HASHERS=[ 'django.contrib.auth.hashers.PBKDF2PasswordHasher', 'django.contrib.auth.hashers.SHA1PasswordHasher', 'django.contrib.auth.hashers.MD5PasswordHasher', ], ) def test_no_upgrade_on_incorrect_pass(self): self.assertEqual('pbkdf2_sha256', get_hasher('default').algorithm) for algo in ('sha1', 'md5'): with self.subTest(algo=algo): encoded = make_password('lètmein', hasher=algo) state = {'upgraded': False} def setter(): state['upgraded'] = True self.assertFalse(check_password('WRONG', encoded, setter)) self.assertFalse(state['upgraded']) def test_pbkdf2_upgrade(self): hasher = get_hasher('default') self.assertEqual('pbkdf2_sha256', hasher.algorithm) self.assertNotEqual(hasher.iterations, 1) old_iterations = hasher.iterations try: # Generate a password with 1 iteration. hasher.iterations = 1 encoded = make_password('letmein') algo, iterations, salt, hash = encoded.split('$', 3) self.assertEqual(iterations, '1') state = {'upgraded': False} def setter(password): state['upgraded'] = True # No upgrade is triggered self.assertTrue(check_password('letmein', encoded, setter)) self.assertFalse(state['upgraded']) # Revert to the old iteration count and ... hasher.iterations = old_iterations # ... check if the password would get updated to the new iteration count. self.assertTrue(check_password('letmein', encoded, setter)) self.assertTrue(state['upgraded']) finally: hasher.iterations = old_iterations def test_pbkdf2_harden_runtime(self): hasher = get_hasher('default') self.assertEqual('pbkdf2_sha256', hasher.algorithm) with mock.patch.object(hasher, 'iterations', 1): encoded = make_password('letmein') with mock.patch.object(hasher, 'iterations', 6), \ mock.patch.object(hasher, 'encode', side_effect=hasher.encode): hasher.harden_runtime('wrong_password', encoded) # Encode should get called once ... self.assertEqual(hasher.encode.call_count, 1) # ... with the original salt and 5 iterations. algorithm, iterations, salt, hash = encoded.split('$', 3) expected_call = (('wrong_password', salt, 5),) self.assertEqual(hasher.encode.call_args, expected_call) def test_pbkdf2_upgrade_new_hasher(self): hasher = get_hasher('default') self.assertEqual('pbkdf2_sha256', hasher.algorithm) self.assertNotEqual(hasher.iterations, 1) state = {'upgraded': False} def setter(password): state['upgraded'] = True with self.settings(PASSWORD_HASHERS=[ 'auth_tests.test_hashers.PBKDF2SingleIterationHasher']): encoded = make_password('letmein') algo, iterations, salt, hash = encoded.split('$', 3) self.assertEqual(iterations, '1') # No upgrade is triggered self.assertTrue(check_password('letmein', encoded, setter)) self.assertFalse(state['upgraded']) # Revert to the old iteration count and check if the password would get # updated to the new iteration count. with self.settings(PASSWORD_HASHERS=[ 'django.contrib.auth.hashers.PBKDF2PasswordHasher', 'auth_tests.test_hashers.PBKDF2SingleIterationHasher']): self.assertTrue(check_password('letmein', encoded, setter)) self.assertTrue(state['upgraded']) def test_check_password_calls_harden_runtime(self): hasher = get_hasher('default') encoded = make_password('letmein') with mock.patch.object(hasher, 'harden_runtime'), \ mock.patch.object(hasher, 'must_update', return_value=True): # Correct password supplied, no hardening needed check_password('letmein', encoded) self.assertEqual(hasher.harden_runtime.call_count, 0) # Wrong password supplied, hardening needed check_password('wrong_password', encoded) self.assertEqual(hasher.harden_runtime.call_count, 1) def test_encode_invalid_salt(self): hasher_classes = [ MD5PasswordHasher, PBKDF2PasswordHasher, PBKDF2SHA1PasswordHasher, ScryptPasswordHasher, SHA1PasswordHasher, ] msg = 'salt must be provided and cannot contain $.' for hasher_class in hasher_classes: hasher = hasher_class() for salt in [None, '', 'sea$salt']: with self.subTest(hasher_class.__name__, salt=salt): with self.assertRaisesMessage(ValueError, msg): hasher.encode('password', salt) def test_encode_password_required(self): hasher_classes = [ MD5PasswordHasher, PBKDF2PasswordHasher, PBKDF2SHA1PasswordHasher, ScryptPasswordHasher, SHA1PasswordHasher, ] msg = 'password must be provided.' for hasher_class in hasher_classes: hasher = hasher_class() with self.subTest(hasher_class.__name__): with self.assertRaisesMessage(TypeError, msg): hasher.encode(None, 'seasalt') class BasePasswordHasherTests(SimpleTestCase): not_implemented_msg = 'subclasses of BasePasswordHasher must provide %s() method' def setUp(self): self.hasher = BasePasswordHasher() def test_load_library_no_algorithm(self): msg = "Hasher 'BasePasswordHasher' doesn't specify a library attribute" with self.assertRaisesMessage(ValueError, msg): self.hasher._load_library() def test_load_library_importerror(self): PlainHasher = type('PlainHasher', (BasePasswordHasher,), {'algorithm': 'plain', 'library': 'plain'}) msg = "Couldn't load 'PlainHasher' algorithm library: No module named 'plain'" with self.assertRaisesMessage(ValueError, msg): PlainHasher()._load_library() def test_attributes(self): self.assertIsNone(self.hasher.algorithm) self.assertIsNone(self.hasher.library) def test_encode(self): msg = self.not_implemented_msg % 'an encode' with self.assertRaisesMessage(NotImplementedError, msg): self.hasher.encode('password', 'salt') def test_decode(self): msg = self.not_implemented_msg % 'a decode' with self.assertRaisesMessage(NotImplementedError, msg): self.hasher.decode('encoded') def test_harden_runtime(self): msg = 'subclasses of BasePasswordHasher should provide a harden_runtime() method' with self.assertWarnsMessage(Warning, msg): self.hasher.harden_runtime('password', 'encoded') def test_must_update(self): self.assertIs(self.hasher.must_update('encoded'), False) def test_safe_summary(self): msg = self.not_implemented_msg % 'a safe_summary' with self.assertRaisesMessage(NotImplementedError, msg): self.hasher.safe_summary('encoded') def test_verify(self): msg = self.not_implemented_msg % 'a verify' with self.assertRaisesMessage(NotImplementedError, msg): self.hasher.verify('password', 'encoded') @skipUnless(argon2, "argon2-cffi not installed") @override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS) class TestUtilsHashPassArgon2(SimpleTestCase): def test_argon2(self): encoded = make_password('lètmein', hasher='argon2') self.assertTrue(is_password_usable(encoded)) self.assertTrue(encoded.startswith('argon2$argon2id$')) self.assertTrue(check_password('lètmein', encoded)) self.assertFalse(check_password('lètmeinz', encoded)) self.assertEqual(identify_hasher(encoded).algorithm, 'argon2') # Blank passwords blank_encoded = make_password('', hasher='argon2') self.assertTrue(blank_encoded.startswith('argon2$argon2id$')) self.assertTrue(is_password_usable(blank_encoded)) self.assertTrue(check_password('', blank_encoded)) self.assertFalse(check_password(' ', blank_encoded)) # Old hashes without version attribute encoded = ( 'argon2$argon2i$m=8,t=1,p=1$c29tZXNhbHQ$gwQOXSNhxiOxPOA0+PY10P9QFO' '4NAYysnqRt1GSQLE55m+2GYDt9FEjPMHhP2Cuf0nOEXXMocVrsJAtNSsKyfg' ) self.assertTrue(check_password('secret', encoded)) self.assertFalse(check_password('wrong', encoded)) # Old hashes with version attribute. encoded = ( 'argon2$argon2i$v=19$m=8,t=1,p=1$c2FsdHNhbHQ$YC9+jJCrQhs5R6db7LlN8Q' ) self.assertIs(check_password('secret', encoded), True) self.assertIs(check_password('wrong', encoded), False) # Salt entropy check. hasher = get_hasher('argon2') encoded_weak_salt = make_password('lètmein', 'iodizedsalt', 'argon2') encoded_strong_salt = make_password('lètmein', hasher.salt(), 'argon2') self.assertIs(hasher.must_update(encoded_weak_salt), True) self.assertIs(hasher.must_update(encoded_strong_salt), False) def test_argon2_decode(self): salt = 'abcdefghijk' encoded = make_password('lètmein', salt=salt, hasher='argon2') hasher = get_hasher('argon2') decoded = hasher.decode(encoded) self.assertEqual(decoded['memory_cost'], hasher.memory_cost) self.assertEqual(decoded['parallelism'], hasher.parallelism) self.assertEqual(decoded['salt'], salt) self.assertEqual(decoded['time_cost'], hasher.time_cost) def test_argon2_upgrade(self): self._test_argon2_upgrade('time_cost', 'time cost', 1) self._test_argon2_upgrade('memory_cost', 'memory cost', 64) self._test_argon2_upgrade('parallelism', 'parallelism', 1) def test_argon2_version_upgrade(self): hasher = get_hasher('argon2') state = {'upgraded': False} encoded = ( 'argon2$argon2id$v=19$m=102400,t=2,p=8$Y041dExhNkljRUUy$TMa6A8fPJh' 'CAUXRhJXCXdw' ) def setter(password): state['upgraded'] = True old_m = hasher.memory_cost old_t = hasher.time_cost old_p = hasher.parallelism try: hasher.memory_cost = 8 hasher.time_cost = 1 hasher.parallelism = 1 self.assertTrue(check_password('secret', encoded, setter, 'argon2')) self.assertTrue(state['upgraded']) finally: hasher.memory_cost = old_m hasher.time_cost = old_t hasher.parallelism = old_p def _test_argon2_upgrade(self, attr, summary_key, new_value): hasher = get_hasher('argon2') self.assertEqual('argon2', hasher.algorithm) self.assertNotEqual(getattr(hasher, attr), new_value) old_value = getattr(hasher, attr) try: # Generate hash with attr set to 1 setattr(hasher, attr, new_value) encoded = make_password('letmein', hasher='argon2') attr_value = hasher.safe_summary(encoded)[summary_key] self.assertEqual(attr_value, new_value) state = {'upgraded': False} def setter(password): state['upgraded'] = True # No upgrade is triggered. self.assertTrue(check_password('letmein', encoded, setter, 'argon2')) self.assertFalse(state['upgraded']) # Revert to the old rounds count and ... setattr(hasher, attr, old_value) # ... check if the password would get updated to the new count. self.assertTrue(check_password('letmein', encoded, setter, 'argon2')) self.assertTrue(state['upgraded']) finally: setattr(hasher, attr, old_value) @override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS) class TestUtilsHashPassScrypt(SimpleTestCase): def test_scrypt(self): encoded = make_password('lètmein', 'seasalt', 'scrypt') self.assertEqual( encoded, 'scrypt$16384$seasalt$8$1$Qj3+9PPyRjSJIebHnG81TMjsqtaIGxNQG/aEB/NY' 'afTJ7tibgfYz71m0ldQESkXFRkdVCBhhY8mx7rQwite/Pw==' ) self.assertIs(is_password_usable(encoded), True) self.assertIs(check_password('lètmein', encoded), True) self.assertIs(check_password('lètmeinz', encoded), False) self.assertEqual(identify_hasher(encoded).algorithm, "scrypt") # Blank passwords. blank_encoded = make_password('', 'seasalt', 'scrypt') self.assertIs(blank_encoded.startswith('scrypt$'), True) self.assertIs(is_password_usable(blank_encoded), True) self.assertIs(check_password('', blank_encoded), True) self.assertIs(check_password(' ', blank_encoded), False) def test_scrypt_decode(self): encoded = make_password('lètmein', 'seasalt', 'scrypt') hasher = get_hasher('scrypt') decoded = hasher.decode(encoded) tests = [ ('block_size', hasher.block_size), ('parallelism', hasher.parallelism), ('salt', 'seasalt'), ('work_factor', hasher.work_factor), ] for key, excepted in tests: with self.subTest(key=key): self.assertEqual(decoded[key], excepted) def _test_scrypt_upgrade(self, attr, summary_key, new_value): hasher = get_hasher('scrypt') self.assertEqual(hasher.algorithm, 'scrypt') self.assertNotEqual(getattr(hasher, attr), new_value) old_value = getattr(hasher, attr) try: # Generate hash with attr set to the new value. setattr(hasher, attr, new_value) encoded = make_password('lètmein', 'seasalt', 'scrypt') attr_value = hasher.safe_summary(encoded)[summary_key] self.assertEqual(attr_value, new_value) state = {'upgraded': False} def setter(password): state['upgraded'] = True # No update is triggered. self.assertIs(check_password('lètmein', encoded, setter, 'scrypt'), True) self.assertIs(state['upgraded'], False) # Revert to the old value. setattr(hasher, attr, old_value) # Password is updated. self.assertIs(check_password('lètmein', encoded, setter, 'scrypt'), True) self.assertIs(state['upgraded'], True) finally: setattr(hasher, attr, old_value) def test_scrypt_upgrade(self): tests = [ ('work_factor', 'work factor', 2 ** 11), ('block_size', 'block size', 10), ('parallelism', 'parallelism', 2), ] for attr, summary_key, new_value in tests: with self.subTest(attr=attr): self._test_scrypt_upgrade(attr, summary_key, new_value)
fc0e9b154366c262ecbc9236fb2d07ee02b84ce9746600e0cb85f3cda44a6afc
import os from django.core.exceptions import SuspiciousFileOperation from django.core.files.base import ContentFile from django.core.files.storage import FileSystemStorage, Storage from django.db.models import FileField from django.test import SimpleTestCase class AWSS3Storage(Storage): """ Simulate an AWS S3 storage which uses Unix-like paths and allows any characters in file names but where there aren't actual folders but just keys. """ prefix = 'mys3folder/' def _save(self, name, content): """ This method is important to test that Storage.save() doesn't replace '\' with '/' (rather FileSystemStorage.save() does). """ return name def get_valid_name(self, name): return name def get_available_name(self, name, max_length=None): return name def generate_filename(self, filename): """ This is the method that's important to override when using S3 so that os.path() isn't called, which would break S3 keys. """ return self.prefix + self.get_valid_name(filename) class GenerateFilenameStorageTests(SimpleTestCase): def test_storage_dangerous_paths(self): candidates = [ ('/tmp/..', '..'), ('/tmp/.', '.'), ('', ''), ] s = FileSystemStorage() msg = "Could not derive file name from '%s'" for file_name, base_name in candidates: with self.subTest(file_name=file_name): with self.assertRaisesMessage(SuspiciousFileOperation, msg % base_name): s.get_available_name(file_name) with self.assertRaisesMessage(SuspiciousFileOperation, msg % base_name): s.generate_filename(file_name) def test_storage_dangerous_paths_dir_name(self): file_name = '/tmp/../path' s = FileSystemStorage() msg = "Detected path traversal attempt in '/tmp/..'" with self.assertRaisesMessage(SuspiciousFileOperation, msg): s.get_available_name(file_name) with self.assertRaisesMessage(SuspiciousFileOperation, msg): s.generate_filename(file_name) def test_filefield_dangerous_filename(self): candidates = [ ('..', 'some/folder/..'), ('.', 'some/folder/.'), ('', 'some/folder/'), ('???', '???'), ('$.$.$', '$.$.$'), ] f = FileField(upload_to='some/folder/') for file_name, msg_file_name in candidates: msg = f"Could not derive file name from '{msg_file_name}'" with self.subTest(file_name=file_name): with self.assertRaisesMessage(SuspiciousFileOperation, msg): f.generate_filename(None, file_name) def test_filefield_dangerous_filename_dot_segments(self): f = FileField(upload_to='some/folder/') msg = "Detected path traversal attempt in 'some/folder/../path'" with self.assertRaisesMessage(SuspiciousFileOperation, msg): f.generate_filename(None, '../path') def test_filefield_generate_filename_absolute_path(self): f = FileField(upload_to='some/folder/') candidates = [ '/tmp/path', '/tmp/../path', ] for file_name in candidates: msg = f"Detected path traversal attempt in '{file_name}'" with self.subTest(file_name=file_name): with self.assertRaisesMessage(SuspiciousFileOperation, msg): f.generate_filename(None, file_name) def test_filefield_generate_filename(self): f = FileField(upload_to='some/folder/') self.assertEqual( f.generate_filename(None, 'test with space.txt'), os.path.normpath('some/folder/test_with_space.txt') ) def test_filefield_generate_filename_with_upload_to(self): def upload_to(instance, filename): return 'some/folder/' + filename f = FileField(upload_to=upload_to) self.assertEqual( f.generate_filename(None, 'test with space.txt'), os.path.normpath('some/folder/test_with_space.txt') ) def test_filefield_generate_filename_upload_to_overrides_dangerous_filename(self): def upload_to(instance, filename): return 'test.txt' f = FileField(upload_to=upload_to) candidates = [ '/tmp/.', '/tmp/..', '/tmp/../path', '/tmp/path', 'some/folder/', 'some/folder/.', 'some/folder/..', 'some/folder/???', 'some/folder/$.$.$', 'some/../test.txt', '', ] for file_name in candidates: with self.subTest(file_name=file_name): self.assertEqual(f.generate_filename(None, file_name), 'test.txt') def test_filefield_generate_filename_upload_to_absolute_path(self): def upload_to(instance, filename): return '/tmp/' + filename f = FileField(upload_to=upload_to) candidates = [ 'path', '../path', '???', '$.$.$', ] for file_name in candidates: msg = f"Detected path traversal attempt in '/tmp/{file_name}'" with self.subTest(file_name=file_name): with self.assertRaisesMessage(SuspiciousFileOperation, msg): f.generate_filename(None, file_name) def test_filefield_generate_filename_upload_to_dangerous_filename(self): def upload_to(instance, filename): return '/tmp/' + filename f = FileField(upload_to=upload_to) candidates = ['..', '.', ''] for file_name in candidates: msg = f"Could not derive file name from '/tmp/{file_name}'" with self.subTest(file_name=file_name): with self.assertRaisesMessage(SuspiciousFileOperation, msg): f.generate_filename(None, file_name) def test_filefield_awss3_storage(self): """ Simulate a FileField with an S3 storage which uses keys rather than folders and names. FileField and Storage shouldn't have any os.path() calls that break the key. """ storage = AWSS3Storage() folder = 'not/a/folder/' f = FileField(upload_to=folder, storage=storage) key = 'my-file-key\\with odd characters' data = ContentFile('test') expected_key = AWSS3Storage.prefix + folder + key # Simulate call to f.save() result_key = f.generate_filename(None, key) self.assertEqual(result_key, expected_key) result_key = storage.save(result_key, data) self.assertEqual(result_key, expected_key) # Repeat test with a callable. def upload_to(instance, filename): # Return a non-normalized path on purpose. return folder + filename f = FileField(upload_to=upload_to, storage=storage) # Simulate call to f.save() result_key = f.generate_filename(None, key) self.assertEqual(result_key, expected_key) result_key = storage.save(result_key, data) self.assertEqual(result_key, expected_key)
a7ff54bfc51efb464da6e3b13b0002334787a4b0979fc2d1e7b27a579ec3be60
import os import shutil import sys import tempfile import threading import time import unittest from datetime import datetime, timedelta from io import StringIO from pathlib import Path from urllib.request import urlopen from django.core.cache import cache from django.core.exceptions import SuspiciousFileOperation from django.core.files.base import ContentFile, File from django.core.files.storage import ( FileSystemStorage, Storage as BaseStorage, default_storage, get_storage_class, ) from django.core.files.uploadedfile import ( InMemoryUploadedFile, SimpleUploadedFile, TemporaryUploadedFile, ) from django.db.models import FileField from django.db.models.fields.files import FileDescriptor from django.test import ( LiveServerTestCase, SimpleTestCase, TestCase, override_settings, ) from django.test.utils import requires_tz_support from django.urls import NoReverseMatch, reverse_lazy from django.utils import timezone from django.utils._os import symlinks_supported from .models import ( Storage, callable_storage, temp_storage, temp_storage_location, ) FILE_SUFFIX_REGEX = '[A-Za-z0-9]{7}' class GetStorageClassTests(SimpleTestCase): def test_get_filesystem_storage(self): """ get_storage_class returns the class for a storage backend name/path. """ self.assertEqual( get_storage_class('django.core.files.storage.FileSystemStorage'), FileSystemStorage) def test_get_invalid_storage_module(self): """ get_storage_class raises an error if the requested import don't exist. """ with self.assertRaisesMessage(ImportError, "No module named 'storage'"): get_storage_class('storage.NonexistentStorage') def test_get_nonexistent_storage_class(self): """ get_storage_class raises an error if the requested class don't exist. """ with self.assertRaises(ImportError): get_storage_class('django.core.files.storage.NonexistentStorage') def test_get_nonexistent_storage_module(self): """ get_storage_class raises an error if the requested module don't exist. """ with self.assertRaisesMessage(ImportError, "No module named 'django.core.files.nonexistent_storage'"): get_storage_class('django.core.files.nonexistent_storage.NonexistentStorage') class FileSystemStorageTests(unittest.TestCase): def test_deconstruction(self): path, args, kwargs = temp_storage.deconstruct() self.assertEqual(path, "django.core.files.storage.FileSystemStorage") self.assertEqual(args, ()) self.assertEqual(kwargs, {'location': temp_storage_location}) kwargs_orig = { 'location': temp_storage_location, 'base_url': 'http://myfiles.example.com/' } storage = FileSystemStorage(**kwargs_orig) path, args, kwargs = storage.deconstruct() self.assertEqual(kwargs, kwargs_orig) def test_lazy_base_url_init(self): """ FileSystemStorage.__init__() shouldn't evaluate base_url. """ storage = FileSystemStorage(base_url=reverse_lazy('app:url')) with self.assertRaises(NoReverseMatch): storage.url(storage.base_url) class FileStorageTests(SimpleTestCase): storage_class = FileSystemStorage def setUp(self): self.temp_dir = tempfile.mkdtemp() self.storage = self.storage_class(location=self.temp_dir, base_url='/test_media_url/') # Set up a second temporary directory which is ensured to have a mixed # case name. self.temp_dir2 = tempfile.mkdtemp(suffix='aBc') def tearDown(self): shutil.rmtree(self.temp_dir) shutil.rmtree(self.temp_dir2) def test_empty_location(self): """ Makes sure an exception is raised if the location is empty """ storage = self.storage_class(location='') self.assertEqual(storage.base_location, '') self.assertEqual(storage.location, os.getcwd()) def test_file_access_options(self): """ Standard file access options are available, and work as expected. """ self.assertFalse(self.storage.exists('storage_test')) f = self.storage.open('storage_test', 'w') f.write('storage contents') f.close() self.assertTrue(self.storage.exists('storage_test')) f = self.storage.open('storage_test', 'r') self.assertEqual(f.read(), 'storage contents') f.close() self.storage.delete('storage_test') self.assertFalse(self.storage.exists('storage_test')) def _test_file_time_getter(self, getter): # Check for correct behavior under both USE_TZ=True and USE_TZ=False. # The tests are similar since they both set up a situation where the # system time zone, Django's TIME_ZONE, and UTC are distinct. self._test_file_time_getter_tz_handling_on(getter) self._test_file_time_getter_tz_handling_off(getter) @override_settings(USE_TZ=True, TIME_ZONE='Africa/Algiers') def _test_file_time_getter_tz_handling_on(self, getter): # Django's TZ (and hence the system TZ) is set to Africa/Algiers which # is UTC+1 and has no DST change. We can set the Django TZ to something # else so that UTC, Django's TIME_ZONE, and the system timezone are all # different. now_in_algiers = timezone.make_aware(datetime.now()) with timezone.override(timezone.get_fixed_timezone(-300)): # At this point the system TZ is +1 and the Django TZ # is -5. The following will be aware in UTC. now = timezone.now() self.assertFalse(self.storage.exists('test.file.tz.on')) f = ContentFile('custom contents') f_name = self.storage.save('test.file.tz.on', f) self.addCleanup(self.storage.delete, f_name) dt = getter(f_name) # dt should be aware, in UTC self.assertTrue(timezone.is_aware(dt)) self.assertEqual(now.tzname(), dt.tzname()) # The three timezones are indeed distinct. naive_now = datetime.now() algiers_offset = now_in_algiers.tzinfo.utcoffset(naive_now) django_offset = timezone.get_current_timezone().utcoffset(naive_now) utc_offset = timezone.utc.utcoffset(naive_now) self.assertGreater(algiers_offset, utc_offset) self.assertLess(django_offset, utc_offset) # dt and now should be the same effective time. self.assertLess(abs(dt - now), timedelta(seconds=2)) @override_settings(USE_TZ=False, TIME_ZONE='Africa/Algiers') def _test_file_time_getter_tz_handling_off(self, getter): # Django's TZ (and hence the system TZ) is set to Africa/Algiers which # is UTC+1 and has no DST change. We can set the Django TZ to something # else so that UTC, Django's TIME_ZONE, and the system timezone are all # different. now_in_algiers = timezone.make_aware(datetime.now()) with timezone.override(timezone.get_fixed_timezone(-300)): # At this point the system TZ is +1 and the Django TZ # is -5. self.assertFalse(self.storage.exists('test.file.tz.off')) f = ContentFile('custom contents') f_name = self.storage.save('test.file.tz.off', f) self.addCleanup(self.storage.delete, f_name) dt = getter(f_name) # dt should be naive, in system (+1) TZ self.assertTrue(timezone.is_naive(dt)) # The three timezones are indeed distinct. naive_now = datetime.now() algiers_offset = now_in_algiers.tzinfo.utcoffset(naive_now) django_offset = timezone.get_current_timezone().utcoffset(naive_now) utc_offset = timezone.utc.utcoffset(naive_now) self.assertGreater(algiers_offset, utc_offset) self.assertLess(django_offset, utc_offset) # dt and naive_now should be the same effective time. self.assertLess(abs(dt - naive_now), timedelta(seconds=2)) # If we convert dt to an aware object using the Algiers # timezone then it should be the same effective time to # now_in_algiers. _dt = timezone.make_aware(dt, now_in_algiers.tzinfo) self.assertLess(abs(_dt - now_in_algiers), timedelta(seconds=2)) def test_file_get_accessed_time(self): """ File storage returns a Datetime object for the last accessed time of a file. """ self.assertFalse(self.storage.exists('test.file')) f = ContentFile('custom contents') f_name = self.storage.save('test.file', f) self.addCleanup(self.storage.delete, f_name) atime = self.storage.get_accessed_time(f_name) self.assertEqual(atime, datetime.fromtimestamp(os.path.getatime(self.storage.path(f_name)))) self.assertLess(timezone.now() - self.storage.get_accessed_time(f_name), timedelta(seconds=2)) @requires_tz_support def test_file_get_accessed_time_timezone(self): self._test_file_time_getter(self.storage.get_accessed_time) def test_file_get_created_time(self): """ File storage returns a datetime for the creation time of a file. """ self.assertFalse(self.storage.exists('test.file')) f = ContentFile('custom contents') f_name = self.storage.save('test.file', f) self.addCleanup(self.storage.delete, f_name) ctime = self.storage.get_created_time(f_name) self.assertEqual(ctime, datetime.fromtimestamp(os.path.getctime(self.storage.path(f_name)))) self.assertLess(timezone.now() - self.storage.get_created_time(f_name), timedelta(seconds=2)) @requires_tz_support def test_file_get_created_time_timezone(self): self._test_file_time_getter(self.storage.get_created_time) def test_file_get_modified_time(self): """ File storage returns a datetime for the last modified time of a file. """ self.assertFalse(self.storage.exists('test.file')) f = ContentFile('custom contents') f_name = self.storage.save('test.file', f) self.addCleanup(self.storage.delete, f_name) mtime = self.storage.get_modified_time(f_name) self.assertEqual(mtime, datetime.fromtimestamp(os.path.getmtime(self.storage.path(f_name)))) self.assertLess(timezone.now() - self.storage.get_modified_time(f_name), timedelta(seconds=2)) @requires_tz_support def test_file_get_modified_time_timezone(self): self._test_file_time_getter(self.storage.get_modified_time) def test_file_save_without_name(self): """ File storage extracts the filename from the content object if no name is given explicitly. """ self.assertFalse(self.storage.exists('test.file')) f = ContentFile('custom contents') f.name = 'test.file' storage_f_name = self.storage.save(None, f) self.assertEqual(storage_f_name, f.name) self.assertTrue(os.path.exists(os.path.join(self.temp_dir, f.name))) self.storage.delete(storage_f_name) def test_file_save_with_path(self): """ Saving a pathname should create intermediate directories as necessary. """ self.assertFalse(self.storage.exists('path/to')) self.storage.save('path/to/test.file', ContentFile('file saved with path')) self.assertTrue(self.storage.exists('path/to')) with self.storage.open('path/to/test.file') as f: self.assertEqual(f.read(), b'file saved with path') self.assertTrue(os.path.exists( os.path.join(self.temp_dir, 'path', 'to', 'test.file'))) self.storage.delete('path/to/test.file') @unittest.skipUnless(symlinks_supported(), 'Must be able to symlink to run this test.') def test_file_save_broken_symlink(self): """A new path is created on save when a broken symlink is supplied.""" nonexistent_file_path = os.path.join(self.temp_dir, 'nonexistent.txt') broken_symlink_path = os.path.join(self.temp_dir, 'symlink.txt') os.symlink(nonexistent_file_path, broken_symlink_path) f = ContentFile('some content') f_name = self.storage.save(broken_symlink_path, f) self.assertIs(os.path.exists(os.path.join(self.temp_dir, f_name)), True) def test_save_doesnt_close(self): with TemporaryUploadedFile('test', 'text/plain', 1, 'utf8') as file: file.write(b'1') file.seek(0) self.assertFalse(file.closed) self.storage.save('path/to/test.file', file) self.assertFalse(file.closed) self.assertFalse(file.file.closed) file = InMemoryUploadedFile(StringIO('1'), '', 'test', 'text/plain', 1, 'utf8') with file: self.assertFalse(file.closed) self.storage.save('path/to/test.file', file) self.assertFalse(file.closed) self.assertFalse(file.file.closed) def test_file_path(self): """ File storage returns the full path of a file """ self.assertFalse(self.storage.exists('test.file')) f = ContentFile('custom contents') f_name = self.storage.save('test.file', f) self.assertEqual(self.storage.path(f_name), os.path.join(self.temp_dir, f_name)) self.storage.delete(f_name) def test_file_url(self): """ File storage returns a url to access a given file from the web. """ self.assertEqual(self.storage.url('test.file'), self.storage.base_url + 'test.file') # should encode special chars except ~!*()' # like encodeURIComponent() JavaScript function do self.assertEqual( self.storage.url(r"~!*()'@#$%^&*abc`+ =.file"), "/test_media_url/~!*()'%40%23%24%25%5E%26*abc%60%2B%20%3D.file" ) self.assertEqual(self.storage.url("ab\0c"), "/test_media_url/ab%00c") # should translate os path separator(s) to the url path separator self.assertEqual(self.storage.url("""a/b\\c.file"""), "/test_media_url/a/b/c.file") # #25905: remove leading slashes from file names to prevent unsafe url output self.assertEqual(self.storage.url("/evil.com"), "/test_media_url/evil.com") self.assertEqual(self.storage.url(r"\evil.com"), "/test_media_url/evil.com") self.assertEqual(self.storage.url("///evil.com"), "/test_media_url/evil.com") self.assertEqual(self.storage.url(r"\\\evil.com"), "/test_media_url/evil.com") self.assertEqual(self.storage.url(None), "/test_media_url/") def test_base_url(self): """ File storage returns a url even when its base_url is unset or modified. """ self.storage.base_url = None with self.assertRaises(ValueError): self.storage.url('test.file') # #22717: missing ending slash in base_url should be auto-corrected storage = self.storage_class(location=self.temp_dir, base_url='/no_ending_slash') self.assertEqual( storage.url('test.file'), '%s%s' % (storage.base_url, 'test.file') ) def test_listdir(self): """ File storage returns a tuple containing directories and files. """ self.assertFalse(self.storage.exists('storage_test_1')) self.assertFalse(self.storage.exists('storage_test_2')) self.assertFalse(self.storage.exists('storage_dir_1')) self.storage.save('storage_test_1', ContentFile('custom content')) self.storage.save('storage_test_2', ContentFile('custom content')) os.mkdir(os.path.join(self.temp_dir, 'storage_dir_1')) self.addCleanup(self.storage.delete, 'storage_test_1') self.addCleanup(self.storage.delete, 'storage_test_2') for directory in ('', Path('')): with self.subTest(directory=directory): dirs, files = self.storage.listdir(directory) self.assertEqual(set(dirs), {'storage_dir_1'}) self.assertEqual(set(files), {'storage_test_1', 'storage_test_2'}) def test_file_storage_prevents_directory_traversal(self): """ File storage prevents directory traversal (files can only be accessed if they're below the storage location). """ with self.assertRaises(SuspiciousFileOperation): self.storage.exists('..') with self.assertRaises(SuspiciousFileOperation): self.storage.exists('/etc/passwd') def test_file_storage_preserves_filename_case(self): """The storage backend should preserve case of filenames.""" # Create a storage backend associated with the mixed case name # directory. other_temp_storage = self.storage_class(location=self.temp_dir2) # Ask that storage backend to store a file with a mixed case filename. mixed_case = 'CaSe_SeNsItIvE' file = other_temp_storage.open(mixed_case, 'w') file.write('storage contents') file.close() self.assertEqual(os.path.join(self.temp_dir2, mixed_case), other_temp_storage.path(mixed_case)) other_temp_storage.delete(mixed_case) def test_makedirs_race_handling(self): """ File storage should be robust against directory creation race conditions. """ real_makedirs = os.makedirs # Monkey-patch os.makedirs, to simulate a normal call, a raced call, # and an error. def fake_makedirs(path, mode=0o777, exist_ok=False): if path == os.path.join(self.temp_dir, 'normal'): real_makedirs(path, mode, exist_ok) elif path == os.path.join(self.temp_dir, 'raced'): real_makedirs(path, mode, exist_ok) if not exist_ok: raise FileExistsError() elif path == os.path.join(self.temp_dir, 'error'): raise PermissionError() else: self.fail('unexpected argument %r' % path) try: os.makedirs = fake_makedirs self.storage.save('normal/test.file', ContentFile('saved normally')) with self.storage.open('normal/test.file') as f: self.assertEqual(f.read(), b'saved normally') self.storage.save('raced/test.file', ContentFile('saved with race')) with self.storage.open('raced/test.file') as f: self.assertEqual(f.read(), b'saved with race') # Exceptions aside from FileExistsError are raised. with self.assertRaises(PermissionError): self.storage.save('error/test.file', ContentFile('not saved')) finally: os.makedirs = real_makedirs def test_remove_race_handling(self): """ File storage should be robust against file removal race conditions. """ real_remove = os.remove # Monkey-patch os.remove, to simulate a normal call, a raced call, # and an error. def fake_remove(path): if path == os.path.join(self.temp_dir, 'normal.file'): real_remove(path) elif path == os.path.join(self.temp_dir, 'raced.file'): real_remove(path) raise FileNotFoundError() elif path == os.path.join(self.temp_dir, 'error.file'): raise PermissionError() else: self.fail('unexpected argument %r' % path) try: os.remove = fake_remove self.storage.save('normal.file', ContentFile('delete normally')) self.storage.delete('normal.file') self.assertFalse(self.storage.exists('normal.file')) self.storage.save('raced.file', ContentFile('delete with race')) self.storage.delete('raced.file') self.assertFalse(self.storage.exists('normal.file')) # Exceptions aside from FileNotFoundError are raised. self.storage.save('error.file', ContentFile('delete with error')) with self.assertRaises(PermissionError): self.storage.delete('error.file') finally: os.remove = real_remove def test_file_chunks_error(self): """ Test behavior when file.chunks() is raising an error """ f1 = ContentFile('chunks fails') def failing_chunks(): raise OSError f1.chunks = failing_chunks with self.assertRaises(OSError): self.storage.save('error.file', f1) def test_delete_no_name(self): """ Calling delete with an empty name should not try to remove the base storage directory, but fail loudly (#20660). """ msg = 'The name must be given to delete().' with self.assertRaisesMessage(ValueError, msg): self.storage.delete(None) with self.assertRaisesMessage(ValueError, msg): self.storage.delete('') def test_delete_deletes_directories(self): tmp_dir = tempfile.mkdtemp(dir=self.storage.location) self.storage.delete(tmp_dir) self.assertFalse(os.path.exists(tmp_dir)) @override_settings( MEDIA_ROOT='media_root', MEDIA_URL='media_url/', FILE_UPLOAD_PERMISSIONS=0o777, FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o777, ) def test_setting_changed(self): """ Properties using settings values as defaults should be updated on referenced settings change while specified values should be unchanged. """ storage = self.storage_class( location='explicit_location', base_url='explicit_base_url/', file_permissions_mode=0o666, directory_permissions_mode=0o666, ) defaults_storage = self.storage_class() settings = { 'MEDIA_ROOT': 'overridden_media_root', 'MEDIA_URL': '/overridden_media_url/', 'FILE_UPLOAD_PERMISSIONS': 0o333, 'FILE_UPLOAD_DIRECTORY_PERMISSIONS': 0o333, } with self.settings(**settings): self.assertEqual(storage.base_location, 'explicit_location') self.assertIn('explicit_location', storage.location) self.assertEqual(storage.base_url, 'explicit_base_url/') self.assertEqual(storage.file_permissions_mode, 0o666) self.assertEqual(storage.directory_permissions_mode, 0o666) self.assertEqual(defaults_storage.base_location, settings['MEDIA_ROOT']) self.assertIn(settings['MEDIA_ROOT'], defaults_storage.location) self.assertEqual(defaults_storage.base_url, settings['MEDIA_URL']) self.assertEqual(defaults_storage.file_permissions_mode, settings['FILE_UPLOAD_PERMISSIONS']) self.assertEqual( defaults_storage.directory_permissions_mode, settings['FILE_UPLOAD_DIRECTORY_PERMISSIONS'] ) def test_file_methods_pathlib_path(self): p = Path('test.file') self.assertFalse(self.storage.exists(p)) f = ContentFile('custom contents') f_name = self.storage.save(p, f) # Storage basic methods. self.assertEqual(self.storage.path(p), os.path.join(self.temp_dir, p)) self.assertEqual(self.storage.size(p), 15) self.assertEqual(self.storage.url(p), self.storage.base_url + f_name) with self.storage.open(p) as f: self.assertEqual(f.read(), b'custom contents') self.addCleanup(self.storage.delete, p) class CustomStorage(FileSystemStorage): def get_available_name(self, name, max_length=None): """ Append numbers to duplicate files rather than underscores, like Trac. """ basename, *ext = os.path.splitext(name) number = 2 while self.exists(name): name = ''.join([basename, '.', str(number)] + ext) number += 1 return name class CustomStorageTests(FileStorageTests): storage_class = CustomStorage def test_custom_get_available_name(self): first = self.storage.save('custom_storage', ContentFile('custom contents')) self.assertEqual(first, 'custom_storage') second = self.storage.save('custom_storage', ContentFile('more contents')) self.assertEqual(second, 'custom_storage.2') self.storage.delete(first) self.storage.delete(second) class OverwritingStorage(FileSystemStorage): """ Overwrite existing files instead of appending a suffix to generate an unused name. """ # Mask out O_EXCL so os.open() doesn't raise OSError if the file exists. OS_OPEN_FLAGS = FileSystemStorage.OS_OPEN_FLAGS & ~os.O_EXCL def get_available_name(self, name, max_length=None): """Override the effort to find an used name.""" return name class OverwritingStorageTests(FileStorageTests): storage_class = OverwritingStorage def test_save_overwrite_behavior(self): """Saving to same file name twice overwrites the first file.""" name = 'test.file' self.assertFalse(self.storage.exists(name)) content_1 = b'content one' content_2 = b'second content' f_1 = ContentFile(content_1) f_2 = ContentFile(content_2) stored_name_1 = self.storage.save(name, f_1) try: self.assertEqual(stored_name_1, name) self.assertTrue(self.storage.exists(name)) self.assertTrue(os.path.exists(os.path.join(self.temp_dir, name))) with self.storage.open(name) as fp: self.assertEqual(fp.read(), content_1) stored_name_2 = self.storage.save(name, f_2) self.assertEqual(stored_name_2, name) self.assertTrue(self.storage.exists(name)) self.assertTrue(os.path.exists(os.path.join(self.temp_dir, name))) with self.storage.open(name) as fp: self.assertEqual(fp.read(), content_2) finally: self.storage.delete(name) class DiscardingFalseContentStorage(FileSystemStorage): def _save(self, name, content): if content: return super()._save(name, content) return '' class DiscardingFalseContentStorageTests(FileStorageTests): storage_class = DiscardingFalseContentStorage def test_custom_storage_discarding_empty_content(self): """ When Storage.save() wraps a file-like object in File, it should include the name argument so that bool(file) evaluates to True (#26495). """ output = StringIO('content') self.storage.save('tests/stringio', output) self.assertTrue(self.storage.exists('tests/stringio')) with self.storage.open('tests/stringio') as f: self.assertEqual(f.read(), b'content') class FileFieldStorageTests(TestCase): def tearDown(self): shutil.rmtree(temp_storage_location) def _storage_max_filename_length(self, storage): """ Query filesystem for maximum filename length (e.g. AUFS has 242). """ dir_to_test = storage.location while not os.path.exists(dir_to_test): dir_to_test = os.path.dirname(dir_to_test) try: return os.pathconf(dir_to_test, 'PC_NAME_MAX') except Exception: return 255 # Should be safe on most backends def test_files(self): self.assertIsInstance(Storage.normal, FileDescriptor) # An object without a file has limited functionality. obj1 = Storage() self.assertEqual(obj1.normal.name, "") with self.assertRaises(ValueError): obj1.normal.size # Saving a file enables full functionality. obj1.normal.save("django_test.txt", ContentFile("content")) self.assertEqual(obj1.normal.name, "tests/django_test.txt") self.assertEqual(obj1.normal.size, 7) self.assertEqual(obj1.normal.read(), b"content") obj1.normal.close() # File objects can be assigned to FileField attributes, but shouldn't # get committed until the model it's attached to is saved. obj1.normal = SimpleUploadedFile("assignment.txt", b"content") dirs, files = temp_storage.listdir("tests") self.assertEqual(dirs, []) self.assertNotIn("assignment.txt", files) obj1.save() dirs, files = temp_storage.listdir("tests") self.assertEqual(sorted(files), ["assignment.txt", "django_test.txt"]) # Save another file with the same name. obj2 = Storage() obj2.normal.save("django_test.txt", ContentFile("more content")) obj2_name = obj2.normal.name self.assertRegex(obj2_name, "tests/django_test_%s.txt" % FILE_SUFFIX_REGEX) self.assertEqual(obj2.normal.size, 12) obj2.normal.close() # Deleting an object does not delete the file it uses. obj2.delete() obj2.normal.save("django_test.txt", ContentFile("more content")) self.assertNotEqual(obj2_name, obj2.normal.name) self.assertRegex(obj2.normal.name, "tests/django_test_%s.txt" % FILE_SUFFIX_REGEX) obj2.normal.close() def test_filefield_read(self): # Files can be read in a little at a time, if necessary. obj = Storage.objects.create( normal=SimpleUploadedFile("assignment.txt", b"content")) obj.normal.open() self.assertEqual(obj.normal.read(3), b"con") self.assertEqual(obj.normal.read(), b"tent") self.assertEqual(list(obj.normal.chunks(chunk_size=2)), [b"co", b"nt", b"en", b"t"]) obj.normal.close() def test_filefield_write(self): # Files can be written to. obj = Storage.objects.create(normal=SimpleUploadedFile('rewritten.txt', b'content')) with obj.normal as normal: normal.open('wb') normal.write(b'updated') obj.refresh_from_db() self.assertEqual(obj.normal.read(), b'updated') obj.normal.close() def test_filefield_reopen(self): obj = Storage.objects.create(normal=SimpleUploadedFile('reopen.txt', b'content')) with obj.normal as normal: normal.open() obj.normal.open() obj.normal.file.seek(0) obj.normal.close() def test_duplicate_filename(self): # Multiple files with the same name get _(7 random chars) appended to them. objs = [Storage() for i in range(2)] for o in objs: o.normal.save("multiple_files.txt", ContentFile("Same Content")) try: names = [o.normal.name for o in objs] self.assertEqual(names[0], "tests/multiple_files.txt") self.assertRegex(names[1], "tests/multiple_files_%s.txt" % FILE_SUFFIX_REGEX) finally: for o in objs: o.delete() def test_file_truncation(self): # Given the max_length is limited, when multiple files get uploaded # under the same name, then the filename get truncated in order to fit # in _(7 random chars). When most of the max_length is taken by # dirname + extension and there are not enough characters in the # filename to truncate, an exception should be raised. objs = [Storage() for i in range(2)] filename = 'filename.ext' for o in objs: o.limited_length.save(filename, ContentFile('Same Content')) try: # Testing truncation. names = [o.limited_length.name for o in objs] self.assertEqual(names[0], 'tests/%s' % filename) self.assertRegex(names[1], 'tests/fi_%s.ext' % FILE_SUFFIX_REGEX) # Testing exception is raised when filename is too short to truncate. filename = 'short.longext' objs[0].limited_length.save(filename, ContentFile('Same Content')) with self.assertRaisesMessage(SuspiciousFileOperation, 'Storage can not find an available filename'): objs[1].limited_length.save(*(filename, ContentFile('Same Content'))) finally: for o in objs: o.delete() @unittest.skipIf( sys.platform == 'win32', "Windows supports at most 260 characters in a path.", ) def test_extended_length_storage(self): # Testing FileField with max_length > 255. Most systems have filename # length limitation of 255. Path takes extra chars. filename = (self._storage_max_filename_length(temp_storage) - 4) * 'a' # 4 chars for extension. obj = Storage() obj.extended_length.save('%s.txt' % filename, ContentFile('Same Content')) self.assertEqual(obj.extended_length.name, 'tests/%s.txt' % filename) self.assertEqual(obj.extended_length.read(), b'Same Content') obj.extended_length.close() def test_filefield_default(self): # Default values allow an object to access a single file. temp_storage.save('tests/default.txt', ContentFile('default content')) obj = Storage.objects.create() self.assertEqual(obj.default.name, "tests/default.txt") self.assertEqual(obj.default.read(), b"default content") obj.default.close() # But it shouldn't be deleted, even if there are no more objects using # it. obj.delete() obj = Storage() self.assertEqual(obj.default.read(), b"default content") obj.default.close() def test_empty_upload_to(self): # upload_to can be empty, meaning it does not use subdirectory. obj = Storage() obj.empty.save('django_test.txt', ContentFile('more content')) self.assertEqual(obj.empty.name, "django_test.txt") self.assertEqual(obj.empty.read(), b"more content") obj.empty.close() def test_pathlib_upload_to(self): obj = Storage() obj.pathlib_callable.save('some_file1.txt', ContentFile('some content')) self.assertEqual(obj.pathlib_callable.name, 'bar/some_file1.txt') obj.pathlib_direct.save('some_file2.txt', ContentFile('some content')) self.assertEqual(obj.pathlib_direct.name, 'bar/some_file2.txt') obj.random.close() def test_random_upload_to(self): # Verify the fix for #5655, making sure the directory is only # determined once. obj = Storage() obj.random.save("random_file", ContentFile("random content")) self.assertTrue(obj.random.name.endswith("/random_file")) obj.random.close() def test_custom_valid_name_callable_upload_to(self): """ Storage.get_valid_name() should be called when upload_to is a callable. """ obj = Storage() obj.custom_valid_name.save("random_file", ContentFile("random content")) # CustomValidNameStorage.get_valid_name() appends '_valid' to the name self.assertTrue(obj.custom_valid_name.name.endswith("/random_file_valid")) obj.custom_valid_name.close() def test_filefield_pickling(self): # Push an object into the cache to make sure it pickles properly obj = Storage() obj.normal.save("django_test.txt", ContentFile("more content")) obj.normal.close() cache.set("obj", obj) self.assertEqual(cache.get("obj").normal.name, "tests/django_test.txt") def test_file_object(self): # Create sample file temp_storage.save('tests/example.txt', ContentFile('some content')) # Load it as Python file object with open(temp_storage.path('tests/example.txt')) as file_obj: # Save it using storage and read its content temp_storage.save('tests/file_obj', file_obj) self.assertTrue(temp_storage.exists('tests/file_obj')) with temp_storage.open('tests/file_obj') as f: self.assertEqual(f.read(), b'some content') def test_stringio(self): # Test passing StringIO instance as content argument to save output = StringIO() output.write('content') output.seek(0) # Save it and read written file temp_storage.save('tests/stringio', output) self.assertTrue(temp_storage.exists('tests/stringio')) with temp_storage.open('tests/stringio') as f: self.assertEqual(f.read(), b'content') class FieldCallableFileStorageTests(SimpleTestCase): def setUp(self): self.temp_storage_location = tempfile.mkdtemp(suffix='filefield_callable_storage') def tearDown(self): shutil.rmtree(self.temp_storage_location) def test_callable_base_class_error_raises(self): class NotStorage: pass msg = 'FileField.storage must be a subclass/instance of django.core.files.storage.Storage' for invalid_type in (NotStorage, str, list, set, tuple): with self.subTest(invalid_type=invalid_type): with self.assertRaisesMessage(TypeError, msg): FileField(storage=invalid_type) def test_file_field_storage_none_uses_default_storage(self): self.assertEqual(FileField().storage, default_storage) def test_callable_function_storage_file_field(self): storage = FileSystemStorage(location=self.temp_storage_location) def get_storage(): return storage obj = FileField(storage=get_storage) self.assertEqual(obj.storage, storage) self.assertEqual(obj.storage.location, storage.location) def test_callable_class_storage_file_field(self): class GetStorage(FileSystemStorage): pass obj = FileField(storage=GetStorage) self.assertIsInstance(obj.storage, BaseStorage) def test_callable_storage_file_field_in_model(self): obj = Storage() self.assertEqual(obj.storage_callable.storage, temp_storage) self.assertEqual(obj.storage_callable.storage.location, temp_storage_location) self.assertIsInstance(obj.storage_callable_class.storage, BaseStorage) def test_deconstruction(self): """ Deconstructing gives the original callable, not the evaluated value. """ obj = Storage() *_, kwargs = obj._meta.get_field('storage_callable').deconstruct() storage = kwargs['storage'] self.assertIs(storage, callable_storage) # Tests for a race condition on file saving (#4948). # This is written in such a way that it'll always pass on platforms # without threading. class SlowFile(ContentFile): def chunks(self): time.sleep(1) return super().chunks() class FileSaveRaceConditionTest(SimpleTestCase): def setUp(self): self.storage_dir = tempfile.mkdtemp() self.storage = FileSystemStorage(self.storage_dir) self.thread = threading.Thread(target=self.save_file, args=['conflict']) def tearDown(self): shutil.rmtree(self.storage_dir) def save_file(self, name): name = self.storage.save(name, SlowFile(b"Data")) def test_race_condition(self): self.thread.start() self.save_file('conflict') self.thread.join() files = sorted(os.listdir(self.storage_dir)) self.assertEqual(files[0], 'conflict') self.assertRegex(files[1], 'conflict_%s' % FILE_SUFFIX_REGEX) @unittest.skipIf(sys.platform == 'win32', "Windows only partially supports umasks and chmod.") class FileStoragePermissions(unittest.TestCase): def setUp(self): self.umask = 0o027 self.old_umask = os.umask(self.umask) self.storage_dir = tempfile.mkdtemp() def tearDown(self): shutil.rmtree(self.storage_dir) os.umask(self.old_umask) @override_settings(FILE_UPLOAD_PERMISSIONS=0o654) def test_file_upload_permissions(self): self.storage = FileSystemStorage(self.storage_dir) name = self.storage.save("the_file", ContentFile("data")) actual_mode = os.stat(self.storage.path(name))[0] & 0o777 self.assertEqual(actual_mode, 0o654) @override_settings(FILE_UPLOAD_PERMISSIONS=None) def test_file_upload_default_permissions(self): self.storage = FileSystemStorage(self.storage_dir) fname = self.storage.save("some_file", ContentFile("data")) mode = os.stat(self.storage.path(fname))[0] & 0o777 self.assertEqual(mode, 0o666 & ~self.umask) @override_settings(FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765) def test_file_upload_directory_permissions(self): self.storage = FileSystemStorage(self.storage_dir) name = self.storage.save('the_directory/subdir/the_file', ContentFile('data')) file_path = Path(self.storage.path(name)) self.assertEqual(file_path.parent.stat().st_mode & 0o777, 0o765) self.assertEqual(file_path.parent.parent.stat().st_mode & 0o777, 0o765) @override_settings(FILE_UPLOAD_DIRECTORY_PERMISSIONS=None) def test_file_upload_directory_default_permissions(self): self.storage = FileSystemStorage(self.storage_dir) name = self.storage.save('the_directory/subdir/the_file', ContentFile('data')) file_path = Path(self.storage.path(name)) expected_mode = 0o777 & ~self.umask self.assertEqual(file_path.parent.stat().st_mode & 0o777, expected_mode) self.assertEqual(file_path.parent.parent.stat().st_mode & 0o777, expected_mode) class FileStoragePathParsing(SimpleTestCase): def setUp(self): self.storage_dir = tempfile.mkdtemp() self.storage = FileSystemStorage(self.storage_dir) def tearDown(self): shutil.rmtree(self.storage_dir) def test_directory_with_dot(self): """Regression test for #9610. If the directory name contains a dot and the file name doesn't, make sure we still mangle the file name instead of the directory name. """ self.storage.save('dotted.path/test', ContentFile("1")) self.storage.save('dotted.path/test', ContentFile("2")) files = sorted(os.listdir(os.path.join(self.storage_dir, 'dotted.path'))) self.assertFalse(os.path.exists(os.path.join(self.storage_dir, 'dotted_.path'))) self.assertEqual(files[0], 'test') self.assertRegex(files[1], 'test_%s' % FILE_SUFFIX_REGEX) def test_first_character_dot(self): """ File names with a dot as their first character don't have an extension, and the underscore should get added to the end. """ self.storage.save('dotted.path/.test', ContentFile("1")) self.storage.save('dotted.path/.test', ContentFile("2")) files = sorted(os.listdir(os.path.join(self.storage_dir, 'dotted.path'))) self.assertFalse(os.path.exists(os.path.join(self.storage_dir, 'dotted_.path'))) self.assertEqual(files[0], '.test') self.assertRegex(files[1], '.test_%s' % FILE_SUFFIX_REGEX) class ContentFileStorageTestCase(unittest.TestCase): def setUp(self): self.storage_dir = tempfile.mkdtemp() self.storage = FileSystemStorage(self.storage_dir) def tearDown(self): shutil.rmtree(self.storage_dir) def test_content_saving(self): """ ContentFile can be saved correctly with the filesystem storage, if it was initialized with either bytes or unicode content. """ self.storage.save('bytes.txt', ContentFile(b"content")) self.storage.save('unicode.txt', ContentFile("español")) @override_settings(ROOT_URLCONF='file_storage.urls') class FileLikeObjectTestCase(LiveServerTestCase): """ Test file-like objects (#15644). """ available_apps = [] def setUp(self): self.temp_dir = tempfile.mkdtemp() self.storage = FileSystemStorage(location=self.temp_dir) def tearDown(self): shutil.rmtree(self.temp_dir) def test_urllib_request_urlopen(self): """ Test the File storage API with a file-like object coming from urllib.request.urlopen(). """ file_like_object = urlopen(self.live_server_url + '/') f = File(file_like_object) stored_filename = self.storage.save("remote_file.html", f) remote_file = urlopen(self.live_server_url + '/') with self.storage.open(stored_filename) as stored_file: self.assertEqual(stored_file.read(), remote_file.read())
8b9c078ccc59d96eb97a554e3c448ef210e317e12fd573d426625e05380da08b
import string import uuid from django.core.exceptions import ImproperlyConfigured from django.test import SimpleTestCase from django.test.utils import override_settings from django.urls import NoReverseMatch, Resolver404, path, resolve, reverse from django.views import View from .converters import DynamicConverter from .views import empty_view included_kwargs = {'base': b'hello', 'value': b'world'} converter_test_data = ( # ('url', ('url_name', 'app_name', {kwargs})), # aGVsbG8= is 'hello' encoded in base64. ('/base64/aGVsbG8=/', ('base64', '', {'value': b'hello'})), ('/base64/aGVsbG8=/subpatterns/d29ybGQ=/', ('subpattern-base64', '', included_kwargs)), ('/base64/aGVsbG8=/namespaced/d29ybGQ=/', ('subpattern-base64', 'namespaced-base64', included_kwargs)), ) @override_settings(ROOT_URLCONF='urlpatterns.path_urls') class SimplifiedURLTests(SimpleTestCase): def test_path_lookup_without_parameters(self): match = resolve('/articles/2003/') self.assertEqual(match.url_name, 'articles-2003') self.assertEqual(match.args, ()) self.assertEqual(match.kwargs, {}) self.assertEqual(match.route, 'articles/2003/') def test_path_lookup_with_typed_parameters(self): match = resolve('/articles/2015/') self.assertEqual(match.url_name, 'articles-year') self.assertEqual(match.args, ()) self.assertEqual(match.kwargs, {'year': 2015}) self.assertEqual(match.route, 'articles/<int:year>/') def test_path_lookup_with_multiple_parameters(self): match = resolve('/articles/2015/04/12/') self.assertEqual(match.url_name, 'articles-year-month-day') self.assertEqual(match.args, ()) self.assertEqual(match.kwargs, {'year': 2015, 'month': 4, 'day': 12}) self.assertEqual(match.route, 'articles/<int:year>/<int:month>/<int:day>/') def test_two_variable_at_start_of_path_pattern(self): match = resolve('/en/foo/') self.assertEqual(match.url_name, 'lang-and-path') self.assertEqual(match.kwargs, {'lang': 'en', 'url': 'foo'}) self.assertEqual(match.route, '<lang>/<path:url>/') def test_re_path(self): match = resolve('/regex/1/') self.assertEqual(match.url_name, 'regex') self.assertEqual(match.kwargs, {'pk': '1'}) self.assertEqual(match.route, '^regex/(?P<pk>[0-9]+)/$') def test_re_path_with_optional_parameter(self): for url, kwargs in ( ('/regex_optional/1/2/', {'arg1': '1', 'arg2': '2'}), ('/regex_optional/1/', {'arg1': '1'}), ): with self.subTest(url=url): match = resolve(url) self.assertEqual(match.url_name, 'regex_optional') self.assertEqual(match.kwargs, kwargs) self.assertEqual( match.route, r'^regex_optional/(?P<arg1>\d+)/(?:(?P<arg2>\d+)/)?', ) def test_re_path_with_missing_optional_parameter(self): match = resolve('/regex_only_optional/') self.assertEqual(match.url_name, 'regex_only_optional') self.assertEqual(match.kwargs, {}) self.assertEqual(match.args, ()) self.assertEqual( match.route, r'^regex_only_optional/(?:(?P<arg1>\d+)/)?', ) def test_path_lookup_with_inclusion(self): match = resolve('/included_urls/extra/something/') self.assertEqual(match.url_name, 'inner-extra') self.assertEqual(match.route, 'included_urls/extra/<extra>/') def test_path_lookup_with_empty_string_inclusion(self): match = resolve('/more/99/') self.assertEqual(match.url_name, 'inner-more') self.assertEqual(match.route, r'^more/(?P<extra>\w+)/$') def test_path_lookup_with_double_inclusion(self): match = resolve('/included_urls/more/some_value/') self.assertEqual(match.url_name, 'inner-more') self.assertEqual(match.route, r'included_urls/more/(?P<extra>\w+)/$') def test_path_reverse_without_parameter(self): url = reverse('articles-2003') self.assertEqual(url, '/articles/2003/') def test_path_reverse_with_parameter(self): url = reverse('articles-year-month-day', kwargs={'year': 2015, 'month': 4, 'day': 12}) self.assertEqual(url, '/articles/2015/4/12/') @override_settings(ROOT_URLCONF='urlpatterns.path_base64_urls') def test_converter_resolve(self): for url, (url_name, app_name, kwargs) in converter_test_data: with self.subTest(url=url): match = resolve(url) self.assertEqual(match.url_name, url_name) self.assertEqual(match.app_name, app_name) self.assertEqual(match.kwargs, kwargs) @override_settings(ROOT_URLCONF='urlpatterns.path_base64_urls') def test_converter_reverse(self): for expected, (url_name, app_name, kwargs) in converter_test_data: if app_name: url_name = '%s:%s' % (app_name, url_name) with self.subTest(url=url_name): url = reverse(url_name, kwargs=kwargs) self.assertEqual(url, expected) @override_settings(ROOT_URLCONF='urlpatterns.path_base64_urls') def test_converter_reverse_with_second_layer_instance_namespace(self): kwargs = included_kwargs.copy() kwargs['last_value'] = b'world' url = reverse('instance-ns-base64:subsubpattern-base64', kwargs=kwargs) self.assertEqual(url, '/base64/aGVsbG8=/subpatterns/d29ybGQ=/d29ybGQ=/') def test_path_inclusion_is_matchable(self): match = resolve('/included_urls/extra/something/') self.assertEqual(match.url_name, 'inner-extra') self.assertEqual(match.kwargs, {'extra': 'something'}) def test_path_inclusion_is_reversible(self): url = reverse('inner-extra', kwargs={'extra': 'something'}) self.assertEqual(url, '/included_urls/extra/something/') def test_invalid_converter(self): msg = "URL route 'foo/<nonexistent:var>/' uses invalid converter 'nonexistent'." with self.assertRaisesMessage(ImproperlyConfigured, msg): path('foo/<nonexistent:var>/', empty_view) def test_invalid_view(self): msg = 'view must be a callable or a list/tuple in the case of include().' with self.assertRaisesMessage(TypeError, msg): path('articles/', 'invalid_view') def test_invalid_view_instance(self): class EmptyCBV(View): pass msg = 'view must be a callable, pass EmptyCBV.as_view(), not EmptyCBV().' with self.assertRaisesMessage(TypeError, msg): path('foo', EmptyCBV()) def test_whitespace_in_route(self): msg = ( "URL route 'space/<int:num>/extra/<str:%stest>' cannot contain " "whitespace in angle brackets <…>" ) for whitespace in string.whitespace: with self.subTest(repr(whitespace)): with self.assertRaisesMessage(ImproperlyConfigured, msg % whitespace): path('space/<int:num>/extra/<str:%stest>' % whitespace, empty_view) # Whitespaces are valid in paths. p = path('space%s/<int:num>/' % string.whitespace, empty_view) match = p.resolve('space%s/1/' % string.whitespace) self.assertEqual(match.kwargs, {'num': 1}) @override_settings(ROOT_URLCONF='urlpatterns.converter_urls') class ConverterTests(SimpleTestCase): def test_matching_urls(self): def no_converter(x): return x test_data = ( ('int', {'0', '1', '01', 1234567890}, int), ('str', {'abcxyz'}, no_converter), ('path', {'allows.ANY*characters'}, no_converter), ('slug', {'abcxyz-ABCXYZ_01234567890'}, no_converter), ('uuid', {'39da9369-838e-4750-91a5-f7805cd82839'}, uuid.UUID), ) for url_name, url_suffixes, converter in test_data: for url_suffix in url_suffixes: url = '/%s/%s/' % (url_name, url_suffix) with self.subTest(url=url): match = resolve(url) self.assertEqual(match.url_name, url_name) self.assertEqual(match.kwargs, {url_name: converter(url_suffix)}) # reverse() works with string parameters. string_kwargs = {url_name: url_suffix} self.assertEqual(reverse(url_name, kwargs=string_kwargs), url) # reverse() also works with native types (int, UUID, etc.). if converter is not no_converter: # The converted value might be different for int (a # leading zero is lost in the conversion). converted_value = match.kwargs[url_name] converted_url = '/%s/%s/' % (url_name, converted_value) self.assertEqual(reverse(url_name, kwargs={url_name: converted_value}), converted_url) def test_nonmatching_urls(self): test_data = ( ('int', {'-1', 'letters'}), ('str', {'', '/'}), ('path', {''}), ('slug', {'', 'stars*notallowed'}), ('uuid', { '', '9da9369-838e-4750-91a5-f7805cd82839', '39da9369-838-4750-91a5-f7805cd82839', '39da9369-838e-475-91a5-f7805cd82839', '39da9369-838e-4750-91a-f7805cd82839', '39da9369-838e-4750-91a5-f7805cd8283', }), ) for url_name, url_suffixes in test_data: for url_suffix in url_suffixes: url = '/%s/%s/' % (url_name, url_suffix) with self.subTest(url=url), self.assertRaises(Resolver404): resolve(url) @override_settings(ROOT_URLCONF='urlpatterns.path_same_name_urls') class SameNameTests(SimpleTestCase): def test_matching_urls_same_name(self): @DynamicConverter.register_to_url def requires_tiny_int(value): if value > 5: raise ValueError return value tests = [ ('number_of_args', [ ([], {}, '0/'), ([1], {}, '1/1/'), ]), ('kwargs_names', [ ([], {'a': 1}, 'a/1/'), ([], {'b': 1}, 'b/1/'), ]), ('converter', [ (['a/b'], {}, 'path/a/b/'), (['a b'], {}, 'str/a%20b/'), (['a-b'], {}, 'slug/a-b/'), (['2'], {}, 'int/2/'), ( ['39da9369-838e-4750-91a5-f7805cd82839'], {}, 'uuid/39da9369-838e-4750-91a5-f7805cd82839/' ), ]), ('regex', [ (['ABC'], {}, 'uppercase/ABC/'), (['abc'], {}, 'lowercase/abc/'), ]), ('converter_to_url', [ ([6], {}, 'int/6/'), ([1], {}, 'tiny_int/1/'), ]), ] for url_name, cases in tests: for args, kwargs, url_suffix in cases: expected_url = '/%s/%s' % (url_name, url_suffix) with self.subTest(url=expected_url): self.assertEqual( reverse(url_name, args=args, kwargs=kwargs), expected_url, ) class ParameterRestrictionTests(SimpleTestCase): def test_integer_parameter_name_causes_exception(self): msg = ( "URL route 'hello/<int:1>/' uses parameter name '1' which isn't " "a valid Python identifier." ) with self.assertRaisesMessage(ImproperlyConfigured, msg): path(r'hello/<int:1>/', lambda r: None) def test_non_identifier_parameter_name_causes_exception(self): msg = ( "URL route 'b/<int:book.id>/' uses parameter name 'book.id' which " "isn't a valid Python identifier." ) with self.assertRaisesMessage(ImproperlyConfigured, msg): path(r'b/<int:book.id>/', lambda r: None) def test_allows_non_ascii_but_valid_identifiers(self): # \u0394 is "GREEK CAPITAL LETTER DELTA", a valid identifier. p = path('hello/<str:\u0394>/', lambda r: None) match = p.resolve('hello/1/') self.assertEqual(match.kwargs, {'\u0394': '1'}) @override_settings(ROOT_URLCONF='urlpatterns.path_dynamic_urls') class ConversionExceptionTests(SimpleTestCase): """How are errors in Converter.to_python() and to_url() handled?""" def test_resolve_value_error_means_no_match(self): @DynamicConverter.register_to_python def raises_value_error(value): raise ValueError() with self.assertRaises(Resolver404): resolve('/dynamic/abc/') def test_resolve_type_error_propagates(self): @DynamicConverter.register_to_python def raises_type_error(value): raise TypeError('This type error propagates.') with self.assertRaisesMessage(TypeError, 'This type error propagates.'): resolve('/dynamic/abc/') def test_reverse_value_error_means_no_match(self): @DynamicConverter.register_to_url def raises_value_error(value): raise ValueError with self.assertRaises(NoReverseMatch): reverse('dynamic', kwargs={'value': object()}) def test_reverse_type_error_propagates(self): @DynamicConverter.register_to_url def raises_type_error(value): raise TypeError('This type error propagates.') with self.assertRaisesMessage(TypeError, 'This type error propagates.'): reverse('dynamic', kwargs={'value': object()})
181d46b1d03a079ffa728e41f60feb149c605d162daa805c186742f5f124d406
import gc import sys import weakref from types import TracebackType from django.dispatch import Signal, receiver from django.test import SimpleTestCase from django.test.utils import override_settings if hasattr(sys, 'pypy_version_info'): def garbage_collect(): # Collecting weakreferences can take two collections on PyPy. gc.collect() gc.collect() else: def garbage_collect(): gc.collect() def receiver_1_arg(val, **kwargs): return val class Callable: def __call__(self, val, **kwargs): return val def a(self, val, **kwargs): return val a_signal = Signal() b_signal = Signal() c_signal = Signal() d_signal = Signal(use_caching=True) class DispatcherTests(SimpleTestCase): def assertTestIsClean(self, signal): """Assert that everything has been cleaned up automatically""" # Note that dead weakref cleanup happens as side effect of using # the signal's receivers through the signals API. So, first do a # call to an API method to force cleanup. self.assertFalse(signal.has_listeners()) self.assertEqual(signal.receivers, []) @override_settings(DEBUG=True) def test_cannot_connect_no_kwargs(self): def receiver_no_kwargs(sender): pass msg = 'Signal receivers must accept keyword arguments (**kwargs).' with self.assertRaisesMessage(ValueError, msg): a_signal.connect(receiver_no_kwargs) self.assertTestIsClean(a_signal) @override_settings(DEBUG=True) def test_cannot_connect_non_callable(self): msg = 'Signal receivers must be callable.' with self.assertRaisesMessage(TypeError, msg): a_signal.connect(object()) self.assertTestIsClean(a_signal) def test_send(self): a_signal.connect(receiver_1_arg, sender=self) result = a_signal.send(sender=self, val='test') self.assertEqual(result, [(receiver_1_arg, 'test')]) a_signal.disconnect(receiver_1_arg, sender=self) self.assertTestIsClean(a_signal) def test_send_no_receivers(self): result = a_signal.send(sender=self, val='test') self.assertEqual(result, []) def test_send_connected_no_sender(self): a_signal.connect(receiver_1_arg) result = a_signal.send(sender=self, val='test') self.assertEqual(result, [(receiver_1_arg, 'test')]) a_signal.disconnect(receiver_1_arg) self.assertTestIsClean(a_signal) def test_send_different_no_sender(self): a_signal.connect(receiver_1_arg, sender=object) result = a_signal.send(sender=self, val='test') self.assertEqual(result, []) a_signal.disconnect(receiver_1_arg, sender=object) self.assertTestIsClean(a_signal) def test_garbage_collected(self): a = Callable() a_signal.connect(a.a, sender=self) del a garbage_collect() result = a_signal.send(sender=self, val="test") self.assertEqual(result, []) self.assertTestIsClean(a_signal) def test_cached_garbaged_collected(self): """ Make sure signal caching sender receivers don't prevent garbage collection of senders. """ class sender: pass wref = weakref.ref(sender) d_signal.connect(receiver_1_arg) d_signal.send(sender, val='garbage') del sender garbage_collect() try: self.assertIsNone(wref()) finally: # Disconnect after reference check since it flushes the tested cache. d_signal.disconnect(receiver_1_arg) def test_multiple_registration(self): a = Callable() a_signal.connect(a) a_signal.connect(a) a_signal.connect(a) a_signal.connect(a) a_signal.connect(a) a_signal.connect(a) result = a_signal.send(sender=self, val="test") self.assertEqual(len(result), 1) self.assertEqual(len(a_signal.receivers), 1) del a del result garbage_collect() self.assertTestIsClean(a_signal) def test_uid_registration(self): def uid_based_receiver_1(**kwargs): pass def uid_based_receiver_2(**kwargs): pass a_signal.connect(uid_based_receiver_1, dispatch_uid="uid") a_signal.connect(uid_based_receiver_2, dispatch_uid="uid") self.assertEqual(len(a_signal.receivers), 1) a_signal.disconnect(dispatch_uid="uid") self.assertTestIsClean(a_signal) def test_send_robust_success(self): a_signal.connect(receiver_1_arg) result = a_signal.send_robust(sender=self, val='test') self.assertEqual(result, [(receiver_1_arg, 'test')]) a_signal.disconnect(receiver_1_arg) self.assertTestIsClean(a_signal) def test_send_robust_no_receivers(self): result = a_signal.send_robust(sender=self, val='test') self.assertEqual(result, []) def test_send_robust_ignored_sender(self): a_signal.connect(receiver_1_arg) result = a_signal.send_robust(sender=self, val='test') self.assertEqual(result, [(receiver_1_arg, 'test')]) a_signal.disconnect(receiver_1_arg) self.assertTestIsClean(a_signal) def test_send_robust_fail(self): def fails(val, **kwargs): raise ValueError('this') a_signal.connect(fails) try: with self.assertLogs('django.dispatch', 'ERROR') as cm: result = a_signal.send_robust(sender=self, val='test') err = result[0][1] self.assertIsInstance(err, ValueError) self.assertEqual(err.args, ('this',)) self.assertIs(hasattr(err, '__traceback__'), True) self.assertIsInstance(err.__traceback__, TracebackType) log_record = cm.records[0] self.assertEqual( log_record.getMessage(), 'Error calling ' 'DispatcherTests.test_send_robust_fail.<locals>.fails in ' 'Signal.send_robust() (this)', ) self.assertIsNotNone(log_record.exc_info) _, exc_value, _ = log_record.exc_info self.assertIsInstance(exc_value, ValueError) self.assertEqual(str(exc_value), 'this') finally: a_signal.disconnect(fails) self.assertTestIsClean(a_signal) def test_disconnection(self): receiver_1 = Callable() receiver_2 = Callable() receiver_3 = Callable() a_signal.connect(receiver_1) a_signal.connect(receiver_2) a_signal.connect(receiver_3) a_signal.disconnect(receiver_1) del receiver_2 garbage_collect() a_signal.disconnect(receiver_3) self.assertTestIsClean(a_signal) def test_values_returned_by_disconnection(self): receiver_1 = Callable() receiver_2 = Callable() a_signal.connect(receiver_1) receiver_1_disconnected = a_signal.disconnect(receiver_1) receiver_2_disconnected = a_signal.disconnect(receiver_2) self.assertTrue(receiver_1_disconnected) self.assertFalse(receiver_2_disconnected) self.assertTestIsClean(a_signal) def test_has_listeners(self): self.assertFalse(a_signal.has_listeners()) self.assertFalse(a_signal.has_listeners(sender=object())) receiver_1 = Callable() a_signal.connect(receiver_1) self.assertTrue(a_signal.has_listeners()) self.assertTrue(a_signal.has_listeners(sender=object())) a_signal.disconnect(receiver_1) self.assertFalse(a_signal.has_listeners()) self.assertFalse(a_signal.has_listeners(sender=object())) class ReceiverTestCase(SimpleTestCase): def test_receiver_single_signal(self): @receiver(a_signal) def f(val, **kwargs): self.state = val self.state = False a_signal.send(sender=self, val=True) self.assertTrue(self.state) def test_receiver_signal_list(self): @receiver([a_signal, b_signal, c_signal]) def f(val, **kwargs): self.state.append(val) self.state = [] a_signal.send(sender=self, val='a') c_signal.send(sender=self, val='c') b_signal.send(sender=self, val='b') self.assertIn('a', self.state) self.assertIn('b', self.state) self.assertIn('c', self.state)
669f0af780d8e069e36a25d7b06857eb8e498b4c2b7e0b73834453cf3651db88
import re from django.conf import settings from django.contrib.sessions.backends.cache import SessionStore from django.core.exceptions import ImproperlyConfigured from django.http import HttpRequest, HttpResponse, UnreadablePostError from django.middleware.csrf import ( CSRF_ALLOWED_CHARS, CSRF_SESSION_KEY, CSRF_TOKEN_LENGTH, REASON_BAD_ORIGIN, REASON_CSRF_TOKEN_MISSING, REASON_NO_CSRF_COOKIE, CsrfViewMiddleware, InvalidTokenFormat, RejectRequest, _does_token_match, _mask_cipher_secret, _sanitize_token, _unmask_cipher_token, get_token, rotate_token, ) from django.test import SimpleTestCase, override_settings from django.views.decorators.csrf import csrf_exempt, requires_csrf_token from .views import ( ensure_csrf_cookie_view, ensured_and_protected_view, non_token_view_using_request_processor, post_form_view, protected_view, sandwiched_rotate_token_view, token_view, ) # This is a test (unmasked) CSRF cookie / secret. TEST_SECRET = 'lcccccccX2kcccccccY2jcccccccssIC' # Two masked versions of TEST_SECRET for testing purposes. MASKED_TEST_SECRET1 = '1bcdefghij2bcdefghij3bcdefghij4bcdefghij5bcdefghij6bcdefghijABCD' MASKED_TEST_SECRET2 = '2JgchWvM1tpxT2lfz9aydoXW9yT1DN3NdLiejYxOOlzzV4nhBbYqmqZYbAV3V5Bf' class CsrfFunctionTests(SimpleTestCase): def test_unmask_cipher_token(self): cases = [ (TEST_SECRET, MASKED_TEST_SECRET1), (TEST_SECRET, MASKED_TEST_SECRET2), ( 32 * 'a', 'vFioG3XOLyGyGsPRFyB9iYUs341ufzIEvFioG3XOLyGyGsPRFyB9iYUs341ufzIE', ), (32 * 'a', 64 * 'a'), (32 * 'a', 64 * 'b'), (32 * 'b', 32 * 'a' + 32 * 'b'), (32 * 'b', 32 * 'b' + 32 * 'c'), (32 * 'c', 32 * 'a' + 32 * 'c'), ] for secret, masked_secret in cases: with self.subTest(masked_secret=masked_secret): actual = _unmask_cipher_token(masked_secret) self.assertEqual(actual, secret) # This method depends on _unmask_cipher_token() being correct. def assertMaskedSecretCorrect(self, masked_secret, secret): """Test that a string is a valid masked version of a secret.""" self.assertEqual(len(masked_secret), CSRF_TOKEN_LENGTH) self.assertTrue( set(masked_secret).issubset(set(CSRF_ALLOWED_CHARS)), msg=f'invalid characters in {masked_secret!r}', ) actual = _unmask_cipher_token(masked_secret) self.assertEqual(actual, secret) def test_mask_cipher_secret(self): cases = [ 32 * 'a', TEST_SECRET, 'da4SrUiHJYoJ0HYQ0vcgisoIuFOxx4ER', ] for secret in cases: with self.subTest(secret=secret): masked = _mask_cipher_secret(secret) self.assertMaskedSecretCorrect(masked, secret) def test_get_token_csrf_cookie_set(self): request = HttpRequest() request.META['CSRF_COOKIE'] = MASKED_TEST_SECRET1 self.assertNotIn('CSRF_COOKIE_NEEDS_UPDATE', request.META) token = get_token(request) self.assertNotEqual(token, MASKED_TEST_SECRET1) self.assertMaskedSecretCorrect(token, TEST_SECRET) # The existing cookie is preserved. self.assertEqual(request.META['CSRF_COOKIE'], MASKED_TEST_SECRET1) self.assertIs(request.META['CSRF_COOKIE_NEEDS_UPDATE'], True) def test_get_token_csrf_cookie_not_set(self): request = HttpRequest() self.assertNotIn('CSRF_COOKIE', request.META) self.assertNotIn('CSRF_COOKIE_NEEDS_UPDATE', request.META) token = get_token(request) cookie = request.META['CSRF_COOKIE'] self.assertEqual(len(cookie), CSRF_TOKEN_LENGTH) unmasked_cookie = _unmask_cipher_token(cookie) self.assertMaskedSecretCorrect(token, unmasked_cookie) self.assertIs(request.META['CSRF_COOKIE_NEEDS_UPDATE'], True) def test_rotate_token(self): request = HttpRequest() request.META['CSRF_COOKIE'] = MASKED_TEST_SECRET1 self.assertNotIn('CSRF_COOKIE_NEEDS_UPDATE', request.META) rotate_token(request) # The underlying secret was changed. cookie = request.META['CSRF_COOKIE'] self.assertEqual(len(cookie), CSRF_TOKEN_LENGTH) unmasked_cookie = _unmask_cipher_token(cookie) self.assertNotEqual(unmasked_cookie, TEST_SECRET) self.assertIs(request.META['CSRF_COOKIE_NEEDS_UPDATE'], True) def test_sanitize_token_masked(self): # Tokens of length CSRF_TOKEN_LENGTH are preserved. cases = [ (MASKED_TEST_SECRET1, MASKED_TEST_SECRET1), (64 * 'a', 64 * 'a'), ] for token, expected in cases: with self.subTest(token=token): actual = _sanitize_token(token) self.assertEqual(actual, expected) def test_sanitize_token_unmasked(self): # A token of length CSRF_SECRET_LENGTH is masked. actual = _sanitize_token(TEST_SECRET) self.assertMaskedSecretCorrect(actual, TEST_SECRET) def test_sanitize_token_invalid(self): cases = [ (64 * '*', 'has invalid characters'), (16 * 'a', 'has incorrect length'), ] for token, expected_message in cases: with self.subTest(token=token): with self.assertRaisesMessage(InvalidTokenFormat, expected_message): _sanitize_token(token) def test_does_token_match(self): cases = [ ((MASKED_TEST_SECRET1, MASKED_TEST_SECRET2), True), ((MASKED_TEST_SECRET1, 64 * 'a'), False), ] for (token1, token2), expected in cases: with self.subTest(token1=token1, token2=token2): actual = _does_token_match(token1, token2) self.assertIs(actual, expected) class TestingSessionStore(SessionStore): """ A version of SessionStore that stores what cookie values are passed to set_cookie() when CSRF_USE_SESSIONS=True. """ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # This is a list of the cookie values passed to set_cookie() over # the course of the request-response. self._cookies_set = [] def __setitem__(self, key, value): super().__setitem__(key, value) self._cookies_set.append(value) class TestingHttpRequest(HttpRequest): """ A version of HttpRequest that lets one track and change some things more easily. """ def __init__(self): super().__init__() self.session = TestingSessionStore() def is_secure(self): return getattr(self, '_is_secure_override', False) class PostErrorRequest(TestingHttpRequest): """ TestingHttpRequest that can raise errors when accessing POST data. """ post_error = None def _get_post(self): if self.post_error is not None: raise self.post_error return self._post def _set_post(self, post): self._post = post POST = property(_get_post, _set_post) class CsrfViewMiddlewareTestMixin: """ Shared methods and tests for session-based and cookie-based tokens. """ _csrf_id_cookie = MASKED_TEST_SECRET1 _csrf_id_token = MASKED_TEST_SECRET2 def _set_csrf_cookie(self, req, cookie): raise NotImplementedError('This method must be implemented by a subclass.') def _read_csrf_cookie(self, req, resp): """ Return the CSRF cookie as a string, or False if no cookie is present. """ raise NotImplementedError('This method must be implemented by a subclass.') def _get_cookies_set(self, req, resp): """ Return a list of the cookie values passed to set_cookie() over the course of the request-response. """ raise NotImplementedError('This method must be implemented by a subclass.') def assertCookiesSet(self, req, resp, expected_secrets): """ Assert that set_cookie() was called with the given sequence of secrets. """ cookies_set = self._get_cookies_set(req, resp) secrets_set = [_unmask_cipher_token(cookie) for cookie in cookies_set] self.assertEqual(secrets_set, expected_secrets) def _get_request(self, method=None, cookie=None, request_class=None): if method is None: method = 'GET' if request_class is None: request_class = TestingHttpRequest req = request_class() req.method = method if cookie is not None: self._set_csrf_cookie(req, cookie) return req def _get_csrf_cookie_request( self, method=None, cookie=None, post_token=None, meta_token=None, token_header=None, request_class=None, ): """ The method argument defaults to "GET". The cookie argument defaults to this class's default test cookie. The post_token and meta_token arguments are included in the request's req.POST and req.META headers, respectively, when that argument is provided and non-None. The token_header argument is the header key to use for req.META, defaults to "HTTP_X_CSRFTOKEN". """ if cookie is None: cookie = self._csrf_id_cookie if token_header is None: token_header = 'HTTP_X_CSRFTOKEN' req = self._get_request( method=method, cookie=cookie, request_class=request_class, ) if post_token is not None: req.POST['csrfmiddlewaretoken'] = post_token if meta_token is not None: req.META[token_header] = meta_token return req def _get_POST_csrf_cookie_request( self, cookie=None, post_token=None, meta_token=None, token_header=None, request_class=None, ): return self._get_csrf_cookie_request( method='POST', cookie=cookie, post_token=post_token, meta_token=meta_token, token_header=token_header, request_class=request_class, ) def _get_POST_request_with_token(self, cookie=None, request_class=None): """The cookie argument defaults to this class's default test cookie.""" return self._get_POST_csrf_cookie_request( cookie=cookie, post_token=self._csrf_id_token, request_class=request_class, ) def _check_token_present(self, response, csrf_id=None): text = str(response.content, response.charset) match = re.search('name="csrfmiddlewaretoken" value="(.*?)"', text) csrf_token = csrf_id or self._csrf_id_token self.assertTrue( match and _does_token_match(csrf_token, match[1]), "Could not find csrfmiddlewaretoken to match %s" % csrf_token ) def test_process_response_get_token_not_used(self): """ If get_token() is not called, the view middleware does not add a cookie. """ # This is important to make pages cacheable. Pages which do call # get_token(), assuming they use the token, are not cacheable because # the token is specific to the user req = self._get_request() # non_token_view_using_request_processor does not call get_token(), but # does use the csrf request processor. By using this, we are testing # that the view processor is properly lazy and doesn't call get_token() # until needed. mw = CsrfViewMiddleware(non_token_view_using_request_processor) mw.process_request(req) mw.process_view(req, non_token_view_using_request_processor, (), {}) resp = mw(req) csrf_cookie = self._read_csrf_cookie(req, resp) self.assertIs(csrf_cookie, False) def _check_bad_or_missing_cookie(self, cookie, expected): """Passing None for cookie includes no cookie.""" req = self._get_request(method='POST', cookie=cookie) mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) with self.assertLogs('django.security.csrf', 'WARNING') as cm: resp = mw.process_view(req, post_form_view, (), {}) self.assertEqual(403, resp.status_code) self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % expected) def test_no_csrf_cookie(self): """ If no CSRF cookies is present, the middleware rejects the incoming request. This will stop login CSRF. """ self._check_bad_or_missing_cookie(None, REASON_NO_CSRF_COOKIE) def _check_bad_or_missing_token( self, expected, post_token=None, meta_token=None, token_header=None, ): req = self._get_POST_csrf_cookie_request( post_token=post_token, meta_token=meta_token, token_header=token_header, ) mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) with self.assertLogs('django.security.csrf', 'WARNING') as cm: resp = mw.process_view(req, post_form_view, (), {}) self.assertEqual(403, resp.status_code) self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % expected) def test_csrf_cookie_bad_or_missing_token(self): """ If a CSRF cookie is present but the token is missing or invalid, the middleware rejects the incoming request. """ cases = [ (None, None, REASON_CSRF_TOKEN_MISSING), (16 * 'a', None, 'CSRF token from POST has incorrect length.'), (64 * '*', None, 'CSRF token from POST has invalid characters.'), (64 * 'a', None, 'CSRF token from POST incorrect.'), ( None, 16 * 'a', "CSRF token from the 'X-Csrftoken' HTTP header has incorrect length.", ), ( None, 64 * '*', "CSRF token from the 'X-Csrftoken' HTTP header has invalid characters.", ), ( None, 64 * 'a', "CSRF token from the 'X-Csrftoken' HTTP header incorrect.", ), ] for post_token, meta_token, expected in cases: with self.subTest(post_token=post_token, meta_token=meta_token): self._check_bad_or_missing_token( expected, post_token=post_token, meta_token=meta_token, ) @override_settings(CSRF_HEADER_NAME='HTTP_X_CSRFTOKEN_CUSTOMIZED') def test_csrf_cookie_bad_token_custom_header(self): """ If a CSRF cookie is present and an invalid token is passed via a custom CSRF_HEADER_NAME, the middleware rejects the incoming request. """ expected = ( "CSRF token from the 'X-Csrftoken-Customized' HTTP header has " "incorrect length." ) self._check_bad_or_missing_token( expected, meta_token=16 * 'a', token_header='HTTP_X_CSRFTOKEN_CUSTOMIZED', ) def test_process_request_csrf_cookie_and_token(self): """ If both a cookie and a token is present, the middleware lets it through. """ req = self._get_POST_request_with_token() mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) def test_process_request_csrf_cookie_no_token_exempt_view(self): """ If a CSRF cookie is present and no token, but the csrf_exempt decorator has been applied to the view, the middleware lets it through """ req = self._get_POST_csrf_cookie_request() mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, csrf_exempt(post_form_view), (), {}) self.assertIsNone(resp) def test_csrf_token_in_header(self): """ The token may be passed in a header instead of in the form. """ req = self._get_POST_csrf_cookie_request(meta_token=self._csrf_id_token) mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) @override_settings(CSRF_HEADER_NAME='HTTP_X_CSRFTOKEN_CUSTOMIZED') def test_csrf_token_in_header_with_customized_name(self): """ settings.CSRF_HEADER_NAME can be used to customize the CSRF header name """ req = self._get_POST_csrf_cookie_request( meta_token=self._csrf_id_token, token_header='HTTP_X_CSRFTOKEN_CUSTOMIZED', ) mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) def test_put_and_delete_rejected(self): """ HTTP PUT and DELETE methods have protection """ req = self._get_request(method='PUT') mw = CsrfViewMiddleware(post_form_view) with self.assertLogs('django.security.csrf', 'WARNING') as cm: resp = mw.process_view(req, post_form_view, (), {}) self.assertEqual(403, resp.status_code) self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % REASON_NO_CSRF_COOKIE) req = self._get_request(method='DELETE') with self.assertLogs('django.security.csrf', 'WARNING') as cm: resp = mw.process_view(req, post_form_view, (), {}) self.assertEqual(403, resp.status_code) self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % REASON_NO_CSRF_COOKIE) def test_put_and_delete_allowed(self): """ HTTP PUT and DELETE can get through with X-CSRFToken and a cookie. """ req = self._get_csrf_cookie_request(method='PUT', meta_token=self._csrf_id_token) mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) req = self._get_csrf_cookie_request(method='DELETE', meta_token=self._csrf_id_token) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) def test_rotate_token_triggers_second_reset(self): """ If rotate_token() is called after the token is reset in CsrfViewMiddleware's process_response() and before another call to the same process_response(), the cookie is reset a second time. """ req = self._get_POST_request_with_token() resp = sandwiched_rotate_token_view(req) self.assertContains(resp, 'OK') csrf_cookie = self._read_csrf_cookie(req, resp) actual_secret = _unmask_cipher_token(csrf_cookie) # set_cookie() was called a second time with a different secret. self.assertCookiesSet(req, resp, [TEST_SECRET, actual_secret]) self.assertNotEqual(actual_secret, TEST_SECRET) # Tests for the template tag method def test_token_node_no_csrf_cookie(self): """ CsrfTokenNode works when no CSRF cookie is set. """ req = self._get_request() resp = token_view(req) token = get_token(req) self.assertIsNotNone(token) self._check_token_present(resp, token) def test_token_node_empty_csrf_cookie(self): """ A new token is sent if the csrf_cookie is the empty string. """ req = self._get_request() self._set_csrf_cookie(req, '') mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = token_view(req) token = get_token(req) self.assertIsNotNone(token) self._check_token_present(resp, token) def test_token_node_with_csrf_cookie(self): """ CsrfTokenNode works when a CSRF cookie is set. """ req = self._get_csrf_cookie_request() mw = CsrfViewMiddleware(token_view) mw.process_request(req) mw.process_view(req, token_view, (), {}) resp = token_view(req) self._check_token_present(resp) def test_get_token_for_exempt_view(self): """ get_token still works for a view decorated with 'csrf_exempt'. """ req = self._get_csrf_cookie_request() mw = CsrfViewMiddleware(token_view) mw.process_request(req) mw.process_view(req, csrf_exempt(token_view), (), {}) resp = token_view(req) self._check_token_present(resp) def test_get_token_for_requires_csrf_token_view(self): """ get_token() works for a view decorated solely with requires_csrf_token. """ req = self._get_csrf_cookie_request() resp = requires_csrf_token(token_view)(req) self._check_token_present(resp) def test_token_node_with_new_csrf_cookie(self): """ CsrfTokenNode works when a CSRF cookie is created by the middleware (when one was not already present) """ req = self._get_request() mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = mw(req) csrf_cookie = self._read_csrf_cookie(req, resp) self._check_token_present(resp, csrf_id=csrf_cookie) def test_cookie_not_reset_on_accepted_request(self): """ The csrf token used in posts is changed on every request (although stays equivalent). The csrf cookie should not change on accepted requests. If it appears in the response, it should keep its value. """ req = self._get_POST_request_with_token() mw = CsrfViewMiddleware(token_view) mw.process_request(req) mw.process_view(req, token_view, (), {}) resp = mw(req) csrf_cookie = self._read_csrf_cookie(req, resp) self.assertEqual( csrf_cookie, self._csrf_id_cookie, 'CSRF cookie was changed on an accepted request', ) @override_settings(DEBUG=True, ALLOWED_HOSTS=['www.example.com']) def test_https_bad_referer(self): """ A POST HTTPS request with a bad referer is rejected """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_REFERER'] = 'https://www.evil.org/somepage' req.META['SERVER_PORT'] = '443' mw = CsrfViewMiddleware(post_form_view) response = mw.process_view(req, post_form_view, (), {}) self.assertContains( response, 'Referer checking failed - https://www.evil.org/somepage does not ' 'match any trusted origins.', status_code=403, ) def _check_referer_rejects(self, mw, req): with self.assertRaises(RejectRequest): mw._check_referer(req) @override_settings(DEBUG=True) def test_https_no_referer(self): """A POST HTTPS request with a missing referer is rejected.""" req = self._get_POST_request_with_token() req._is_secure_override = True mw = CsrfViewMiddleware(post_form_view) self._check_referer_rejects(mw, req) response = mw.process_view(req, post_form_view, (), {}) self.assertContains( response, 'Referer checking failed - no Referer.', status_code=403, ) def test_https_malformed_host(self): """ CsrfViewMiddleware generates a 403 response if it receives an HTTPS request with a bad host. """ req = self._get_request(method='POST') req._is_secure_override = True req.META['HTTP_HOST'] = '@malformed' req.META['HTTP_REFERER'] = 'https://www.evil.org/somepage' req.META['SERVER_PORT'] = '443' mw = CsrfViewMiddleware(token_view) expected = ( 'Referer checking failed - https://www.evil.org/somepage does not ' 'match any trusted origins.' ) with self.assertRaisesMessage(RejectRequest, expected): mw._check_referer(req) response = mw.process_view(req, token_view, (), {}) self.assertEqual(response.status_code, 403) def test_origin_malformed_host(self): req = self._get_request(method='POST') req._is_secure_override = True req.META['HTTP_HOST'] = '@malformed' req.META['HTTP_ORIGIN'] = 'https://www.evil.org' mw = CsrfViewMiddleware(token_view) self._check_referer_rejects(mw, req) response = mw.process_view(req, token_view, (), {}) self.assertEqual(response.status_code, 403) @override_settings(DEBUG=True) def test_https_malformed_referer(self): """ A POST HTTPS request with a bad referer is rejected. """ malformed_referer_msg = 'Referer checking failed - Referer is malformed.' req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_REFERER'] = 'http://http://www.example.com/' mw = CsrfViewMiddleware(post_form_view) self._check_referer_rejects(mw, req) response = mw.process_view(req, post_form_view, (), {}) self.assertContains( response, 'Referer checking failed - Referer is insecure while host is secure.', status_code=403, ) # Empty req.META['HTTP_REFERER'] = '' self._check_referer_rejects(mw, req) response = mw.process_view(req, post_form_view, (), {}) self.assertContains(response, malformed_referer_msg, status_code=403) # Non-ASCII req.META['HTTP_REFERER'] = 'ØBöIß' self._check_referer_rejects(mw, req) response = mw.process_view(req, post_form_view, (), {}) self.assertContains(response, malformed_referer_msg, status_code=403) # missing scheme # >>> urlparse('//example.com/') # ParseResult(scheme='', netloc='example.com', path='/', params='', query='', fragment='') req.META['HTTP_REFERER'] = '//example.com/' self._check_referer_rejects(mw, req) response = mw.process_view(req, post_form_view, (), {}) self.assertContains(response, malformed_referer_msg, status_code=403) # missing netloc # >>> urlparse('https://') # ParseResult(scheme='https', netloc='', path='', params='', query='', fragment='') req.META['HTTP_REFERER'] = 'https://' self._check_referer_rejects(mw, req) response = mw.process_view(req, post_form_view, (), {}) self.assertContains(response, malformed_referer_msg, status_code=403) # Invalid URL # >>> urlparse('https://[') # ValueError: Invalid IPv6 URL req.META['HTTP_REFERER'] = 'https://[' self._check_referer_rejects(mw, req) response = mw.process_view(req, post_form_view, (), {}) self.assertContains(response, malformed_referer_msg, status_code=403) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_https_good_referer(self): """ A POST HTTPS request with a good referer is accepted. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_REFERER'] = 'https://www.example.com/somepage' mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_https_good_referer_2(self): """ A POST HTTPS request with a good referer is accepted where the referer contains no trailing slash. """ # See ticket #15617 req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_REFERER'] = 'https://www.example.com' mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) def _test_https_good_referer_behind_proxy(self): req = self._get_POST_request_with_token() req._is_secure_override = True req.META.update({ 'HTTP_HOST': '10.0.0.2', 'HTTP_REFERER': 'https://www.example.com/somepage', 'SERVER_PORT': '8080', 'HTTP_X_FORWARDED_HOST': 'www.example.com', 'HTTP_X_FORWARDED_PORT': '443', }) mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) @override_settings(CSRF_TRUSTED_ORIGINS=['https://dashboard.example.com']) def test_https_good_referer_malformed_host(self): """ A POST HTTPS request is accepted if it receives a good referer with a bad host. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = '@malformed' req.META['HTTP_REFERER'] = 'https://dashboard.example.com/somepage' mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) @override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_TRUSTED_ORIGINS=['https://dashboard.example.com']) def test_https_csrf_trusted_origin_allowed(self): """ A POST HTTPS request with a referer added to the CSRF_TRUSTED_ORIGINS setting is accepted. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_REFERER'] = 'https://dashboard.example.com' mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) @override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_TRUSTED_ORIGINS=['https://*.example.com']) def test_https_csrf_wildcard_trusted_origin_allowed(self): """ A POST HTTPS request with a referer that matches a CSRF_TRUSTED_ORIGINS wildcard is accepted. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_REFERER'] = 'https://dashboard.example.com' mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) response = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(response) def _test_https_good_referer_matches_cookie_domain(self): req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_REFERER'] = 'https://foo.example.com/' req.META['SERVER_PORT'] = '443' mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) response = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(response) def _test_https_good_referer_matches_cookie_domain_with_different_port(self): req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_REFERER'] = 'https://foo.example.com:4443/' req.META['SERVER_PORT'] = '4443' mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) response = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(response) def test_ensures_csrf_cookie_no_logging(self): """ ensure_csrf_cookie() doesn't log warnings (#19436). """ with self.assertNoLogs('django.request', 'WARNING'): req = self._get_request() ensure_csrf_cookie_view(req) def test_reading_post_data_raises_unreadable_post_error(self): """ An UnreadablePostError raised while reading the POST data should be handled by the middleware. """ req = self._get_POST_request_with_token() mw = CsrfViewMiddleware(post_form_view) mw.process_request(req) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) req = self._get_POST_request_with_token(request_class=PostErrorRequest) req.post_error = UnreadablePostError('Error reading input data.') mw.process_request(req) with self.assertLogs('django.security.csrf', 'WARNING') as cm: resp = mw.process_view(req, post_form_view, (), {}) self.assertEqual(resp.status_code, 403) self.assertEqual( cm.records[0].getMessage(), 'Forbidden (%s): ' % REASON_CSRF_TOKEN_MISSING, ) def test_reading_post_data_raises_os_error(self): """ An OSError raised while reading the POST data should not be handled by the middleware. """ mw = CsrfViewMiddleware(post_form_view) req = self._get_POST_request_with_token(request_class=PostErrorRequest) req.post_error = OSError('Deleted directories/Missing permissions.') mw.process_request(req) with self.assertRaises(OSError): mw.process_view(req, post_form_view, (), {}) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_bad_origin_bad_domain(self): """A request with a bad origin is rejected.""" req = self._get_POST_request_with_token() req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'https://www.evil.org' mw = CsrfViewMiddleware(post_form_view) self._check_referer_rejects(mw, req) self.assertIs(mw._origin_verified(req), False) with self.assertLogs('django.security.csrf', 'WARNING') as cm: response = mw.process_view(req, post_form_view, (), {}) self.assertEqual(response.status_code, 403) msg = REASON_BAD_ORIGIN % req.META['HTTP_ORIGIN'] self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % msg) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_bad_origin_null_origin(self): """A request with a null origin is rejected.""" req = self._get_POST_request_with_token() req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'null' mw = CsrfViewMiddleware(post_form_view) self._check_referer_rejects(mw, req) self.assertIs(mw._origin_verified(req), False) with self.assertLogs('django.security.csrf', 'WARNING') as cm: response = mw.process_view(req, post_form_view, (), {}) self.assertEqual(response.status_code, 403) msg = REASON_BAD_ORIGIN % req.META['HTTP_ORIGIN'] self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % msg) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_bad_origin_bad_protocol(self): """A request with an origin with wrong protocol is rejected.""" req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'http://example.com' mw = CsrfViewMiddleware(post_form_view) self._check_referer_rejects(mw, req) self.assertIs(mw._origin_verified(req), False) with self.assertLogs('django.security.csrf', 'WARNING') as cm: response = mw.process_view(req, post_form_view, (), {}) self.assertEqual(response.status_code, 403) msg = REASON_BAD_ORIGIN % req.META['HTTP_ORIGIN'] self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % msg) @override_settings( ALLOWED_HOSTS=['www.example.com'], CSRF_TRUSTED_ORIGINS=[ 'http://no-match.com', 'https://*.example.com', 'http://*.no-match.com', 'http://*.no-match-2.com', ], ) def test_bad_origin_csrf_trusted_origin_bad_protocol(self): """ A request with an origin with the wrong protocol compared to CSRF_TRUSTED_ORIGINS is rejected. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'http://foo.example.com' mw = CsrfViewMiddleware(post_form_view) self._check_referer_rejects(mw, req) self.assertIs(mw._origin_verified(req), False) with self.assertLogs('django.security.csrf', 'WARNING') as cm: response = mw.process_view(req, post_form_view, (), {}) self.assertEqual(response.status_code, 403) msg = REASON_BAD_ORIGIN % req.META['HTTP_ORIGIN'] self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % msg) self.assertEqual(mw.allowed_origins_exact, {'http://no-match.com'}) self.assertEqual(mw.allowed_origin_subdomains, { 'https': ['.example.com'], 'http': ['.no-match.com', '.no-match-2.com'], }) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_bad_origin_cannot_be_parsed(self): """ A POST request with an origin that can't be parsed by urlparse() is rejected. """ req = self._get_POST_request_with_token() req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'https://[' mw = CsrfViewMiddleware(post_form_view) self._check_referer_rejects(mw, req) self.assertIs(mw._origin_verified(req), False) with self.assertLogs('django.security.csrf', 'WARNING') as cm: response = mw.process_view(req, post_form_view, (), {}) self.assertEqual(response.status_code, 403) msg = REASON_BAD_ORIGIN % req.META['HTTP_ORIGIN'] self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % msg) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_good_origin_insecure(self): """A POST HTTP request with a good origin is accepted.""" req = self._get_POST_request_with_token() req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'http://www.example.com' mw = CsrfViewMiddleware(post_form_view) self.assertIs(mw._origin_verified(req), True) response = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(response) @override_settings(ALLOWED_HOSTS=['www.example.com']) def test_good_origin_secure(self): """A POST HTTPS request with a good origin is accepted.""" req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'https://www.example.com' mw = CsrfViewMiddleware(post_form_view) self.assertIs(mw._origin_verified(req), True) response = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(response) @override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_TRUSTED_ORIGINS=['https://dashboard.example.com']) def test_good_origin_csrf_trusted_origin_allowed(self): """ A POST request with an origin added to the CSRF_TRUSTED_ORIGINS setting is accepted. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'https://dashboard.example.com' mw = CsrfViewMiddleware(post_form_view) self.assertIs(mw._origin_verified(req), True) resp = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(resp) self.assertEqual(mw.allowed_origins_exact, {'https://dashboard.example.com'}) self.assertEqual(mw.allowed_origin_subdomains, {}) @override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_TRUSTED_ORIGINS=['https://*.example.com']) def test_good_origin_wildcard_csrf_trusted_origin_allowed(self): """ A POST request with an origin that matches a CSRF_TRUSTED_ORIGINS wildcard is accepted. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_ORIGIN'] = 'https://foo.example.com' mw = CsrfViewMiddleware(post_form_view) self.assertIs(mw._origin_verified(req), True) response = mw.process_view(req, post_form_view, (), {}) self.assertIsNone(response) self.assertEqual(mw.allowed_origins_exact, set()) self.assertEqual(mw.allowed_origin_subdomains, {'https': ['.example.com']}) class CsrfViewMiddlewareTests(CsrfViewMiddlewareTestMixin, SimpleTestCase): def _set_csrf_cookie(self, req, cookie): req.COOKIES[settings.CSRF_COOKIE_NAME] = cookie def _read_csrf_cookie(self, req, resp): """ Return the CSRF cookie as a string, or False if no cookie is present. """ if settings.CSRF_COOKIE_NAME not in resp.cookies: return False csrf_cookie = resp.cookies[settings.CSRF_COOKIE_NAME] return csrf_cookie.value def _get_cookies_set(self, req, resp): return resp._cookies_set def test_ensures_csrf_cookie_no_middleware(self): """ The ensure_csrf_cookie() decorator works without middleware. """ req = self._get_request() resp = ensure_csrf_cookie_view(req) csrf_cookie = self._read_csrf_cookie(req, resp) self.assertTrue(csrf_cookie) self.assertIn('Cookie', resp.get('Vary', '')) def test_ensures_csrf_cookie_with_middleware(self): """ The ensure_csrf_cookie() decorator works with the CsrfViewMiddleware enabled. """ req = self._get_request() mw = CsrfViewMiddleware(ensure_csrf_cookie_view) mw.process_view(req, ensure_csrf_cookie_view, (), {}) resp = mw(req) csrf_cookie = self._read_csrf_cookie(req, resp) self.assertTrue(csrf_cookie) self.assertIn('Cookie', resp.get('Vary', '')) def test_csrf_cookie_age(self): """ CSRF cookie age can be set using settings.CSRF_COOKIE_AGE. """ req = self._get_request() MAX_AGE = 123 with self.settings(CSRF_COOKIE_NAME='csrfcookie', CSRF_COOKIE_DOMAIN='.example.com', CSRF_COOKIE_AGE=MAX_AGE, CSRF_COOKIE_PATH='/test/', CSRF_COOKIE_SECURE=True, CSRF_COOKIE_HTTPONLY=True): # token_view calls get_token() indirectly mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = mw(req) max_age = resp.cookies.get('csrfcookie').get('max-age') self.assertEqual(max_age, MAX_AGE) def test_csrf_cookie_age_none(self): """ CSRF cookie age does not have max age set and therefore uses session-based cookies. """ req = self._get_request() MAX_AGE = None with self.settings(CSRF_COOKIE_NAME='csrfcookie', CSRF_COOKIE_DOMAIN='.example.com', CSRF_COOKIE_AGE=MAX_AGE, CSRF_COOKIE_PATH='/test/', CSRF_COOKIE_SECURE=True, CSRF_COOKIE_HTTPONLY=True): # token_view calls get_token() indirectly mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = mw(req) max_age = resp.cookies.get('csrfcookie').get('max-age') self.assertEqual(max_age, '') def test_csrf_cookie_samesite(self): req = self._get_request() with self.settings(CSRF_COOKIE_NAME='csrfcookie', CSRF_COOKIE_SAMESITE='Strict'): mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = mw(req) self.assertEqual(resp.cookies['csrfcookie']['samesite'], 'Strict') def test_bad_csrf_cookie_characters(self): """ If the CSRF cookie has invalid characters in a POST request, the middleware rejects the incoming request. """ self._check_bad_or_missing_cookie(64 * '*', 'CSRF cookie has invalid characters.') def test_bad_csrf_cookie_length(self): """ If the CSRF cookie has an incorrect length in a POST request, the middleware rejects the incoming request. """ self._check_bad_or_missing_cookie(16 * 'a', 'CSRF cookie has incorrect length.') def test_process_view_token_too_long(self): """ If the token is longer than expected, it is ignored and a new token is created. """ req = self._get_request() self._set_csrf_cookie(req, 'x' * 100000) mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = mw(req) csrf_cookie = self._read_csrf_cookie(req, resp) self.assertEqual(len(csrf_cookie), CSRF_TOKEN_LENGTH) def test_process_view_token_invalid_chars(self): """ If the token contains non-alphanumeric characters, it is ignored and a new token is created. """ token = ('!@#' + self._csrf_id_token)[:CSRF_TOKEN_LENGTH] req = self._get_request() self._set_csrf_cookie(req, token) mw = CsrfViewMiddleware(token_view) mw.process_view(req, token_view, (), {}) resp = mw(req) csrf_cookie = self._read_csrf_cookie(req, resp) self.assertEqual(len(csrf_cookie), CSRF_TOKEN_LENGTH) self.assertNotEqual(csrf_cookie, token) def test_masked_unmasked_combinations(self): """ All combinations are allowed of (1) masked and unmasked cookies, (2) masked and unmasked tokens, and (3) tokens provided via POST and the X-CSRFToken header. """ cases = [ (TEST_SECRET, TEST_SECRET, None), (TEST_SECRET, MASKED_TEST_SECRET2, None), (TEST_SECRET, None, TEST_SECRET), (TEST_SECRET, None, MASKED_TEST_SECRET2), (MASKED_TEST_SECRET1, TEST_SECRET, None), (MASKED_TEST_SECRET1, MASKED_TEST_SECRET2, None), (MASKED_TEST_SECRET1, None, TEST_SECRET), (MASKED_TEST_SECRET1, None, MASKED_TEST_SECRET2), ] for args in cases: with self.subTest(args=args): cookie, post_token, meta_token = args req = self._get_POST_csrf_cookie_request( cookie=cookie, post_token=post_token, meta_token=meta_token, ) mw = CsrfViewMiddleware(token_view) mw.process_request(req) resp = mw.process_view(req, token_view, (), {}) self.assertIsNone(resp) def test_cookie_reset_only_once(self): """ A CSRF cookie that needs to be reset is reset only once when the view is decorated with both ensure_csrf_cookie and csrf_protect. """ # Pass an unmasked cookie to trigger a cookie reset. req = self._get_POST_request_with_token(cookie=TEST_SECRET) resp = ensured_and_protected_view(req) self.assertContains(resp, 'OK') csrf_cookie = self._read_csrf_cookie(req, resp) actual_secret = _unmask_cipher_token(csrf_cookie) self.assertEqual(actual_secret, TEST_SECRET) # set_cookie() was called only once and with the expected secret. self.assertCookiesSet(req, resp, [TEST_SECRET]) def test_invalid_cookie_replaced_on_GET(self): """ A CSRF cookie with the wrong format is replaced during a GET request. """ req = self._get_request(cookie='badvalue') resp = protected_view(req) self.assertContains(resp, 'OK') csrf_cookie = self._read_csrf_cookie(req, resp) self.assertTrue(csrf_cookie, msg='No CSRF cookie was sent.') self.assertEqual(len(csrf_cookie), CSRF_TOKEN_LENGTH) def test_bare_secret_accepted_and_replaced(self): """ The csrf token is reset from a bare secret. """ req = self._get_POST_request_with_token(cookie=TEST_SECRET) mw = CsrfViewMiddleware(token_view) mw.process_request(req) resp = mw.process_view(req, token_view, (), {}) self.assertIsNone(resp) resp = mw(req) csrf_cookie = self._read_csrf_cookie(req, resp) self.assertEqual(len(csrf_cookie), CSRF_TOKEN_LENGTH) self._check_token_present(resp, csrf_id=csrf_cookie) @override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_COOKIE_DOMAIN='.example.com', USE_X_FORWARDED_PORT=True) def test_https_good_referer_behind_proxy(self): """ A POST HTTPS request is accepted when USE_X_FORWARDED_PORT=True. """ self._test_https_good_referer_behind_proxy() @override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_COOKIE_DOMAIN='.example.com') def test_https_good_referer_matches_cookie_domain(self): """ A POST HTTPS request with a good referer should be accepted from a subdomain that's allowed by CSRF_COOKIE_DOMAIN. """ self._test_https_good_referer_matches_cookie_domain() @override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_COOKIE_DOMAIN='.example.com') def test_https_good_referer_matches_cookie_domain_with_different_port(self): """ A POST HTTPS request with a good referer should be accepted from a subdomain that's allowed by CSRF_COOKIE_DOMAIN and a non-443 port. """ self._test_https_good_referer_matches_cookie_domain_with_different_port() @override_settings(CSRF_COOKIE_DOMAIN='.example.com', DEBUG=True) def test_https_reject_insecure_referer(self): """ A POST HTTPS request from an insecure referer should be rejected. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_REFERER'] = 'http://example.com/' req.META['SERVER_PORT'] = '443' mw = CsrfViewMiddleware(post_form_view) self._check_referer_rejects(mw, req) response = mw.process_view(req, post_form_view, (), {}) self.assertContains( response, 'Referer checking failed - Referer is insecure while host is secure.', status_code=403, ) @override_settings(CSRF_USE_SESSIONS=True, CSRF_COOKIE_DOMAIN=None) class CsrfViewMiddlewareUseSessionsTests(CsrfViewMiddlewareTestMixin, SimpleTestCase): """ CSRF tests with CSRF_USE_SESSIONS=True. """ def _set_csrf_cookie(self, req, cookie): req.session[CSRF_SESSION_KEY] = cookie def _read_csrf_cookie(self, req, resp=None): """ Return the CSRF cookie as a string, or False if no cookie is present. """ if CSRF_SESSION_KEY not in req.session: return False return req.session[CSRF_SESSION_KEY] def _get_cookies_set(self, req, resp): return req.session._cookies_set def test_no_session_on_request(self): msg = ( 'CSRF_USE_SESSIONS is enabled, but request.session is not set. ' 'SessionMiddleware must appear before CsrfViewMiddleware in MIDDLEWARE.' ) with self.assertRaisesMessage(ImproperlyConfigured, msg): mw = CsrfViewMiddleware(lambda req: HttpResponse()) mw.process_request(HttpRequest()) def test_masked_unmasked_combinations(self): """ Masked and unmasked tokens are allowed both as POST and as the X-CSRFToken header. """ cases = [ # Bare secrets are not allowed when CSRF_USE_SESSIONS=True. (MASKED_TEST_SECRET1, TEST_SECRET, None), (MASKED_TEST_SECRET1, MASKED_TEST_SECRET2, None), (MASKED_TEST_SECRET1, None, TEST_SECRET), (MASKED_TEST_SECRET1, None, MASKED_TEST_SECRET2), ] for args in cases: with self.subTest(args=args): cookie, post_token, meta_token = args req = self._get_POST_csrf_cookie_request( cookie=cookie, post_token=post_token, meta_token=meta_token, ) mw = CsrfViewMiddleware(token_view) mw.process_request(req) resp = mw.process_view(req, token_view, (), {}) self.assertIsNone(resp) def test_process_response_get_token_used(self): """The ensure_csrf_cookie() decorator works without middleware.""" req = self._get_request() ensure_csrf_cookie_view(req) csrf_cookie = self._read_csrf_cookie(req) self.assertTrue(csrf_cookie) def test_session_modify(self): """The session isn't saved if the CSRF cookie is unchanged.""" req = self._get_request() mw = CsrfViewMiddleware(ensure_csrf_cookie_view) mw.process_view(req, ensure_csrf_cookie_view, (), {}) mw(req) csrf_cookie = self._read_csrf_cookie(req) self.assertTrue(csrf_cookie) req.session.modified = False mw.process_view(req, ensure_csrf_cookie_view, (), {}) mw(req) self.assertFalse(req.session.modified) def test_ensures_csrf_cookie_with_middleware(self): """ The ensure_csrf_cookie() decorator works with the CsrfViewMiddleware enabled. """ req = self._get_request() mw = CsrfViewMiddleware(ensure_csrf_cookie_view) mw.process_view(req, ensure_csrf_cookie_view, (), {}) mw(req) csrf_cookie = self._read_csrf_cookie(req) self.assertTrue(csrf_cookie) @override_settings( ALLOWED_HOSTS=['www.example.com'], SESSION_COOKIE_DOMAIN='.example.com', USE_X_FORWARDED_PORT=True, DEBUG=True, ) def test_https_good_referer_behind_proxy(self): """ A POST HTTPS request is accepted when USE_X_FORWARDED_PORT=True. """ self._test_https_good_referer_behind_proxy() @override_settings(ALLOWED_HOSTS=['www.example.com'], SESSION_COOKIE_DOMAIN='.example.com') def test_https_good_referer_matches_cookie_domain(self): """ A POST HTTPS request with a good referer should be accepted from a subdomain that's allowed by SESSION_COOKIE_DOMAIN. """ self._test_https_good_referer_matches_cookie_domain() @override_settings(ALLOWED_HOSTS=['www.example.com'], SESSION_COOKIE_DOMAIN='.example.com') def test_https_good_referer_matches_cookie_domain_with_different_port(self): """ A POST HTTPS request with a good referer should be accepted from a subdomain that's allowed by SESSION_COOKIE_DOMAIN and a non-443 port. """ self._test_https_good_referer_matches_cookie_domain_with_different_port() @override_settings(SESSION_COOKIE_DOMAIN='.example.com', DEBUG=True) def test_https_reject_insecure_referer(self): """ A POST HTTPS request from an insecure referer should be rejected. """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_REFERER'] = 'http://example.com/' req.META['SERVER_PORT'] = '443' mw = CsrfViewMiddleware(post_form_view) response = mw.process_view(req, post_form_view, (), {}) self.assertContains( response, 'Referer checking failed - Referer is insecure while host is secure.', status_code=403, ) @override_settings(ROOT_URLCONF='csrf_tests.csrf_token_error_handler_urls', DEBUG=False) class CsrfInErrorHandlingViewsTests(SimpleTestCase): def test_csrf_token_on_404_stays_constant(self): response = self.client.get('/does not exist/') # The error handler returns status code 599. self.assertEqual(response.status_code, 599) token1 = response.content response = self.client.get('/does not exist/') self.assertEqual(response.status_code, 599) token2 = response.content self.assertTrue(_does_token_match(token1.decode('ascii'), token2.decode('ascii')))
16e1fea7f4e0a190de045ae68a30db827e0f3329710d7dc53018250543e7f623
from django.http import HttpResponse from django.middleware.csrf import get_token, rotate_token from django.template import Context, RequestContext, Template from django.template.context_processors import csrf from django.utils.decorators import decorator_from_middleware from django.utils.deprecation import MiddlewareMixin from django.views.decorators.csrf import csrf_protect, ensure_csrf_cookie class TestingHttpResponse(HttpResponse): """ A version of HttpResponse that stores what cookie values are passed to set_cookie() when CSRF_USE_SESSIONS=False. """ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # This is a list of the cookie values passed to set_cookie() over # the course of the request-response. self._cookies_set = [] def set_cookie(self, key, value, **kwargs): super().set_cookie(key, value, **kwargs) self._cookies_set.append(value) class _CsrfCookieRotator(MiddlewareMixin): def process_response(self, request, response): rotate_token(request) return response csrf_rotating_token = decorator_from_middleware(_CsrfCookieRotator) @csrf_protect def protected_view(request): return HttpResponse('OK') @ensure_csrf_cookie def ensure_csrf_cookie_view(request): return HttpResponse('OK') @csrf_protect @ensure_csrf_cookie def ensured_and_protected_view(request): return TestingHttpResponse('OK') @csrf_protect @csrf_rotating_token @ensure_csrf_cookie def sandwiched_rotate_token_view(request): """ This is a view that calls rotate_token() in process_response() between two calls to CsrfViewMiddleware.process_response(). """ return TestingHttpResponse('OK') def post_form_view(request): """Return a POST form (without a token).""" return HttpResponse(content=""" <html><body><h1>\u00a1Unicode!<form method="post"><input type="text"></form></body></html> """) def token_view(request): context = RequestContext(request, processors=[csrf]) template = Template('{% csrf_token %}') return HttpResponse(template.render(context)) def non_token_view_using_request_processor(request): """Use the csrf view processor instead of the token.""" context = RequestContext(request, processors=[csrf]) template = Template('') return HttpResponse(template.render(context)) def csrf_token_error_handler(request, **kwargs): """This error handler accesses the CSRF token.""" template = Template(get_token(request)) return HttpResponse(template.render(Context()), status=599)
5017382cdc1307d52c0012d037acde7ff20d22f371fcfc4a5ff126de8825ea44
from django.http import HttpRequest from django.middleware.csrf import _does_token_match as equivalent_tokens from django.template.context_processors import csrf from django.test import SimpleTestCase class TestContextProcessor(SimpleTestCase): def test_force_token_to_string(self): request = HttpRequest() test_token = '1bcdefghij2bcdefghij3bcdefghij4bcdefghij5bcdefghij6bcdefghijABCD' request.META['CSRF_COOKIE'] = test_token token = csrf(request).get('csrf_token') self.assertTrue(equivalent_tokens(str(token), test_token))
52352f3a11b6fbee9da8f3be64147a6cf55e99116be7b1ab37d72448dd06d708
import datetime import math import re from decimal import Decimal from django.core.exceptions import FieldError from django.db import connection from django.db.models import ( Avg, Case, Count, DateField, DateTimeField, DecimalField, DurationField, Exists, F, FloatField, IntegerField, Max, Min, OuterRef, Q, StdDev, Subquery, Sum, TimeField, Value, Variance, When, ) from django.db.models.expressions import Func, RawSQL from django.db.models.functions import ( Cast, Coalesce, Greatest, Now, Pi, TruncDate, TruncHour, ) from django.test import TestCase from django.test.testcases import skipUnlessDBFeature from django.test.utils import Approximate, CaptureQueriesContext from django.utils import timezone from .models import Author, Book, Publisher, Store class NowUTC(Now): template = 'CURRENT_TIMESTAMP' output_field = DateTimeField() def as_mysql(self, compiler, connection, **extra_context): return self.as_sql(compiler, connection, template='UTC_TIMESTAMP', **extra_context) def as_oracle(self, compiler, connection, **extra_context): return self.as_sql(compiler, connection, template="CURRENT_TIMESTAMP AT TIME ZONE 'UTC'", **extra_context) def as_postgresql(self, compiler, connection, **extra_context): return self.as_sql(compiler, connection, template="STATEMENT_TIMESTAMP() AT TIME ZONE 'UTC'", **extra_context) class AggregateTestCase(TestCase): @classmethod def setUpTestData(cls): cls.a1 = Author.objects.create(name='Adrian Holovaty', age=34) cls.a2 = Author.objects.create(name='Jacob Kaplan-Moss', age=35) cls.a3 = Author.objects.create(name='Brad Dayley', age=45) cls.a4 = Author.objects.create(name='James Bennett', age=29) cls.a5 = Author.objects.create(name='Jeffrey Forcier', age=37) cls.a6 = Author.objects.create(name='Paul Bissex', age=29) cls.a7 = Author.objects.create(name='Wesley J. Chun', age=25) cls.a8 = Author.objects.create(name='Peter Norvig', age=57) cls.a9 = Author.objects.create(name='Stuart Russell', age=46) cls.a1.friends.add(cls.a2, cls.a4) cls.a2.friends.add(cls.a1, cls.a7) cls.a4.friends.add(cls.a1) cls.a5.friends.add(cls.a6, cls.a7) cls.a6.friends.add(cls.a5, cls.a7) cls.a7.friends.add(cls.a2, cls.a5, cls.a6) cls.a8.friends.add(cls.a9) cls.a9.friends.add(cls.a8) cls.p1 = Publisher.objects.create(name='Apress', num_awards=3, duration=datetime.timedelta(days=1)) cls.p2 = Publisher.objects.create(name='Sams', num_awards=1, duration=datetime.timedelta(days=2)) cls.p3 = Publisher.objects.create(name='Prentice Hall', num_awards=7) cls.p4 = Publisher.objects.create(name='Morgan Kaufmann', num_awards=9) cls.p5 = Publisher.objects.create(name="Jonno's House of Books", num_awards=0) cls.b1 = Book.objects.create( isbn='159059725', name='The Definitive Guide to Django: Web Development Done Right', pages=447, rating=4.5, price=Decimal('30.00'), contact=cls.a1, publisher=cls.p1, pubdate=datetime.date(2007, 12, 6) ) cls.b2 = Book.objects.create( isbn='067232959', name='Sams Teach Yourself Django in 24 Hours', pages=528, rating=3.0, price=Decimal('23.09'), contact=cls.a3, publisher=cls.p2, pubdate=datetime.date(2008, 3, 3) ) cls.b3 = Book.objects.create( isbn='159059996', name='Practical Django Projects', pages=300, rating=4.0, price=Decimal('29.69'), contact=cls.a4, publisher=cls.p1, pubdate=datetime.date(2008, 6, 23) ) cls.b4 = Book.objects.create( isbn='013235613', name='Python Web Development with Django', pages=350, rating=4.0, price=Decimal('29.69'), contact=cls.a5, publisher=cls.p3, pubdate=datetime.date(2008, 11, 3) ) cls.b5 = Book.objects.create( isbn='013790395', name='Artificial Intelligence: A Modern Approach', pages=1132, rating=4.0, price=Decimal('82.80'), contact=cls.a8, publisher=cls.p3, pubdate=datetime.date(1995, 1, 15) ) cls.b6 = Book.objects.create( isbn='155860191', name='Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', pages=946, rating=5.0, price=Decimal('75.00'), contact=cls.a8, publisher=cls.p4, pubdate=datetime.date(1991, 10, 15) ) cls.b1.authors.add(cls.a1, cls.a2) cls.b2.authors.add(cls.a3) cls.b3.authors.add(cls.a4) cls.b4.authors.add(cls.a5, cls.a6, cls.a7) cls.b5.authors.add(cls.a8, cls.a9) cls.b6.authors.add(cls.a8) s1 = Store.objects.create( name='Amazon.com', original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42), friday_night_closing=datetime.time(23, 59, 59) ) s2 = Store.objects.create( name='Books.com', original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37), friday_night_closing=datetime.time(23, 59, 59) ) s3 = Store.objects.create( name="Mamma and Pappa's Books", original_opening=datetime.datetime(1945, 4, 25, 16, 24, 14), friday_night_closing=datetime.time(21, 30) ) s1.books.add(cls.b1, cls.b2, cls.b3, cls.b4, cls.b5, cls.b6) s2.books.add(cls.b1, cls.b3, cls.b5, cls.b6) s3.books.add(cls.b3, cls.b4, cls.b6) def test_empty_aggregate(self): self.assertEqual(Author.objects.all().aggregate(), {}) def test_aggregate_in_order_by(self): msg = ( 'Using an aggregate in order_by() without also including it in ' 'annotate() is not allowed: Avg(F(book__rating)' ) with self.assertRaisesMessage(FieldError, msg): Author.objects.values('age').order_by(Avg('book__rating')) def test_single_aggregate(self): vals = Author.objects.aggregate(Avg("age")) self.assertEqual(vals, {"age__avg": Approximate(37.4, places=1)}) def test_multiple_aggregates(self): vals = Author.objects.aggregate(Sum("age"), Avg("age")) self.assertEqual(vals, {"age__sum": 337, "age__avg": Approximate(37.4, places=1)}) def test_filter_aggregate(self): vals = Author.objects.filter(age__gt=29).aggregate(Sum("age")) self.assertEqual(vals, {'age__sum': 254}) def test_related_aggregate(self): vals = Author.objects.aggregate(Avg("friends__age")) self.assertEqual(vals, {'friends__age__avg': Approximate(34.07, places=2)}) vals = Book.objects.filter(rating__lt=4.5).aggregate(Avg("authors__age")) self.assertEqual(vals, {'authors__age__avg': Approximate(38.2857, places=2)}) vals = Author.objects.all().filter(name__contains="a").aggregate(Avg("book__rating")) self.assertEqual(vals, {'book__rating__avg': 4.0}) vals = Book.objects.aggregate(Sum("publisher__num_awards")) self.assertEqual(vals, {'publisher__num_awards__sum': 30}) vals = Publisher.objects.aggregate(Sum("book__price")) self.assertEqual(vals, {'book__price__sum': Decimal('270.27')}) def test_aggregate_multi_join(self): vals = Store.objects.aggregate(Max("books__authors__age")) self.assertEqual(vals, {'books__authors__age__max': 57}) vals = Author.objects.aggregate(Min("book__publisher__num_awards")) self.assertEqual(vals, {'book__publisher__num_awards__min': 1}) def test_aggregate_alias(self): vals = Store.objects.filter(name="Amazon.com").aggregate(amazon_mean=Avg("books__rating")) self.assertEqual(vals, {'amazon_mean': Approximate(4.08, places=2)}) def test_aggregate_transform(self): vals = Store.objects.aggregate(min_month=Min('original_opening__month')) self.assertEqual(vals, {'min_month': 3}) def test_aggregate_join_transform(self): vals = Publisher.objects.aggregate(min_year=Min('book__pubdate__year')) self.assertEqual(vals, {'min_year': 1991}) def test_annotate_basic(self): self.assertQuerysetEqual( Book.objects.annotate().order_by('pk'), [ "The Definitive Guide to Django: Web Development Done Right", "Sams Teach Yourself Django in 24 Hours", "Practical Django Projects", "Python Web Development with Django", "Artificial Intelligence: A Modern Approach", "Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp" ], lambda b: b.name ) books = Book.objects.annotate(mean_age=Avg("authors__age")) b = books.get(pk=self.b1.pk) self.assertEqual( b.name, 'The Definitive Guide to Django: Web Development Done Right' ) self.assertEqual(b.mean_age, 34.5) def test_annotate_defer(self): qs = Book.objects.annotate( page_sum=Sum("pages")).defer('name').filter(pk=self.b1.pk) rows = [ (self.b1.id, "159059725", 447, "The Definitive Guide to Django: Web Development Done Right") ] self.assertQuerysetEqual( qs.order_by('pk'), rows, lambda r: (r.id, r.isbn, r.page_sum, r.name) ) def test_annotate_defer_select_related(self): qs = Book.objects.select_related('contact').annotate( page_sum=Sum("pages")).defer('name').filter(pk=self.b1.pk) rows = [ (self.b1.id, "159059725", 447, "Adrian Holovaty", "The Definitive Guide to Django: Web Development Done Right") ] self.assertQuerysetEqual( qs.order_by('pk'), rows, lambda r: (r.id, r.isbn, r.page_sum, r.contact.name, r.name) ) def test_annotate_m2m(self): books = Book.objects.filter(rating__lt=4.5).annotate(Avg("authors__age")).order_by("name") self.assertQuerysetEqual( books, [ ('Artificial Intelligence: A Modern Approach', 51.5), ('Practical Django Projects', 29.0), ('Python Web Development with Django', Approximate(30.3, places=1)), ('Sams Teach Yourself Django in 24 Hours', 45.0) ], lambda b: (b.name, b.authors__age__avg), ) books = Book.objects.annotate(num_authors=Count("authors")).order_by("name") self.assertQuerysetEqual( books, [ ('Artificial Intelligence: A Modern Approach', 2), ('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 1), ('Practical Django Projects', 1), ('Python Web Development with Django', 3), ('Sams Teach Yourself Django in 24 Hours', 1), ('The Definitive Guide to Django: Web Development Done Right', 2) ], lambda b: (b.name, b.num_authors) ) def test_backwards_m2m_annotate(self): authors = Author.objects.filter(name__contains="a").annotate(Avg("book__rating")).order_by("name") self.assertQuerysetEqual( authors, [ ('Adrian Holovaty', 4.5), ('Brad Dayley', 3.0), ('Jacob Kaplan-Moss', 4.5), ('James Bennett', 4.0), ('Paul Bissex', 4.0), ('Stuart Russell', 4.0) ], lambda a: (a.name, a.book__rating__avg) ) authors = Author.objects.annotate(num_books=Count("book")).order_by("name") self.assertQuerysetEqual( authors, [ ('Adrian Holovaty', 1), ('Brad Dayley', 1), ('Jacob Kaplan-Moss', 1), ('James Bennett', 1), ('Jeffrey Forcier', 1), ('Paul Bissex', 1), ('Peter Norvig', 2), ('Stuart Russell', 1), ('Wesley J. Chun', 1) ], lambda a: (a.name, a.num_books) ) def test_reverse_fkey_annotate(self): books = Book.objects.annotate(Sum("publisher__num_awards")).order_by("name") self.assertQuerysetEqual( books, [ ('Artificial Intelligence: A Modern Approach', 7), ('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 9), ('Practical Django Projects', 3), ('Python Web Development with Django', 7), ('Sams Teach Yourself Django in 24 Hours', 1), ('The Definitive Guide to Django: Web Development Done Right', 3) ], lambda b: (b.name, b.publisher__num_awards__sum) ) publishers = Publisher.objects.annotate(Sum("book__price")).order_by("name") self.assertQuerysetEqual( publishers, [ ('Apress', Decimal("59.69")), ("Jonno's House of Books", None), ('Morgan Kaufmann', Decimal("75.00")), ('Prentice Hall', Decimal("112.49")), ('Sams', Decimal("23.09")) ], lambda p: (p.name, p.book__price__sum) ) def test_annotate_values(self): books = list(Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values()) self.assertEqual( books, [ { "contact_id": self.a1.id, "id": self.b1.id, "isbn": "159059725", "mean_age": 34.5, "name": "The Definitive Guide to Django: Web Development Done Right", "pages": 447, "price": Approximate(Decimal("30")), "pubdate": datetime.date(2007, 12, 6), "publisher_id": self.p1.id, "rating": 4.5, } ] ) books = ( Book.objects .filter(pk=self.b1.pk) .annotate(mean_age=Avg('authors__age')) .values('pk', 'isbn', 'mean_age') ) self.assertEqual( list(books), [ { "pk": self.b1.pk, "isbn": "159059725", "mean_age": 34.5, } ] ) books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values("name") self.assertEqual( list(books), [{'name': 'The Definitive Guide to Django: Web Development Done Right'}], ) books = Book.objects.filter(pk=self.b1.pk).values().annotate(mean_age=Avg('authors__age')) self.assertEqual( list(books), [ { "contact_id": self.a1.id, "id": self.b1.id, "isbn": "159059725", "mean_age": 34.5, "name": "The Definitive Guide to Django: Web Development Done Right", "pages": 447, "price": Approximate(Decimal("30")), "pubdate": datetime.date(2007, 12, 6), "publisher_id": self.p1.id, "rating": 4.5, } ] ) books = ( Book.objects .values("rating") .annotate(n_authors=Count("authors__id"), mean_age=Avg("authors__age")) .order_by("rating") ) self.assertEqual( list(books), [ { "rating": 3.0, "n_authors": 1, "mean_age": 45.0, }, { "rating": 4.0, "n_authors": 6, "mean_age": Approximate(37.16, places=1) }, { "rating": 4.5, "n_authors": 2, "mean_age": 34.5, }, { "rating": 5.0, "n_authors": 1, "mean_age": 57.0, } ] ) authors = Author.objects.annotate(Avg("friends__age")).order_by("name") self.assertQuerysetEqual( authors, [ ('Adrian Holovaty', 32.0), ('Brad Dayley', None), ('Jacob Kaplan-Moss', 29.5), ('James Bennett', 34.0), ('Jeffrey Forcier', 27.0), ('Paul Bissex', 31.0), ('Peter Norvig', 46.0), ('Stuart Russell', 57.0), ('Wesley J. Chun', Approximate(33.66, places=1)) ], lambda a: (a.name, a.friends__age__avg) ) def test_count(self): vals = Book.objects.aggregate(Count("rating")) self.assertEqual(vals, {"rating__count": 6}) def test_count_star(self): with self.assertNumQueries(1) as ctx: Book.objects.aggregate(n=Count("*")) sql = ctx.captured_queries[0]['sql'] self.assertIn('SELECT COUNT(*) ', sql) def test_count_distinct_expression(self): aggs = Book.objects.aggregate( distinct_ratings=Count(Case(When(pages__gt=300, then='rating')), distinct=True), ) self.assertEqual(aggs['distinct_ratings'], 4) def test_distinct_on_aggregate(self): for aggregate, expected_result in ( (Avg, 4.125), (Count, 4), (Sum, 16.5), ): with self.subTest(aggregate=aggregate.__name__): books = Book.objects.aggregate(ratings=aggregate('rating', distinct=True)) self.assertEqual(books['ratings'], expected_result) def test_non_grouped_annotation_not_in_group_by(self): """ An annotation not included in values() before an aggregate should be excluded from the group by clause. """ qs = ( Book.objects.annotate(xprice=F('price')).filter(rating=4.0).values('rating') .annotate(count=Count('publisher_id', distinct=True)).values('count', 'rating').order_by('count') ) self.assertEqual(list(qs), [{'rating': 4.0, 'count': 2}]) def test_grouped_annotation_in_group_by(self): """ An annotation included in values() before an aggregate should be included in the group by clause. """ qs = ( Book.objects.annotate(xprice=F('price')).filter(rating=4.0).values('rating', 'xprice') .annotate(count=Count('publisher_id', distinct=True)).values('count', 'rating').order_by('count') ) self.assertEqual( list(qs), [ {'rating': 4.0, 'count': 1}, {'rating': 4.0, 'count': 2}, ] ) def test_fkey_aggregate(self): explicit = list(Author.objects.annotate(Count('book__id'))) implicit = list(Author.objects.annotate(Count('book'))) self.assertCountEqual(explicit, implicit) def test_annotate_ordering(self): books = Book.objects.values('rating').annotate(oldest=Max('authors__age')).order_by('oldest', 'rating') self.assertEqual( list(books), [ {'rating': 4.5, 'oldest': 35}, {'rating': 3.0, 'oldest': 45}, {'rating': 4.0, 'oldest': 57}, {'rating': 5.0, 'oldest': 57}, ] ) books = Book.objects.values("rating").annotate(oldest=Max("authors__age")).order_by("-oldest", "-rating") self.assertEqual( list(books), [ {'rating': 5.0, 'oldest': 57}, {'rating': 4.0, 'oldest': 57}, {'rating': 3.0, 'oldest': 45}, {'rating': 4.5, 'oldest': 35}, ] ) def test_aggregate_annotation(self): vals = Book.objects.annotate(num_authors=Count("authors__id")).aggregate(Avg("num_authors")) self.assertEqual(vals, {"num_authors__avg": Approximate(1.66, places=1)}) def test_avg_duration_field(self): # Explicit `output_field`. self.assertEqual( Publisher.objects.aggregate(Avg('duration', output_field=DurationField())), {'duration__avg': datetime.timedelta(days=1, hours=12)} ) # Implicit `output_field`. self.assertEqual( Publisher.objects.aggregate(Avg('duration')), {'duration__avg': datetime.timedelta(days=1, hours=12)} ) def test_sum_duration_field(self): self.assertEqual( Publisher.objects.aggregate(Sum('duration', output_field=DurationField())), {'duration__sum': datetime.timedelta(days=3)} ) def test_sum_distinct_aggregate(self): """ Sum on a distinct() QuerySet should aggregate only the distinct items. """ authors = Author.objects.filter(book__in=[self.b5, self.b6]) self.assertEqual(authors.count(), 3) distinct_authors = authors.distinct() self.assertEqual(distinct_authors.count(), 2) # Selected author ages are 57 and 46 age_sum = distinct_authors.aggregate(Sum('age')) self.assertEqual(age_sum['age__sum'], 103) def test_filtering(self): p = Publisher.objects.create(name='Expensive Publisher', num_awards=0) Book.objects.create( name='ExpensiveBook1', pages=1, isbn='111', rating=3.5, price=Decimal("1000"), publisher=p, contact_id=self.a1.id, pubdate=datetime.date(2008, 12, 1) ) Book.objects.create( name='ExpensiveBook2', pages=1, isbn='222', rating=4.0, price=Decimal("1000"), publisher=p, contact_id=self.a1.id, pubdate=datetime.date(2008, 12, 2) ) Book.objects.create( name='ExpensiveBook3', pages=1, isbn='333', rating=4.5, price=Decimal("35"), publisher=p, contact_id=self.a1.id, pubdate=datetime.date(2008, 12, 3) ) publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk") self.assertQuerysetEqual( publishers, ['Apress', 'Prentice Hall', 'Expensive Publisher'], lambda p: p.name, ) publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).order_by("pk") self.assertQuerysetEqual( publishers, [ "Apress", "Apress", "Sams", "Prentice Hall", "Expensive Publisher", ], lambda p: p.name ) publishers = ( Publisher.objects .annotate(num_books=Count("book__id")) .filter(num_books__gt=1, book__price__lt=Decimal("40.0")) .order_by("pk") ) self.assertQuerysetEqual( publishers, ['Apress', 'Prentice Hall', 'Expensive Publisher'], lambda p: p.name, ) publishers = ( Publisher.objects .filter(book__price__lt=Decimal("40.0")) .annotate(num_books=Count("book__id")) .filter(num_books__gt=1) .order_by("pk") ) self.assertQuerysetEqual(publishers, ['Apress'], lambda p: p.name) publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__range=[1, 3]).order_by("pk") self.assertQuerysetEqual( publishers, [ "Apress", "Sams", "Prentice Hall", "Morgan Kaufmann", "Expensive Publisher", ], lambda p: p.name ) publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__range=[1, 2]).order_by("pk") self.assertQuerysetEqual( publishers, ['Apress', 'Sams', 'Prentice Hall', 'Morgan Kaufmann'], lambda p: p.name ) publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__in=[1, 3]).order_by("pk") self.assertQuerysetEqual( publishers, ['Sams', 'Morgan Kaufmann', 'Expensive Publisher'], lambda p: p.name, ) publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__isnull=True) self.assertEqual(len(publishers), 0) def test_annotation(self): vals = Author.objects.filter(pk=self.a1.pk).aggregate(Count("friends__id")) self.assertEqual(vals, {"friends__id__count": 2}) books = Book.objects.annotate(num_authors=Count("authors__name")).filter(num_authors__exact=2).order_by("pk") self.assertQuerysetEqual( books, [ "The Definitive Guide to Django: Web Development Done Right", "Artificial Intelligence: A Modern Approach", ], lambda b: b.name ) authors = ( Author.objects .annotate(num_friends=Count("friends__id", distinct=True)) .filter(num_friends=0) .order_by("pk") ) self.assertQuerysetEqual(authors, ['Brad Dayley'], lambda a: a.name) publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk") self.assertQuerysetEqual(publishers, ['Apress', 'Prentice Hall'], lambda p: p.name) publishers = ( Publisher.objects .filter(book__price__lt=Decimal("40.0")) .annotate(num_books=Count("book__id")) .filter(num_books__gt=1) ) self.assertQuerysetEqual(publishers, ['Apress'], lambda p: p.name) books = ( Book.objects .annotate(num_authors=Count("authors__id")) .filter(authors__name__contains="Norvig", num_authors__gt=1) ) self.assertQuerysetEqual( books, ['Artificial Intelligence: A Modern Approach'], lambda b: b.name ) def test_more_aggregation(self): a = Author.objects.get(name__contains='Norvig') b = Book.objects.get(name__contains='Done Right') b.authors.add(a) b.save() vals = ( Book.objects .annotate(num_authors=Count("authors__id")) .filter(authors__name__contains="Norvig", num_authors__gt=1) .aggregate(Avg("rating")) ) self.assertEqual(vals, {"rating__avg": 4.25}) def test_even_more_aggregate(self): publishers = Publisher.objects.annotate( earliest_book=Min("book__pubdate"), ).exclude(earliest_book=None).order_by("earliest_book").values( 'earliest_book', 'num_awards', 'id', 'name', ) self.assertEqual( list(publishers), [ { 'earliest_book': datetime.date(1991, 10, 15), 'num_awards': 9, 'id': self.p4.id, 'name': 'Morgan Kaufmann' }, { 'earliest_book': datetime.date(1995, 1, 15), 'num_awards': 7, 'id': self.p3.id, 'name': 'Prentice Hall' }, { 'earliest_book': datetime.date(2007, 12, 6), 'num_awards': 3, 'id': self.p1.id, 'name': 'Apress' }, { 'earliest_book': datetime.date(2008, 3, 3), 'num_awards': 1, 'id': self.p2.id, 'name': 'Sams' } ] ) vals = Store.objects.aggregate(Max("friday_night_closing"), Min("original_opening")) self.assertEqual( vals, { "friday_night_closing__max": datetime.time(23, 59, 59), "original_opening__min": datetime.datetime(1945, 4, 25, 16, 24, 14), } ) def test_annotate_values_list(self): books = ( Book.objects .filter(pk=self.b1.pk) .annotate(mean_age=Avg("authors__age")) .values_list("pk", "isbn", "mean_age") ) self.assertEqual(list(books), [(self.b1.id, '159059725', 34.5)]) books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values_list("isbn") self.assertEqual(list(books), [('159059725',)]) books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values_list("mean_age") self.assertEqual(list(books), [(34.5,)]) books = ( Book.objects .filter(pk=self.b1.pk) .annotate(mean_age=Avg("authors__age")) .values_list("mean_age", flat=True) ) self.assertEqual(list(books), [34.5]) books = Book.objects.values_list("price").annotate(count=Count("price")).order_by("-count", "price") self.assertEqual( list(books), [ (Decimal("29.69"), 2), (Decimal('23.09'), 1), (Decimal('30'), 1), (Decimal('75'), 1), (Decimal('82.8'), 1), ] ) def test_dates_with_aggregation(self): """ .dates() returns a distinct set of dates when applied to a QuerySet with aggregation. Refs #18056. Previously, .dates() would return distinct (date_kind, aggregation) sets, in this case (year, num_authors), so 2008 would be returned twice because there are books from 2008 with a different number of authors. """ dates = Book.objects.annotate(num_authors=Count("authors")).dates('pubdate', 'year') self.assertSequenceEqual( dates, [ datetime.date(1991, 1, 1), datetime.date(1995, 1, 1), datetime.date(2007, 1, 1), datetime.date(2008, 1, 1), ], ) def test_values_aggregation(self): # Refs #20782 max_rating = Book.objects.values('rating').aggregate(max_rating=Max('rating')) self.assertEqual(max_rating['max_rating'], 5) max_books_per_rating = Book.objects.values('rating').annotate( books_per_rating=Count('id') ).aggregate(Max('books_per_rating')) self.assertEqual( max_books_per_rating, {'books_per_rating__max': 3}) def test_ticket17424(self): """ Doing exclude() on a foreign model after annotate() doesn't crash. """ all_books = list(Book.objects.values_list('pk', flat=True).order_by('pk')) annotated_books = Book.objects.order_by('pk').annotate(one=Count("id")) # The value doesn't matter, we just need any negative # constraint on a related model that's a noop. excluded_books = annotated_books.exclude(publisher__name="__UNLIKELY_VALUE__") # Try to generate query tree str(excluded_books.query) self.assertQuerysetEqual(excluded_books, all_books, lambda x: x.pk) # Check internal state self.assertIsNone(annotated_books.query.alias_map["aggregation_book"].join_type) self.assertIsNone(excluded_books.query.alias_map["aggregation_book"].join_type) def test_ticket12886(self): """ Aggregation over sliced queryset works correctly. """ qs = Book.objects.all().order_by('-rating')[0:3] vals = qs.aggregate(average_top3_rating=Avg('rating'))['average_top3_rating'] self.assertAlmostEqual(vals, 4.5, places=2) def test_ticket11881(self): """ Subqueries do not needlessly contain ORDER BY, SELECT FOR UPDATE or select_related() stuff. """ qs = Book.objects.all().select_for_update().order_by( 'pk').select_related('publisher').annotate(max_pk=Max('pk')) with CaptureQueriesContext(connection) as captured_queries: qs.aggregate(avg_pk=Avg('max_pk')) self.assertEqual(len(captured_queries), 1) qstr = captured_queries[0]['sql'].lower() self.assertNotIn('for update', qstr) forced_ordering = connection.ops.force_no_ordering() if forced_ordering: # If the backend needs to force an ordering we make sure it's # the only "ORDER BY" clause present in the query. self.assertEqual( re.findall(r'order by (\w+)', qstr), [', '.join(f[1][0] for f in forced_ordering).lower()] ) else: self.assertNotIn('order by', qstr) self.assertEqual(qstr.count(' join '), 0) def test_decimal_max_digits_has_no_effect(self): Book.objects.all().delete() a1 = Author.objects.first() p1 = Publisher.objects.first() thedate = timezone.now() for i in range(10): Book.objects.create( isbn="abcde{}".format(i), name="none", pages=10, rating=4.0, price=9999.98, contact=a1, publisher=p1, pubdate=thedate) book = Book.objects.aggregate(price_sum=Sum('price')) self.assertEqual(book['price_sum'], Decimal("99999.80")) def test_nonaggregate_aggregation_throws(self): with self.assertRaisesMessage(TypeError, 'fail is not an aggregate expression'): Book.objects.aggregate(fail=F('price')) def test_nonfield_annotation(self): book = Book.objects.annotate(val=Max(Value(2))).first() self.assertEqual(book.val, 2) book = Book.objects.annotate(val=Max(Value(2), output_field=IntegerField())).first() self.assertEqual(book.val, 2) book = Book.objects.annotate(val=Max(2, output_field=IntegerField())).first() self.assertEqual(book.val, 2) def test_annotation_expressions(self): authors = Author.objects.annotate(combined_ages=Sum(F('age') + F('friends__age'))).order_by('name') authors2 = Author.objects.annotate(combined_ages=Sum('age') + Sum('friends__age')).order_by('name') for qs in (authors, authors2): self.assertQuerysetEqual( qs, [ ('Adrian Holovaty', 132), ('Brad Dayley', None), ('Jacob Kaplan-Moss', 129), ('James Bennett', 63), ('Jeffrey Forcier', 128), ('Paul Bissex', 120), ('Peter Norvig', 103), ('Stuart Russell', 103), ('Wesley J. Chun', 176) ], lambda a: (a.name, a.combined_ages) ) def test_aggregation_expressions(self): a1 = Author.objects.aggregate(av_age=Sum('age') / Count('*')) a2 = Author.objects.aggregate(av_age=Sum('age') / Count('age')) a3 = Author.objects.aggregate(av_age=Avg('age')) self.assertEqual(a1, {'av_age': 37}) self.assertEqual(a2, {'av_age': 37}) self.assertEqual(a3, {'av_age': Approximate(37.4, places=1)}) def test_avg_decimal_field(self): v = Book.objects.filter(rating=4).aggregate(avg_price=(Avg('price')))['avg_price'] self.assertIsInstance(v, Decimal) self.assertEqual(v, Approximate(Decimal('47.39'), places=2)) def test_order_of_precedence(self): p1 = Book.objects.filter(rating=4).aggregate(avg_price=(Avg('price') + 2) * 3) self.assertEqual(p1, {'avg_price': Approximate(Decimal('148.18'), places=2)}) p2 = Book.objects.filter(rating=4).aggregate(avg_price=Avg('price') + 2 * 3) self.assertEqual(p2, {'avg_price': Approximate(Decimal('53.39'), places=2)}) def test_combine_different_types(self): msg = ( 'Expression contains mixed types: FloatField, DecimalField. ' 'You must set output_field.' ) qs = Book.objects.annotate(sums=Sum('rating') + Sum('pages') + Sum('price')) with self.assertRaisesMessage(FieldError, msg): qs.first() with self.assertRaisesMessage(FieldError, msg): qs.first() b1 = Book.objects.annotate(sums=Sum(F('rating') + F('pages') + F('price'), output_field=IntegerField())).get(pk=self.b4.pk) self.assertEqual(b1.sums, 383) b2 = Book.objects.annotate(sums=Sum(F('rating') + F('pages') + F('price'), output_field=FloatField())).get(pk=self.b4.pk) self.assertEqual(b2.sums, 383.69) b3 = Book.objects.annotate(sums=Sum(F('rating') + F('pages') + F('price'), output_field=DecimalField())).get(pk=self.b4.pk) self.assertEqual(b3.sums, Approximate(Decimal("383.69"), places=2)) def test_complex_aggregations_require_kwarg(self): with self.assertRaisesMessage(TypeError, 'Complex annotations require an alias'): Author.objects.annotate(Sum(F('age') + F('friends__age'))) with self.assertRaisesMessage(TypeError, 'Complex aggregates require an alias'): Author.objects.aggregate(Sum('age') / Count('age')) with self.assertRaisesMessage(TypeError, 'Complex aggregates require an alias'): Author.objects.aggregate(Sum(1)) def test_aggregate_over_complex_annotation(self): qs = Author.objects.annotate( combined_ages=Sum(F('age') + F('friends__age'))) age = qs.aggregate(max_combined_age=Max('combined_ages')) self.assertEqual(age['max_combined_age'], 176) age = qs.aggregate(max_combined_age_doubled=Max('combined_ages') * 2) self.assertEqual(age['max_combined_age_doubled'], 176 * 2) age = qs.aggregate( max_combined_age_doubled=Max('combined_ages') + Max('combined_ages')) self.assertEqual(age['max_combined_age_doubled'], 176 * 2) age = qs.aggregate( max_combined_age_doubled=Max('combined_ages') + Max('combined_ages'), sum_combined_age=Sum('combined_ages')) self.assertEqual(age['max_combined_age_doubled'], 176 * 2) self.assertEqual(age['sum_combined_age'], 954) age = qs.aggregate( max_combined_age_doubled=Max('combined_ages') + Max('combined_ages'), sum_combined_age_doubled=Sum('combined_ages') + Sum('combined_ages')) self.assertEqual(age['max_combined_age_doubled'], 176 * 2) self.assertEqual(age['sum_combined_age_doubled'], 954 * 2) def test_values_annotation_with_expression(self): # ensure the F() is promoted to the group by clause qs = Author.objects.values('name').annotate(another_age=Sum('age') + F('age')) a = qs.get(name="Adrian Holovaty") self.assertEqual(a['another_age'], 68) qs = qs.annotate(friend_count=Count('friends')) a = qs.get(name="Adrian Holovaty") self.assertEqual(a['friend_count'], 2) qs = qs.annotate(combined_age=Sum('age') + F('friends__age')).filter( name="Adrian Holovaty").order_by('-combined_age') self.assertEqual( list(qs), [ { "name": 'Adrian Holovaty', "another_age": 68, "friend_count": 1, "combined_age": 69 }, { "name": 'Adrian Holovaty', "another_age": 68, "friend_count": 1, "combined_age": 63 } ] ) vals = qs.values('name', 'combined_age') self.assertEqual( list(vals), [ {'name': 'Adrian Holovaty', 'combined_age': 69}, {'name': 'Adrian Holovaty', 'combined_age': 63}, ] ) def test_annotate_values_aggregate(self): alias_age = Author.objects.annotate( age_alias=F('age') ).values( 'age_alias', ).aggregate(sum_age=Sum('age_alias')) age = Author.objects.values('age').aggregate(sum_age=Sum('age')) self.assertEqual(alias_age['sum_age'], age['sum_age']) def test_annotate_over_annotate(self): author = Author.objects.annotate( age_alias=F('age') ).annotate( sum_age=Sum('age_alias') ).get(name="Adrian Holovaty") other_author = Author.objects.annotate( sum_age=Sum('age') ).get(name="Adrian Holovaty") self.assertEqual(author.sum_age, other_author.sum_age) def test_aggregate_over_aggregate(self): msg = "Cannot compute Avg('age'): 'age' is an aggregate" with self.assertRaisesMessage(FieldError, msg): Author.objects.annotate( age_alias=F('age'), ).aggregate( age=Sum(F('age')), avg_age=Avg(F('age')), ) def test_annotated_aggregate_over_annotated_aggregate(self): with self.assertRaisesMessage(FieldError, "Cannot compute Sum('id__max'): 'id__max' is an aggregate"): Book.objects.annotate(Max('id')).annotate(Sum('id__max')) class MyMax(Max): def as_sql(self, compiler, connection): self.set_source_expressions(self.get_source_expressions()[0:1]) return super().as_sql(compiler, connection) with self.assertRaisesMessage(FieldError, "Cannot compute Max('id__max'): 'id__max' is an aggregate"): Book.objects.annotate(Max('id')).annotate(my_max=MyMax('id__max', 'price')) def test_multi_arg_aggregate(self): class MyMax(Max): output_field = DecimalField() def as_sql(self, compiler, connection): copy = self.copy() copy.set_source_expressions(copy.get_source_expressions()[0:1]) return super(MyMax, copy).as_sql(compiler, connection) with self.assertRaisesMessage(TypeError, 'Complex aggregates require an alias'): Book.objects.aggregate(MyMax('pages', 'price')) with self.assertRaisesMessage(TypeError, 'Complex annotations require an alias'): Book.objects.annotate(MyMax('pages', 'price')) Book.objects.aggregate(max_field=MyMax('pages', 'price')) def test_add_implementation(self): class MySum(Sum): pass # test completely changing how the output is rendered def lower_case_function_override(self, compiler, connection): sql, params = compiler.compile(self.source_expressions[0]) substitutions = {'function': self.function.lower(), 'expressions': sql, 'distinct': ''} substitutions.update(self.extra) return self.template % substitutions, params setattr(MySum, 'as_' + connection.vendor, lower_case_function_override) qs = Book.objects.annotate( sums=MySum(F('rating') + F('pages') + F('price'), output_field=IntegerField()) ) self.assertEqual(str(qs.query).count('sum('), 1) b1 = qs.get(pk=self.b4.pk) self.assertEqual(b1.sums, 383) # test changing the dict and delegating def lower_case_function_super(self, compiler, connection): self.extra['function'] = self.function.lower() return super(MySum, self).as_sql(compiler, connection) setattr(MySum, 'as_' + connection.vendor, lower_case_function_super) qs = Book.objects.annotate( sums=MySum(F('rating') + F('pages') + F('price'), output_field=IntegerField()) ) self.assertEqual(str(qs.query).count('sum('), 1) b1 = qs.get(pk=self.b4.pk) self.assertEqual(b1.sums, 383) # test overriding all parts of the template def be_evil(self, compiler, connection): substitutions = {'function': 'MAX', 'expressions': '2', 'distinct': ''} substitutions.update(self.extra) return self.template % substitutions, () setattr(MySum, 'as_' + connection.vendor, be_evil) qs = Book.objects.annotate( sums=MySum(F('rating') + F('pages') + F('price'), output_field=IntegerField()) ) self.assertEqual(str(qs.query).count('MAX('), 1) b1 = qs.get(pk=self.b4.pk) self.assertEqual(b1.sums, 2) def test_complex_values_aggregation(self): max_rating = Book.objects.values('rating').aggregate( double_max_rating=Max('rating') + Max('rating')) self.assertEqual(max_rating['double_max_rating'], 5 * 2) max_books_per_rating = Book.objects.values('rating').annotate( books_per_rating=Count('id') + 5 ).aggregate(Max('books_per_rating')) self.assertEqual( max_books_per_rating, {'books_per_rating__max': 3 + 5}) def test_expression_on_aggregation(self): qs = Publisher.objects.annotate( price_or_median=Greatest(Avg('book__rating', output_field=DecimalField()), Avg('book__price')) ).filter(price_or_median__gte=F('num_awards')).order_by('num_awards') self.assertQuerysetEqual( qs, [1, 3, 7, 9], lambda v: v.num_awards) qs2 = Publisher.objects.annotate( rating_or_num_awards=Greatest(Avg('book__rating'), F('num_awards'), output_field=FloatField()) ).filter(rating_or_num_awards__gt=F('num_awards')).order_by('num_awards') self.assertQuerysetEqual( qs2, [1, 3], lambda v: v.num_awards) def test_arguments_must_be_expressions(self): msg = 'QuerySet.aggregate() received non-expression(s): %s.' with self.assertRaisesMessage(TypeError, msg % FloatField()): Book.objects.aggregate(FloatField()) with self.assertRaisesMessage(TypeError, msg % True): Book.objects.aggregate(is_book=True) with self.assertRaisesMessage(TypeError, msg % ', '.join([str(FloatField()), 'True'])): Book.objects.aggregate(FloatField(), Avg('price'), is_book=True) def test_aggregation_subquery_annotation(self): """Subquery annotations are excluded from the GROUP BY if they are not explicitly grouped against.""" latest_book_pubdate_qs = Book.objects.filter( publisher=OuterRef('pk') ).order_by('-pubdate').values('pubdate')[:1] publisher_qs = Publisher.objects.annotate( latest_book_pubdate=Subquery(latest_book_pubdate_qs), ).annotate(count=Count('book')) with self.assertNumQueries(1) as ctx: list(publisher_qs) self.assertEqual(ctx[0]['sql'].count('SELECT'), 2) # The GROUP BY should not be by alias either. self.assertEqual(ctx[0]['sql'].lower().count('latest_book_pubdate'), 1) def test_aggregation_subquery_annotation_exists(self): latest_book_pubdate_qs = Book.objects.filter( publisher=OuterRef('pk') ).order_by('-pubdate').values('pubdate')[:1] publisher_qs = Publisher.objects.annotate( latest_book_pubdate=Subquery(latest_book_pubdate_qs), count=Count('book'), ) self.assertTrue(publisher_qs.exists()) def test_aggregation_exists_annotation(self): published_books = Book.objects.filter(publisher=OuterRef('pk')) publisher_qs = Publisher.objects.annotate( published_book=Exists(published_books), count=Count('book'), ).values_list('name', flat=True) self.assertCountEqual(list(publisher_qs), [ 'Apress', 'Morgan Kaufmann', "Jonno's House of Books", 'Prentice Hall', 'Sams', ]) def test_aggregation_subquery_annotation_values(self): """ Subquery annotations and external aliases are excluded from the GROUP BY if they are not selected. """ books_qs = Book.objects.annotate( first_author_the_same_age=Subquery( Author.objects.filter( age=OuterRef('contact__friends__age'), ).order_by('age').values('id')[:1], ) ).filter( publisher=self.p1, first_author_the_same_age__isnull=False, ).annotate( min_age=Min('contact__friends__age'), ).values('name', 'min_age').order_by('name') self.assertEqual(list(books_qs), [ {'name': 'Practical Django Projects', 'min_age': 34}, { 'name': 'The Definitive Guide to Django: Web Development Done Right', 'min_age': 29, }, ]) def test_aggregation_subquery_annotation_values_collision(self): books_rating_qs = Book.objects.filter( publisher=OuterRef('pk'), price=Decimal('29.69'), ).values('rating') publisher_qs = Publisher.objects.filter( book__contact__age__gt=20, name=self.p1.name, ).annotate( rating=Subquery(books_rating_qs), contacts_count=Count('book__contact'), ).values('rating').annotate(total_count=Count('rating')) self.assertEqual(list(publisher_qs), [ {'rating': 4.0, 'total_count': 2}, ]) @skipUnlessDBFeature('supports_subqueries_in_group_by') def test_aggregation_subquery_annotation_multivalued(self): """ Subquery annotations must be included in the GROUP BY if they use potentially multivalued relations (contain the LOOKUP_SEP). """ subquery_qs = Author.objects.filter( pk=OuterRef('pk'), book__name=OuterRef('book__name'), ).values('pk') author_qs = Author.objects.annotate( subquery_id=Subquery(subquery_qs), ).annotate(count=Count('book')) self.assertEqual(author_qs.count(), Author.objects.count()) def test_aggregation_order_by_not_selected_annotation_values(self): result_asc = [ self.b4.pk, self.b3.pk, self.b1.pk, self.b2.pk, self.b5.pk, self.b6.pk, ] result_desc = result_asc[::-1] tests = [ ('min_related_age', result_asc), ('-min_related_age', result_desc), (F('min_related_age'), result_asc), (F('min_related_age').asc(), result_asc), (F('min_related_age').desc(), result_desc), ] for ordering, expected_result in tests: with self.subTest(ordering=ordering): books_qs = Book.objects.annotate( min_age=Min('authors__age'), ).annotate( min_related_age=Coalesce('min_age', 'contact__age'), ).order_by(ordering).values_list('pk', flat=True) self.assertEqual(list(books_qs), expected_result) @skipUnlessDBFeature('supports_subqueries_in_group_by') def test_group_by_subquery_annotation(self): """ Subquery annotations are included in the GROUP BY if they are grouped against. """ long_books_count_qs = Book.objects.filter( publisher=OuterRef('pk'), pages__gt=400, ).values( 'publisher' ).annotate(count=Count('pk')).values('count') long_books_count_breakdown = Publisher.objects.values_list( Subquery(long_books_count_qs, IntegerField()), ).annotate(total=Count('*')) self.assertEqual(dict(long_books_count_breakdown), {None: 1, 1: 4}) @skipUnlessDBFeature('supports_subqueries_in_group_by') def test_group_by_exists_annotation(self): """ Exists annotations are included in the GROUP BY if they are grouped against. """ long_books_qs = Book.objects.filter( publisher=OuterRef('pk'), pages__gt=800, ) has_long_books_breakdown = Publisher.objects.values_list( Exists(long_books_qs), ).annotate(total=Count('*')) self.assertEqual(dict(has_long_books_breakdown), {True: 2, False: 3}) @skipUnlessDBFeature('supports_subqueries_in_group_by') def test_aggregation_subquery_annotation_related_field(self): publisher = Publisher.objects.create(name=self.a9.name, num_awards=2) book = Book.objects.create( isbn='159059999', name='Test book.', pages=819, rating=2.5, price=Decimal('14.44'), contact=self.a9, publisher=publisher, pubdate=datetime.date(2019, 12, 6), ) book.authors.add(self.a5, self.a6, self.a7) books_qs = Book.objects.annotate( contact_publisher=Subquery( Publisher.objects.filter( pk=OuterRef('publisher'), name=OuterRef('contact__name'), ).values('name')[:1], ) ).filter( contact_publisher__isnull=False, ).annotate(count=Count('authors')) self.assertSequenceEqual(books_qs, [book]) # FIXME: GROUP BY doesn't need to include a subquery with # non-multivalued JOINs, see Col.possibly_multivalued (refs #31150): # with self.assertNumQueries(1) as ctx: # self.assertSequenceEqual(books_qs, [book]) # self.assertEqual(ctx[0]['sql'].count('SELECT'), 2) @skipUnlessDBFeature('supports_subqueries_in_group_by') def test_aggregation_nested_subquery_outerref(self): publisher_with_same_name = Publisher.objects.filter( id__in=Subquery( Publisher.objects.filter( name=OuterRef(OuterRef('publisher__name')), ).values('id'), ), ).values(publisher_count=Count('id'))[:1] books_breakdown = Book.objects.annotate( publisher_count=Subquery(publisher_with_same_name), authors_count=Count('authors'), ).values_list('publisher_count', flat=True) self.assertSequenceEqual(books_breakdown, [1] * 6) def test_aggregation_random_ordering(self): """Random() is not included in the GROUP BY when used for ordering.""" authors = Author.objects.annotate(contact_count=Count('book')).order_by('?') self.assertQuerysetEqual(authors, [ ('Adrian Holovaty', 1), ('Jacob Kaplan-Moss', 1), ('Brad Dayley', 1), ('James Bennett', 1), ('Jeffrey Forcier', 1), ('Paul Bissex', 1), ('Wesley J. Chun', 1), ('Stuart Russell', 1), ('Peter Norvig', 2), ], lambda a: (a.name, a.contact_count), ordered=False) def test_empty_result_optimization(self): with self.assertNumQueries(0): self.assertEqual( Publisher.objects.none().aggregate( sum_awards=Sum('num_awards'), books_count=Count('book'), ), { 'sum_awards': None, 'books_count': 0, } ) # Expression without empty_aggregate_value forces queries to be # executed even if they would return an empty result set. raw_books_count = Func('book', function='COUNT') raw_books_count.contains_aggregate = True with self.assertNumQueries(1): self.assertEqual( Publisher.objects.none().aggregate( sum_awards=Sum('num_awards'), books_count=raw_books_count, ), { 'sum_awards': None, 'books_count': 0, } ) def test_coalesced_empty_result_set(self): with self.assertNumQueries(0): self.assertEqual( Publisher.objects.none().aggregate( sum_awards=Coalesce(Sum('num_awards'), 0), )['sum_awards'], 0, ) # Multiple expressions. with self.assertNumQueries(0): self.assertEqual( Publisher.objects.none().aggregate( sum_awards=Coalesce(Sum('num_awards'), None, 0), )['sum_awards'], 0, ) # Nested coalesce. with self.assertNumQueries(0): self.assertEqual( Publisher.objects.none().aggregate( sum_awards=Coalesce(Coalesce(Sum('num_awards'), None), 0), )['sum_awards'], 0, ) # Expression coalesce. with self.assertNumQueries(1): self.assertIsInstance( Store.objects.none().aggregate( latest_opening=Coalesce( Max('original_opening'), RawSQL('CURRENT_TIMESTAMP', []), ), )['latest_opening'], datetime.datetime, ) def test_aggregation_default_unsupported_by_count(self): msg = 'Count does not allow default.' with self.assertRaisesMessage(TypeError, msg): Count('age', default=0) def test_aggregation_default_unset(self): for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]: with self.subTest(Aggregate): result = Author.objects.filter(age__gt=100).aggregate( value=Aggregate('age'), ) self.assertIsNone(result['value']) def test_aggregation_default_zero(self): for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]: with self.subTest(Aggregate): result = Author.objects.filter(age__gt=100).aggregate( value=Aggregate('age', default=0), ) self.assertEqual(result['value'], 0) def test_aggregation_default_integer(self): for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]: with self.subTest(Aggregate): result = Author.objects.filter(age__gt=100).aggregate( value=Aggregate('age', default=21), ) self.assertEqual(result['value'], 21) def test_aggregation_default_expression(self): for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]: with self.subTest(Aggregate): result = Author.objects.filter(age__gt=100).aggregate( value=Aggregate('age', default=Value(5) * Value(7)), ) self.assertEqual(result['value'], 35) def test_aggregation_default_group_by(self): qs = Publisher.objects.values('name').annotate( books=Count('book'), pages=Sum('book__pages', default=0), ).filter(books=0) self.assertSequenceEqual( qs, [{'name': "Jonno's House of Books", 'books': 0, 'pages': 0}], ) def test_aggregation_default_compound_expression(self): # Scale rating to a percentage; default to 50% if no books published. formula = Avg('book__rating', default=2.5) * 20.0 queryset = Publisher.objects.annotate(rating=formula).order_by('name') self.assertSequenceEqual(queryset.values('name', 'rating'), [ {'name': 'Apress', 'rating': 85.0}, {'name': "Jonno's House of Books", 'rating': 50.0}, {'name': 'Morgan Kaufmann', 'rating': 100.0}, {'name': 'Prentice Hall', 'rating': 80.0}, {'name': 'Sams', 'rating': 60.0}, ]) def test_aggregation_default_using_time_from_python(self): expr = Min( 'store__friday_night_closing', filter=~Q(store__name='Amazon.com'), default=datetime.time(17), ) if connection.vendor == 'mysql': # Workaround for #30224 for MySQL 8.0+ & MariaDB. expr.default = Cast(expr.default, TimeField()) queryset = Book.objects.annotate(oldest_store_opening=expr).order_by('isbn') self.assertSequenceEqual(queryset.values('isbn', 'oldest_store_opening'), [ {'isbn': '013235613', 'oldest_store_opening': datetime.time(21, 30)}, {'isbn': '013790395', 'oldest_store_opening': datetime.time(23, 59, 59)}, {'isbn': '067232959', 'oldest_store_opening': datetime.time(17)}, {'isbn': '155860191', 'oldest_store_opening': datetime.time(21, 30)}, {'isbn': '159059725', 'oldest_store_opening': datetime.time(23, 59, 59)}, {'isbn': '159059996', 'oldest_store_opening': datetime.time(21, 30)}, ]) def test_aggregation_default_using_time_from_database(self): now = timezone.now().astimezone(timezone.utc) expr = Min( 'store__friday_night_closing', filter=~Q(store__name='Amazon.com'), default=TruncHour(NowUTC(), output_field=TimeField()), ) queryset = Book.objects.annotate(oldest_store_opening=expr).order_by('isbn') self.assertSequenceEqual(queryset.values('isbn', 'oldest_store_opening'), [ {'isbn': '013235613', 'oldest_store_opening': datetime.time(21, 30)}, {'isbn': '013790395', 'oldest_store_opening': datetime.time(23, 59, 59)}, {'isbn': '067232959', 'oldest_store_opening': datetime.time(now.hour)}, {'isbn': '155860191', 'oldest_store_opening': datetime.time(21, 30)}, {'isbn': '159059725', 'oldest_store_opening': datetime.time(23, 59, 59)}, {'isbn': '159059996', 'oldest_store_opening': datetime.time(21, 30)}, ]) def test_aggregation_default_using_date_from_python(self): expr = Min('book__pubdate', default=datetime.date(1970, 1, 1)) if connection.vendor == 'mysql': # Workaround for #30224 for MySQL 5.7+ & MariaDB. expr.default = Cast(expr.default, DateField()) queryset = Publisher.objects.annotate(earliest_pubdate=expr).order_by('name') self.assertSequenceEqual(queryset.values('name', 'earliest_pubdate'), [ {'name': 'Apress', 'earliest_pubdate': datetime.date(2007, 12, 6)}, {'name': "Jonno's House of Books", 'earliest_pubdate': datetime.date(1970, 1, 1)}, {'name': 'Morgan Kaufmann', 'earliest_pubdate': datetime.date(1991, 10, 15)}, {'name': 'Prentice Hall', 'earliest_pubdate': datetime.date(1995, 1, 15)}, {'name': 'Sams', 'earliest_pubdate': datetime.date(2008, 3, 3)}, ]) def test_aggregation_default_using_date_from_database(self): now = timezone.now().astimezone(timezone.utc) expr = Min('book__pubdate', default=TruncDate(NowUTC())) queryset = Publisher.objects.annotate(earliest_pubdate=expr).order_by('name') self.assertSequenceEqual(queryset.values('name', 'earliest_pubdate'), [ {'name': 'Apress', 'earliest_pubdate': datetime.date(2007, 12, 6)}, {'name': "Jonno's House of Books", 'earliest_pubdate': now.date()}, {'name': 'Morgan Kaufmann', 'earliest_pubdate': datetime.date(1991, 10, 15)}, {'name': 'Prentice Hall', 'earliest_pubdate': datetime.date(1995, 1, 15)}, {'name': 'Sams', 'earliest_pubdate': datetime.date(2008, 3, 3)}, ]) def test_aggregation_default_using_datetime_from_python(self): expr = Min( 'store__original_opening', filter=~Q(store__name='Amazon.com'), default=datetime.datetime(1970, 1, 1), ) if connection.vendor == 'mysql': # Workaround for #30224 for MySQL 8.0+ & MariaDB. expr.default = Cast(expr.default, DateTimeField()) queryset = Book.objects.annotate(oldest_store_opening=expr).order_by('isbn') self.assertSequenceEqual(queryset.values('isbn', 'oldest_store_opening'), [ {'isbn': '013235613', 'oldest_store_opening': datetime.datetime(1945, 4, 25, 16, 24, 14)}, {'isbn': '013790395', 'oldest_store_opening': datetime.datetime(2001, 3, 15, 11, 23, 37)}, {'isbn': '067232959', 'oldest_store_opening': datetime.datetime(1970, 1, 1)}, {'isbn': '155860191', 'oldest_store_opening': datetime.datetime(1945, 4, 25, 16, 24, 14)}, {'isbn': '159059725', 'oldest_store_opening': datetime.datetime(2001, 3, 15, 11, 23, 37)}, {'isbn': '159059996', 'oldest_store_opening': datetime.datetime(1945, 4, 25, 16, 24, 14)}, ]) def test_aggregation_default_using_datetime_from_database(self): now = timezone.now().astimezone(timezone.utc) expr = Min( 'store__original_opening', filter=~Q(store__name='Amazon.com'), default=TruncHour(NowUTC(), output_field=DateTimeField()), ) queryset = Book.objects.annotate(oldest_store_opening=expr).order_by('isbn') self.assertSequenceEqual(queryset.values('isbn', 'oldest_store_opening'), [ {'isbn': '013235613', 'oldest_store_opening': datetime.datetime(1945, 4, 25, 16, 24, 14)}, {'isbn': '013790395', 'oldest_store_opening': datetime.datetime(2001, 3, 15, 11, 23, 37)}, {'isbn': '067232959', 'oldest_store_opening': now.replace(minute=0, second=0, microsecond=0, tzinfo=None)}, {'isbn': '155860191', 'oldest_store_opening': datetime.datetime(1945, 4, 25, 16, 24, 14)}, {'isbn': '159059725', 'oldest_store_opening': datetime.datetime(2001, 3, 15, 11, 23, 37)}, {'isbn': '159059996', 'oldest_store_opening': datetime.datetime(1945, 4, 25, 16, 24, 14)}, ]) def test_aggregation_default_using_duration_from_python(self): result = Publisher.objects.filter(num_awards__gt=3).aggregate( value=Sum('duration', default=datetime.timedelta(0)), ) self.assertEqual(result['value'], datetime.timedelta(0)) def test_aggregation_default_using_duration_from_database(self): result = Publisher.objects.filter(num_awards__gt=3).aggregate( value=Sum('duration', default=Now() - Now()), ) self.assertEqual(result['value'], datetime.timedelta(0)) def test_aggregation_default_using_decimal_from_python(self): result = Book.objects.filter(rating__lt=3.0).aggregate( value=Sum('price', default=Decimal('0.00')), ) self.assertEqual(result['value'], Decimal('0.00')) def test_aggregation_default_using_decimal_from_database(self): result = Book.objects.filter(rating__lt=3.0).aggregate( value=Sum('price', default=Pi()), ) self.assertAlmostEqual(result['value'], Decimal.from_float(math.pi), places=6) def test_aggregation_default_passed_another_aggregate(self): result = Book.objects.aggregate( value=Sum('price', filter=Q(rating__lt=3.0), default=Avg('pages') / 10.0), ) self.assertAlmostEqual(result['value'], Decimal('61.72'), places=2)
59c15ec08f4017780a06c3a3724b9853750ef9f7770c43d33abd4510a5f82338
from django.test import TestCase from .models import Article, Author, Comment, Forum, Post, SystemInfo class NullFkOrderingTests(TestCase): def test_ordering_across_null_fk(self): """ Regression test for #7512 ordering across nullable Foreign Keys shouldn't exclude results """ author_1 = Author.objects.create(name='Tom Jones') author_2 = Author.objects.create(name='Bob Smith') Article.objects.create(title='No author on this article') Article.objects.create(author=author_1, title='This article written by Tom Jones') Article.objects.create(author=author_2, title='This article written by Bob Smith') # We can't compare results directly (since different databases sort NULLs to # different ends of the ordering), but we can check that all results are # returned. self.assertEqual(len(list(Article.objects.all())), 3) s = SystemInfo.objects.create(system_name='System Info') f = Forum.objects.create(system_info=s, forum_name='First forum') p = Post.objects.create(forum=f, title='First Post') Comment.objects.create(post=p, comment_text='My first comment') Comment.objects.create(comment_text='My second comment') s2 = SystemInfo.objects.create(system_name='More System Info') f2 = Forum.objects.create(system_info=s2, forum_name='Second forum') p2 = Post.objects.create(forum=f2, title='Second Post') Comment.objects.create(comment_text='Another first comment') Comment.objects.create(post=p2, comment_text='Another second comment') # We have to test this carefully. Some databases sort NULL values before # everything else, some sort them afterward. So we extract the ordered list # and check the length. Before the fix, this list was too short (some values # were omitted). self.assertEqual(len(list(Comment.objects.all())), 4)
13d8907b7aecdfc0fe7df539f2d8a2c625bc5fdce572c39c13e33afa5e3f344f
from django.contrib import admin from django.contrib.admin import sites from django.test import SimpleTestCase, override_settings from .sites import CustomAdminSite @override_settings(INSTALLED_APPS=[ 'admin_default_site.apps.MyCustomAdminConfig', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ]) class CustomAdminSiteTests(SimpleTestCase): def setUp(self): # Reset admin.site since it may have already been instantiated by # another test app. self._old_site = admin.site admin.site = sites.site = sites.DefaultAdminSite() def tearDown(self): admin.site = sites.site = self._old_site def test_use_custom_admin_site(self): self.assertEqual(admin.site.__class__.__name__, 'CustomAdminSite') class DefaultAdminSiteTests(SimpleTestCase): def test_use_default_admin_site(self): self.assertEqual(admin.site.__class__.__name__, 'AdminSite') def test_repr(self): self.assertEqual(str(admin.site), "AdminSite(name='admin')") self.assertEqual(repr(admin.site), "AdminSite(name='admin')") class AdminSiteTests(SimpleTestCase): def test_repr(self): admin_site = CustomAdminSite(name='other') self.assertEqual(repr(admin_site), "CustomAdminSite(name='other')")