hash
stringlengths 64
64
| content
stringlengths 0
1.51M
|
---|---|
a27b81308388d15af042a8138a4c490179fee72bf6f56e9a30b2a294632c2839 | from django.db import ProgrammingError
from django.utils.functional import cached_property
class BaseDatabaseFeatures:
# An optional tuple indicating the minimum supported database version.
minimum_database_version = None
gis_enabled = False
# Oracle can't group by LOB (large object) data types.
allows_group_by_lob = True
allows_group_by_selected_pks = False
allows_group_by_select_index = True
empty_fetchmany_value = []
update_can_self_select = True
# Does the backend support self-reference subqueries in the DELETE
# statement?
delete_can_self_reference_subquery = True
# Does the backend distinguish between '' and None?
interprets_empty_strings_as_nulls = False
# Does the backend allow inserting duplicate NULL rows in a nullable
# unique field? All core backends implement this correctly, but other
# databases such as SQL Server do not.
supports_nullable_unique_constraints = True
# Does the backend allow inserting duplicate rows when a unique_together
# constraint exists and some fields are nullable but not all of them?
supports_partially_nullable_unique_constraints = True
# Does the backend support initially deferrable unique constraints?
supports_deferrable_unique_constraints = False
can_use_chunked_reads = True
can_return_columns_from_insert = False
can_return_rows_from_bulk_insert = False
has_bulk_insert = True
uses_savepoints = True
can_release_savepoints = False
# If True, don't use integer foreign keys referring to, e.g., positive
# integer primary keys.
related_fields_match_type = False
allow_sliced_subqueries_with_in = True
has_select_for_update = False
has_select_for_update_nowait = False
has_select_for_update_skip_locked = False
has_select_for_update_of = False
has_select_for_no_key_update = False
# Does the database's SELECT FOR UPDATE OF syntax require a column rather
# than a table?
select_for_update_of_column = False
# Does the default test database allow multiple connections?
# Usually an indication that the test database is in-memory
test_db_allows_multiple_connections = True
# Can an object be saved without an explicit primary key?
supports_unspecified_pk = False
# Can a fixture contain forward references? i.e., are
# FK constraints checked at the end of transaction, or
# at the end of each save operation?
supports_forward_references = True
# Does the backend truncate names properly when they are too long?
truncates_names = False
# Is there a REAL datatype in addition to floats/doubles?
has_real_datatype = False
supports_subqueries_in_group_by = True
# Does the backend ignore unnecessary ORDER BY clauses in subqueries?
ignores_unnecessary_order_by_in_subqueries = True
# Is there a true datatype for uuid?
has_native_uuid_field = False
# Is there a true datatype for timedeltas?
has_native_duration_field = False
# Does the database driver supports same type temporal data subtraction
# by returning the type used to store duration field?
supports_temporal_subtraction = False
# Does the __regex lookup support backreferencing and grouping?
supports_regex_backreferencing = True
# Can date/datetime lookups be performed using a string?
supports_date_lookup_using_string = True
# Can datetimes with timezones be used?
supports_timezones = True
# Does the database have a copy of the zoneinfo database?
has_zoneinfo_database = True
# When performing a GROUP BY, is an ORDER BY NULL required
# to remove any ordering?
requires_explicit_null_ordering_when_grouping = False
# Does the backend order NULL values as largest or smallest?
nulls_order_largest = False
# Does the backend support NULLS FIRST and NULLS LAST in ORDER BY?
supports_order_by_nulls_modifier = True
# Does the backend orders NULLS FIRST by default?
order_by_nulls_first = False
# The database's limit on the number of query parameters.
max_query_params = None
# Can an object have an autoincrement primary key of 0?
allows_auto_pk_0 = True
# Do we need to NULL a ForeignKey out, or can the constraint check be
# deferred
can_defer_constraint_checks = False
# Does the backend support tablespaces? Default to False because it isn't
# in the SQL standard.
supports_tablespaces = False
# Does the backend reset sequences between tests?
supports_sequence_reset = True
# Can the backend introspect the default value of a column?
can_introspect_default = True
# Confirm support for introspected foreign keys
# Every database can do this reliably, except MySQL,
# which can't do it for MyISAM tables
can_introspect_foreign_keys = True
# Map fields which some backends may not be able to differentiate to the
# field it's introspected as.
introspected_field_types = {
"AutoField": "AutoField",
"BigAutoField": "BigAutoField",
"BigIntegerField": "BigIntegerField",
"BinaryField": "BinaryField",
"BooleanField": "BooleanField",
"CharField": "CharField",
"DurationField": "DurationField",
"GenericIPAddressField": "GenericIPAddressField",
"IntegerField": "IntegerField",
"PositiveBigIntegerField": "PositiveBigIntegerField",
"PositiveIntegerField": "PositiveIntegerField",
"PositiveSmallIntegerField": "PositiveSmallIntegerField",
"SmallAutoField": "SmallAutoField",
"SmallIntegerField": "SmallIntegerField",
"TimeField": "TimeField",
}
# Can the backend introspect the column order (ASC/DESC) for indexes?
supports_index_column_ordering = True
# Does the backend support introspection of materialized views?
can_introspect_materialized_views = False
# Support for the DISTINCT ON clause
can_distinct_on_fields = False
# Does the backend prevent running SQL queries in broken transactions?
atomic_transactions = True
# Can we roll back DDL in a transaction?
can_rollback_ddl = False
schema_editor_uses_clientside_param_binding = False
# Does it support operations requiring references rename in a transaction?
supports_atomic_references_rename = True
# Can we issue more than one ALTER COLUMN clause in an ALTER TABLE?
supports_combined_alters = False
# Does it support foreign keys?
supports_foreign_keys = True
# Can it create foreign key constraints inline when adding columns?
can_create_inline_fk = True
# Can an index be renamed?
can_rename_index = False
# Does it automatically index foreign keys?
indexes_foreign_keys = True
# Does it support CHECK constraints?
supports_column_check_constraints = True
supports_table_check_constraints = True
# Does the backend support introspection of CHECK constraints?
can_introspect_check_constraints = True
# Does the backend support 'pyformat' style ("... %(name)s ...", {'name': value})
# parameter passing? Note this can be provided by the backend even if not
# supported by the Python driver
supports_paramstyle_pyformat = True
# Does the backend require literal defaults, rather than parameterized ones?
requires_literal_defaults = False
# Does the backend support functions in defaults?
supports_expression_defaults = True
# Does the backend support the DEFAULT keyword in insert queries?
supports_default_keyword_in_insert = True
# Does the backend support the DEFAULT keyword in bulk insert queries?
supports_default_keyword_in_bulk_insert = True
# Does the backend require a connection reset after each material schema change?
connection_persists_old_columns = False
# What kind of error does the backend throw when accessing closed cursor?
closed_cursor_error_class = ProgrammingError
# Does 'a' LIKE 'A' match?
has_case_insensitive_like = False
# Suffix for backends that don't support "SELECT xxx;" queries.
bare_select_suffix = ""
# If NULL is implied on columns without needing to be explicitly specified
implied_column_null = False
# Does the backend support "select for update" queries with limit (and offset)?
supports_select_for_update_with_limit = True
# Does the backend ignore null expressions in GREATEST and LEAST queries unless
# every expression is null?
greatest_least_ignores_nulls = False
# Can the backend clone databases for parallel test execution?
# Defaults to False to allow third-party backends to opt-in.
can_clone_databases = False
# Does the backend consider table names with different casing to
# be equal?
ignores_table_name_case = False
# Place FOR UPDATE right after FROM clause. Used on MSSQL.
for_update_after_from = False
# Combinatorial flags
supports_select_union = True
supports_select_intersection = True
supports_select_difference = True
supports_slicing_ordering_in_compound = False
supports_parentheses_in_compound = True
requires_compound_order_by_subquery = False
# Does the database support SQL 2003 FILTER (WHERE ...) in aggregate
# expressions?
supports_aggregate_filter_clause = False
# Does the backend support indexing a TextField?
supports_index_on_text_field = True
# Does the backend support window expressions (expression OVER (...))?
supports_over_clause = False
supports_frame_range_fixed_distance = False
only_supports_unbounded_with_preceding_and_following = False
# Does the backend support CAST with precision?
supports_cast_with_precision = True
# How many second decimals does the database return when casting a value to
# a type with time?
time_cast_precision = 6
# SQL to create a procedure for use by the Django test suite. The
# functionality of the procedure isn't important.
create_test_procedure_without_params_sql = None
create_test_procedure_with_int_param_sql = None
# SQL to create a table with a composite primary key for use by the Django
# test suite.
create_test_table_with_composite_primary_key = None
# Does the backend support keyword parameters for cursor.callproc()?
supports_callproc_kwargs = False
# What formats does the backend EXPLAIN syntax support?
supported_explain_formats = set()
# Does the backend support the default parameter in lead() and lag()?
supports_default_in_lead_lag = True
# Does the backend support ignoring constraint or uniqueness errors during
# INSERT?
supports_ignore_conflicts = True
# Does the backend support updating rows on constraint or uniqueness errors
# during INSERT?
supports_update_conflicts = False
supports_update_conflicts_with_target = False
# Does this backend require casting the results of CASE expressions used
# in UPDATE statements to ensure the expression has the correct type?
requires_casted_case_in_updates = False
# Does the backend support partial indexes (CREATE INDEX ... WHERE ...)?
supports_partial_indexes = True
supports_functions_in_partial_indexes = True
# Does the backend support covering indexes (CREATE INDEX ... INCLUDE ...)?
supports_covering_indexes = False
# Does the backend support indexes on expressions?
supports_expression_indexes = True
# Does the backend treat COLLATE as an indexed expression?
collate_as_index_expression = False
# Does the database allow more than one constraint or index on the same
# field(s)?
allows_multiple_constraints_on_same_fields = True
# Does the backend support boolean expressions in SELECT and GROUP BY
# clauses?
supports_boolean_expr_in_select_clause = True
# Does the backend support comparing boolean expressions in WHERE clauses?
# Eg: WHERE (price > 0) IS NOT NULL
supports_comparing_boolean_expr = True
# Does the backend support JSONField?
supports_json_field = True
# Can the backend introspect a JSONField?
can_introspect_json_field = True
# Does the backend support primitives in JSONField?
supports_primitives_in_json_field = True
# Is there a true datatype for JSON?
has_native_json_field = False
# Does the backend use PostgreSQL-style JSON operators like '->'?
has_json_operators = False
# Does the backend support __contains and __contained_by lookups for
# a JSONField?
supports_json_field_contains = True
# Does value__d__contains={'f': 'g'} (without a list around the dict) match
# {'d': [{'f': 'g'}]}?
json_key_contains_list_matching_requires_list = False
# Does the backend support JSONObject() database function?
has_json_object_function = True
# Does the backend support column collations?
supports_collation_on_charfield = True
supports_collation_on_textfield = True
# Does the backend support non-deterministic collations?
supports_non_deterministic_collations = True
# Does the backend support column and table comments?
supports_comments = False
# Does the backend support column comments in ADD COLUMN statements?
supports_comments_inline = False
# Does the backend support the logical XOR operator?
supports_logical_xor = False
# Set to (exception, message) if null characters in text are disallowed.
prohibits_null_characters_in_text_exception = None
# Does the backend support unlimited character columns?
supports_unlimited_charfield = False
# Collation names for use by the Django test suite.
test_collations = {
"ci": None, # Case-insensitive.
"cs": None, # Case-sensitive.
"non_default": None, # Non-default.
"swedish_ci": None, # Swedish case-insensitive.
}
# SQL template override for tests.aggregation.tests.NowUTC
test_now_utc_template = None
# SQL to create a model instance using the database defaults.
insert_test_table_with_defaults = None
# A set of dotted paths to tests in Django's test suite that are expected
# to fail on this database.
django_test_expected_failures = set()
# A map of reasons to sets of dotted paths to tests in Django's test suite
# that should be skipped for this database.
django_test_skips = {}
def __init__(self, connection):
self.connection = connection
@cached_property
def supports_explaining_query_execution(self):
"""Does this backend support explaining query execution?"""
return self.connection.ops.explain_prefix is not None
@cached_property
def supports_transactions(self):
"""Confirm support for transactions."""
with self.connection.cursor() as cursor:
cursor.execute("CREATE TABLE ROLLBACK_TEST (X INT)")
self.connection.set_autocommit(False)
cursor.execute("INSERT INTO ROLLBACK_TEST (X) VALUES (8)")
self.connection.rollback()
self.connection.set_autocommit(True)
cursor.execute("SELECT COUNT(X) FROM ROLLBACK_TEST")
(count,) = cursor.fetchone()
cursor.execute("DROP TABLE ROLLBACK_TEST")
return count == 0
def allows_group_by_selected_pks_on_model(self, model):
if not self.allows_group_by_selected_pks:
return False
return model._meta.managed
|
806b3a2896d3d8c12df5f150a84835605bf73f07c3c2e0afe5c7c8a1b18231ad | import logging
import operator
from datetime import datetime
from django.conf import settings
from django.db.backends.ddl_references import (
Columns,
Expressions,
ForeignKeyName,
IndexName,
Statement,
Table,
)
from django.db.backends.utils import names_digest, split_identifier, truncate_name
from django.db.models import NOT_PROVIDED, Deferrable, Index
from django.db.models.sql import Query
from django.db.transaction import TransactionManagementError, atomic
from django.utils import timezone
logger = logging.getLogger("django.db.backends.schema")
def _is_relevant_relation(relation, altered_field):
"""
When altering the given field, must constraints on its model from the given
relation be temporarily dropped?
"""
field = relation.field
if field.many_to_many:
# M2M reverse field
return False
if altered_field.primary_key and field.to_fields == [None]:
# Foreign key constraint on the primary key, which is being altered.
return True
# Is the constraint targeting the field being altered?
return altered_field.name in field.to_fields
def _all_related_fields(model):
# Related fields must be returned in a deterministic order.
return sorted(
model._meta._get_fields(
forward=False,
reverse=True,
include_hidden=True,
include_parents=False,
),
key=operator.attrgetter("name"),
)
def _related_non_m2m_objects(old_field, new_field):
# Filter out m2m objects from reverse relations.
# Return (old_relation, new_relation) tuples.
related_fields = zip(
(
obj
for obj in _all_related_fields(old_field.model)
if _is_relevant_relation(obj, old_field)
),
(
obj
for obj in _all_related_fields(new_field.model)
if _is_relevant_relation(obj, new_field)
),
)
for old_rel, new_rel in related_fields:
yield old_rel, new_rel
yield from _related_non_m2m_objects(
old_rel.remote_field,
new_rel.remote_field,
)
class BaseDatabaseSchemaEditor:
"""
This class and its subclasses are responsible for emitting schema-changing
statements to the databases - model creation/removal/alteration, field
renaming, index fiddling, and so on.
"""
# Overrideable SQL templates
sql_create_table = "CREATE TABLE %(table)s (%(definition)s)"
sql_rename_table = "ALTER TABLE %(old_table)s RENAME TO %(new_table)s"
sql_retablespace_table = "ALTER TABLE %(table)s SET TABLESPACE %(new_tablespace)s"
sql_delete_table = "DROP TABLE %(table)s CASCADE"
sql_create_column = "ALTER TABLE %(table)s ADD COLUMN %(column)s %(definition)s"
sql_alter_column = "ALTER TABLE %(table)s %(changes)s"
sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s%(collation)s"
sql_alter_column_null = "ALTER COLUMN %(column)s DROP NOT NULL"
sql_alter_column_not_null = "ALTER COLUMN %(column)s SET NOT NULL"
sql_alter_column_default = "ALTER COLUMN %(column)s SET DEFAULT %(default)s"
sql_alter_column_no_default = "ALTER COLUMN %(column)s DROP DEFAULT"
sql_alter_column_no_default_null = sql_alter_column_no_default
sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s CASCADE"
sql_rename_column = (
"ALTER TABLE %(table)s RENAME COLUMN %(old_column)s TO %(new_column)s"
)
sql_update_with_default = (
"UPDATE %(table)s SET %(column)s = %(default)s WHERE %(column)s IS NULL"
)
sql_unique_constraint = "UNIQUE (%(columns)s)%(deferrable)s"
sql_check_constraint = "CHECK (%(check)s)"
sql_delete_constraint = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
sql_constraint = "CONSTRAINT %(name)s %(constraint)s"
sql_create_check = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s CHECK (%(check)s)"
sql_delete_check = sql_delete_constraint
sql_create_unique = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s "
"UNIQUE (%(columns)s)%(deferrable)s"
)
sql_delete_unique = sql_delete_constraint
sql_create_fk = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) "
"REFERENCES %(to_table)s (%(to_column)s)%(deferrable)s"
)
sql_create_inline_fk = None
sql_create_column_inline_fk = None
sql_delete_fk = sql_delete_constraint
sql_create_index = (
"CREATE INDEX %(name)s ON %(table)s "
"(%(columns)s)%(include)s%(extra)s%(condition)s"
)
sql_create_unique_index = (
"CREATE UNIQUE INDEX %(name)s ON %(table)s "
"(%(columns)s)%(include)s%(condition)s"
)
sql_rename_index = "ALTER INDEX %(old_name)s RENAME TO %(new_name)s"
sql_delete_index = "DROP INDEX %(name)s"
sql_create_pk = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)"
)
sql_delete_pk = sql_delete_constraint
sql_delete_procedure = "DROP PROCEDURE %(procedure)s"
sql_alter_table_comment = "COMMENT ON TABLE %(table)s IS %(comment)s"
sql_alter_column_comment = "COMMENT ON COLUMN %(table)s.%(column)s IS %(comment)s"
def __init__(self, connection, collect_sql=False, atomic=True):
self.connection = connection
self.collect_sql = collect_sql
if self.collect_sql:
self.collected_sql = []
self.atomic_migration = self.connection.features.can_rollback_ddl and atomic
# State-managing methods
def __enter__(self):
self.deferred_sql = []
if self.atomic_migration:
self.atomic = atomic(self.connection.alias)
self.atomic.__enter__()
return self
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
for sql in self.deferred_sql:
self.execute(sql)
if self.atomic_migration:
self.atomic.__exit__(exc_type, exc_value, traceback)
# Core utility functions
def execute(self, sql, params=()):
"""Execute the given SQL statement, with optional parameters."""
# Don't perform the transactional DDL check if SQL is being collected
# as it's not going to be executed anyway.
if (
not self.collect_sql
and self.connection.in_atomic_block
and not self.connection.features.can_rollback_ddl
):
raise TransactionManagementError(
"Executing DDL statements while in a transaction on databases "
"that can't perform a rollback is prohibited."
)
# Account for non-string statement objects.
sql = str(sql)
# Log the command we're running, then run it
logger.debug(
"%s; (params %r)", sql, params, extra={"params": params, "sql": sql}
)
if self.collect_sql:
ending = "" if sql.rstrip().endswith(";") else ";"
if params is not None:
self.collected_sql.append(
(sql % tuple(map(self.quote_value, params))) + ending
)
else:
self.collected_sql.append(sql + ending)
else:
with self.connection.cursor() as cursor:
cursor.execute(sql, params)
def quote_name(self, name):
return self.connection.ops.quote_name(name)
def table_sql(self, model):
"""Take a model and return its table definition."""
# Add any unique_togethers (always deferred, as some fields might be
# created afterward, like geometry fields with some backends).
for field_names in model._meta.unique_together:
fields = [model._meta.get_field(field) for field in field_names]
self.deferred_sql.append(self._create_unique_sql(model, fields))
# Create column SQL, add FK deferreds if needed.
column_sqls = []
params = []
for field in model._meta.local_fields:
# SQL.
definition, extra_params = self.column_sql(model, field)
if definition is None:
continue
# Check constraints can go on the column SQL here.
db_params = field.db_parameters(connection=self.connection)
if db_params["check"]:
definition += " " + self.sql_check_constraint % db_params
# Autoincrement SQL (for backends with inline variant).
col_type_suffix = field.db_type_suffix(connection=self.connection)
if col_type_suffix:
definition += " %s" % col_type_suffix
params.extend(extra_params)
# FK.
if field.remote_field and field.db_constraint:
to_table = field.remote_field.model._meta.db_table
to_column = field.remote_field.model._meta.get_field(
field.remote_field.field_name
).column
if self.sql_create_inline_fk:
definition += " " + self.sql_create_inline_fk % {
"to_table": self.quote_name(to_table),
"to_column": self.quote_name(to_column),
}
elif self.connection.features.supports_foreign_keys:
self.deferred_sql.append(
self._create_fk_sql(
model, field, "_fk_%(to_table)s_%(to_column)s"
)
)
# Add the SQL to our big list.
column_sqls.append(
"%s %s"
% (
self.quote_name(field.column),
definition,
)
)
# Autoincrement SQL (for backends with post table definition
# variant).
if field.get_internal_type() in (
"AutoField",
"BigAutoField",
"SmallAutoField",
):
autoinc_sql = self.connection.ops.autoinc_sql(
model._meta.db_table, field.column
)
if autoinc_sql:
self.deferred_sql.extend(autoinc_sql)
constraints = [
constraint.constraint_sql(model, self)
for constraint in model._meta.constraints
]
sql = self.sql_create_table % {
"table": self.quote_name(model._meta.db_table),
"definition": ", ".join(
str(constraint)
for constraint in (*column_sqls, *constraints)
if constraint
),
}
if model._meta.db_tablespace:
tablespace_sql = self.connection.ops.tablespace_sql(
model._meta.db_tablespace
)
if tablespace_sql:
sql += " " + tablespace_sql
return sql, params
# Field <-> database mapping functions
def _iter_column_sql(
self, column_db_type, params, model, field, field_db_params, include_default
):
yield column_db_type
if collation := field_db_params.get("collation"):
yield self._collate_sql(collation)
if self.connection.features.supports_comments_inline and field.db_comment:
yield self._comment_sql(field.db_comment)
# Work out nullability.
null = field.null
# Add database default.
if field.db_default is not NOT_PROVIDED:
default_sql, default_params = self.db_default_sql(field)
yield f"DEFAULT {default_sql}"
params.extend(default_params)
include_default = False
# Include a default value, if requested.
include_default = (
include_default
and not self.skip_default(field)
and
# Don't include a default value if it's a nullable field and the
# default cannot be dropped in the ALTER COLUMN statement (e.g.
# MySQL longtext and longblob).
not (null and self.skip_default_on_alter(field))
)
if include_default:
default_value = self.effective_default(field)
if default_value is not None:
column_default = "DEFAULT " + self._column_default_sql(field)
if self.connection.features.requires_literal_defaults:
# Some databases can't take defaults as a parameter (Oracle).
# If this is the case, the individual schema backend should
# implement prepare_default().
yield column_default % self.prepare_default(default_value)
else:
yield column_default
params.append(default_value)
# Oracle treats the empty string ('') as null, so coerce the null
# option whenever '' is a possible value.
if (
field.empty_strings_allowed
and not field.primary_key
and self.connection.features.interprets_empty_strings_as_nulls
):
null = True
if not null:
yield "NOT NULL"
elif not self.connection.features.implied_column_null:
yield "NULL"
if field.primary_key:
yield "PRIMARY KEY"
elif field.unique:
yield "UNIQUE"
# Optionally add the tablespace if it's an implicitly indexed column.
tablespace = field.db_tablespace or model._meta.db_tablespace
if (
tablespace
and self.connection.features.supports_tablespaces
and field.unique
):
yield self.connection.ops.tablespace_sql(tablespace, inline=True)
def column_sql(self, model, field, include_default=False):
"""
Return the column definition for a field. The field must already have
had set_attributes_from_name() called.
"""
# Get the column's type and use that as the basis of the SQL.
field_db_params = field.db_parameters(connection=self.connection)
column_db_type = field_db_params["type"]
# Check for fields that aren't actually columns (e.g. M2M).
if column_db_type is None:
return None, None
params = []
return (
" ".join(
# This appends to the params being returned.
self._iter_column_sql(
column_db_type,
params,
model,
field,
field_db_params,
include_default,
)
),
params,
)
def skip_default(self, field):
"""
Some backends don't accept default values for certain columns types
(i.e. MySQL longtext and longblob).
"""
return False
def skip_default_on_alter(self, field):
"""
Some backends don't accept default values for certain columns types
(i.e. MySQL longtext and longblob) in the ALTER COLUMN statement.
"""
return False
def prepare_default(self, value):
"""
Only used for backends which have requires_literal_defaults feature
"""
raise NotImplementedError(
"subclasses of BaseDatabaseSchemaEditor for backends which have "
"requires_literal_defaults must provide a prepare_default() method"
)
def _column_default_sql(self, field):
"""
Return the SQL to use in a DEFAULT clause. The resulting string should
contain a '%s' placeholder for a default value.
"""
return "%s"
def db_default_sql(self, field):
"""Return the sql and params for the field's database default."""
from django.db.models.expressions import Value
sql = "%s" if isinstance(field.db_default, Value) else "(%s)"
query = Query(model=field.model)
compiler = query.get_compiler(connection=self.connection)
default_sql, params = compiler.compile(field.db_default)
if self.connection.features.requires_literal_defaults:
# Some databases doesn't support parameterized defaults (Oracle,
# SQLite). If this is the case, the individual schema backend
# should implement prepare_default().
default_sql %= tuple(self.prepare_default(p) for p in params)
params = []
return sql % default_sql, params
@staticmethod
def _effective_default(field):
# This method allows testing its logic without a connection.
if field.has_default():
default = field.get_default()
elif not field.null and field.blank and field.empty_strings_allowed:
if field.get_internal_type() == "BinaryField":
default = b""
else:
default = ""
elif getattr(field, "auto_now", False) or getattr(field, "auto_now_add", False):
internal_type = field.get_internal_type()
if internal_type == "DateTimeField":
default = timezone.now()
else:
default = datetime.now()
if internal_type == "DateField":
default = default.date()
elif internal_type == "TimeField":
default = default.time()
else:
default = None
return default
def effective_default(self, field):
"""Return a field's effective database default value."""
return field.get_db_prep_save(self._effective_default(field), self.connection)
def quote_value(self, value):
"""
Return a quoted version of the value so it's safe to use in an SQL
string. This is not safe against injection from user code; it is
intended only for use in making SQL scripts or preparing default values
for particularly tricky backends (defaults are not user-defined, though,
so this is safe).
"""
raise NotImplementedError()
# Actions
def create_model(self, model):
"""
Create a table and any accompanying indexes or unique constraints for
the given `model`.
"""
sql, params = self.table_sql(model)
# Prevent using [] as params, in the case a literal '%' is used in the
# definition.
self.execute(sql, params or None)
if self.connection.features.supports_comments:
# Add table comment.
if model._meta.db_table_comment:
self.alter_db_table_comment(model, None, model._meta.db_table_comment)
# Add column comments.
if not self.connection.features.supports_comments_inline:
for field in model._meta.local_fields:
if field.db_comment:
field_db_params = field.db_parameters(
connection=self.connection
)
field_type = field_db_params["type"]
self.execute(
*self._alter_column_comment_sql(
model, field, field_type, field.db_comment
)
)
# Add any field index and index_together's (deferred as SQLite
# _remake_table needs it).
self.deferred_sql.extend(self._model_indexes_sql(model))
# Make M2M tables
for field in model._meta.local_many_to_many:
if field.remote_field.through._meta.auto_created:
self.create_model(field.remote_field.through)
def delete_model(self, model):
"""Delete a model from the database."""
# Handle auto-created intermediary models
for field in model._meta.local_many_to_many:
if field.remote_field.through._meta.auto_created:
self.delete_model(field.remote_field.through)
# Delete the table
self.execute(
self.sql_delete_table
% {
"table": self.quote_name(model._meta.db_table),
}
)
# Remove all deferred statements referencing the deleted table.
for sql in list(self.deferred_sql):
if isinstance(sql, Statement) and sql.references_table(
model._meta.db_table
):
self.deferred_sql.remove(sql)
def add_index(self, model, index):
"""Add an index on a model."""
if (
index.contains_expressions
and not self.connection.features.supports_expression_indexes
):
return None
# Index.create_sql returns interpolated SQL which makes params=None a
# necessity to avoid escaping attempts on execution.
self.execute(index.create_sql(model, self), params=None)
def remove_index(self, model, index):
"""Remove an index from a model."""
if (
index.contains_expressions
and not self.connection.features.supports_expression_indexes
):
return None
self.execute(index.remove_sql(model, self))
def rename_index(self, model, old_index, new_index):
if self.connection.features.can_rename_index:
self.execute(
self._rename_index_sql(model, old_index.name, new_index.name),
params=None,
)
else:
self.remove_index(model, old_index)
self.add_index(model, new_index)
def add_constraint(self, model, constraint):
"""Add a constraint to a model."""
sql = constraint.create_sql(model, self)
if sql:
# Constraint.create_sql returns interpolated SQL which makes
# params=None a necessity to avoid escaping attempts on execution.
self.execute(sql, params=None)
def remove_constraint(self, model, constraint):
"""Remove a constraint from a model."""
sql = constraint.remove_sql(model, self)
if sql:
self.execute(sql)
def alter_unique_together(self, model, old_unique_together, new_unique_together):
"""
Deal with a model changing its unique_together. The input
unique_togethers must be doubly-nested, not the single-nested
["foo", "bar"] format.
"""
olds = {tuple(fields) for fields in old_unique_together}
news = {tuple(fields) for fields in new_unique_together}
# Deleted uniques
for fields in olds.difference(news):
self._delete_composed_index(
model,
fields,
{"unique": True, "primary_key": False},
self.sql_delete_unique,
)
# Created uniques
for field_names in news.difference(olds):
fields = [model._meta.get_field(field) for field in field_names]
self.execute(self._create_unique_sql(model, fields))
def alter_index_together(self, model, old_index_together, new_index_together):
"""
Deal with a model changing its index_together. The input
index_togethers must be doubly-nested, not the single-nested
["foo", "bar"] format.
"""
olds = {tuple(fields) for fields in old_index_together}
news = {tuple(fields) for fields in new_index_together}
# Deleted indexes
for fields in olds.difference(news):
self._delete_composed_index(
model,
fields,
{"index": True, "unique": False},
self.sql_delete_index,
)
# Created indexes
for field_names in news.difference(olds):
fields = [model._meta.get_field(field) for field in field_names]
self.execute(self._create_index_sql(model, fields=fields, suffix="_idx"))
def _delete_composed_index(self, model, fields, constraint_kwargs, sql):
meta_constraint_names = {
constraint.name for constraint in model._meta.constraints
}
meta_index_names = {constraint.name for constraint in model._meta.indexes}
columns = [model._meta.get_field(field).column for field in fields]
constraint_names = self._constraint_names(
model,
columns,
exclude=meta_constraint_names | meta_index_names,
**constraint_kwargs,
)
if (
constraint_kwargs.get("unique") is True
and constraint_names
and self.connection.features.allows_multiple_constraints_on_same_fields
):
# Constraint matching the unique_together name.
default_name = str(
self._unique_constraint_name(model._meta.db_table, columns, quote=False)
)
if default_name in constraint_names:
constraint_names = [default_name]
if len(constraint_names) != 1:
raise ValueError(
"Found wrong number (%s) of constraints for %s(%s)"
% (
len(constraint_names),
model._meta.db_table,
", ".join(columns),
)
)
self.execute(self._delete_constraint_sql(sql, model, constraint_names[0]))
def alter_db_table(self, model, old_db_table, new_db_table):
"""Rename the table a model points to."""
if old_db_table == new_db_table or (
self.connection.features.ignores_table_name_case
and old_db_table.lower() == new_db_table.lower()
):
return
self.execute(
self.sql_rename_table
% {
"old_table": self.quote_name(old_db_table),
"new_table": self.quote_name(new_db_table),
}
)
# Rename all references to the old table name.
for sql in self.deferred_sql:
if isinstance(sql, Statement):
sql.rename_table_references(old_db_table, new_db_table)
def alter_db_table_comment(self, model, old_db_table_comment, new_db_table_comment):
self.execute(
self.sql_alter_table_comment
% {
"table": self.quote_name(model._meta.db_table),
"comment": self.quote_value(new_db_table_comment or ""),
}
)
def alter_db_tablespace(self, model, old_db_tablespace, new_db_tablespace):
"""Move a model's table between tablespaces."""
self.execute(
self.sql_retablespace_table
% {
"table": self.quote_name(model._meta.db_table),
"old_tablespace": self.quote_name(old_db_tablespace),
"new_tablespace": self.quote_name(new_db_tablespace),
}
)
def add_field(self, model, field):
"""
Create a field on a model. Usually involves adding a column, but may
involve adding a table instead (for M2M fields).
"""
# Special-case implicit M2M tables
if field.many_to_many and field.remote_field.through._meta.auto_created:
return self.create_model(field.remote_field.through)
# Get the column's definition
definition, params = self.column_sql(model, field, include_default=True)
# It might not actually have a column behind it
if definition is None:
return
if col_type_suffix := field.db_type_suffix(connection=self.connection):
definition += f" {col_type_suffix}"
# Check constraints can go on the column SQL here
db_params = field.db_parameters(connection=self.connection)
if db_params["check"]:
definition += " " + self.sql_check_constraint % db_params
if (
field.remote_field
and self.connection.features.supports_foreign_keys
and field.db_constraint
):
constraint_suffix = "_fk_%(to_table)s_%(to_column)s"
# Add FK constraint inline, if supported.
if self.sql_create_column_inline_fk:
to_table = field.remote_field.model._meta.db_table
to_column = field.remote_field.model._meta.get_field(
field.remote_field.field_name
).column
namespace, _ = split_identifier(model._meta.db_table)
definition += " " + self.sql_create_column_inline_fk % {
"name": self._fk_constraint_name(model, field, constraint_suffix),
"namespace": "%s." % self.quote_name(namespace)
if namespace
else "",
"column": self.quote_name(field.column),
"to_table": self.quote_name(to_table),
"to_column": self.quote_name(to_column),
"deferrable": self.connection.ops.deferrable_sql(),
}
# Otherwise, add FK constraints later.
else:
self.deferred_sql.append(
self._create_fk_sql(model, field, constraint_suffix)
)
# Build the SQL and run it
sql = self.sql_create_column % {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
"definition": definition,
}
self.execute(sql, params)
# Drop the default if we need to
# (Django usually does not use in-database defaults)
if (
not self.skip_default_on_alter(field)
and self.effective_default(field) is not None
):
changes_sql, params = self._alter_column_default_sql(
model, None, field, drop=True
)
sql = self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": changes_sql,
}
self.execute(sql, params)
# Add field comment, if required.
if (
field.db_comment
and self.connection.features.supports_comments
and not self.connection.features.supports_comments_inline
):
field_type = db_params["type"]
self.execute(
*self._alter_column_comment_sql(
model, field, field_type, field.db_comment
)
)
# Add an index, if required
self.deferred_sql.extend(self._field_indexes_sql(model, field))
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
def remove_field(self, model, field):
"""
Remove a field from a model. Usually involves deleting a column,
but for M2Ms may involve deleting a table.
"""
# Special-case implicit M2M tables
if field.many_to_many and field.remote_field.through._meta.auto_created:
return self.delete_model(field.remote_field.through)
# It might not actually have a column behind it
if field.db_parameters(connection=self.connection)["type"] is None:
return
# Drop any FK constraints, MySQL requires explicit deletion
if field.remote_field:
fk_names = self._constraint_names(model, [field.column], foreign_key=True)
for fk_name in fk_names:
self.execute(self._delete_fk_sql(model, fk_name))
# Delete the column
sql = self.sql_delete_column % {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
}
self.execute(sql)
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
# Remove all deferred statements referencing the deleted column.
for sql in list(self.deferred_sql):
if isinstance(sql, Statement) and sql.references_column(
model._meta.db_table, field.column
):
self.deferred_sql.remove(sql)
def alter_field(self, model, old_field, new_field, strict=False):
"""
Allow a field's type, uniqueness, nullability, default, column,
constraints, etc. to be modified.
`old_field` is required to compute the necessary changes.
If `strict` is True, raise errors if the old column does not match
`old_field` precisely.
"""
if not self._field_should_be_altered(old_field, new_field):
return
# Ensure this field is even column-based
old_db_params = old_field.db_parameters(connection=self.connection)
old_type = old_db_params["type"]
new_db_params = new_field.db_parameters(connection=self.connection)
new_type = new_db_params["type"]
if (old_type is None and old_field.remote_field is None) or (
new_type is None and new_field.remote_field is None
):
raise ValueError(
"Cannot alter field %s into %s - they do not properly define "
"db_type (are you using a badly-written custom field?)"
% (old_field, new_field),
)
elif (
old_type is None
and new_type is None
and (
old_field.remote_field.through
and new_field.remote_field.through
and old_field.remote_field.through._meta.auto_created
and new_field.remote_field.through._meta.auto_created
)
):
return self._alter_many_to_many(model, old_field, new_field, strict)
elif (
old_type is None
and new_type is None
and (
old_field.remote_field.through
and new_field.remote_field.through
and not old_field.remote_field.through._meta.auto_created
and not new_field.remote_field.through._meta.auto_created
)
):
# Both sides have through models; this is a no-op.
return
elif old_type is None or new_type is None:
raise ValueError(
"Cannot alter field %s into %s - they are not compatible types "
"(you cannot alter to or from M2M fields, or add or remove "
"through= on M2M fields)" % (old_field, new_field)
)
self._alter_field(
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict,
)
def _field_db_check(self, field, field_db_params):
# Always check constraints with the same mocked column name to avoid
# recreating constrains when the column is renamed.
check_constraints = self.connection.data_type_check_constraints
data = field.db_type_parameters(self.connection)
data["column"] = "__column_name__"
try:
return check_constraints[field.get_internal_type()] % data
except KeyError:
return None
def _alter_field(
self,
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict=False,
):
"""Perform a "physical" (non-ManyToMany) field update."""
# Drop any FK constraints, we'll remake them later
fks_dropped = set()
if (
self.connection.features.supports_foreign_keys
and old_field.remote_field
and old_field.db_constraint
and self._field_should_be_altered(
old_field,
new_field,
ignore={"db_comment"},
)
):
fk_names = self._constraint_names(
model, [old_field.column], foreign_key=True
)
if strict and len(fk_names) != 1:
raise ValueError(
"Found wrong number (%s) of foreign key constraints for %s.%s"
% (
len(fk_names),
model._meta.db_table,
old_field.column,
)
)
for fk_name in fk_names:
fks_dropped.add((old_field.column,))
self.execute(self._delete_fk_sql(model, fk_name))
# Has unique been removed?
if old_field.unique and (
not new_field.unique or self._field_became_primary_key(old_field, new_field)
):
# Find the unique constraint for this field
meta_constraint_names = {
constraint.name for constraint in model._meta.constraints
}
constraint_names = self._constraint_names(
model,
[old_field.column],
unique=True,
primary_key=False,
exclude=meta_constraint_names,
)
if strict and len(constraint_names) != 1:
raise ValueError(
"Found wrong number (%s) of unique constraints for %s.%s"
% (
len(constraint_names),
model._meta.db_table,
old_field.column,
)
)
for constraint_name in constraint_names:
self.execute(self._delete_unique_sql(model, constraint_name))
# Drop incoming FK constraints if the field is a primary key or unique,
# which might be a to_field target, and things are going to change.
old_collation = old_db_params.get("collation")
new_collation = new_db_params.get("collation")
drop_foreign_keys = (
self.connection.features.supports_foreign_keys
and (
(old_field.primary_key and new_field.primary_key)
or (old_field.unique and new_field.unique)
)
and ((old_type != new_type) or (old_collation != new_collation))
)
if drop_foreign_keys:
# '_meta.related_field' also contains M2M reverse fields, these
# will be filtered out
for _old_rel, new_rel in _related_non_m2m_objects(old_field, new_field):
rel_fk_names = self._constraint_names(
new_rel.related_model, [new_rel.field.column], foreign_key=True
)
for fk_name in rel_fk_names:
self.execute(self._delete_fk_sql(new_rel.related_model, fk_name))
# Removed an index? (no strict check, as multiple indexes are possible)
# Remove indexes if db_index switched to False or a unique constraint
# will now be used in lieu of an index. The following lines from the
# truth table show all True cases; the rest are False:
#
# old_field.db_index | old_field.unique | new_field.db_index | new_field.unique
# ------------------------------------------------------------------------------
# True | False | False | False
# True | False | False | True
# True | False | True | True
if (
old_field.db_index
and not old_field.unique
and (not new_field.db_index or new_field.unique)
):
# Find the index for this field
meta_index_names = {index.name for index in model._meta.indexes}
# Retrieve only BTREE indexes since this is what's created with
# db_index=True.
index_names = self._constraint_names(
model,
[old_field.column],
index=True,
type_=Index.suffix,
exclude=meta_index_names,
)
for index_name in index_names:
# The only way to check if an index was created with
# db_index=True or with Index(['field'], name='foo')
# is to look at its name (refs #28053).
self.execute(self._delete_index_sql(model, index_name))
# Change check constraints?
old_db_check = self._field_db_check(old_field, old_db_params)
new_db_check = self._field_db_check(new_field, new_db_params)
if old_db_check != new_db_check and old_db_check:
meta_constraint_names = {
constraint.name for constraint in model._meta.constraints
}
constraint_names = self._constraint_names(
model,
[old_field.column],
check=True,
exclude=meta_constraint_names,
)
if strict and len(constraint_names) != 1:
raise ValueError(
"Found wrong number (%s) of check constraints for %s.%s"
% (
len(constraint_names),
model._meta.db_table,
old_field.column,
)
)
for constraint_name in constraint_names:
self.execute(self._delete_check_sql(model, constraint_name))
# Have they renamed the column?
if old_field.column != new_field.column:
self.execute(
self._rename_field_sql(
model._meta.db_table, old_field, new_field, new_type
)
)
# Rename all references to the renamed column.
for sql in self.deferred_sql:
if isinstance(sql, Statement):
sql.rename_column_references(
model._meta.db_table, old_field.column, new_field.column
)
# Next, start accumulating actions to do
actions = []
null_actions = []
post_actions = []
# Type suffix change? (e.g. auto increment).
old_type_suffix = old_field.db_type_suffix(connection=self.connection)
new_type_suffix = new_field.db_type_suffix(connection=self.connection)
# Type, collation, or comment change?
if (
old_type != new_type
or old_type_suffix != new_type_suffix
or old_collation != new_collation
or (
self.connection.features.supports_comments
and old_field.db_comment != new_field.db_comment
)
):
fragment, other_actions = self._alter_column_type_sql(
model, old_field, new_field, new_type, old_collation, new_collation
)
actions.append(fragment)
post_actions.extend(other_actions)
if new_field.db_default is not NOT_PROVIDED:
if (
old_field.db_default is NOT_PROVIDED
or new_field.db_default != old_field.db_default
):
actions.append(
self._alter_column_database_default_sql(model, old_field, new_field)
)
elif old_field.db_default is not NOT_PROVIDED:
actions.append(
self._alter_column_database_default_sql(
model, old_field, new_field, drop=True
)
)
# When changing a column NULL constraint to NOT NULL with a given
# default value, we need to perform 4 steps:
# 1. Add a default for new incoming writes
# 2. Update existing NULL rows with new default
# 3. Replace NULL constraint with NOT NULL
# 4. Drop the default again.
# Default change?
needs_database_default = False
if (
old_field.null
and not new_field.null
and new_field.db_default is NOT_PROVIDED
):
old_default = self.effective_default(old_field)
new_default = self.effective_default(new_field)
if (
not self.skip_default_on_alter(new_field)
and old_default != new_default
and new_default is not None
):
needs_database_default = True
actions.append(
self._alter_column_default_sql(model, old_field, new_field)
)
# Nullability change?
if old_field.null != new_field.null:
fragment = self._alter_column_null_sql(model, old_field, new_field)
if fragment:
null_actions.append(fragment)
# Only if we have a default and there is a change from NULL to NOT NULL
four_way_default_alteration = (
new_field.has_default() or new_field.db_default is not NOT_PROVIDED
) and (old_field.null and not new_field.null)
if actions or null_actions:
if not four_way_default_alteration:
# If we don't have to do a 4-way default alteration we can
# directly run a (NOT) NULL alteration
actions += null_actions
# Combine actions together if we can (e.g. postgres)
if self.connection.features.supports_combined_alters and actions:
sql, params = tuple(zip(*actions))
actions = [(", ".join(sql), sum(params, []))]
# Apply those actions
for sql, params in actions:
self.execute(
self.sql_alter_column
% {
"table": self.quote_name(model._meta.db_table),
"changes": sql,
},
params,
)
if four_way_default_alteration:
if new_field.db_default is NOT_PROVIDED:
default_sql = "%s"
params = [new_default]
else:
default_sql, params = self.db_default_sql(new_field)
# Update existing rows with default value
self.execute(
self.sql_update_with_default
% {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(new_field.column),
"default": default_sql,
},
params,
)
# Since we didn't run a NOT NULL change before we need to do it
# now
for sql, params in null_actions:
self.execute(
self.sql_alter_column
% {
"table": self.quote_name(model._meta.db_table),
"changes": sql,
},
params,
)
if post_actions:
for sql, params in post_actions:
self.execute(sql, params)
# If primary_key changed to False, delete the primary key constraint.
if old_field.primary_key and not new_field.primary_key:
self._delete_primary_key(model, strict)
# Added a unique?
if self._unique_should_be_added(old_field, new_field):
self.execute(self._create_unique_sql(model, [new_field]))
# Added an index? Add an index if db_index switched to True or a unique
# constraint will no longer be used in lieu of an index. The following
# lines from the truth table show all True cases; the rest are False:
#
# old_field.db_index | old_field.unique | new_field.db_index | new_field.unique
# ------------------------------------------------------------------------------
# False | False | True | False
# False | True | True | False
# True | True | True | False
if (
(not old_field.db_index or old_field.unique)
and new_field.db_index
and not new_field.unique
):
self.execute(self._create_index_sql(model, fields=[new_field]))
# Type alteration on primary key? Then we need to alter the column
# referring to us.
rels_to_update = []
if drop_foreign_keys:
rels_to_update.extend(_related_non_m2m_objects(old_field, new_field))
# Changed to become primary key?
if self._field_became_primary_key(old_field, new_field):
# Make the new one
self.execute(self._create_primary_key_sql(model, new_field))
# Update all referencing columns
rels_to_update.extend(_related_non_m2m_objects(old_field, new_field))
# Handle our type alters on the other end of rels from the PK stuff above
for old_rel, new_rel in rels_to_update:
rel_db_params = new_rel.field.db_parameters(connection=self.connection)
rel_type = rel_db_params["type"]
rel_collation = rel_db_params.get("collation")
old_rel_db_params = old_rel.field.db_parameters(connection=self.connection)
old_rel_collation = old_rel_db_params.get("collation")
fragment, other_actions = self._alter_column_type_sql(
new_rel.related_model,
old_rel.field,
new_rel.field,
rel_type,
old_rel_collation,
rel_collation,
)
self.execute(
self.sql_alter_column
% {
"table": self.quote_name(new_rel.related_model._meta.db_table),
"changes": fragment[0],
},
fragment[1],
)
for sql, params in other_actions:
self.execute(sql, params)
# Does it have a foreign key?
if (
self.connection.features.supports_foreign_keys
and new_field.remote_field
and (
fks_dropped or not old_field.remote_field or not old_field.db_constraint
)
and new_field.db_constraint
):
self.execute(
self._create_fk_sql(model, new_field, "_fk_%(to_table)s_%(to_column)s")
)
# Rebuild FKs that pointed to us if we previously had to drop them
if drop_foreign_keys:
for _, rel in rels_to_update:
if rel.field.db_constraint:
self.execute(
self._create_fk_sql(rel.related_model, rel.field, "_fk")
)
# Does it have check constraints we need to add?
if old_db_check != new_db_check and new_db_check:
constraint_name = self._create_index_name(
model._meta.db_table, [new_field.column], suffix="_check"
)
self.execute(
self._create_check_sql(model, constraint_name, new_db_params["check"])
)
# Drop the default if we need to
# (Django usually does not use in-database defaults)
if needs_database_default:
changes_sql, params = self._alter_column_default_sql(
model, old_field, new_field, drop=True
)
sql = self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": changes_sql,
}
self.execute(sql, params)
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
def _alter_column_null_sql(self, model, old_field, new_field):
"""
Hook to specialize column null alteration.
Return a (sql, params) fragment to set a column to null or non-null
as required by new_field, or None if no changes are required.
"""
if (
self.connection.features.interprets_empty_strings_as_nulls
and new_field.empty_strings_allowed
):
# The field is nullable in the database anyway, leave it alone.
return
else:
new_db_params = new_field.db_parameters(connection=self.connection)
sql = (
self.sql_alter_column_null
if new_field.null
else self.sql_alter_column_not_null
)
return (
sql
% {
"column": self.quote_name(new_field.column),
"type": new_db_params["type"],
},
[],
)
def _alter_column_default_sql(self, model, old_field, new_field, drop=False):
"""
Hook to specialize column default alteration.
Return a (sql, params) fragment to add or drop (depending on the drop
argument) a default to new_field's column.
"""
new_default = self.effective_default(new_field)
default = self._column_default_sql(new_field)
params = [new_default]
if drop:
params = []
elif self.connection.features.requires_literal_defaults:
# Some databases (Oracle) can't take defaults as a parameter
# If this is the case, the SchemaEditor for that database should
# implement prepare_default().
default = self.prepare_default(new_default)
params = []
new_db_params = new_field.db_parameters(connection=self.connection)
if drop:
if new_field.null:
sql = self.sql_alter_column_no_default_null
else:
sql = self.sql_alter_column_no_default
else:
sql = self.sql_alter_column_default
return (
sql
% {
"column": self.quote_name(new_field.column),
"type": new_db_params["type"],
"default": default,
},
params,
)
def _alter_column_database_default_sql(
self, model, old_field, new_field, drop=False
):
"""
Hook to specialize column database default alteration.
Return a (sql, params) fragment to add or drop (depending on the drop
argument) a default to new_field's column.
"""
if drop:
sql = self.sql_alter_column_no_default
default_sql = ""
params = []
else:
sql = self.sql_alter_column_default
default_sql, params = self.db_default_sql(new_field)
new_db_params = new_field.db_parameters(connection=self.connection)
return (
sql
% {
"column": self.quote_name(new_field.column),
"type": new_db_params["type"],
"default": default_sql,
},
params,
)
def _alter_column_type_sql(
self, model, old_field, new_field, new_type, old_collation, new_collation
):
"""
Hook to specialize column type alteration for different backends,
for cases when a creation type is different to an alteration type
(e.g. SERIAL in PostgreSQL, PostGIS fields).
Return a two-tuple of: an SQL fragment of (sql, params) to insert into
an ALTER TABLE statement and a list of extra (sql, params) tuples to
run once the field is altered.
"""
other_actions = []
if collate_sql := self._collate_sql(
new_collation, old_collation, model._meta.db_table
):
collate_sql = f" {collate_sql}"
else:
collate_sql = ""
# Comment change?
comment_sql = ""
if self.connection.features.supports_comments and not new_field.many_to_many:
if old_field.db_comment != new_field.db_comment:
# PostgreSQL and Oracle can't execute 'ALTER COLUMN ...' and
# 'COMMENT ON ...' at the same time.
sql, params = self._alter_column_comment_sql(
model, new_field, new_type, new_field.db_comment
)
if sql:
other_actions.append((sql, params))
if new_field.db_comment:
comment_sql = self._comment_sql(new_field.db_comment)
return (
(
self.sql_alter_column_type
% {
"column": self.quote_name(new_field.column),
"type": new_type,
"collation": collate_sql,
"comment": comment_sql,
},
[],
),
other_actions,
)
def _alter_column_comment_sql(self, model, new_field, new_type, new_db_comment):
return (
self.sql_alter_column_comment
% {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(new_field.column),
"comment": self._comment_sql(new_db_comment),
},
[],
)
def _comment_sql(self, comment):
return self.quote_value(comment or "")
def _alter_many_to_many(self, model, old_field, new_field, strict):
"""Alter M2Ms to repoint their to= endpoints."""
# Rename the through table
if (
old_field.remote_field.through._meta.db_table
!= new_field.remote_field.through._meta.db_table
):
self.alter_db_table(
old_field.remote_field.through,
old_field.remote_field.through._meta.db_table,
new_field.remote_field.through._meta.db_table,
)
# Repoint the FK to the other side
self.alter_field(
new_field.remote_field.through,
# The field that points to the target model is needed, so we can
# tell alter_field to change it - this is m2m_reverse_field_name()
# (as opposed to m2m_field_name(), which points to our model).
old_field.remote_field.through._meta.get_field(
old_field.m2m_reverse_field_name()
),
new_field.remote_field.through._meta.get_field(
new_field.m2m_reverse_field_name()
),
)
self.alter_field(
new_field.remote_field.through,
# for self-referential models we need to alter field from the other end too
old_field.remote_field.through._meta.get_field(old_field.m2m_field_name()),
new_field.remote_field.through._meta.get_field(new_field.m2m_field_name()),
)
def _create_index_name(self, table_name, column_names, suffix=""):
"""
Generate a unique name for an index/unique constraint.
The name is divided into 3 parts: the table name, the column names,
and a unique digest and suffix.
"""
_, table_name = split_identifier(table_name)
hash_suffix_part = "%s%s" % (
names_digest(table_name, *column_names, length=8),
suffix,
)
max_length = self.connection.ops.max_name_length() or 200
# If everything fits into max_length, use that name.
index_name = "%s_%s_%s" % (table_name, "_".join(column_names), hash_suffix_part)
if len(index_name) <= max_length:
return index_name
# Shorten a long suffix.
if len(hash_suffix_part) > max_length / 3:
hash_suffix_part = hash_suffix_part[: max_length // 3]
other_length = (max_length - len(hash_suffix_part)) // 2 - 1
index_name = "%s_%s_%s" % (
table_name[:other_length],
"_".join(column_names)[:other_length],
hash_suffix_part,
)
# Prepend D if needed to prevent the name from starting with an
# underscore or a number (not permitted on Oracle).
if index_name[0] == "_" or index_name[0].isdigit():
index_name = "D%s" % index_name[:-1]
return index_name
def _get_index_tablespace_sql(self, model, fields, db_tablespace=None):
if db_tablespace is None:
if len(fields) == 1 and fields[0].db_tablespace:
db_tablespace = fields[0].db_tablespace
elif settings.DEFAULT_INDEX_TABLESPACE:
db_tablespace = settings.DEFAULT_INDEX_TABLESPACE
elif model._meta.db_tablespace:
db_tablespace = model._meta.db_tablespace
if db_tablespace is not None:
return " " + self.connection.ops.tablespace_sql(db_tablespace)
return ""
def _index_condition_sql(self, condition):
if condition:
return " WHERE " + condition
return ""
def _index_include_sql(self, model, columns):
if not columns or not self.connection.features.supports_covering_indexes:
return ""
return Statement(
" INCLUDE (%(columns)s)",
columns=Columns(model._meta.db_table, columns, self.quote_name),
)
def _create_index_sql(
self,
model,
*,
fields=None,
name=None,
suffix="",
using="",
db_tablespace=None,
col_suffixes=(),
sql=None,
opclasses=(),
condition=None,
include=None,
expressions=None,
):
"""
Return the SQL statement to create the index for one or several fields
or expressions. `sql` can be specified if the syntax differs from the
standard (GIS indexes, ...).
"""
fields = fields or []
expressions = expressions or []
compiler = Query(model, alias_cols=False).get_compiler(
connection=self.connection,
)
tablespace_sql = self._get_index_tablespace_sql(
model, fields, db_tablespace=db_tablespace
)
columns = [field.column for field in fields]
sql_create_index = sql or self.sql_create_index
table = model._meta.db_table
def create_index_name(*args, **kwargs):
nonlocal name
if name is None:
name = self._create_index_name(*args, **kwargs)
return self.quote_name(name)
return Statement(
sql_create_index,
table=Table(table, self.quote_name),
name=IndexName(table, columns, suffix, create_index_name),
using=using,
columns=(
self._index_columns(table, columns, col_suffixes, opclasses)
if columns
else Expressions(table, expressions, compiler, self.quote_value)
),
extra=tablespace_sql,
condition=self._index_condition_sql(condition),
include=self._index_include_sql(model, include),
)
def _delete_index_sql(self, model, name, sql=None):
return Statement(
sql or self.sql_delete_index,
table=Table(model._meta.db_table, self.quote_name),
name=self.quote_name(name),
)
def _rename_index_sql(self, model, old_name, new_name):
return Statement(
self.sql_rename_index,
table=Table(model._meta.db_table, self.quote_name),
old_name=self.quote_name(old_name),
new_name=self.quote_name(new_name),
)
def _index_columns(self, table, columns, col_suffixes, opclasses):
return Columns(table, columns, self.quote_name, col_suffixes=col_suffixes)
def _model_indexes_sql(self, model):
"""
Return a list of all index SQL statements (field indexes,
index_together, Meta.indexes) for the specified model.
"""
if not model._meta.managed or model._meta.proxy or model._meta.swapped:
return []
output = []
for field in model._meta.local_fields:
output.extend(self._field_indexes_sql(model, field))
# RemovedInDjango51Warning.
for field_names in model._meta.index_together:
fields = [model._meta.get_field(field) for field in field_names]
output.append(self._create_index_sql(model, fields=fields, suffix="_idx"))
for index in model._meta.indexes:
if (
not index.contains_expressions
or self.connection.features.supports_expression_indexes
):
output.append(index.create_sql(model, self))
return output
def _field_indexes_sql(self, model, field):
"""
Return a list of all index SQL statements for the specified field.
"""
output = []
if self._field_should_be_indexed(model, field):
output.append(self._create_index_sql(model, fields=[field]))
return output
def _field_should_be_altered(self, old_field, new_field, ignore=None):
ignore = ignore or set()
_, old_path, old_args, old_kwargs = old_field.deconstruct()
_, new_path, new_args, new_kwargs = new_field.deconstruct()
# Don't alter when:
# - changing only a field name
# - changing an attribute that doesn't affect the schema
# - changing an attribute in the provided set of ignored attributes
# - adding only a db_column and the column name is not changed
for attr in ignore.union(old_field.non_db_attrs):
old_kwargs.pop(attr, None)
for attr in ignore.union(new_field.non_db_attrs):
new_kwargs.pop(attr, None)
return self.quote_name(old_field.column) != self.quote_name(
new_field.column
) or (old_path, old_args, old_kwargs) != (new_path, new_args, new_kwargs)
def _field_should_be_indexed(self, model, field):
return field.db_index and not field.unique
def _field_became_primary_key(self, old_field, new_field):
return not old_field.primary_key and new_field.primary_key
def _unique_should_be_added(self, old_field, new_field):
return (
not new_field.primary_key
and new_field.unique
and (not old_field.unique or old_field.primary_key)
)
def _rename_field_sql(self, table, old_field, new_field, new_type):
return self.sql_rename_column % {
"table": self.quote_name(table),
"old_column": self.quote_name(old_field.column),
"new_column": self.quote_name(new_field.column),
"type": new_type,
}
def _create_fk_sql(self, model, field, suffix):
table = Table(model._meta.db_table, self.quote_name)
name = self._fk_constraint_name(model, field, suffix)
column = Columns(model._meta.db_table, [field.column], self.quote_name)
to_table = Table(field.target_field.model._meta.db_table, self.quote_name)
to_column = Columns(
field.target_field.model._meta.db_table,
[field.target_field.column],
self.quote_name,
)
deferrable = self.connection.ops.deferrable_sql()
return Statement(
self.sql_create_fk,
table=table,
name=name,
column=column,
to_table=to_table,
to_column=to_column,
deferrable=deferrable,
)
def _fk_constraint_name(self, model, field, suffix):
def create_fk_name(*args, **kwargs):
return self.quote_name(self._create_index_name(*args, **kwargs))
return ForeignKeyName(
model._meta.db_table,
[field.column],
split_identifier(field.target_field.model._meta.db_table)[1],
[field.target_field.column],
suffix,
create_fk_name,
)
def _delete_fk_sql(self, model, name):
return self._delete_constraint_sql(self.sql_delete_fk, model, name)
def _deferrable_constraint_sql(self, deferrable):
if deferrable is None:
return ""
if deferrable == Deferrable.DEFERRED:
return " DEFERRABLE INITIALLY DEFERRED"
if deferrable == Deferrable.IMMEDIATE:
return " DEFERRABLE INITIALLY IMMEDIATE"
def _unique_sql(
self,
model,
fields,
name,
condition=None,
deferrable=None,
include=None,
opclasses=None,
expressions=None,
):
if (
deferrable
and not self.connection.features.supports_deferrable_unique_constraints
):
return None
if condition or include or opclasses or expressions:
# Databases support conditional, covering, and functional unique
# constraints via a unique index.
sql = self._create_unique_sql(
model,
fields,
name=name,
condition=condition,
include=include,
opclasses=opclasses,
expressions=expressions,
)
if sql:
self.deferred_sql.append(sql)
return None
constraint = self.sql_unique_constraint % {
"columns": ", ".join([self.quote_name(field.column) for field in fields]),
"deferrable": self._deferrable_constraint_sql(deferrable),
}
return self.sql_constraint % {
"name": self.quote_name(name),
"constraint": constraint,
}
def _create_unique_sql(
self,
model,
fields,
name=None,
condition=None,
deferrable=None,
include=None,
opclasses=None,
expressions=None,
):
if (
(
deferrable
and not self.connection.features.supports_deferrable_unique_constraints
)
or (condition and not self.connection.features.supports_partial_indexes)
or (include and not self.connection.features.supports_covering_indexes)
or (
expressions and not self.connection.features.supports_expression_indexes
)
):
return None
compiler = Query(model, alias_cols=False).get_compiler(
connection=self.connection
)
table = model._meta.db_table
columns = [field.column for field in fields]
if name is None:
name = self._unique_constraint_name(table, columns, quote=True)
else:
name = self.quote_name(name)
if condition or include or opclasses or expressions:
sql = self.sql_create_unique_index
else:
sql = self.sql_create_unique
if columns:
columns = self._index_columns(
table, columns, col_suffixes=(), opclasses=opclasses
)
else:
columns = Expressions(table, expressions, compiler, self.quote_value)
return Statement(
sql,
table=Table(table, self.quote_name),
name=name,
columns=columns,
condition=self._index_condition_sql(condition),
deferrable=self._deferrable_constraint_sql(deferrable),
include=self._index_include_sql(model, include),
)
def _unique_constraint_name(self, table, columns, quote=True):
if quote:
def create_unique_name(*args, **kwargs):
return self.quote_name(self._create_index_name(*args, **kwargs))
else:
create_unique_name = self._create_index_name
return IndexName(table, columns, "_uniq", create_unique_name)
def _delete_unique_sql(
self,
model,
name,
condition=None,
deferrable=None,
include=None,
opclasses=None,
expressions=None,
):
if (
(
deferrable
and not self.connection.features.supports_deferrable_unique_constraints
)
or (condition and not self.connection.features.supports_partial_indexes)
or (include and not self.connection.features.supports_covering_indexes)
or (
expressions and not self.connection.features.supports_expression_indexes
)
):
return None
if condition or include or opclasses or expressions:
sql = self.sql_delete_index
else:
sql = self.sql_delete_unique
return self._delete_constraint_sql(sql, model, name)
def _check_sql(self, name, check):
return self.sql_constraint % {
"name": self.quote_name(name),
"constraint": self.sql_check_constraint % {"check": check},
}
def _create_check_sql(self, model, name, check):
if not self.connection.features.supports_table_check_constraints:
return None
return Statement(
self.sql_create_check,
table=Table(model._meta.db_table, self.quote_name),
name=self.quote_name(name),
check=check,
)
def _delete_check_sql(self, model, name):
if not self.connection.features.supports_table_check_constraints:
return None
return self._delete_constraint_sql(self.sql_delete_check, model, name)
def _delete_constraint_sql(self, template, model, name):
return Statement(
template,
table=Table(model._meta.db_table, self.quote_name),
name=self.quote_name(name),
)
def _constraint_names(
self,
model,
column_names=None,
unique=None,
primary_key=None,
index=None,
foreign_key=None,
check=None,
type_=None,
exclude=None,
):
"""Return all constraint names matching the columns and conditions."""
if column_names is not None:
column_names = [
self.connection.introspection.identifier_converter(
truncate_name(name, self.connection.ops.max_name_length())
)
if self.connection.features.truncates_names
else self.connection.introspection.identifier_converter(name)
for name in column_names
]
with self.connection.cursor() as cursor:
constraints = self.connection.introspection.get_constraints(
cursor, model._meta.db_table
)
result = []
for name, infodict in constraints.items():
if column_names is None or column_names == infodict["columns"]:
if unique is not None and infodict["unique"] != unique:
continue
if primary_key is not None and infodict["primary_key"] != primary_key:
continue
if index is not None and infodict["index"] != index:
continue
if check is not None and infodict["check"] != check:
continue
if foreign_key is not None and not infodict["foreign_key"]:
continue
if type_ is not None and infodict["type"] != type_:
continue
if not exclude or name not in exclude:
result.append(name)
return result
def _delete_primary_key(self, model, strict=False):
constraint_names = self._constraint_names(model, primary_key=True)
if strict and len(constraint_names) != 1:
raise ValueError(
"Found wrong number (%s) of PK constraints for %s"
% (
len(constraint_names),
model._meta.db_table,
)
)
for constraint_name in constraint_names:
self.execute(self._delete_primary_key_sql(model, constraint_name))
def _create_primary_key_sql(self, model, field):
return Statement(
self.sql_create_pk,
table=Table(model._meta.db_table, self.quote_name),
name=self.quote_name(
self._create_index_name(
model._meta.db_table, [field.column], suffix="_pk"
)
),
columns=Columns(model._meta.db_table, [field.column], self.quote_name),
)
def _delete_primary_key_sql(self, model, name):
return self._delete_constraint_sql(self.sql_delete_pk, model, name)
def _collate_sql(self, collation, old_collation=None, table_name=None):
return "COLLATE " + self.quote_name(collation) if collation else ""
def remove_procedure(self, procedure_name, param_types=()):
sql = self.sql_delete_procedure % {
"procedure": self.quote_name(procedure_name),
"param_types": ",".join(param_types),
}
self.execute(sql)
|
e95eafca2a8eab6f706214d23732b83fa543a299f2056e0a91f6fd42420f3648 | import operator
from django.db.backends.base.features import BaseDatabaseFeatures
from django.utils.functional import cached_property
class DatabaseFeatures(BaseDatabaseFeatures):
empty_fetchmany_value = ()
allows_group_by_selected_pks = True
related_fields_match_type = True
# MySQL doesn't support sliced subqueries with IN/ALL/ANY/SOME.
allow_sliced_subqueries_with_in = False
has_select_for_update = True
supports_forward_references = False
supports_regex_backreferencing = False
supports_date_lookup_using_string = False
supports_timezones = False
requires_explicit_null_ordering_when_grouping = True
atomic_transactions = False
can_clone_databases = True
supports_comments = True
supports_comments_inline = True
supports_temporal_subtraction = True
supports_slicing_ordering_in_compound = True
supports_index_on_text_field = False
supports_update_conflicts = True
delete_can_self_reference_subquery = False
create_test_procedure_without_params_sql = """
CREATE PROCEDURE test_procedure ()
BEGIN
DECLARE V_I INTEGER;
SET V_I = 1;
END;
"""
create_test_procedure_with_int_param_sql = """
CREATE PROCEDURE test_procedure (P_I INTEGER)
BEGIN
DECLARE V_I INTEGER;
SET V_I = P_I;
END;
"""
create_test_table_with_composite_primary_key = """
CREATE TABLE test_table_composite_pk (
column_1 INTEGER NOT NULL,
column_2 INTEGER NOT NULL,
PRIMARY KEY(column_1, column_2)
)
"""
# Neither MySQL nor MariaDB support partial indexes.
supports_partial_indexes = False
# COLLATE must be wrapped in parentheses because MySQL treats COLLATE as an
# indexed expression.
collate_as_index_expression = True
insert_test_table_with_defaults = "INSERT INTO {} () VALUES ()"
supports_order_by_nulls_modifier = False
order_by_nulls_first = True
supports_logical_xor = True
@cached_property
def minimum_database_version(self):
if self.connection.mysql_is_mariadb:
return (10, 4)
else:
return (8,)
@cached_property
def test_collations(self):
charset = "utf8"
if (
self.connection.mysql_is_mariadb
and self.connection.mysql_version >= (10, 6)
) or (
not self.connection.mysql_is_mariadb
and self.connection.mysql_version >= (8, 0, 30)
):
# utf8 is an alias for utf8mb3 in MariaDB 10.6+ and MySQL 8.0.30+.
charset = "utf8mb3"
return {
"ci": f"{charset}_general_ci",
"non_default": f"{charset}_esperanto_ci",
"swedish_ci": f"{charset}_swedish_ci",
}
test_now_utc_template = "UTC_TIMESTAMP(6)"
@cached_property
def django_test_skips(self):
skips = {
"This doesn't work on MySQL.": {
"db_functions.comparison.test_greatest.GreatestTests."
"test_coalesce_workaround",
"db_functions.comparison.test_least.LeastTests."
"test_coalesce_workaround",
},
"Running on MySQL requires utf8mb4 encoding (#18392).": {
"model_fields.test_textfield.TextFieldTests.test_emoji",
"model_fields.test_charfield.TestCharField.test_emoji",
},
"MySQL doesn't support functional indexes on a function that "
"returns JSON": {
"schema.tests.SchemaTests.test_func_index_json_key_transform",
},
"MySQL supports multiplying and dividing DurationFields by a "
"scalar value but it's not implemented (#25287).": {
"expressions.tests.FTimeDeltaTests.test_durationfield_multiply_divide",
},
"UPDATE ... ORDER BY syntax on MySQL/MariaDB does not support ordering by"
"related fields.": {
"update.tests.AdvancedTests."
"test_update_ordered_by_inline_m2m_annotation",
"update.tests.AdvancedTests.test_update_ordered_by_m2m_annotation",
"update.tests.AdvancedTests.test_update_ordered_by_m2m_annotation_desc",
},
}
if self.connection.mysql_is_mariadb and (
10,
4,
3,
) < self.connection.mysql_version < (10, 5, 2):
skips.update(
{
"https://jira.mariadb.org/browse/MDEV-19598": {
"schema.tests.SchemaTests."
"test_alter_not_unique_field_to_primary_key",
},
}
)
if self.connection.mysql_is_mariadb and (
10,
4,
12,
) < self.connection.mysql_version < (10, 5):
skips.update(
{
"https://jira.mariadb.org/browse/MDEV-22775": {
"schema.tests.SchemaTests."
"test_alter_pk_with_self_referential_field",
},
}
)
if not self.supports_explain_analyze:
skips.update(
{
"MariaDB and MySQL >= 8.0.18 specific.": {
"queries.test_explain.ExplainTests.test_mysql_analyze",
},
}
)
if "ONLY_FULL_GROUP_BY" in self.connection.sql_mode:
skips.update(
{
"GROUP BY cannot contain nonaggregated column when "
"ONLY_FULL_GROUP_BY mode is enabled on MySQL, see #34262.": {
"aggregation.tests.AggregateTestCase."
"test_group_by_nested_expression_with_params",
},
}
)
if self.connection.mysql_version < (8, 0, 31):
skips.update(
{
"Nesting of UNIONs at the right-hand side is not supported on "
"MySQL < 8.0.31": {
"queries.test_qs_combinators.QuerySetSetOperationTests."
"test_union_nested"
},
}
)
return skips
@cached_property
def _mysql_storage_engine(self):
"Internal method used in Django tests. Don't rely on this from your code"
return self.connection.mysql_server_data["default_storage_engine"]
@cached_property
def allows_auto_pk_0(self):
"""
Autoincrement primary key can be set to 0 if it doesn't generate new
autoincrement values.
"""
return "NO_AUTO_VALUE_ON_ZERO" in self.connection.sql_mode
@cached_property
def update_can_self_select(self):
return self.connection.mysql_is_mariadb and self.connection.mysql_version >= (
10,
3,
2,
)
@cached_property
def can_introspect_foreign_keys(self):
"Confirm support for introspected foreign keys"
return self._mysql_storage_engine != "MyISAM"
@cached_property
def introspected_field_types(self):
return {
**super().introspected_field_types,
"BinaryField": "TextField",
"BooleanField": "IntegerField",
"DurationField": "BigIntegerField",
"GenericIPAddressField": "CharField",
}
@cached_property
def can_return_columns_from_insert(self):
return self.connection.mysql_is_mariadb and self.connection.mysql_version >= (
10,
5,
0,
)
can_return_rows_from_bulk_insert = property(
operator.attrgetter("can_return_columns_from_insert")
)
@cached_property
def has_zoneinfo_database(self):
return self.connection.mysql_server_data["has_zoneinfo_database"]
@cached_property
def is_sql_auto_is_null_enabled(self):
return self.connection.mysql_server_data["sql_auto_is_null"]
@cached_property
def supports_over_clause(self):
if self.connection.mysql_is_mariadb:
return True
return self.connection.mysql_version >= (8, 0, 2)
supports_frame_range_fixed_distance = property(
operator.attrgetter("supports_over_clause")
)
@cached_property
def supports_column_check_constraints(self):
if self.connection.mysql_is_mariadb:
return True
return self.connection.mysql_version >= (8, 0, 16)
supports_table_check_constraints = property(
operator.attrgetter("supports_column_check_constraints")
)
@cached_property
def can_introspect_check_constraints(self):
if self.connection.mysql_is_mariadb:
return True
return self.connection.mysql_version >= (8, 0, 16)
@cached_property
def has_select_for_update_skip_locked(self):
if self.connection.mysql_is_mariadb:
return self.connection.mysql_version >= (10, 6)
return self.connection.mysql_version >= (8, 0, 1)
@cached_property
def has_select_for_update_nowait(self):
if self.connection.mysql_is_mariadb:
return True
return self.connection.mysql_version >= (8, 0, 1)
@cached_property
def has_select_for_update_of(self):
return (
not self.connection.mysql_is_mariadb
and self.connection.mysql_version >= (8, 0, 1)
)
@cached_property
def supports_explain_analyze(self):
return self.connection.mysql_is_mariadb or self.connection.mysql_version >= (
8,
0,
18,
)
@cached_property
def supported_explain_formats(self):
# Alias MySQL's TRADITIONAL to TEXT for consistency with other
# backends.
formats = {"JSON", "TEXT", "TRADITIONAL"}
if not self.connection.mysql_is_mariadb and self.connection.mysql_version >= (
8,
0,
16,
):
formats.add("TREE")
return formats
@cached_property
def supports_transactions(self):
"""
All storage engines except MyISAM support transactions.
"""
return self._mysql_storage_engine != "MyISAM"
@cached_property
def ignores_table_name_case(self):
return self.connection.mysql_server_data["lower_case_table_names"]
@cached_property
def supports_default_in_lead_lag(self):
# To be added in https://jira.mariadb.org/browse/MDEV-12981.
return not self.connection.mysql_is_mariadb
@cached_property
def can_introspect_json_field(self):
if self.connection.mysql_is_mariadb:
return self.can_introspect_check_constraints
return True
@cached_property
def supports_index_column_ordering(self):
if self._mysql_storage_engine != "InnoDB":
return False
if self.connection.mysql_is_mariadb:
return self.connection.mysql_version >= (10, 8)
return self.connection.mysql_version >= (8, 0, 1)
@cached_property
def supports_expression_indexes(self):
return (
not self.connection.mysql_is_mariadb
and self._mysql_storage_engine != "MyISAM"
and self.connection.mysql_version >= (8, 0, 13)
)
@cached_property
def supports_select_intersection(self):
is_mariadb = self.connection.mysql_is_mariadb
return is_mariadb or self.connection.mysql_version >= (8, 0, 31)
supports_select_difference = property(
operator.attrgetter("supports_select_intersection")
)
@cached_property
def can_rename_index(self):
if self.connection.mysql_is_mariadb:
return self.connection.mysql_version >= (10, 5, 2)
return True
@cached_property
def supports_expression_defaults(self):
if self.connection.mysql_is_mariadb:
return True
return self.connection.mysql_version >= (8, 0, 13)
|
c3154d0d11c44e6362a3613925862596431cbaf4c7ede1d22c119a4c6e5ef0df | from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.models import NOT_PROVIDED, F, UniqueConstraint
from django.db.models.constants import LOOKUP_SEP
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_rename_table = "RENAME TABLE %(old_table)s TO %(new_table)s"
sql_alter_column_null = "MODIFY %(column)s %(type)s NULL"
sql_alter_column_not_null = "MODIFY %(column)s %(type)s NOT NULL"
sql_alter_column_type = "MODIFY %(column)s %(type)s%(collation)s%(comment)s"
sql_alter_column_no_default_null = "ALTER COLUMN %(column)s SET DEFAULT NULL"
# No 'CASCADE' which works as a no-op in MySQL but is undocumented
sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s"
sql_delete_unique = "ALTER TABLE %(table)s DROP INDEX %(name)s"
sql_create_column_inline_fk = (
", ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) "
"REFERENCES %(to_table)s(%(to_column)s)"
)
sql_delete_fk = "ALTER TABLE %(table)s DROP FOREIGN KEY %(name)s"
sql_delete_index = "DROP INDEX %(name)s ON %(table)s"
sql_rename_index = "ALTER TABLE %(table)s RENAME INDEX %(old_name)s TO %(new_name)s"
sql_create_pk = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)"
)
sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY"
sql_create_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s"
sql_alter_table_comment = "ALTER TABLE %(table)s COMMENT = %(comment)s"
sql_alter_column_comment = None
@property
def sql_delete_check(self):
if self.connection.mysql_is_mariadb:
# The name of the column check constraint is the same as the field
# name on MariaDB. Adding IF EXISTS clause prevents migrations
# crash. Constraint is removed during a "MODIFY" column statement.
return "ALTER TABLE %(table)s DROP CONSTRAINT IF EXISTS %(name)s"
return "ALTER TABLE %(table)s DROP CHECK %(name)s"
@property
def sql_rename_column(self):
# MariaDB >= 10.5.2 and MySQL >= 8.0.4 support an
# "ALTER TABLE ... RENAME COLUMN" statement.
if self.connection.mysql_is_mariadb:
if self.connection.mysql_version >= (10, 5, 2):
return super().sql_rename_column
elif self.connection.mysql_version >= (8, 0, 4):
return super().sql_rename_column
return "ALTER TABLE %(table)s CHANGE %(old_column)s %(new_column)s %(type)s"
def quote_value(self, value):
self.connection.ensure_connection()
# MySQLdb escapes to string, PyMySQL to bytes.
quoted = self.connection.connection.escape(
value, self.connection.connection.encoders
)
if isinstance(value, str) and isinstance(quoted, bytes):
quoted = quoted.decode()
return quoted
def _is_limited_data_type(self, field):
db_type = field.db_type(self.connection)
return (
db_type is not None
and db_type.lower() in self.connection._limited_data_types
)
def skip_default(self, field):
if not self._supports_limited_data_type_defaults:
return self._is_limited_data_type(field)
return False
def skip_default_on_alter(self, field):
if self._is_limited_data_type(field) and not self.connection.mysql_is_mariadb:
# MySQL doesn't support defaults for BLOB and TEXT in the
# ALTER COLUMN statement.
return True
return False
@property
def _supports_limited_data_type_defaults(self):
# MariaDB and MySQL >= 8.0.13 support defaults for BLOB and TEXT.
if self.connection.mysql_is_mariadb:
return True
return self.connection.mysql_version >= (8, 0, 13)
def _column_default_sql(self, field):
if (
not self.connection.mysql_is_mariadb
and self._supports_limited_data_type_defaults
and self._is_limited_data_type(field)
):
# MySQL supports defaults for BLOB and TEXT columns only if the
# default value is written as an expression i.e. in parentheses.
return "(%s)"
return super()._column_default_sql(field)
def add_field(self, model, field):
super().add_field(model, field)
# Simulate the effect of a one-off default.
# field.default may be unhashable, so a set isn't used for "in" check.
if self.skip_default(field) and field.default not in (None, NOT_PROVIDED):
effective_default = self.effective_default(field)
self.execute(
"UPDATE %(table)s SET %(column)s = %%s"
% {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
},
[effective_default],
)
def remove_constraint(self, model, constraint):
if (
isinstance(constraint, UniqueConstraint)
and constraint.create_sql(model, self) is not None
):
self._create_missing_fk_index(
model,
fields=constraint.fields,
expressions=constraint.expressions,
)
super().remove_constraint(model, constraint)
def remove_index(self, model, index):
self._create_missing_fk_index(
model,
fields=[field_name for field_name, _ in index.fields_orders],
expressions=index.expressions,
)
super().remove_index(model, index)
def _field_should_be_indexed(self, model, field):
if not super()._field_should_be_indexed(model, field):
return False
storage = self.connection.introspection.get_storage_engine(
self.connection.cursor(), model._meta.db_table
)
# No need to create an index for ForeignKey fields except if
# db_constraint=False because the index from that constraint won't be
# created.
if (
storage == "InnoDB"
and field.get_internal_type() == "ForeignKey"
and field.db_constraint
):
return False
return not self._is_limited_data_type(field)
def _create_missing_fk_index(
self,
model,
*,
fields,
expressions=None,
):
"""
MySQL can remove an implicit FK index on a field when that field is
covered by another index like a unique_together. "covered" here means
that the more complex index has the FK field as its first field (see
https://bugs.mysql.com/bug.php?id=37910).
Manually create an implicit FK index to make it possible to remove the
composed index.
"""
first_field_name = None
if fields:
first_field_name = fields[0]
elif (
expressions
and self.connection.features.supports_expression_indexes
and isinstance(expressions[0], F)
and LOOKUP_SEP not in expressions[0].name
):
first_field_name = expressions[0].name
if not first_field_name:
return
first_field = model._meta.get_field(first_field_name)
if first_field.get_internal_type() == "ForeignKey":
column = self.connection.introspection.identifier_converter(
first_field.column
)
with self.connection.cursor() as cursor:
constraint_names = [
name
for name, infodict in self.connection.introspection.get_constraints(
cursor, model._meta.db_table
).items()
if infodict["index"] and infodict["columns"][0] == column
]
# There are no other indexes that starts with the FK field, only
# the index that is expected to be deleted.
if len(constraint_names) == 1:
self.execute(
self._create_index_sql(model, fields=[first_field], suffix="")
)
def _delete_composed_index(self, model, fields, *args):
self._create_missing_fk_index(model, fields=fields)
return super()._delete_composed_index(model, fields, *args)
def _set_field_new_type(self, field, new_type):
"""
Keep the NULL and DEFAULT properties of the old field. If it has
changed, it will be handled separately.
"""
if field.db_default is not NOT_PROVIDED:
default_sql, params = self.db_default_sql(field)
default_sql %= tuple(self.quote_value(p) for p in params)
new_type += f" DEFAULT {default_sql}"
if field.null:
new_type += " NULL"
else:
new_type += " NOT NULL"
return new_type
def _alter_column_type_sql(
self, model, old_field, new_field, new_type, old_collation, new_collation
):
new_type = self._set_field_new_type(old_field, new_type)
return super()._alter_column_type_sql(
model, old_field, new_field, new_type, old_collation, new_collation
)
def _field_db_check(self, field, field_db_params):
if self.connection.mysql_is_mariadb and self.connection.mysql_version >= (
10,
5,
2,
):
return super()._field_db_check(field, field_db_params)
# On MySQL and MariaDB < 10.5.2 (no support for
# "ALTER TABLE ... RENAME COLUMN" statements), check constraints with
# the column name as it requires explicit recreation when the column is
# renamed.
return field_db_params["check"]
def _rename_field_sql(self, table, old_field, new_field, new_type):
new_type = self._set_field_new_type(old_field, new_type)
return super()._rename_field_sql(table, old_field, new_field, new_type)
def _alter_column_comment_sql(self, model, new_field, new_type, new_db_comment):
# Comment is alter when altering the column type.
return "", []
def _comment_sql(self, comment):
comment_sql = super()._comment_sql(comment)
return f" COMMENT {comment_sql}"
def _alter_column_null_sql(self, model, old_field, new_field):
if new_field.db_default is NOT_PROVIDED:
return super()._alter_column_null_sql(model, old_field, new_field)
new_db_params = new_field.db_parameters(connection=self.connection)
type_sql = self._set_field_new_type(new_field, new_db_params["type"])
return (
"MODIFY %(column)s %(type)s"
% {
"column": self.quote_name(new_field.column),
"type": type_sql,
},
[],
)
|
43e46f7fcc4cb3ef08ae7dd9a123c34a52188c57846247eebf8347b4a4c007e6 | import operator
from django.db import DataError, InterfaceError
from django.db.backends.base.features import BaseDatabaseFeatures
from django.db.backends.postgresql.psycopg_any import is_psycopg3
from django.utils.functional import cached_property
class DatabaseFeatures(BaseDatabaseFeatures):
minimum_database_version = (12,)
allows_group_by_selected_pks = True
can_return_columns_from_insert = True
can_return_rows_from_bulk_insert = True
has_real_datatype = True
has_native_uuid_field = True
has_native_duration_field = True
has_native_json_field = True
can_defer_constraint_checks = True
has_select_for_update = True
has_select_for_update_nowait = True
has_select_for_update_of = True
has_select_for_update_skip_locked = True
has_select_for_no_key_update = True
can_release_savepoints = True
supports_comments = True
supports_tablespaces = True
supports_transactions = True
can_introspect_materialized_views = True
can_distinct_on_fields = True
can_rollback_ddl = True
schema_editor_uses_clientside_param_binding = True
supports_combined_alters = True
nulls_order_largest = True
closed_cursor_error_class = InterfaceError
greatest_least_ignores_nulls = True
can_clone_databases = True
supports_temporal_subtraction = True
supports_slicing_ordering_in_compound = True
create_test_procedure_without_params_sql = """
CREATE FUNCTION test_procedure () RETURNS void AS $$
DECLARE
V_I INTEGER;
BEGIN
V_I := 1;
END;
$$ LANGUAGE plpgsql;"""
create_test_procedure_with_int_param_sql = """
CREATE FUNCTION test_procedure (P_I INTEGER) RETURNS void AS $$
DECLARE
V_I INTEGER;
BEGIN
V_I := P_I;
END;
$$ LANGUAGE plpgsql;"""
create_test_table_with_composite_primary_key = """
CREATE TABLE test_table_composite_pk (
column_1 INTEGER NOT NULL,
column_2 INTEGER NOT NULL,
PRIMARY KEY(column_1, column_2)
)
"""
requires_casted_case_in_updates = True
supports_over_clause = True
only_supports_unbounded_with_preceding_and_following = True
supports_aggregate_filter_clause = True
supported_explain_formats = {"JSON", "TEXT", "XML", "YAML"}
supports_deferrable_unique_constraints = True
has_json_operators = True
json_key_contains_list_matching_requires_list = True
supports_update_conflicts = True
supports_update_conflicts_with_target = True
supports_covering_indexes = True
can_rename_index = True
test_collations = {
"non_default": "sv-x-icu",
"swedish_ci": "sv-x-icu",
}
test_now_utc_template = "STATEMENT_TIMESTAMP() AT TIME ZONE 'UTC'"
insert_test_table_with_defaults = "INSERT INTO {} DEFAULT VALUES"
django_test_skips = {
"opclasses are PostgreSQL only.": {
"indexes.tests.SchemaIndexesNotPostgreSQLTests."
"test_create_index_ignores_opclasses",
},
"PostgreSQL requires casting to text.": {
"lookup.tests.LookupTests.test_textfield_exact_null",
},
}
@cached_property
def django_test_expected_failures(self):
expected_failures = set()
if self.uses_server_side_binding:
expected_failures.update(
{
# Parameters passed to expressions in SELECT and GROUP BY
# clauses are not recognized as the same values when using
# server-side binding cursors (#34255).
"aggregation.tests.AggregateTestCase."
"test_group_by_nested_expression_with_params",
}
)
return expected_failures
@cached_property
def uses_server_side_binding(self):
options = self.connection.settings_dict["OPTIONS"]
return is_psycopg3 and options.get("server_side_binding") is True
@cached_property
def prohibits_null_characters_in_text_exception(self):
if is_psycopg3:
return DataError, "PostgreSQL text fields cannot contain NUL (0x00) bytes"
else:
return ValueError, "A string literal cannot contain NUL (0x00) characters."
@cached_property
def introspected_field_types(self):
return {
**super().introspected_field_types,
"PositiveBigIntegerField": "BigIntegerField",
"PositiveIntegerField": "IntegerField",
"PositiveSmallIntegerField": "SmallIntegerField",
}
@cached_property
def is_postgresql_13(self):
return self.connection.pg_version >= 130000
@cached_property
def is_postgresql_14(self):
return self.connection.pg_version >= 140000
has_bit_xor = property(operator.attrgetter("is_postgresql_14"))
supports_covering_spgist_indexes = property(operator.attrgetter("is_postgresql_14"))
supports_unlimited_charfield = True
|
aec26c3075cc9d54374bdf51596080043b9f041e0fa213ed5f69e49c07cfa151 | from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.backends.ddl_references import IndexColumns
from django.db.backends.postgresql.psycopg_any import sql
from django.db.backends.utils import strip_quotes
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
# Setting all constraints to IMMEDIATE to allow changing data in the same
# transaction.
sql_update_with_default = (
"UPDATE %(table)s SET %(column)s = %(default)s WHERE %(column)s IS NULL"
"; SET CONSTRAINTS ALL IMMEDIATE"
)
sql_alter_sequence_type = "ALTER SEQUENCE IF EXISTS %(sequence)s AS %(type)s"
sql_delete_sequence = "DROP SEQUENCE IF EXISTS %(sequence)s CASCADE"
sql_create_index = (
"CREATE INDEX %(name)s ON %(table)s%(using)s "
"(%(columns)s)%(include)s%(extra)s%(condition)s"
)
sql_create_index_concurrently = (
"CREATE INDEX CONCURRENTLY %(name)s ON %(table)s%(using)s "
"(%(columns)s)%(include)s%(extra)s%(condition)s"
)
sql_delete_index = "DROP INDEX IF EXISTS %(name)s"
sql_delete_index_concurrently = "DROP INDEX CONCURRENTLY IF EXISTS %(name)s"
# Setting the constraint to IMMEDIATE to allow changing data in the same
# transaction.
sql_create_column_inline_fk = (
"CONSTRAINT %(name)s REFERENCES %(to_table)s(%(to_column)s)%(deferrable)s"
"; SET CONSTRAINTS %(namespace)s%(name)s IMMEDIATE"
)
# Setting the constraint to IMMEDIATE runs any deferred checks to allow
# dropping it in the same transaction.
sql_delete_fk = (
"SET CONSTRAINTS %(name)s IMMEDIATE; "
"ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
)
sql_delete_procedure = "DROP FUNCTION %(procedure)s(%(param_types)s)"
def execute(self, sql, params=()):
# Merge the query client-side, as PostgreSQL won't do it server-side.
if params is None:
return super().execute(sql, params)
sql = self.connection.ops.compose_sql(str(sql), params)
# Don't let the superclass touch anything.
return super().execute(sql, None)
sql_add_identity = (
"ALTER TABLE %(table)s ALTER COLUMN %(column)s ADD "
"GENERATED BY DEFAULT AS IDENTITY"
)
sql_drop_indentity = (
"ALTER TABLE %(table)s ALTER COLUMN %(column)s DROP IDENTITY IF EXISTS"
)
def quote_value(self, value):
return sql.quote(value, self.connection.connection)
def _field_indexes_sql(self, model, field):
output = super()._field_indexes_sql(model, field)
like_index_statement = self._create_like_index_sql(model, field)
if like_index_statement is not None:
output.append(like_index_statement)
return output
def _field_data_type(self, field):
if field.is_relation:
return field.rel_db_type(self.connection)
return self.connection.data_types.get(
field.get_internal_type(),
field.db_type(self.connection),
)
def _field_base_data_types(self, field):
# Yield base data types for array fields.
if field.base_field.get_internal_type() == "ArrayField":
yield from self._field_base_data_types(field.base_field)
else:
yield self._field_data_type(field.base_field)
def _create_like_index_sql(self, model, field):
"""
Return the statement to create an index with varchar operator pattern
when the column type is 'varchar' or 'text', otherwise return None.
"""
db_type = field.db_type(connection=self.connection)
if db_type is not None and (field.db_index or field.unique):
# Fields with database column types of `varchar` and `text` need
# a second index that specifies their operator class, which is
# needed when performing correct LIKE queries outside the
# C locale. See #12234.
#
# The same doesn't apply to array fields such as varchar[size]
# and text[size], so skip them.
if "[" in db_type:
return None
# Non-deterministic collations on Postgresql don't support indexes
# for operator classes varchar_pattern_ops/text_pattern_ops.
if getattr(field, "db_collation", None) or (
field.is_relation and getattr(field.target_field, "db_collation", None)
):
return None
if db_type.startswith("varchar"):
return self._create_index_sql(
model,
fields=[field],
suffix="_like",
opclasses=["varchar_pattern_ops"],
)
elif db_type.startswith("text"):
return self._create_index_sql(
model,
fields=[field],
suffix="_like",
opclasses=["text_pattern_ops"],
)
return None
def _using_sql(self, new_field, old_field):
using_sql = " USING %(column)s::%(type)s"
new_internal_type = new_field.get_internal_type()
old_internal_type = old_field.get_internal_type()
if new_internal_type == "ArrayField" and new_internal_type == old_internal_type:
# Compare base data types for array fields.
if list(self._field_base_data_types(old_field)) != list(
self._field_base_data_types(new_field)
):
return using_sql
elif self._field_data_type(old_field) != self._field_data_type(new_field):
return using_sql
return ""
def _get_sequence_name(self, table, column):
with self.connection.cursor() as cursor:
for sequence in self.connection.introspection.get_sequences(cursor, table):
if sequence["column"] == column:
return sequence["name"]
return None
def _alter_column_type_sql(
self, model, old_field, new_field, new_type, old_collation, new_collation
):
# Drop indexes on varchar/text/citext columns that are changing to a
# different type.
old_db_params = old_field.db_parameters(connection=self.connection)
old_type = old_db_params["type"]
if (old_field.db_index or old_field.unique) and (
(old_type.startswith("varchar") and not new_type.startswith("varchar"))
or (old_type.startswith("text") and not new_type.startswith("text"))
or (old_type.startswith("citext") and not new_type.startswith("citext"))
):
index_name = self._create_index_name(
model._meta.db_table, [old_field.column], suffix="_like"
)
self.execute(self._delete_index_sql(model, index_name))
self.sql_alter_column_type = (
"ALTER COLUMN %(column)s TYPE %(type)s%(collation)s"
)
# Cast when data type changed.
if using_sql := self._using_sql(new_field, old_field):
self.sql_alter_column_type += using_sql
new_internal_type = new_field.get_internal_type()
old_internal_type = old_field.get_internal_type()
# Make ALTER TYPE with IDENTITY make sense.
table = strip_quotes(model._meta.db_table)
auto_field_types = {
"AutoField",
"BigAutoField",
"SmallAutoField",
}
old_is_auto = old_internal_type in auto_field_types
new_is_auto = new_internal_type in auto_field_types
if new_is_auto and not old_is_auto:
column = strip_quotes(new_field.column)
return (
(
self.sql_alter_column_type
% {
"column": self.quote_name(column),
"type": new_type,
"collation": "",
},
[],
),
[
(
self.sql_add_identity
% {
"table": self.quote_name(table),
"column": self.quote_name(column),
},
[],
),
],
)
elif old_is_auto and not new_is_auto:
# Drop IDENTITY if exists (pre-Django 4.1 serial columns don't have
# it).
self.execute(
self.sql_drop_indentity
% {
"table": self.quote_name(table),
"column": self.quote_name(strip_quotes(new_field.column)),
}
)
column = strip_quotes(new_field.column)
fragment, _ = super()._alter_column_type_sql(
model, old_field, new_field, new_type, old_collation, new_collation
)
# Drop the sequence if exists (Django 4.1+ identity columns don't
# have it).
other_actions = []
if sequence_name := self._get_sequence_name(table, column):
other_actions = [
(
self.sql_delete_sequence
% {
"sequence": self.quote_name(sequence_name),
},
[],
)
]
return fragment, other_actions
elif new_is_auto and old_is_auto and old_internal_type != new_internal_type:
fragment, _ = super()._alter_column_type_sql(
model, old_field, new_field, new_type, old_collation, new_collation
)
column = strip_quotes(new_field.column)
db_types = {
"AutoField": "integer",
"BigAutoField": "bigint",
"SmallAutoField": "smallint",
}
# Alter the sequence type if exists (Django 4.1+ identity columns
# don't have it).
other_actions = []
if sequence_name := self._get_sequence_name(table, column):
other_actions = [
(
self.sql_alter_sequence_type
% {
"sequence": self.quote_name(sequence_name),
"type": db_types[new_internal_type],
},
[],
),
]
return fragment, other_actions
else:
return super()._alter_column_type_sql(
model, old_field, new_field, new_type, old_collation, new_collation
)
def _alter_column_collation_sql(
self, model, new_field, new_type, new_collation, old_field
):
sql = self.sql_alter_column_collate
# Cast when data type changed.
if using_sql := self._using_sql(new_field, old_field):
sql += using_sql
return (
sql
% {
"column": self.quote_name(new_field.column),
"type": new_type,
"collation": " " + self._collate_sql(new_collation)
if new_collation
else "",
},
[],
)
def _alter_field(
self,
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict=False,
):
super()._alter_field(
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict,
)
# Added an index? Create any PostgreSQL-specific indexes.
if (not (old_field.db_index or old_field.unique) and new_field.db_index) or (
not old_field.unique and new_field.unique
):
like_index_statement = self._create_like_index_sql(model, new_field)
if like_index_statement is not None:
self.execute(like_index_statement)
# Removed an index? Drop any PostgreSQL-specific indexes.
if old_field.unique and not (new_field.db_index or new_field.unique):
index_to_remove = self._create_index_name(
model._meta.db_table, [old_field.column], suffix="_like"
)
self.execute(self._delete_index_sql(model, index_to_remove))
def _index_columns(self, table, columns, col_suffixes, opclasses):
if opclasses:
return IndexColumns(
table,
columns,
self.quote_name,
col_suffixes=col_suffixes,
opclasses=opclasses,
)
return super()._index_columns(table, columns, col_suffixes, opclasses)
def add_index(self, model, index, concurrently=False):
self.execute(
index.create_sql(model, self, concurrently=concurrently), params=None
)
def remove_index(self, model, index, concurrently=False):
self.execute(index.remove_sql(model, self, concurrently=concurrently))
def _delete_index_sql(self, model, name, sql=None, concurrently=False):
sql = (
self.sql_delete_index_concurrently
if concurrently
else self.sql_delete_index
)
return super()._delete_index_sql(model, name, sql)
def _create_index_sql(
self,
model,
*,
fields=None,
name=None,
suffix="",
using="",
db_tablespace=None,
col_suffixes=(),
sql=None,
opclasses=(),
condition=None,
concurrently=False,
include=None,
expressions=None,
):
sql = sql or (
self.sql_create_index
if not concurrently
else self.sql_create_index_concurrently
)
return super()._create_index_sql(
model,
fields=fields,
name=name,
suffix=suffix,
using=using,
db_tablespace=db_tablespace,
col_suffixes=col_suffixes,
sql=sql,
opclasses=opclasses,
condition=condition,
include=include,
expressions=expressions,
)
|
64b07630d6d09eaf1d535b2a94c284a1ea7cfd944cbf339597b2fe5c7e772800 | import operator
from django.db import transaction
from django.db.backends.base.features import BaseDatabaseFeatures
from django.db.utils import OperationalError
from django.utils.functional import cached_property
from .base import Database
class DatabaseFeatures(BaseDatabaseFeatures):
minimum_database_version = (3, 21)
test_db_allows_multiple_connections = False
supports_unspecified_pk = True
supports_timezones = False
max_query_params = 999
supports_transactions = True
atomic_transactions = False
can_rollback_ddl = True
can_create_inline_fk = False
requires_literal_defaults = True
can_clone_databases = True
supports_temporal_subtraction = True
ignores_table_name_case = True
supports_cast_with_precision = False
time_cast_precision = 3
can_release_savepoints = True
has_case_insensitive_like = True
# Is "ALTER TABLE ... RENAME COLUMN" supported?
can_alter_table_rename_column = Database.sqlite_version_info >= (3, 25, 0)
# Is "ALTER TABLE ... DROP COLUMN" supported?
can_alter_table_drop_column = Database.sqlite_version_info >= (3, 35, 5)
supports_parentheses_in_compound = False
can_defer_constraint_checks = True
supports_over_clause = Database.sqlite_version_info >= (3, 25, 0)
supports_frame_range_fixed_distance = Database.sqlite_version_info >= (3, 28, 0)
supports_aggregate_filter_clause = Database.sqlite_version_info >= (3, 30, 1)
supports_order_by_nulls_modifier = Database.sqlite_version_info >= (3, 30, 0)
# NULLS LAST/FIRST emulation on < 3.30 requires subquery wrapping.
requires_compound_order_by_subquery = Database.sqlite_version_info < (3, 30)
order_by_nulls_first = True
supports_json_field_contains = False
supports_update_conflicts = Database.sqlite_version_info >= (3, 24, 0)
supports_update_conflicts_with_target = supports_update_conflicts
test_collations = {
"ci": "nocase",
"cs": "binary",
"non_default": "nocase",
}
django_test_expected_failures = {
# The django_format_dtdelta() function doesn't properly handle mixed
# Date/DateTime fields and timedeltas.
"expressions.tests.FTimeDeltaTests.test_mixed_comparisons1",
}
create_test_table_with_composite_primary_key = """
CREATE TABLE test_table_composite_pk (
column_1 INTEGER NOT NULL,
column_2 INTEGER NOT NULL,
PRIMARY KEY(column_1, column_2)
)
"""
insert_test_table_with_defaults = 'INSERT INTO {} ("null") VALUES (1)'
supports_default_keyword_in_insert = False
@cached_property
def django_test_skips(self):
skips = {
"SQLite stores values rounded to 15 significant digits.": {
"model_fields.test_decimalfield.DecimalFieldTests."
"test_fetch_from_db_without_float_rounding",
},
"SQLite naively remakes the table on field alteration.": {
"schema.tests.SchemaTests.test_unique_no_unnecessary_fk_drops",
"schema.tests.SchemaTests.test_unique_and_reverse_m2m",
"schema.tests.SchemaTests."
"test_alter_field_default_doesnt_perform_queries",
"schema.tests.SchemaTests."
"test_rename_column_renames_deferred_sql_references",
},
"SQLite doesn't support negative precision for ROUND().": {
"db_functions.math.test_round.RoundTests."
"test_null_with_negative_precision",
"db_functions.math.test_round.RoundTests."
"test_decimal_with_negative_precision",
"db_functions.math.test_round.RoundTests."
"test_float_with_negative_precision",
"db_functions.math.test_round.RoundTests."
"test_integer_with_negative_precision",
},
}
if Database.sqlite_version_info < (3, 27):
skips.update(
{
"Nondeterministic failure on SQLite < 3.27.": {
"expressions_window.tests.WindowFunctionTests."
"test_subquery_row_range_rank",
},
}
)
if self.connection.is_in_memory_db():
skips.update(
{
"the sqlite backend's close() method is a no-op when using an "
"in-memory database": {
"servers.test_liveserverthread.LiveServerThreadTest."
"test_closes_connections",
"servers.tests.LiveServerTestCloseConnectionTest."
"test_closes_connections",
},
"For SQLite in-memory tests, closing the connection destroys"
"the database.": {
"test_utils.tests.AssertNumQueriesUponConnectionTests."
"test_ignores_connection_configuration_queries",
},
}
)
else:
skips.update(
{
"Only connections to in-memory SQLite databases are passed to the "
"server thread.": {
"servers.tests.LiveServerInMemoryDatabaseLockTest."
"test_in_memory_database_lock",
},
"multiprocessing's start method is checked only for in-memory "
"SQLite databases": {
"backends.sqlite.test_creation.TestDbSignatureTests."
"test_get_test_db_clone_settings_not_supported",
},
}
)
return skips
@cached_property
def supports_atomic_references_rename(self):
return Database.sqlite_version_info >= (3, 26, 0)
@cached_property
def introspected_field_types(self):
return {
**super().introspected_field_types,
"BigAutoField": "AutoField",
"DurationField": "BigIntegerField",
"GenericIPAddressField": "CharField",
"SmallAutoField": "AutoField",
}
@cached_property
def supports_json_field(self):
with self.connection.cursor() as cursor:
try:
with transaction.atomic(self.connection.alias):
cursor.execute('SELECT JSON(\'{"a": "b"}\')')
except OperationalError:
return False
return True
can_introspect_json_field = property(operator.attrgetter("supports_json_field"))
has_json_object_function = property(operator.attrgetter("supports_json_field"))
@cached_property
def can_return_columns_from_insert(self):
return Database.sqlite_version_info >= (3, 35)
can_return_rows_from_bulk_insert = property(
operator.attrgetter("can_return_columns_from_insert")
)
|
aba4ebb7e8c206e5d25439d325e604a9bd4d8273300a2dec56bed077c59b89f9 | import copy
from decimal import Decimal
from django.apps.registry import Apps
from django.db import NotSupportedError
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.backends.ddl_references import Statement
from django.db.backends.utils import strip_quotes
from django.db.models import NOT_PROVIDED, UniqueConstraint
from django.db.transaction import atomic
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_delete_table = "DROP TABLE %(table)s"
sql_create_fk = None
sql_create_inline_fk = (
"REFERENCES %(to_table)s (%(to_column)s) DEFERRABLE INITIALLY DEFERRED"
)
sql_create_column_inline_fk = sql_create_inline_fk
sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s"
sql_create_unique = "CREATE UNIQUE INDEX %(name)s ON %(table)s (%(columns)s)"
sql_delete_unique = "DROP INDEX %(name)s"
def __enter__(self):
# Some SQLite schema alterations need foreign key constraints to be
# disabled. Enforce it here for the duration of the schema edition.
if not self.connection.disable_constraint_checking():
raise NotSupportedError(
"SQLite schema editor cannot be used while foreign key "
"constraint checks are enabled. Make sure to disable them "
"before entering a transaction.atomic() context because "
"SQLite does not support disabling them in the middle of "
"a multi-statement transaction."
)
return super().__enter__()
def __exit__(self, exc_type, exc_value, traceback):
self.connection.check_constraints()
super().__exit__(exc_type, exc_value, traceback)
self.connection.enable_constraint_checking()
def quote_value(self, value):
# The backend "mostly works" without this function and there are use
# cases for compiling Python without the sqlite3 libraries (e.g.
# security hardening).
try:
import sqlite3
value = sqlite3.adapt(value)
except ImportError:
pass
except sqlite3.ProgrammingError:
pass
# Manual emulation of SQLite parameter quoting
if isinstance(value, bool):
return str(int(value))
elif isinstance(value, (Decimal, float, int)):
return str(value)
elif isinstance(value, str):
return "'%s'" % value.replace("'", "''")
elif value is None:
return "NULL"
elif isinstance(value, (bytes, bytearray, memoryview)):
# Bytes are only allowed for BLOB fields, encoded as string
# literals containing hexadecimal data and preceded by a single "X"
# character.
return "X'%s'" % value.hex()
else:
raise ValueError(
"Cannot quote parameter value %r of type %s" % (value, type(value))
)
def prepare_default(self, value):
return self.quote_value(value)
def _is_referenced_by_fk_constraint(
self, table_name, column_name=None, ignore_self=False
):
"""
Return whether or not the provided table name is referenced by another
one. If `column_name` is specified, only references pointing to that
column are considered. If `ignore_self` is True, self-referential
constraints are ignored.
"""
with self.connection.cursor() as cursor:
for other_table in self.connection.introspection.get_table_list(cursor):
if ignore_self and other_table.name == table_name:
continue
relations = self.connection.introspection.get_relations(
cursor, other_table.name
)
for constraint_column, constraint_table in relations.values():
if constraint_table == table_name and (
column_name is None or constraint_column == column_name
):
return True
return False
def alter_db_table(
self, model, old_db_table, new_db_table, disable_constraints=True
):
if (
not self.connection.features.supports_atomic_references_rename
and disable_constraints
and self._is_referenced_by_fk_constraint(old_db_table)
):
if self.connection.in_atomic_block:
raise NotSupportedError(
(
"Renaming the %r table while in a transaction is not "
"supported on SQLite < 3.26 because it would break referential "
"integrity. Try adding `atomic = False` to the Migration class."
)
% old_db_table
)
self.connection.enable_constraint_checking()
super().alter_db_table(model, old_db_table, new_db_table)
self.connection.disable_constraint_checking()
else:
super().alter_db_table(model, old_db_table, new_db_table)
def alter_field(self, model, old_field, new_field, strict=False):
if not self._field_should_be_altered(old_field, new_field):
return
old_field_name = old_field.name
table_name = model._meta.db_table
_, old_column_name = old_field.get_attname_column()
if (
new_field.name != old_field_name
and not self.connection.features.supports_atomic_references_rename
and self._is_referenced_by_fk_constraint(
table_name, old_column_name, ignore_self=True
)
):
if self.connection.in_atomic_block:
raise NotSupportedError(
(
"Renaming the %r.%r column while in a transaction is not "
"supported on SQLite < 3.26 because it would break referential "
"integrity. Try adding `atomic = False` to the Migration class."
)
% (model._meta.db_table, old_field_name)
)
with atomic(self.connection.alias):
super().alter_field(model, old_field, new_field, strict=strict)
# Follow SQLite's documented procedure for performing changes
# that don't affect the on-disk content.
# https://sqlite.org/lang_altertable.html#otheralter
with self.connection.cursor() as cursor:
schema_version = cursor.execute("PRAGMA schema_version").fetchone()[
0
]
cursor.execute("PRAGMA writable_schema = 1")
references_template = ' REFERENCES "%s" ("%%s") ' % table_name
new_column_name = new_field.get_attname_column()[1]
search = references_template % old_column_name
replacement = references_template % new_column_name
cursor.execute(
"UPDATE sqlite_master SET sql = replace(sql, %s, %s)",
(search, replacement),
)
cursor.execute("PRAGMA schema_version = %d" % (schema_version + 1))
cursor.execute("PRAGMA writable_schema = 0")
# The integrity check will raise an exception and rollback
# the transaction if the sqlite_master updates corrupt the
# database.
cursor.execute("PRAGMA integrity_check")
# Perform a VACUUM to refresh the database representation from
# the sqlite_master table.
with self.connection.cursor() as cursor:
cursor.execute("VACUUM")
else:
super().alter_field(model, old_field, new_field, strict=strict)
def _remake_table(
self, model, create_field=None, delete_field=None, alter_fields=None
):
"""
Shortcut to transform a model from old_model into new_model
This follows the correct procedure to perform non-rename or column
addition operations based on SQLite's documentation
https://www.sqlite.org/lang_altertable.html#caution
The essential steps are:
1. Create a table with the updated definition called "new__app_model"
2. Copy the data from the existing "app_model" table to the new table
3. Drop the "app_model" table
4. Rename the "new__app_model" table to "app_model"
5. Restore any index of the previous "app_model" table.
"""
# Self-referential fields must be recreated rather than copied from
# the old model to ensure their remote_field.field_name doesn't refer
# to an altered field.
def is_self_referential(f):
return f.is_relation and f.remote_field.model is model
# Work out the new fields dict / mapping
body = {
f.name: f.clone() if is_self_referential(f) else f
for f in model._meta.local_concrete_fields
}
# Since mapping might mix column names and default values,
# its values must be already quoted.
mapping = {
f.column: self.quote_name(f.column)
for f in model._meta.local_concrete_fields
}
# This maps field names (not columns) for things like unique_together
rename_mapping = {}
# If any of the new or altered fields is introducing a new PK,
# remove the old one
restore_pk_field = None
alter_fields = alter_fields or []
if getattr(create_field, "primary_key", False) or any(
getattr(new_field, "primary_key", False) for _, new_field in alter_fields
):
for name, field in list(body.items()):
if field.primary_key and not any(
# Do not remove the old primary key when an altered field
# that introduces a primary key is the same field.
name == new_field.name
for _, new_field in alter_fields
):
field.primary_key = False
restore_pk_field = field
if field.auto_created:
del body[name]
del mapping[field.column]
# Add in any created fields
if create_field:
body[create_field.name] = create_field
# Choose a default and insert it into the copy map
if (
create_field.db_default is NOT_PROVIDED
and not create_field.many_to_many
and create_field.concrete
):
mapping[create_field.column] = self.prepare_default(
self.effective_default(create_field)
)
# Add in any altered fields
for alter_field in alter_fields:
old_field, new_field = alter_field
body.pop(old_field.name, None)
mapping.pop(old_field.column, None)
body[new_field.name] = new_field
if old_field.null and not new_field.null:
if new_field.db_default is NOT_PROVIDED:
default = self.prepare_default(self.effective_default(new_field))
else:
default, _ = self.db_default_sql(new_field)
case_sql = "coalesce(%(col)s, %(default)s)" % {
"col": self.quote_name(old_field.column),
"default": default,
}
mapping[new_field.column] = case_sql
else:
mapping[new_field.column] = self.quote_name(old_field.column)
rename_mapping[old_field.name] = new_field.name
# Remove any deleted fields
if delete_field:
del body[delete_field.name]
del mapping[delete_field.column]
# Remove any implicit M2M tables
if (
delete_field.many_to_many
and delete_field.remote_field.through._meta.auto_created
):
return self.delete_model(delete_field.remote_field.through)
# Work inside a new app registry
apps = Apps()
# Work out the new value of unique_together, taking renames into
# account
unique_together = [
[rename_mapping.get(n, n) for n in unique]
for unique in model._meta.unique_together
]
# RemovedInDjango51Warning.
# Work out the new value for index_together, taking renames into
# account
index_together = [
[rename_mapping.get(n, n) for n in index]
for index in model._meta.index_together
]
indexes = model._meta.indexes
if delete_field:
indexes = [
index for index in indexes if delete_field.name not in index.fields
]
constraints = list(model._meta.constraints)
# Provide isolated instances of the fields to the new model body so
# that the existing model's internals aren't interfered with when
# the dummy model is constructed.
body_copy = copy.deepcopy(body)
# Construct a new model with the new fields to allow self referential
# primary key to resolve to. This model won't ever be materialized as a
# table and solely exists for foreign key reference resolution purposes.
# This wouldn't be required if the schema editor was operating on model
# states instead of rendered models.
meta_contents = {
"app_label": model._meta.app_label,
"db_table": model._meta.db_table,
"unique_together": unique_together,
"index_together": index_together, # RemovedInDjango51Warning.
"indexes": indexes,
"constraints": constraints,
"apps": apps,
}
meta = type("Meta", (), meta_contents)
body_copy["Meta"] = meta
body_copy["__module__"] = model.__module__
type(model._meta.object_name, model.__bases__, body_copy)
# Construct a model with a renamed table name.
body_copy = copy.deepcopy(body)
meta_contents = {
"app_label": model._meta.app_label,
"db_table": "new__%s" % strip_quotes(model._meta.db_table),
"unique_together": unique_together,
"index_together": index_together, # RemovedInDjango51Warning.
"indexes": indexes,
"constraints": constraints,
"apps": apps,
}
meta = type("Meta", (), meta_contents)
body_copy["Meta"] = meta
body_copy["__module__"] = model.__module__
new_model = type("New%s" % model._meta.object_name, model.__bases__, body_copy)
# Create a new table with the updated schema.
self.create_model(new_model)
# Copy data from the old table into the new table
self.execute(
"INSERT INTO %s (%s) SELECT %s FROM %s"
% (
self.quote_name(new_model._meta.db_table),
", ".join(self.quote_name(x) for x in mapping),
", ".join(mapping.values()),
self.quote_name(model._meta.db_table),
)
)
# Delete the old table to make way for the new
self.delete_model(model, handle_autom2m=False)
# Rename the new table to take way for the old
self.alter_db_table(
new_model,
new_model._meta.db_table,
model._meta.db_table,
disable_constraints=False,
)
# Run deferred SQL on correct table
for sql in self.deferred_sql:
self.execute(sql)
self.deferred_sql = []
# Fix any PK-removed field
if restore_pk_field:
restore_pk_field.primary_key = True
def delete_model(self, model, handle_autom2m=True):
if handle_autom2m:
super().delete_model(model)
else:
# Delete the table (and only that)
self.execute(
self.sql_delete_table
% {
"table": self.quote_name(model._meta.db_table),
}
)
# Remove all deferred statements referencing the deleted table.
for sql in list(self.deferred_sql):
if isinstance(sql, Statement) and sql.references_table(
model._meta.db_table
):
self.deferred_sql.remove(sql)
def add_field(self, model, field):
"""Create a field on a model."""
from django.db.models.expressions import Value
# Special-case implicit M2M tables.
if field.many_to_many and field.remote_field.through._meta.auto_created:
self.create_model(field.remote_field.through)
elif (
# Primary keys and unique fields are not supported in ALTER TABLE
# ADD COLUMN.
field.primary_key
or field.unique
or not field.null
# Fields with default values cannot by handled by ALTER TABLE ADD
# COLUMN statement because DROP DEFAULT is not supported in
# ALTER TABLE.
or self.effective_default(field) is not None
# Fields with non-constant defaults cannot by handled by ALTER
# TABLE ADD COLUMN statement.
or (
field.db_default is not NOT_PROVIDED
and not isinstance(field.db_default, Value)
)
):
self._remake_table(model, create_field=field)
else:
super().add_field(model, field)
def remove_field(self, model, field):
"""
Remove a field from a model. Usually involves deleting a column,
but for M2Ms may involve deleting a table.
"""
# M2M fields are a special case
if field.many_to_many:
# For implicit M2M tables, delete the auto-created table
if field.remote_field.through._meta.auto_created:
self.delete_model(field.remote_field.through)
# For explicit "through" M2M fields, do nothing
elif (
self.connection.features.can_alter_table_drop_column
# Primary keys, unique fields, indexed fields, and foreign keys are
# not supported in ALTER TABLE DROP COLUMN.
and not field.primary_key
and not field.unique
and not field.db_index
and not (field.remote_field and field.db_constraint)
):
super().remove_field(model, field)
# For everything else, remake.
else:
# It might not actually have a column behind it
if field.db_parameters(connection=self.connection)["type"] is None:
return
self._remake_table(model, delete_field=field)
def _alter_field(
self,
model,
old_field,
new_field,
old_type,
new_type,
old_db_params,
new_db_params,
strict=False,
):
"""Perform a "physical" (non-ManyToMany) field update."""
# Use "ALTER TABLE ... RENAME COLUMN" if only the column name
# changed and there aren't any constraints.
if (
self.connection.features.can_alter_table_rename_column
and old_field.column != new_field.column
and self.column_sql(model, old_field) == self.column_sql(model, new_field)
and not (
old_field.remote_field
and old_field.db_constraint
or new_field.remote_field
and new_field.db_constraint
)
):
return self.execute(
self._rename_field_sql(
model._meta.db_table, old_field, new_field, new_type
)
)
# Alter by remaking table
self._remake_table(model, alter_fields=[(old_field, new_field)])
# Rebuild tables with FKs pointing to this field.
old_collation = old_db_params.get("collation")
new_collation = new_db_params.get("collation")
if new_field.unique and (
old_type != new_type or old_collation != new_collation
):
related_models = set()
opts = new_field.model._meta
for remote_field in opts.related_objects:
# Ignore self-relationship since the table was already rebuilt.
if remote_field.related_model == model:
continue
if not remote_field.many_to_many:
if remote_field.field_name == new_field.name:
related_models.add(remote_field.related_model)
elif new_field.primary_key and remote_field.through._meta.auto_created:
related_models.add(remote_field.through)
if new_field.primary_key:
for many_to_many in opts.many_to_many:
# Ignore self-relationship since the table was already rebuilt.
if many_to_many.related_model == model:
continue
if many_to_many.remote_field.through._meta.auto_created:
related_models.add(many_to_many.remote_field.through)
for related_model in related_models:
self._remake_table(related_model)
def _alter_many_to_many(self, model, old_field, new_field, strict):
"""Alter M2Ms to repoint their to= endpoints."""
if (
old_field.remote_field.through._meta.db_table
== new_field.remote_field.through._meta.db_table
):
# The field name didn't change, but some options did, so we have to
# propagate this altering.
self._remake_table(
old_field.remote_field.through,
alter_fields=[
(
# The field that points to the target model is needed,
# so that table can be remade with the new m2m field -
# this is m2m_reverse_field_name().
old_field.remote_field.through._meta.get_field(
old_field.m2m_reverse_field_name()
),
new_field.remote_field.through._meta.get_field(
new_field.m2m_reverse_field_name()
),
),
(
# The field that points to the model itself is needed,
# so that table can be remade with the new self field -
# this is m2m_field_name().
old_field.remote_field.through._meta.get_field(
old_field.m2m_field_name()
),
new_field.remote_field.through._meta.get_field(
new_field.m2m_field_name()
),
),
],
)
return
# Make a new through table
self.create_model(new_field.remote_field.through)
# Copy the data across
self.execute(
"INSERT INTO %s (%s) SELECT %s FROM %s"
% (
self.quote_name(new_field.remote_field.through._meta.db_table),
", ".join(
[
"id",
new_field.m2m_column_name(),
new_field.m2m_reverse_name(),
]
),
", ".join(
[
"id",
old_field.m2m_column_name(),
old_field.m2m_reverse_name(),
]
),
self.quote_name(old_field.remote_field.through._meta.db_table),
)
)
# Delete the old through table
self.delete_model(old_field.remote_field.through)
def add_constraint(self, model, constraint):
if isinstance(constraint, UniqueConstraint) and (
constraint.condition
or constraint.contains_expressions
or constraint.include
or constraint.deferrable
):
super().add_constraint(model, constraint)
else:
self._remake_table(model)
def remove_constraint(self, model, constraint):
if isinstance(constraint, UniqueConstraint) and (
constraint.condition
or constraint.contains_expressions
or constraint.include
or constraint.deferrable
):
super().remove_constraint(model, constraint)
else:
self._remake_table(model)
def _collate_sql(self, collation):
return "COLLATE " + collation
|
bba258e4d91d94c93a594138617b4888c0a277b92a37feb49bbf134bd8d6e1cc | import subprocess
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS, connections
class Command(BaseCommand):
help = (
"Runs the command-line client for specified database, or the "
"default database if none is provided."
)
requires_system_checks = []
def add_arguments(self, parser):
parser.add_argument(
"--database",
default=DEFAULT_DB_ALIAS,
help=(
"Nominates a database onto which to open a shell. Defaults to the "
'"default" database.'
),
)
parameters = parser.add_argument_group("parameters", prefix_chars="--")
parameters.add_argument("parameters", nargs="*")
def handle(self, **options):
connection = connections[options["database"]]
try:
connection.client.runshell(options["parameters"])
except FileNotFoundError:
# Note that we're assuming the FileNotFoundError relates to the
# command missing. It could be raised for some other reason, in
# which case this error message would be inaccurate. Still, this
# message catches the common case.
raise CommandError(
"You appear not to have the %r program installed or on your path."
% connection.client.executable_name
)
except subprocess.CalledProcessError as e:
raise CommandError(
'"%s" returned non-zero exit status %s.'
% (
" ".join(map(str, e.cmd)),
e.returncode,
),
returncode=e.returncode,
)
|
56fd3c0f8b173e78512be068add28247033d364867077cb50ecdcf356812c3f4 | import os
import sys
import warnings
from itertools import takewhile
from django.apps import apps
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError, no_translations
from django.core.management.utils import run_formatters
from django.db import DEFAULT_DB_ALIAS, OperationalError, connections, router
from django.db.migrations import Migration
from django.db.migrations.autodetector import MigrationAutodetector
from django.db.migrations.loader import MigrationLoader
from django.db.migrations.migration import SwappableTuple
from django.db.migrations.optimizer import MigrationOptimizer
from django.db.migrations.questioner import (
InteractiveMigrationQuestioner,
MigrationQuestioner,
NonInteractiveMigrationQuestioner,
)
from django.db.migrations.state import ProjectState
from django.db.migrations.utils import get_migration_name_timestamp
from django.db.migrations.writer import MigrationWriter
class Command(BaseCommand):
help = "Creates new migration(s) for apps."
def add_arguments(self, parser):
parser.add_argument(
"args",
metavar="app_label",
nargs="*",
help="Specify the app label(s) to create migrations for.",
)
parser.add_argument(
"--dry-run",
action="store_true",
help="Just show what migrations would be made; don't actually write them.",
)
parser.add_argument(
"--merge",
action="store_true",
help="Enable fixing of migration conflicts.",
)
parser.add_argument(
"--empty",
action="store_true",
help="Create an empty migration.",
)
parser.add_argument(
"--noinput",
"--no-input",
action="store_false",
dest="interactive",
help="Tells Django to NOT prompt the user for input of any kind.",
)
parser.add_argument(
"-n",
"--name",
help="Use this name for migration file(s).",
)
parser.add_argument(
"--no-header",
action="store_false",
dest="include_header",
help="Do not add header comments to new migration file(s).",
)
parser.add_argument(
"--check",
action="store_true",
dest="check_changes",
help=(
"Exit with a non-zero status if model changes are missing migrations "
"and don't actually write them."
),
)
parser.add_argument(
"--scriptable",
action="store_true",
dest="scriptable",
help=(
"Divert log output and input prompts to stderr, writing only "
"paths of generated migration files to stdout."
),
)
parser.add_argument(
"--update",
action="store_true",
dest="update",
help=(
"Merge model changes into the latest migration and optimize the "
"resulting operations."
),
)
@property
def log_output(self):
return self.stderr if self.scriptable else self.stdout
def log(self, msg):
self.log_output.write(msg)
@no_translations
def handle(self, *app_labels, **options):
self.written_files = []
self.verbosity = options["verbosity"]
self.interactive = options["interactive"]
self.dry_run = options["dry_run"]
self.merge = options["merge"]
self.empty = options["empty"]
self.migration_name = options["name"]
if self.migration_name and not self.migration_name.isidentifier():
raise CommandError("The migration name must be a valid Python identifier.")
self.include_header = options["include_header"]
check_changes = options["check_changes"]
self.scriptable = options["scriptable"]
self.update = options["update"]
# If logs and prompts are diverted to stderr, remove the ERROR style.
if self.scriptable:
self.stderr.style_func = None
# Make sure the app they asked for exists
app_labels = set(app_labels)
has_bad_labels = False
for app_label in app_labels:
try:
apps.get_app_config(app_label)
except LookupError as err:
self.stderr.write(str(err))
has_bad_labels = True
if has_bad_labels:
sys.exit(2)
# Load the current graph state. Pass in None for the connection so
# the loader doesn't try to resolve replaced migrations from DB.
loader = MigrationLoader(None, ignore_no_migrations=True)
# Raise an error if any migrations are applied before their dependencies.
consistency_check_labels = {config.label for config in apps.get_app_configs()}
# Non-default databases are only checked if database routers used.
aliases_to_check = (
connections if settings.DATABASE_ROUTERS else [DEFAULT_DB_ALIAS]
)
for alias in sorted(aliases_to_check):
connection = connections[alias]
if connection.settings_dict["ENGINE"] != "django.db.backends.dummy" and any(
# At least one model must be migrated to the database.
router.allow_migrate(
connection.alias, app_label, model_name=model._meta.object_name
)
for app_label in consistency_check_labels
for model in apps.get_app_config(app_label).get_models()
):
try:
loader.check_consistent_history(connection)
except OperationalError as error:
warnings.warn(
"Got an error checking a consistent migration history "
"performed for database connection '%s': %s" % (alias, error),
RuntimeWarning,
)
# Before anything else, see if there's conflicting apps and drop out
# hard if there are any and they don't want to merge
conflicts = loader.detect_conflicts()
# If app_labels is specified, filter out conflicting migrations for
# unspecified apps.
if app_labels:
conflicts = {
app_label: conflict
for app_label, conflict in conflicts.items()
if app_label in app_labels
}
if conflicts and not self.merge:
name_str = "; ".join(
"%s in %s" % (", ".join(names), app) for app, names in conflicts.items()
)
raise CommandError(
"Conflicting migrations detected; multiple leaf nodes in the "
"migration graph: (%s).\nTo fix them run "
"'python manage.py makemigrations --merge'" % name_str
)
# If they want to merge and there's nothing to merge, then politely exit
if self.merge and not conflicts:
self.log("No conflicts detected to merge.")
return
# If they want to merge and there is something to merge, then
# divert into the merge code
if self.merge and conflicts:
return self.handle_merge(loader, conflicts)
if self.interactive:
questioner = InteractiveMigrationQuestioner(
specified_apps=app_labels,
dry_run=self.dry_run,
prompt_output=self.log_output,
)
else:
questioner = NonInteractiveMigrationQuestioner(
specified_apps=app_labels,
dry_run=self.dry_run,
verbosity=self.verbosity,
log=self.log,
)
# Set up autodetector
autodetector = MigrationAutodetector(
loader.project_state(),
ProjectState.from_apps(apps),
questioner,
)
# If they want to make an empty migration, make one for each app
if self.empty:
if not app_labels:
raise CommandError(
"You must supply at least one app label when using --empty."
)
# Make a fake changes() result we can pass to arrange_for_graph
changes = {app: [Migration("custom", app)] for app in app_labels}
changes = autodetector.arrange_for_graph(
changes=changes,
graph=loader.graph,
migration_name=self.migration_name,
)
self.write_migration_files(changes)
return
# Detect changes
changes = autodetector.changes(
graph=loader.graph,
trim_to_apps=app_labels or None,
convert_apps=app_labels or None,
migration_name=self.migration_name,
)
if not changes:
# No changes? Tell them.
if self.verbosity >= 1:
if app_labels:
if len(app_labels) == 1:
self.log("No changes detected in app '%s'" % app_labels.pop())
else:
self.log(
"No changes detected in apps '%s'"
% ("', '".join(app_labels))
)
else:
self.log("No changes detected")
else:
if check_changes:
sys.exit(1)
if self.update:
self.write_to_last_migration_files(changes)
else:
self.write_migration_files(changes)
def write_to_last_migration_files(self, changes):
loader = MigrationLoader(connections[DEFAULT_DB_ALIAS])
new_changes = {}
update_previous_migration_paths = {}
for app_label, app_migrations in changes.items():
# Find last migration.
leaf_migration_nodes = loader.graph.leaf_nodes(app=app_label)
if len(leaf_migration_nodes) == 0:
raise CommandError(
f"App {app_label} has no migration, cannot update last migration."
)
leaf_migration_node = leaf_migration_nodes[0]
# Multiple leaf nodes have already been checked earlier in command.
leaf_migration = loader.graph.nodes[leaf_migration_node]
# Updated migration cannot be a squash migration, a dependency of
# another migration, and cannot be already applied.
if leaf_migration.replaces:
raise CommandError(
f"Cannot update squash migration '{leaf_migration}'."
)
if leaf_migration_node in loader.applied_migrations:
raise CommandError(
f"Cannot update applied migration '{leaf_migration}'."
)
depending_migrations = [
migration
for migration in loader.disk_migrations.values()
if leaf_migration_node in migration.dependencies
]
if depending_migrations:
formatted_migrations = ", ".join(
[f"'{migration}'" for migration in depending_migrations]
)
raise CommandError(
f"Cannot update migration '{leaf_migration}' that migrations "
f"{formatted_migrations} depend on."
)
# Build new migration.
for migration in app_migrations:
leaf_migration.operations.extend(migration.operations)
for dependency in migration.dependencies:
if isinstance(dependency, SwappableTuple):
if settings.AUTH_USER_MODEL == dependency.setting:
leaf_migration.dependencies.append(
("__setting__", "AUTH_USER_MODEL")
)
else:
leaf_migration.dependencies.append(dependency)
elif dependency[0] != migration.app_label:
leaf_migration.dependencies.append(dependency)
# Optimize migration.
optimizer = MigrationOptimizer()
leaf_migration.operations = optimizer.optimize(
leaf_migration.operations, app_label
)
# Update name.
previous_migration_path = MigrationWriter(leaf_migration).path
name_fragment = self.migration_name or leaf_migration.suggest_name()
suggested_name = leaf_migration.name[:4] + f"_{name_fragment}"
if leaf_migration.name == suggested_name:
new_name = leaf_migration.name + "_updated"
else:
new_name = suggested_name
leaf_migration.name = new_name
# Register overridden migration.
new_changes[app_label] = [leaf_migration]
update_previous_migration_paths[app_label] = previous_migration_path
self.write_migration_files(new_changes, update_previous_migration_paths)
def write_migration_files(self, changes, update_previous_migration_paths=None):
"""
Take a changes dict and write them out as migration files.
"""
directory_created = {}
for app_label, app_migrations in changes.items():
if self.verbosity >= 1:
self.log(self.style.MIGRATE_HEADING("Migrations for '%s':" % app_label))
for migration in app_migrations:
# Describe the migration
writer = MigrationWriter(migration, self.include_header)
if self.verbosity >= 1:
# Display a relative path if it's below the current working
# directory, or an absolute path otherwise.
migration_string = self.get_relative_path(writer.path)
self.log(" %s\n" % self.style.MIGRATE_LABEL(migration_string))
for operation in migration.operations:
self.log(" - %s" % operation.describe())
if self.scriptable:
self.stdout.write(migration_string)
if not self.dry_run:
# Write the migrations file to the disk.
migrations_directory = os.path.dirname(writer.path)
if not directory_created.get(app_label):
os.makedirs(migrations_directory, exist_ok=True)
init_path = os.path.join(migrations_directory, "__init__.py")
if not os.path.isfile(init_path):
open(init_path, "w").close()
# We just do this once per app
directory_created[app_label] = True
migration_string = writer.as_string()
with open(writer.path, "w", encoding="utf-8") as fh:
fh.write(migration_string)
self.written_files.append(writer.path)
if update_previous_migration_paths:
prev_path = update_previous_migration_paths[app_label]
rel_prev_path = self.get_relative_path(prev_path)
if writer.needs_manual_porting:
migration_path = self.get_relative_path(writer.path)
self.log(
self.style.WARNING(
f"Updated migration {migration_path} requires "
f"manual porting.\n"
f"Previous migration {rel_prev_path} was kept and "
f"must be deleted after porting functions manually."
)
)
else:
os.remove(prev_path)
self.log(f"Deleted {rel_prev_path}")
elif self.verbosity == 3:
# Alternatively, makemigrations --dry-run --verbosity 3
# will log the migrations rather than saving the file to
# the disk.
self.log(
self.style.MIGRATE_HEADING(
"Full migrations file '%s':" % writer.filename
)
)
self.log(writer.as_string())
run_formatters(self.written_files)
@staticmethod
def get_relative_path(path):
try:
migration_string = os.path.relpath(path)
except ValueError:
migration_string = path
if migration_string.startswith(".."):
migration_string = path
return migration_string
def handle_merge(self, loader, conflicts):
"""
Handles merging together conflicted migrations interactively,
if it's safe; otherwise, advises on how to fix it.
"""
if self.interactive:
questioner = InteractiveMigrationQuestioner(prompt_output=self.log_output)
else:
questioner = MigrationQuestioner(defaults={"ask_merge": True})
for app_label, migration_names in conflicts.items():
# Grab out the migrations in question, and work out their
# common ancestor.
merge_migrations = []
for migration_name in migration_names:
migration = loader.get_migration(app_label, migration_name)
migration.ancestry = [
mig
for mig in loader.graph.forwards_plan((app_label, migration_name))
if mig[0] == migration.app_label
]
merge_migrations.append(migration)
def all_items_equal(seq):
return all(item == seq[0] for item in seq[1:])
merge_migrations_generations = zip(*(m.ancestry for m in merge_migrations))
common_ancestor_count = sum(
1
for common_ancestor_generation in takewhile(
all_items_equal, merge_migrations_generations
)
)
if not common_ancestor_count:
raise ValueError(
"Could not find common ancestor of %s" % migration_names
)
# Now work out the operations along each divergent branch
for migration in merge_migrations:
migration.branch = migration.ancestry[common_ancestor_count:]
migrations_ops = (
loader.get_migration(node_app, node_name).operations
for node_app, node_name in migration.branch
)
migration.merged_operations = sum(migrations_ops, [])
# In future, this could use some of the Optimizer code
# (can_optimize_through) to automatically see if they're
# mergeable. For now, we always just prompt the user.
if self.verbosity > 0:
self.log(self.style.MIGRATE_HEADING("Merging %s" % app_label))
for migration in merge_migrations:
self.log(self.style.MIGRATE_LABEL(" Branch %s" % migration.name))
for operation in migration.merged_operations:
self.log(" - %s" % operation.describe())
if questioner.ask_merge(app_label):
# If they still want to merge it, then write out an empty
# file depending on the migrations needing merging.
numbers = [
MigrationAutodetector.parse_number(migration.name)
for migration in merge_migrations
]
try:
biggest_number = max(x for x in numbers if x is not None)
except ValueError:
biggest_number = 1
subclass = type(
"Migration",
(Migration,),
{
"dependencies": [
(app_label, migration.name)
for migration in merge_migrations
],
},
)
parts = ["%04i" % (biggest_number + 1)]
if self.migration_name:
parts.append(self.migration_name)
else:
parts.append("merge")
leaf_names = "_".join(
sorted(migration.name for migration in merge_migrations)
)
if len(leaf_names) > 47:
parts.append(get_migration_name_timestamp())
else:
parts.append(leaf_names)
migration_name = "_".join(parts)
new_migration = subclass(migration_name, app_label)
writer = MigrationWriter(new_migration, self.include_header)
if not self.dry_run:
# Write the merge migrations file to the disk
with open(writer.path, "w", encoding="utf-8") as fh:
fh.write(writer.as_string())
run_formatters([writer.path])
if self.verbosity > 0:
self.log("\nCreated new merge migration %s" % writer.path)
if self.scriptable:
self.stdout.write(writer.path)
elif self.verbosity == 3:
# Alternatively, makemigrations --merge --dry-run --verbosity 3
# will log the merge migrations rather than saving the file
# to the disk.
self.log(
self.style.MIGRATE_HEADING(
"Full merge migrations file '%s':" % writer.filename
)
)
self.log(writer.as_string())
|
3b9bea4d781a4853325e2a6ed26c0d323b7e025dba78e7693615680326fda6ee | import base64
import binascii
import functools
import hashlib
import importlib
import math
import warnings
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.signals import setting_changed
from django.dispatch import receiver
from django.utils.crypto import (
RANDOM_STRING_CHARS,
constant_time_compare,
get_random_string,
pbkdf2,
)
from django.utils.deprecation import RemovedInDjango51Warning
from django.utils.module_loading import import_string
from django.utils.translation import gettext_noop as _
UNUSABLE_PASSWORD_PREFIX = "!" # This will never be a valid encoded hash
UNUSABLE_PASSWORD_SUFFIX_LENGTH = (
40 # number of random chars to add after UNUSABLE_PASSWORD_PREFIX
)
def is_password_usable(encoded):
"""
Return True if this password wasn't generated by
User.set_unusable_password(), i.e. make_password(None).
"""
return encoded is None or not encoded.startswith(UNUSABLE_PASSWORD_PREFIX)
def verify_password(password, encoded, preferred="default"):
"""
Return two booleans. The first is whether the raw password matches the
three part encoded digest, and the second whether to regenerate the
password.
"""
if password is None or not is_password_usable(encoded):
return False, False
preferred = get_hasher(preferred)
try:
hasher = identify_hasher(encoded)
except ValueError:
# encoded is gibberish or uses a hasher that's no longer installed.
return False, False
hasher_changed = hasher.algorithm != preferred.algorithm
must_update = hasher_changed or preferred.must_update(encoded)
is_correct = hasher.verify(password, encoded)
# If the hasher didn't change (we don't protect against enumeration if it
# does) and the password should get updated, try to close the timing gap
# between the work factor of the current encoded password and the default
# work factor.
if not is_correct and not hasher_changed and must_update:
hasher.harden_runtime(password, encoded)
return is_correct, must_update
def check_password(password, encoded, setter=None, preferred="default"):
"""
Return a boolean of whether the raw password matches the three part encoded
digest.
If setter is specified, it'll be called when you need to regenerate the
password.
"""
is_correct, must_update = verify_password(password, encoded, preferred=preferred)
if setter and is_correct and must_update:
setter(password)
return is_correct
async def acheck_password(password, encoded, setter=None, preferred="default"):
"""See check_password()."""
is_correct, must_update = verify_password(password, encoded, preferred=preferred)
if setter and is_correct and must_update:
await setter(password)
return is_correct
def make_password(password, salt=None, hasher="default"):
"""
Turn a plain-text password into a hash for database storage
Same as encode() but generate a new random salt. If password is None then
return a concatenation of UNUSABLE_PASSWORD_PREFIX and a random string,
which disallows logins. Additional random string reduces chances of gaining
access to staff or superuser accounts. See ticket #20079 for more info.
"""
if password is None:
return UNUSABLE_PASSWORD_PREFIX + get_random_string(
UNUSABLE_PASSWORD_SUFFIX_LENGTH
)
if not isinstance(password, (bytes, str)):
raise TypeError(
"Password must be a string or bytes, got %s." % type(password).__qualname__
)
hasher = get_hasher(hasher)
salt = salt or hasher.salt()
return hasher.encode(password, salt)
@functools.lru_cache
def get_hashers():
hashers = []
for hasher_path in settings.PASSWORD_HASHERS:
hasher_cls = import_string(hasher_path)
hasher = hasher_cls()
if not getattr(hasher, "algorithm"):
raise ImproperlyConfigured(
"hasher doesn't specify an algorithm name: %s" % hasher_path
)
hashers.append(hasher)
return hashers
@functools.lru_cache
def get_hashers_by_algorithm():
return {hasher.algorithm: hasher for hasher in get_hashers()}
@receiver(setting_changed)
def reset_hashers(*, setting, **kwargs):
if setting == "PASSWORD_HASHERS":
get_hashers.cache_clear()
get_hashers_by_algorithm.cache_clear()
def get_hasher(algorithm="default"):
"""
Return an instance of a loaded password hasher.
If algorithm is 'default', return the default hasher. Lazily import hashers
specified in the project's settings file if needed.
"""
if hasattr(algorithm, "algorithm"):
return algorithm
elif algorithm == "default":
return get_hashers()[0]
else:
hashers = get_hashers_by_algorithm()
try:
return hashers[algorithm]
except KeyError:
raise ValueError(
"Unknown password hashing algorithm '%s'. "
"Did you specify it in the PASSWORD_HASHERS "
"setting?" % algorithm
)
def identify_hasher(encoded):
"""
Return an instance of a loaded password hasher.
Identify hasher algorithm by examining encoded hash, and call
get_hasher() to return hasher. Raise ValueError if
algorithm cannot be identified, or if hasher is not loaded.
"""
# Ancient versions of Django created plain MD5 passwords and accepted
# MD5 passwords with an empty salt.
if (len(encoded) == 32 and "$" not in encoded) or (
len(encoded) == 37 and encoded.startswith("md5$$")
):
algorithm = "unsalted_md5"
# Ancient versions of Django accepted SHA1 passwords with an empty salt.
elif len(encoded) == 46 and encoded.startswith("sha1$$"):
algorithm = "unsalted_sha1"
else:
algorithm = encoded.split("$", 1)[0]
return get_hasher(algorithm)
def mask_hash(hash, show=6, char="*"):
"""
Return the given hash, with only the first ``show`` number shown. The
rest are masked with ``char`` for security reasons.
"""
masked = hash[:show]
masked += char * len(hash[show:])
return masked
def must_update_salt(salt, expected_entropy):
# Each character in the salt provides log_2(len(alphabet)) bits of entropy.
return len(salt) * math.log2(len(RANDOM_STRING_CHARS)) < expected_entropy
class BasePasswordHasher:
"""
Abstract base class for password hashers
When creating your own hasher, you need to override algorithm,
verify(), encode() and safe_summary().
PasswordHasher objects are immutable.
"""
algorithm = None
library = None
salt_entropy = 128
def _load_library(self):
if self.library is not None:
if isinstance(self.library, (tuple, list)):
name, mod_path = self.library
else:
mod_path = self.library
try:
module = importlib.import_module(mod_path)
except ImportError as e:
raise ValueError(
"Couldn't load %r algorithm library: %s"
% (self.__class__.__name__, e)
)
return module
raise ValueError(
"Hasher %r doesn't specify a library attribute" % self.__class__.__name__
)
def salt(self):
"""
Generate a cryptographically secure nonce salt in ASCII with an entropy
of at least `salt_entropy` bits.
"""
# Each character in the salt provides
# log_2(len(alphabet)) bits of entropy.
char_count = math.ceil(self.salt_entropy / math.log2(len(RANDOM_STRING_CHARS)))
return get_random_string(char_count, allowed_chars=RANDOM_STRING_CHARS)
def verify(self, password, encoded):
"""Check if the given password is correct."""
raise NotImplementedError(
"subclasses of BasePasswordHasher must provide a verify() method"
)
def _check_encode_args(self, password, salt):
if password is None:
raise TypeError("password must be provided.")
if not salt or "$" in salt:
raise ValueError("salt must be provided and cannot contain $.")
def encode(self, password, salt):
"""
Create an encoded database value.
The result is normally formatted as "algorithm$salt$hash" and
must be fewer than 128 characters.
"""
raise NotImplementedError(
"subclasses of BasePasswordHasher must provide an encode() method"
)
def decode(self, encoded):
"""
Return a decoded database value.
The result is a dictionary and should contain `algorithm`, `hash`, and
`salt`. Extra keys can be algorithm specific like `iterations` or
`work_factor`.
"""
raise NotImplementedError(
"subclasses of BasePasswordHasher must provide a decode() method."
)
def safe_summary(self, encoded):
"""
Return a summary of safe values.
The result is a dictionary and will be used where the password field
must be displayed to construct a safe representation of the password.
"""
raise NotImplementedError(
"subclasses of BasePasswordHasher must provide a safe_summary() method"
)
def must_update(self, encoded):
return False
def harden_runtime(self, password, encoded):
"""
Bridge the runtime gap between the work factor supplied in `encoded`
and the work factor suggested by this hasher.
Taking PBKDF2 as an example, if `encoded` contains 20000 iterations and
`self.iterations` is 30000, this method should run password through
another 10000 iterations of PBKDF2. Similar approaches should exist
for any hasher that has a work factor. If not, this method should be
defined as a no-op to silence the warning.
"""
warnings.warn(
"subclasses of BasePasswordHasher should provide a harden_runtime() method"
)
class PBKDF2PasswordHasher(BasePasswordHasher):
"""
Secure password hashing using the PBKDF2 algorithm (recommended)
Configured to use PBKDF2 + HMAC + SHA256.
The result is a 64 byte binary string. Iterations may be changed
safely but you must rename the algorithm if you change SHA256.
"""
algorithm = "pbkdf2_sha256"
iterations = 720000
digest = hashlib.sha256
def encode(self, password, salt, iterations=None):
self._check_encode_args(password, salt)
iterations = iterations or self.iterations
hash = pbkdf2(password, salt, iterations, digest=self.digest)
hash = base64.b64encode(hash).decode("ascii").strip()
return "%s$%d$%s$%s" % (self.algorithm, iterations, salt, hash)
def decode(self, encoded):
algorithm, iterations, salt, hash = encoded.split("$", 3)
assert algorithm == self.algorithm
return {
"algorithm": algorithm,
"hash": hash,
"iterations": int(iterations),
"salt": salt,
}
def verify(self, password, encoded):
decoded = self.decode(encoded)
encoded_2 = self.encode(password, decoded["salt"], decoded["iterations"])
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
decoded = self.decode(encoded)
return {
_("algorithm"): decoded["algorithm"],
_("iterations"): decoded["iterations"],
_("salt"): mask_hash(decoded["salt"]),
_("hash"): mask_hash(decoded["hash"]),
}
def must_update(self, encoded):
decoded = self.decode(encoded)
update_salt = must_update_salt(decoded["salt"], self.salt_entropy)
return (decoded["iterations"] != self.iterations) or update_salt
def harden_runtime(self, password, encoded):
decoded = self.decode(encoded)
extra_iterations = self.iterations - decoded["iterations"]
if extra_iterations > 0:
self.encode(password, decoded["salt"], extra_iterations)
class PBKDF2SHA1PasswordHasher(PBKDF2PasswordHasher):
"""
Alternate PBKDF2 hasher which uses SHA1, the default PRF
recommended by PKCS #5. This is compatible with other
implementations of PBKDF2, such as openssl's
PKCS5_PBKDF2_HMAC_SHA1().
"""
algorithm = "pbkdf2_sha1"
digest = hashlib.sha1
class Argon2PasswordHasher(BasePasswordHasher):
"""
Secure password hashing using the argon2 algorithm.
This is the winner of the Password Hashing Competition 2013-2015
(https://password-hashing.net). It requires the argon2-cffi library which
depends on native C code and might cause portability issues.
"""
algorithm = "argon2"
library = "argon2"
time_cost = 2
memory_cost = 102400
parallelism = 8
def encode(self, password, salt):
argon2 = self._load_library()
params = self.params()
data = argon2.low_level.hash_secret(
password.encode(),
salt.encode(),
time_cost=params.time_cost,
memory_cost=params.memory_cost,
parallelism=params.parallelism,
hash_len=params.hash_len,
type=params.type,
)
return self.algorithm + data.decode("ascii")
def decode(self, encoded):
argon2 = self._load_library()
algorithm, rest = encoded.split("$", 1)
assert algorithm == self.algorithm
params = argon2.extract_parameters("$" + rest)
variety, *_, b64salt, hash = rest.split("$")
# Add padding.
b64salt += "=" * (-len(b64salt) % 4)
salt = base64.b64decode(b64salt).decode("latin1")
return {
"algorithm": algorithm,
"hash": hash,
"memory_cost": params.memory_cost,
"parallelism": params.parallelism,
"salt": salt,
"time_cost": params.time_cost,
"variety": variety,
"version": params.version,
"params": params,
}
def verify(self, password, encoded):
argon2 = self._load_library()
algorithm, rest = encoded.split("$", 1)
assert algorithm == self.algorithm
try:
return argon2.PasswordHasher().verify("$" + rest, password)
except argon2.exceptions.VerificationError:
return False
def safe_summary(self, encoded):
decoded = self.decode(encoded)
return {
_("algorithm"): decoded["algorithm"],
_("variety"): decoded["variety"],
_("version"): decoded["version"],
_("memory cost"): decoded["memory_cost"],
_("time cost"): decoded["time_cost"],
_("parallelism"): decoded["parallelism"],
_("salt"): mask_hash(decoded["salt"]),
_("hash"): mask_hash(decoded["hash"]),
}
def must_update(self, encoded):
decoded = self.decode(encoded)
current_params = decoded["params"]
new_params = self.params()
# Set salt_len to the salt_len of the current parameters because salt
# is explicitly passed to argon2.
new_params.salt_len = current_params.salt_len
update_salt = must_update_salt(decoded["salt"], self.salt_entropy)
return (current_params != new_params) or update_salt
def harden_runtime(self, password, encoded):
# The runtime for Argon2 is too complicated to implement a sensible
# hardening algorithm.
pass
def params(self):
argon2 = self._load_library()
# salt_len is a noop, because we provide our own salt.
return argon2.Parameters(
type=argon2.low_level.Type.ID,
version=argon2.low_level.ARGON2_VERSION,
salt_len=argon2.DEFAULT_RANDOM_SALT_LENGTH,
hash_len=argon2.DEFAULT_HASH_LENGTH,
time_cost=self.time_cost,
memory_cost=self.memory_cost,
parallelism=self.parallelism,
)
class BCryptSHA256PasswordHasher(BasePasswordHasher):
"""
Secure password hashing using the bcrypt algorithm (recommended)
This is considered by many to be the most secure algorithm but you
must first install the bcrypt library. Please be warned that
this library depends on native C code and might cause portability
issues.
"""
algorithm = "bcrypt_sha256"
digest = hashlib.sha256
library = ("bcrypt", "bcrypt")
rounds = 12
def salt(self):
bcrypt = self._load_library()
return bcrypt.gensalt(self.rounds)
def encode(self, password, salt):
bcrypt = self._load_library()
password = password.encode()
# Hash the password prior to using bcrypt to prevent password
# truncation as described in #20138.
if self.digest is not None:
# Use binascii.hexlify() because a hex encoded bytestring is str.
password = binascii.hexlify(self.digest(password).digest())
data = bcrypt.hashpw(password, salt)
return "%s$%s" % (self.algorithm, data.decode("ascii"))
def decode(self, encoded):
algorithm, empty, algostr, work_factor, data = encoded.split("$", 4)
assert algorithm == self.algorithm
return {
"algorithm": algorithm,
"algostr": algostr,
"checksum": data[22:],
"salt": data[:22],
"work_factor": int(work_factor),
}
def verify(self, password, encoded):
algorithm, data = encoded.split("$", 1)
assert algorithm == self.algorithm
encoded_2 = self.encode(password, data.encode("ascii"))
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
decoded = self.decode(encoded)
return {
_("algorithm"): decoded["algorithm"],
_("work factor"): decoded["work_factor"],
_("salt"): mask_hash(decoded["salt"]),
_("checksum"): mask_hash(decoded["checksum"]),
}
def must_update(self, encoded):
decoded = self.decode(encoded)
return decoded["work_factor"] != self.rounds
def harden_runtime(self, password, encoded):
_, data = encoded.split("$", 1)
salt = data[:29] # Length of the salt in bcrypt.
rounds = data.split("$")[2]
# work factor is logarithmic, adding one doubles the load.
diff = 2 ** (self.rounds - int(rounds)) - 1
while diff > 0:
self.encode(password, salt.encode("ascii"))
diff -= 1
class BCryptPasswordHasher(BCryptSHA256PasswordHasher):
"""
Secure password hashing using the bcrypt algorithm
This is considered by many to be the most secure algorithm but you
must first install the bcrypt library. Please be warned that
this library depends on native C code and might cause portability
issues.
This hasher does not first hash the password which means it is subject to
bcrypt's 72 bytes password truncation. Most use cases should prefer the
BCryptSHA256PasswordHasher.
"""
algorithm = "bcrypt"
digest = None
class ScryptPasswordHasher(BasePasswordHasher):
"""
Secure password hashing using the Scrypt algorithm.
"""
algorithm = "scrypt"
block_size = 8
maxmem = 0
parallelism = 1
work_factor = 2**14
def encode(self, password, salt, n=None, r=None, p=None):
self._check_encode_args(password, salt)
n = n or self.work_factor
r = r or self.block_size
p = p or self.parallelism
hash_ = hashlib.scrypt(
password.encode(),
salt=salt.encode(),
n=n,
r=r,
p=p,
maxmem=self.maxmem,
dklen=64,
)
hash_ = base64.b64encode(hash_).decode("ascii").strip()
return "%s$%d$%s$%d$%d$%s" % (self.algorithm, n, salt, r, p, hash_)
def decode(self, encoded):
algorithm, work_factor, salt, block_size, parallelism, hash_ = encoded.split(
"$", 6
)
assert algorithm == self.algorithm
return {
"algorithm": algorithm,
"work_factor": int(work_factor),
"salt": salt,
"block_size": int(block_size),
"parallelism": int(parallelism),
"hash": hash_,
}
def verify(self, password, encoded):
decoded = self.decode(encoded)
encoded_2 = self.encode(
password,
decoded["salt"],
decoded["work_factor"],
decoded["block_size"],
decoded["parallelism"],
)
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
decoded = self.decode(encoded)
return {
_("algorithm"): decoded["algorithm"],
_("work factor"): decoded["work_factor"],
_("block size"): decoded["block_size"],
_("parallelism"): decoded["parallelism"],
_("salt"): mask_hash(decoded["salt"]),
_("hash"): mask_hash(decoded["hash"]),
}
def must_update(self, encoded):
decoded = self.decode(encoded)
return (
decoded["work_factor"] != self.work_factor
or decoded["block_size"] != self.block_size
or decoded["parallelism"] != self.parallelism
)
def harden_runtime(self, password, encoded):
# The runtime for Scrypt is too complicated to implement a sensible
# hardening algorithm.
pass
# RemovedInDjango51Warning.
class SHA1PasswordHasher(BasePasswordHasher):
"""
The SHA1 password hashing algorithm (not recommended)
"""
algorithm = "sha1"
def __init__(self, *args, **kwargs):
warnings.warn(
"django.contrib.auth.hashers.SHA1PasswordHasher is deprecated.",
RemovedInDjango51Warning,
stacklevel=2,
)
super().__init__(*args, **kwargs)
def encode(self, password, salt):
self._check_encode_args(password, salt)
hash = hashlib.sha1((salt + password).encode()).hexdigest()
return "%s$%s$%s" % (self.algorithm, salt, hash)
def decode(self, encoded):
algorithm, salt, hash = encoded.split("$", 2)
assert algorithm == self.algorithm
return {
"algorithm": algorithm,
"hash": hash,
"salt": salt,
}
def verify(self, password, encoded):
decoded = self.decode(encoded)
encoded_2 = self.encode(password, decoded["salt"])
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
decoded = self.decode(encoded)
return {
_("algorithm"): decoded["algorithm"],
_("salt"): mask_hash(decoded["salt"], show=2),
_("hash"): mask_hash(decoded["hash"]),
}
def must_update(self, encoded):
decoded = self.decode(encoded)
return must_update_salt(decoded["salt"], self.salt_entropy)
def harden_runtime(self, password, encoded):
pass
class MD5PasswordHasher(BasePasswordHasher):
"""
The Salted MD5 password hashing algorithm (not recommended)
"""
algorithm = "md5"
def encode(self, password, salt):
self._check_encode_args(password, salt)
hash = hashlib.md5((salt + password).encode()).hexdigest()
return "%s$%s$%s" % (self.algorithm, salt, hash)
def decode(self, encoded):
algorithm, salt, hash = encoded.split("$", 2)
assert algorithm == self.algorithm
return {
"algorithm": algorithm,
"hash": hash,
"salt": salt,
}
def verify(self, password, encoded):
decoded = self.decode(encoded)
encoded_2 = self.encode(password, decoded["salt"])
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
decoded = self.decode(encoded)
return {
_("algorithm"): decoded["algorithm"],
_("salt"): mask_hash(decoded["salt"], show=2),
_("hash"): mask_hash(decoded["hash"]),
}
def must_update(self, encoded):
decoded = self.decode(encoded)
return must_update_salt(decoded["salt"], self.salt_entropy)
def harden_runtime(self, password, encoded):
pass
# RemovedInDjango51Warning.
class UnsaltedSHA1PasswordHasher(BasePasswordHasher):
"""
Very insecure algorithm that you should *never* use; store SHA1 hashes
with an empty salt.
This class is implemented because Django used to accept such password
hashes. Some older Django installs still have these values lingering
around so we need to handle and upgrade them properly.
"""
algorithm = "unsalted_sha1"
def __init__(self, *args, **kwargs):
warnings.warn(
"django.contrib.auth.hashers.UnsaltedSHA1PasswordHasher is deprecated.",
RemovedInDjango51Warning,
stacklevel=2,
)
super().__init__(*args, **kwargs)
def salt(self):
return ""
def encode(self, password, salt):
if salt != "":
raise ValueError("salt must be empty.")
hash = hashlib.sha1(password.encode()).hexdigest()
return "sha1$$%s" % hash
def decode(self, encoded):
assert encoded.startswith("sha1$$")
return {
"algorithm": self.algorithm,
"hash": encoded[6:],
"salt": None,
}
def verify(self, password, encoded):
encoded_2 = self.encode(password, "")
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
decoded = self.decode(encoded)
return {
_("algorithm"): decoded["algorithm"],
_("hash"): mask_hash(decoded["hash"]),
}
def harden_runtime(self, password, encoded):
pass
# RemovedInDjango51Warning.
class UnsaltedMD5PasswordHasher(BasePasswordHasher):
"""
Incredibly insecure algorithm that you should *never* use; stores unsalted
MD5 hashes without the algorithm prefix, also accepts MD5 hashes with an
empty salt.
This class is implemented because Django used to store passwords this way
and to accept such password hashes. Some older Django installs still have
these values lingering around so we need to handle and upgrade them
properly.
"""
algorithm = "unsalted_md5"
def __init__(self, *args, **kwargs):
warnings.warn(
"django.contrib.auth.hashers.UnsaltedMD5PasswordHasher is deprecated.",
RemovedInDjango51Warning,
stacklevel=2,
)
super().__init__(*args, **kwargs)
def salt(self):
return ""
def encode(self, password, salt):
if salt != "":
raise ValueError("salt must be empty.")
return hashlib.md5(password.encode()).hexdigest()
def decode(self, encoded):
return {
"algorithm": self.algorithm,
"hash": encoded,
"salt": None,
}
def verify(self, password, encoded):
if len(encoded) == 37:
encoded = encoded.removeprefix("md5$$")
encoded_2 = self.encode(password, "")
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
decoded = self.decode(encoded)
return {
_("algorithm"): decoded["algorithm"],
_("hash"): mask_hash(decoded["hash"], show=3),
}
def harden_runtime(self, password, encoded):
pass
|
f14f32dd3b1363887ea77b8a6a05f99956c8011eb4e62bfe5d9aa658c47c979a | """
This module allows importing AbstractBaseUser even when django.contrib.auth is
not in INSTALLED_APPS.
"""
import unicodedata
import warnings
from django.conf import settings
from django.contrib.auth import password_validation
from django.contrib.auth.hashers import (
acheck_password,
check_password,
is_password_usable,
make_password,
)
from django.db import models
from django.utils.crypto import get_random_string, salted_hmac
from django.utils.deprecation import RemovedInDjango51Warning
from django.utils.translation import gettext_lazy as _
class BaseUserManager(models.Manager):
@classmethod
def normalize_email(cls, email):
"""
Normalize the email address by lowercasing the domain part of it.
"""
email = email or ""
try:
email_name, domain_part = email.strip().rsplit("@", 1)
except ValueError:
pass
else:
email = email_name + "@" + domain_part.lower()
return email
def make_random_password(
self,
length=10,
allowed_chars="abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789",
):
"""
Generate a random password with the given length and given
allowed_chars. The default value of allowed_chars does not have "I" or
"O" or letters and digits that look similar -- just to avoid confusion.
"""
warnings.warn(
"BaseUserManager.make_random_password() is deprecated.",
category=RemovedInDjango51Warning,
stacklevel=2,
)
return get_random_string(length, allowed_chars)
def get_by_natural_key(self, username):
return self.get(**{self.model.USERNAME_FIELD: username})
class AbstractBaseUser(models.Model):
password = models.CharField(_("password"), max_length=128)
last_login = models.DateTimeField(_("last login"), blank=True, null=True)
is_active = True
REQUIRED_FIELDS = []
# Stores the raw password if set_password() is called so that it can
# be passed to password_changed() after the model is saved.
_password = None
class Meta:
abstract = True
def __str__(self):
return self.get_username()
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
if self._password is not None:
password_validation.password_changed(self._password, self)
self._password = None
def get_username(self):
"""Return the username for this User."""
return getattr(self, self.USERNAME_FIELD)
def clean(self):
setattr(self, self.USERNAME_FIELD, self.normalize_username(self.get_username()))
def natural_key(self):
return (self.get_username(),)
@property
def is_anonymous(self):
"""
Always return False. This is a way of comparing User objects to
anonymous users.
"""
return False
@property
def is_authenticated(self):
"""
Always return True. This is a way to tell if the user has been
authenticated in templates.
"""
return True
def set_password(self, raw_password):
self.password = make_password(raw_password)
self._password = raw_password
def check_password(self, raw_password):
"""
Return a boolean of whether the raw_password was correct. Handles
hashing formats behind the scenes.
"""
def setter(raw_password):
self.set_password(raw_password)
# Password hash upgrades shouldn't be considered password changes.
self._password = None
self.save(update_fields=["password"])
return check_password(raw_password, self.password, setter)
async def acheck_password(self, raw_password):
"""See check_password()."""
async def setter(raw_password):
self.set_password(raw_password)
# Password hash upgrades shouldn't be considered password changes.
self._password = None
await self.asave(update_fields=["password"])
return await acheck_password(raw_password, self.password, setter)
def set_unusable_password(self):
# Set a value that will never be a valid hash
self.password = make_password(None)
def has_usable_password(self):
"""
Return False if set_unusable_password() has been called for this user.
"""
return is_password_usable(self.password)
def get_session_auth_hash(self):
"""
Return an HMAC of the password field.
"""
return self._get_session_auth_hash()
def get_session_auth_fallback_hash(self):
for fallback_secret in settings.SECRET_KEY_FALLBACKS:
yield self._get_session_auth_hash(secret=fallback_secret)
def _get_session_auth_hash(self, secret=None):
key_salt = "django.contrib.auth.models.AbstractBaseUser.get_session_auth_hash"
return salted_hmac(
key_salt,
self.password,
secret=secret,
algorithm="sha256",
).hexdigest()
@classmethod
def get_email_field_name(cls):
try:
return cls.EMAIL_FIELD
except AttributeError:
return "email"
@classmethod
def normalize_username(cls, username):
return (
unicodedata.normalize("NFKC", username)
if isinstance(username, str)
else username
)
|
bcbd93361805b021fee095c313ed88fc1841db14102f0c781a41cac7b9bc473e | from django import template
register = template.Library()
class AdminLogNode(template.Node):
def __init__(self, limit, varname, user):
self.limit = limit
self.varname = varname
self.user = user
def __repr__(self):
return "<GetAdminLog Node>"
def render(self, context):
entries = context["log_entries"]
if self.user is not None:
user_id = self.user
if not user_id.isdigit():
user_id = context[self.user].pk
entries = entries.filter(user__pk=user_id)
context[self.varname] = entries[: int(self.limit)]
return ""
@register.tag
def get_admin_log(parser, token):
"""
Populate a template variable with the admin log for the given criteria.
Usage::
{% get_admin_log [limit] as [varname] for_user [context_var_with_user_obj] %}
Examples::
{% get_admin_log 10 as admin_log for_user 23 %}
{% get_admin_log 10 as admin_log for_user user %}
{% get_admin_log 10 as admin_log %}
Note that ``context_var_containing_user_obj`` can be a hard-coded integer
(user ID) or the name of a template context variable containing the user
object whose ID you want.
"""
tokens = token.contents.split()
if len(tokens) < 4:
raise template.TemplateSyntaxError(
"'get_admin_log' statements require two arguments"
)
if not tokens[1].isdigit():
raise template.TemplateSyntaxError(
"First argument to 'get_admin_log' must be an integer"
)
if tokens[2] != "as":
raise template.TemplateSyntaxError(
"Second argument to 'get_admin_log' must be 'as'"
)
if len(tokens) > 4:
if tokens[4] != "for_user":
raise template.TemplateSyntaxError(
"Fourth argument to 'get_admin_log' must be 'for_user'"
)
return AdminLogNode(
limit=tokens[1],
varname=tokens[3],
user=(tokens[5] if len(tokens) > 5 else None),
)
|
8fa75c64b61f90bdaa763f3e1f848a4ade400f3d9a2c920a13faf5deb078d91c | from django.db import migrations, models
from django.db.migrations import operations
from django.db.migrations.optimizer import MigrationOptimizer
from django.db.migrations.serializer import serializer_factory
from django.db.models.functions import Abs
from django.test import SimpleTestCase
from .models import EmptyManager, UnicodeModel
class OptimizerTests(SimpleTestCase):
"""
Tests the migration autodetector.
"""
def optimize(self, operations, app_label):
"""
Handy shortcut for getting results + number of loops
"""
optimizer = MigrationOptimizer()
return optimizer.optimize(operations, app_label), optimizer._iterations
def serialize(self, value):
return serializer_factory(value).serialize()[0]
def assertOptimizesTo(
self, operations, expected, exact=None, less_than=None, app_label=None
):
result, iterations = self.optimize(operations, app_label or "migrations")
result = [self.serialize(f) for f in result]
expected = [self.serialize(f) for f in expected]
self.assertEqual(expected, result)
if exact is not None and iterations != exact:
raise self.failureException(
"Optimization did not take exactly %s iterations (it took %s)"
% (exact, iterations)
)
if less_than is not None and iterations >= less_than:
raise self.failureException(
"Optimization did not take less than %s iterations (it took %s)"
% (less_than, iterations)
)
def assertDoesNotOptimize(self, operations, **kwargs):
self.assertOptimizesTo(operations, operations, **kwargs)
def test_none_app_label(self):
optimizer = MigrationOptimizer()
with self.assertRaisesMessage(TypeError, "app_label must be a str"):
optimizer.optimize([], None)
def test_single(self):
"""
The optimizer does nothing on a single operation,
and that it does it in just one pass.
"""
self.assertOptimizesTo(
[migrations.DeleteModel("Foo")],
[migrations.DeleteModel("Foo")],
exact=1,
)
def test_create_delete_model(self):
"""
CreateModel and DeleteModel should collapse into nothing.
"""
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.DeleteModel("Foo"),
],
[],
)
def test_create_rename_model(self):
"""
CreateModel should absorb RenameModels.
"""
managers = [("objects", EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[("name", models.CharField(max_length=255))],
options={"verbose_name": "Foo"},
bases=(UnicodeModel,),
managers=managers,
),
migrations.RenameModel("Foo", "Bar"),
],
[
migrations.CreateModel(
"Bar",
[("name", models.CharField(max_length=255))],
options={"verbose_name": "Foo"},
bases=(UnicodeModel,),
managers=managers,
)
],
)
def test_rename_model_self(self):
"""
RenameModels should absorb themselves.
"""
self.assertOptimizesTo(
[
migrations.RenameModel("Foo", "Baa"),
migrations.RenameModel("Baa", "Bar"),
],
[
migrations.RenameModel("Foo", "Bar"),
],
)
def test_create_alter_model_options(self):
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", fields=[]),
migrations.AlterModelOptions(
name="Foo", options={"verbose_name_plural": "Foozes"}
),
],
[
migrations.CreateModel(
"Foo", fields=[], options={"verbose_name_plural": "Foozes"}
),
],
)
def test_create_alter_model_managers(self):
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", fields=[]),
migrations.AlterModelManagers(
name="Foo",
managers=[
("objects", models.Manager()),
("things", models.Manager()),
],
),
],
[
migrations.CreateModel(
"Foo",
fields=[],
managers=[
("objects", models.Manager()),
("things", models.Manager()),
],
),
],
)
def test_create_model_and_remove_model_options(self):
self.assertOptimizesTo(
[
migrations.CreateModel(
"MyModel",
fields=[],
options={"verbose_name": "My Model"},
),
migrations.AlterModelOptions("MyModel", options={}),
],
[migrations.CreateModel("MyModel", fields=[])],
)
self.assertOptimizesTo(
[
migrations.CreateModel(
"MyModel",
fields=[],
options={
"verbose_name": "My Model",
"verbose_name_plural": "My Model plural",
},
),
migrations.AlterModelOptions(
"MyModel",
options={"verbose_name": "My Model"},
),
],
[
migrations.CreateModel(
"MyModel",
fields=[],
options={"verbose_name": "My Model"},
),
],
)
def _test_create_alter_foo_delete_model(self, alter_foo):
"""
CreateModel, AlterModelTable, AlterUniqueTogether/AlterIndexTogether/
AlterOrderWithRespectTo, and DeleteModel should collapse into nothing.
"""
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.AlterModelTable("Foo", "woohoo"),
alter_foo,
migrations.DeleteModel("Foo"),
],
[],
)
def test_create_alter_unique_delete_model(self):
self._test_create_alter_foo_delete_model(
migrations.AlterUniqueTogether("Foo", [["a", "b"]])
)
def test_create_alter_index_delete_model(self):
self._test_create_alter_foo_delete_model(
migrations.AlterIndexTogether("Foo", [["a", "b"]])
)
def test_create_alter_owrt_delete_model(self):
self._test_create_alter_foo_delete_model(
migrations.AlterOrderWithRespectTo("Foo", "a")
)
def _test_alter_alter(self, alter_foo, alter_bar):
"""
Two AlterUniqueTogether/AlterIndexTogether/AlterOrderWithRespectTo
/AlterField should collapse into the second.
"""
self.assertOptimizesTo(
[
alter_foo,
alter_bar,
],
[
alter_bar,
],
)
def test_alter_alter_table_model(self):
self._test_alter_alter(
migrations.AlterModelTable("Foo", "a"),
migrations.AlterModelTable("Foo", "b"),
)
def test_alter_alter_unique_model(self):
self._test_alter_alter(
migrations.AlterUniqueTogether("Foo", [["a", "b"]]),
migrations.AlterUniqueTogether("Foo", [["a", "c"]]),
)
def test_alter_alter_index_model(self):
self._test_alter_alter(
migrations.AlterIndexTogether("Foo", [["a", "b"]]),
migrations.AlterIndexTogether("Foo", [["a", "c"]]),
)
def test_alter_alter_owrt_model(self):
self._test_alter_alter(
migrations.AlterOrderWithRespectTo("Foo", "a"),
migrations.AlterOrderWithRespectTo("Foo", "b"),
)
def test_alter_alter_field(self):
self._test_alter_alter(
migrations.AlterField("Foo", "name", models.IntegerField()),
migrations.AlterField("Foo", "name", models.IntegerField(help_text="help")),
)
def test_optimize_through_create(self):
"""
We should be able to optimize away create/delete through a create or
delete of a different model, but only if the create operation does not
mention the model at all.
"""
# These should work
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.DeleteModel("Foo"),
],
[
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
],
)
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.DeleteModel("Bar"),
migrations.DeleteModel("Foo"),
],
[],
)
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.DeleteModel("Foo"),
migrations.DeleteModel("Bar"),
],
[],
)
# Operations should be optimized if the FK references a model from the
# other app.
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]
),
migrations.DeleteModel("Foo"),
],
[
migrations.CreateModel(
"Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]
),
],
app_label="otherapp",
)
# But it shouldn't work if a FK references a model with the same
# app_label.
self.assertDoesNotOptimize(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Bar", [("other", models.ForeignKey("Foo", models.CASCADE))]
),
migrations.DeleteModel("Foo"),
],
)
self.assertDoesNotOptimize(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]
),
migrations.DeleteModel("Foo"),
],
app_label="testapp",
)
# This should not work - bases should block it
self.assertDoesNotOptimize(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Bar", [("size", models.IntegerField())], bases=("Foo",)
),
migrations.DeleteModel("Foo"),
],
)
self.assertDoesNotOptimize(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Bar", [("size", models.IntegerField())], bases=("testapp.Foo",)
),
migrations.DeleteModel("Foo"),
],
app_label="testapp",
)
# The same operations should be optimized if app_label and none of
# bases belong to that app.
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Bar", [("size", models.IntegerField())], bases=("testapp.Foo",)
),
migrations.DeleteModel("Foo"),
],
[
migrations.CreateModel(
"Bar", [("size", models.IntegerField())], bases=("testapp.Foo",)
),
],
app_label="otherapp",
)
# But it shouldn't work if some of bases belongs to the specified app.
self.assertDoesNotOptimize(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Bar", [("size", models.IntegerField())], bases=("testapp.Foo",)
),
migrations.DeleteModel("Foo"),
],
app_label="testapp",
)
self.assertOptimizesTo(
[
migrations.CreateModel(
"Book", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Person", [("name", models.CharField(max_length=255))]
),
migrations.AddField(
"book",
"author",
models.ForeignKey("test_app.Person", models.CASCADE),
),
migrations.CreateModel(
"Review",
[("book", models.ForeignKey("test_app.Book", models.CASCADE))],
),
migrations.CreateModel(
"Reviewer", [("name", models.CharField(max_length=255))]
),
migrations.AddField(
"review",
"reviewer",
models.ForeignKey("test_app.Reviewer", models.CASCADE),
),
migrations.RemoveField("book", "author"),
migrations.DeleteModel("Person"),
],
[
migrations.CreateModel(
"Book", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Reviewer", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Review",
[
("book", models.ForeignKey("test_app.Book", models.CASCADE)),
(
"reviewer",
models.ForeignKey("test_app.Reviewer", models.CASCADE),
),
],
),
],
app_label="test_app",
)
def test_create_model_add_field(self):
"""
AddField should optimize into CreateModel.
"""
managers = [("objects", EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[("name", models.CharField(max_length=255))],
options={"verbose_name": "Foo"},
bases=(UnicodeModel,),
managers=managers,
),
migrations.AddField("Foo", "age", models.IntegerField()),
],
[
migrations.CreateModel(
name="Foo",
fields=[
("name", models.CharField(max_length=255)),
("age", models.IntegerField()),
],
options={"verbose_name": "Foo"},
bases=(UnicodeModel,),
managers=managers,
),
],
)
def test_create_model_reordering(self):
"""
AddField optimizes into CreateModel if it's a FK to a model that's
between them (and there's no FK in the other direction), by changing
the order of the CreateModel operations.
"""
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel("Link", [("url", models.TextField())]),
migrations.AddField(
"Foo", "link", models.ForeignKey("migrations.Link", models.CASCADE)
),
],
[
migrations.CreateModel("Link", [("url", models.TextField())]),
migrations.CreateModel(
"Foo",
[
("name", models.CharField(max_length=255)),
("link", models.ForeignKey("migrations.Link", models.CASCADE)),
],
),
],
)
def test_create_model_reordering_circular_fk(self):
"""
CreateModel reordering behavior doesn't result in an infinite loop if
there are FKs in both directions.
"""
self.assertOptimizesTo(
[
migrations.CreateModel("Bar", [("url", models.TextField())]),
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.AddField(
"Bar", "foo_fk", models.ForeignKey("migrations.Foo", models.CASCADE)
),
migrations.AddField(
"Foo", "bar_fk", models.ForeignKey("migrations.Bar", models.CASCADE)
),
],
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Bar",
[
("url", models.TextField()),
("foo_fk", models.ForeignKey("migrations.Foo", models.CASCADE)),
],
),
migrations.AddField(
"Foo", "bar_fk", models.ForeignKey("migrations.Bar", models.CASCADE)
),
],
)
def test_create_model_no_reordering_for_unrelated_fk(self):
"""
CreateModel order remains unchanged if the later AddField operation
isn't a FK between them.
"""
self.assertDoesNotOptimize(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel("Link", [("url", models.TextField())]),
migrations.AddField(
"Other",
"link",
models.ForeignKey("migrations.Link", models.CASCADE),
),
],
)
def test_create_model_no_reordering_of_inherited_model(self):
"""
A CreateModel that inherits from another isn't reordered to avoid
moving it earlier than its parent CreateModel operation.
"""
self.assertOptimizesTo(
[
migrations.CreateModel(
"Other", [("foo", models.CharField(max_length=255))]
),
migrations.CreateModel(
"ParentModel", [("bar", models.CharField(max_length=255))]
),
migrations.CreateModel(
"ChildModel",
[("baz", models.CharField(max_length=255))],
bases=("migrations.parentmodel",),
),
migrations.AddField(
"Other",
"fk",
models.ForeignKey("migrations.ChildModel", models.CASCADE),
),
],
[
migrations.CreateModel(
"ParentModel", [("bar", models.CharField(max_length=255))]
),
migrations.CreateModel(
"ChildModel",
[("baz", models.CharField(max_length=255))],
bases=("migrations.parentmodel",),
),
migrations.CreateModel(
"Other",
[
("foo", models.CharField(max_length=255)),
(
"fk",
models.ForeignKey("migrations.ChildModel", models.CASCADE),
),
],
),
],
)
def test_create_model_add_field_not_through_m2m_through(self):
"""
AddField should NOT optimize into CreateModel if it's an M2M using a
through that's created between them.
"""
self.assertDoesNotOptimize(
[
migrations.CreateModel("Employee", []),
migrations.CreateModel("Employer", []),
migrations.CreateModel(
"Employment",
[
(
"employee",
models.ForeignKey("migrations.Employee", models.CASCADE),
),
(
"employment",
models.ForeignKey("migrations.Employer", models.CASCADE),
),
],
),
migrations.AddField(
"Employer",
"employees",
models.ManyToManyField(
"migrations.Employee",
through="migrations.Employment",
),
),
],
)
def test_create_model_alter_field(self):
"""
AlterField should optimize into CreateModel.
"""
managers = [("objects", EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[("name", models.CharField(max_length=255))],
options={"verbose_name": "Foo"},
bases=(UnicodeModel,),
managers=managers,
),
migrations.AlterField("Foo", "name", models.IntegerField()),
],
[
migrations.CreateModel(
name="Foo",
fields=[
("name", models.IntegerField()),
],
options={"verbose_name": "Foo"},
bases=(UnicodeModel,),
managers=managers,
),
],
)
def test_create_model_rename_field(self):
"""
RenameField should optimize into CreateModel.
"""
managers = [("objects", EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[("name", models.CharField(max_length=255))],
options={"verbose_name": "Foo"},
bases=(UnicodeModel,),
managers=managers,
),
migrations.RenameField("Foo", "name", "title"),
],
[
migrations.CreateModel(
name="Foo",
fields=[
("title", models.CharField(max_length=255)),
],
options={"verbose_name": "Foo"},
bases=(UnicodeModel,),
managers=managers,
),
],
)
def test_add_field_rename_field(self):
"""
RenameField should optimize into AddField
"""
self.assertOptimizesTo(
[
migrations.AddField("Foo", "name", models.CharField(max_length=255)),
migrations.RenameField("Foo", "name", "title"),
],
[
migrations.AddField("Foo", "title", models.CharField(max_length=255)),
],
)
def test_alter_field_rename_field(self):
"""
RenameField should optimize to the other side of AlterField,
and into itself.
"""
self.assertOptimizesTo(
[
migrations.AlterField("Foo", "name", models.CharField(max_length=255)),
migrations.RenameField("Foo", "name", "title"),
migrations.RenameField("Foo", "title", "nom"),
],
[
migrations.RenameField("Foo", "name", "nom"),
migrations.AlterField("Foo", "nom", models.CharField(max_length=255)),
],
)
def test_swapping_fields_names(self):
self.assertDoesNotOptimize(
[
migrations.CreateModel(
"MyModel",
[
("field_a", models.IntegerField()),
("field_b", models.IntegerField()),
],
),
migrations.RunPython(migrations.RunPython.noop),
migrations.RenameField("MyModel", "field_a", "field_c"),
migrations.RenameField("MyModel", "field_b", "field_a"),
migrations.RenameField("MyModel", "field_c", "field_b"),
],
)
def test_create_model_remove_field(self):
"""
RemoveField should optimize into CreateModel.
"""
managers = [("objects", EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[
("name", models.CharField(max_length=255)),
("age", models.IntegerField()),
],
options={"verbose_name": "Foo"},
bases=(UnicodeModel,),
managers=managers,
),
migrations.RemoveField("Foo", "age"),
],
[
migrations.CreateModel(
name="Foo",
fields=[
("name", models.CharField(max_length=255)),
],
options={"verbose_name": "Foo"},
bases=(UnicodeModel,),
managers=managers,
),
],
)
def test_add_field_alter_field(self):
"""
AlterField should optimize into AddField.
"""
self.assertOptimizesTo(
[
migrations.AddField("Foo", "age", models.IntegerField()),
migrations.AlterField("Foo", "age", models.FloatField(default=2.4)),
],
[
migrations.AddField(
"Foo", name="age", field=models.FloatField(default=2.4)
),
],
)
def test_add_field_delete_field(self):
"""
RemoveField should cancel AddField
"""
self.assertOptimizesTo(
[
migrations.AddField("Foo", "age", models.IntegerField()),
migrations.RemoveField("Foo", "age"),
],
[],
)
def test_alter_field_delete_field(self):
"""
RemoveField should absorb AlterField
"""
self.assertOptimizesTo(
[
migrations.AlterField("Foo", "age", models.IntegerField()),
migrations.RemoveField("Foo", "age"),
],
[
migrations.RemoveField("Foo", "age"),
],
)
def _test_create_alter_foo_field(self, alter):
"""
CreateModel, AlterFooTogether/AlterOrderWithRespectTo followed by an
add/alter/rename field should optimize to CreateModel with options.
"""
option_value = getattr(alter, alter.option_name)
options = {alter.option_name: option_value}
# AddField
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
],
),
alter,
migrations.AddField("Foo", "c", models.IntegerField()),
],
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
("c", models.IntegerField()),
],
options=options,
),
],
)
# AlterField
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
],
),
alter,
migrations.AlterField("Foo", "b", models.CharField(max_length=255)),
],
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.CharField(max_length=255)),
],
options=options,
),
],
)
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
("c", models.IntegerField()),
],
),
alter,
migrations.AlterField("Foo", "c", models.CharField(max_length=255)),
],
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
("c", models.CharField(max_length=255)),
],
options=options,
),
],
)
# RenameField
if isinstance(option_value, str):
renamed_options = {alter.option_name: "c"}
else:
renamed_options = {
alter.option_name: {
tuple("c" if value == "b" else value for value in item)
for item in option_value
}
}
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
],
),
alter,
migrations.RenameField("Foo", "b", "c"),
],
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("c", models.IntegerField()),
],
options=renamed_options,
),
],
)
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
],
),
alter,
migrations.RenameField("Foo", "b", "x"),
migrations.RenameField("Foo", "x", "c"),
],
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("c", models.IntegerField()),
],
options=renamed_options,
),
],
)
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
("c", models.IntegerField()),
],
),
alter,
migrations.RenameField("Foo", "c", "d"),
],
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
("d", models.IntegerField()),
],
options=options,
),
],
)
# RemoveField
if isinstance(option_value, str):
removed_options = None
else:
removed_options = {
alter.option_name: {
tuple(value for value in item if value != "b")
for item in option_value
}
}
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
],
),
alter,
migrations.RemoveField("Foo", "b"),
],
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
],
options=removed_options,
),
],
)
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
("c", models.IntegerField()),
],
),
alter,
migrations.RemoveField("Foo", "c"),
],
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
],
options=options,
),
],
)
def test_create_alter_unique_field(self):
self._test_create_alter_foo_field(
migrations.AlterUniqueTogether("Foo", [["a", "b"]])
)
def test_create_alter_index_field(self):
self._test_create_alter_foo_field(
migrations.AlterIndexTogether("Foo", [["a", "b"]])
)
def test_create_alter_owrt_field(self):
self._test_create_alter_foo_field(
migrations.AlterOrderWithRespectTo("Foo", "b")
)
def test_optimize_through_fields(self):
"""
field-level through checking is working. This should manage to collapse
model Foo to nonexistence, and model Bar to a single IntegerField
called "width".
"""
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.AddField("Foo", "age", models.IntegerField()),
migrations.AddField("Bar", "width", models.IntegerField()),
migrations.AlterField("Foo", "age", models.IntegerField()),
migrations.RenameField("Bar", "size", "dimensions"),
migrations.RemoveField("Foo", "age"),
migrations.RenameModel("Foo", "Phou"),
migrations.RemoveField("Bar", "dimensions"),
migrations.RenameModel("Phou", "Fou"),
migrations.DeleteModel("Fou"),
],
[
migrations.CreateModel("Bar", [("width", models.IntegerField())]),
],
)
def test_optimize_elidable_operation(self):
elidable_operation = operations.base.Operation()
elidable_operation.elidable = True
self.assertOptimizesTo(
[
elidable_operation,
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
elidable_operation,
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
elidable_operation,
migrations.RenameModel("Foo", "Phou"),
migrations.DeleteModel("Bar"),
elidable_operation,
],
[
migrations.CreateModel(
"Phou", [("name", models.CharField(max_length=255))]
),
],
)
def test_rename_index(self):
self.assertOptimizesTo(
[
migrations.RenameIndex(
"Pony", new_name="mid_name", old_fields=("weight", "pink")
),
migrations.RenameIndex(
"Pony", new_name="new_name", old_name="mid_name"
),
],
[
migrations.RenameIndex(
"Pony", new_name="new_name", old_fields=("weight", "pink")
),
],
)
self.assertOptimizesTo(
[
migrations.RenameIndex(
"Pony", new_name="mid_name", old_name="old_name"
),
migrations.RenameIndex(
"Pony", new_name="new_name", old_name="mid_name"
),
],
[migrations.RenameIndex("Pony", new_name="new_name", old_name="old_name")],
)
self.assertDoesNotOptimize(
[
migrations.RenameIndex(
"Pony", new_name="mid_name", old_name="old_name"
),
migrations.RenameIndex(
"Pony", new_name="new_name", old_fields=("weight", "pink")
),
]
)
def test_add_rename_index(self):
tests = [
models.Index(fields=["weight", "pink"], name="mid_name"),
models.Index(Abs("weight"), name="mid_name"),
models.Index(
Abs("weight"), name="mid_name", condition=models.Q(weight__gt=0)
),
]
for index in tests:
with self.subTest(index=index):
renamed_index = index.clone()
renamed_index.name = "new_name"
self.assertOptimizesTo(
[
migrations.AddIndex("Pony", index),
migrations.RenameIndex(
"Pony", new_name="new_name", old_name="mid_name"
),
],
[
migrations.AddIndex("Pony", renamed_index),
],
)
self.assertDoesNotOptimize(
[
migrations.AddIndex("Pony", index),
migrations.RenameIndex(
"Pony", new_name="new_name", old_name="other_name"
),
],
)
def test_add_remove_index(self):
self.assertOptimizesTo(
[
migrations.AddIndex(
"Pony",
models.Index(
fields=["weight", "pink"], name="idx_pony_weight_pink"
),
),
migrations.RemoveIndex("Pony", "idx_pony_weight_pink"),
],
[],
)
def test_add_remove_constraint(self):
gt_constraint = models.CheckConstraint(
check=models.Q(pink__gt=2), name="constraint_pony_pink_gt_2"
)
self.assertOptimizesTo(
[
migrations.AddConstraint("Pony", gt_constraint),
migrations.RemoveConstraint("Pony", gt_constraint.name),
],
[],
)
self.assertDoesNotOptimize(
[
migrations.AddConstraint("Pony", gt_constraint),
migrations.RemoveConstraint("Pony", "other_name"),
],
)
def test_create_model_add_index(self):
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Pony",
fields=[
("weight", models.IntegerField()),
("age", models.IntegerField()),
],
options={
"indexes": [models.Index(fields=["age"], name="idx_pony_age")],
},
),
migrations.AddIndex(
"Pony",
models.Index(fields=["weight"], name="idx_pony_weight"),
),
],
[
migrations.CreateModel(
name="Pony",
fields=[
("weight", models.IntegerField()),
("age", models.IntegerField()),
],
options={
"indexes": [
models.Index(fields=["age"], name="idx_pony_age"),
models.Index(fields=["weight"], name="idx_pony_weight"),
],
},
),
],
)
def test_create_model_remove_index(self):
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Pony",
fields=[
("weight", models.IntegerField()),
("age", models.IntegerField()),
],
options={
"indexes": [
models.Index(fields=["age"], name="idx_pony_age"),
models.Index(fields=["weight"], name="idx_pony_weight"),
],
},
),
migrations.RemoveIndex("Pony", "idx_pony_age"),
],
[
migrations.CreateModel(
name="Pony",
fields=[
("weight", models.IntegerField()),
("age", models.IntegerField()),
],
options={
"indexes": [
models.Index(fields=["weight"], name="idx_pony_weight"),
],
},
),
],
)
def test_create_model_remove_index_together_rename_index(self):
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Pony",
fields=[
("weight", models.IntegerField()),
("age", models.IntegerField()),
],
options={
"index_together": [("age", "weight")],
},
),
migrations.RenameIndex(
"Pony", new_name="idx_pony_age_weight", old_fields=("age", "weight")
),
],
[
migrations.CreateModel(
name="Pony",
fields=[
("weight", models.IntegerField()),
("age", models.IntegerField()),
],
options={
"indexes": [
models.Index(
fields=["age", "weight"], name="idx_pony_age_weight"
),
],
},
),
],
)
def test_create_model_index_together_rename_index(self):
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Pony",
fields=[
("weight", models.IntegerField()),
("age", models.IntegerField()),
("height", models.IntegerField()),
("rank", models.IntegerField()),
],
options={
"index_together": [("age", "weight"), ("height", "rank")],
},
),
migrations.RenameIndex(
"Pony", new_name="idx_pony_age_weight", old_fields=("age", "weight")
),
],
[
migrations.CreateModel(
name="Pony",
fields=[
("weight", models.IntegerField()),
("age", models.IntegerField()),
("height", models.IntegerField()),
("rank", models.IntegerField()),
],
options={
"index_together": {("height", "rank")},
"indexes": [
models.Index(
fields=["age", "weight"], name="idx_pony_age_weight"
),
],
},
),
],
)
def test_create_model_rename_index_no_old_fields(self):
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Pony",
fields=[
("weight", models.IntegerField()),
("age", models.IntegerField()),
],
options={
"indexes": [models.Index(fields=["age"], name="idx_pony_age")],
},
),
migrations.RenameIndex(
"Pony", new_name="idx_pony_age_new", old_name="idx_pony_age"
),
],
[
migrations.CreateModel(
name="Pony",
fields=[
("weight", models.IntegerField()),
("age", models.IntegerField()),
],
options={
"indexes": [models.Index(fields=["age"], name="idx_pony_age")],
},
),
migrations.RenameIndex(
"Pony", new_name="idx_pony_age_new", old_name="idx_pony_age"
),
],
)
|
33c7c290d1c38b8789f8fab12417a75d67bb31a8885c8e8bc41c7e294a6870fb | import datetime
import importlib
import io
import os
import shutil
import sys
from unittest import mock
from django.apps import apps
from django.core.management import CommandError, call_command
from django.db import (
ConnectionHandler,
DatabaseError,
OperationalError,
connection,
connections,
models,
)
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.backends.utils import truncate_name
from django.db.migrations.exceptions import InconsistentMigrationHistory
from django.db.migrations.recorder import MigrationRecorder
from django.test import TestCase, override_settings, skipUnlessDBFeature
from django.test.utils import captured_stdout
from django.utils import timezone
from django.utils.version import get_docs_version
from .models import UnicodeModel, UnserializableModel
from .routers import TestRouter
from .test_base import MigrationTestBase
HAS_BLACK = shutil.which("black")
class MigrateTests(MigrationTestBase):
"""
Tests running the migrate command.
"""
databases = {"default", "other"}
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate(self):
"""
Tests basic usage of the migrate command.
"""
# No tables are created
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
# Run the migrations to 0001 only
stdout = io.StringIO()
call_command(
"migrate", "migrations", "0001", verbosity=2, stdout=stdout, no_color=True
)
stdout = stdout.getvalue()
self.assertIn(
"Target specific migration: 0001_initial, from migrations", stdout
)
self.assertIn("Applying migrations.0001_initial... OK", stdout)
self.assertIn("Running pre-migrate handlers for application migrations", stdout)
self.assertIn(
"Running post-migrate handlers for application migrations", stdout
)
# The correct tables exist
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
# Run migrations all the way
call_command("migrate", verbosity=0)
# The correct tables exist
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
# Unmigrate everything
stdout = io.StringIO()
call_command(
"migrate", "migrations", "zero", verbosity=2, stdout=stdout, no_color=True
)
stdout = stdout.getvalue()
self.assertIn("Unapply all migrations: migrations", stdout)
self.assertIn("Unapplying migrations.0002_second... OK", stdout)
self.assertIn("Running pre-migrate handlers for application migrations", stdout)
self.assertIn(
"Running post-migrate handlers for application migrations", stdout
)
# Tables are gone
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
@override_settings(
INSTALLED_APPS=[
"django.contrib.auth",
"django.contrib.contenttypes",
"migrations.migrations_test_apps.migrated_app",
]
)
def test_migrate_with_system_checks(self):
out = io.StringIO()
call_command("migrate", skip_checks=False, no_color=True, stdout=out)
self.assertIn("Apply all migrations: migrated_app", out.getvalue())
@override_settings(
INSTALLED_APPS=[
"migrations",
"migrations.migrations_test_apps.unmigrated_app_syncdb",
]
)
def test_app_without_migrations(self):
msg = "App 'unmigrated_app_syncdb' does not have migrations."
with self.assertRaisesMessage(CommandError, msg):
call_command("migrate", app_label="unmigrated_app_syncdb")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_clashing_prefix"}
)
def test_ambiguous_prefix(self):
msg = (
"More than one migration matches 'a' in app 'migrations'. Please "
"be more specific."
)
with self.assertRaisesMessage(CommandError, msg):
call_command("migrate", app_label="migrations", migration_name="a")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_unknown_prefix(self):
msg = "Cannot find a migration matching 'nonexistent' from app 'migrations'."
with self.assertRaisesMessage(CommandError, msg):
call_command(
"migrate", app_label="migrations", migration_name="nonexistent"
)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_initial_false"}
)
def test_migrate_initial_false(self):
"""
`Migration.initial = False` skips fake-initial detection.
"""
# Make sure no tables are created
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Run the migrations to 0001 only
call_command("migrate", "migrations", "0001", verbosity=0)
# Fake rollback
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
# Make sure fake-initial detection does not run
with self.assertRaises(DatabaseError):
call_command(
"migrate", "migrations", "0001", fake_initial=True, verbosity=0
)
call_command("migrate", "migrations", "0001", fake=True, verbosity=0)
# Real rollback
call_command("migrate", "migrations", "zero", verbosity=0)
# Make sure it's all gone
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations"},
DATABASE_ROUTERS=["migrations.routers.TestRouter"],
)
def test_migrate_fake_initial(self):
"""
--fake-initial only works if all tables created in the initial
migration of an app exists. Database routers must be obeyed when doing
that check.
"""
# Make sure no tables are created
for db in self.databases:
self.assertTableNotExists("migrations_author", using=db)
self.assertTableNotExists("migrations_tribble", using=db)
try:
# Run the migrations to 0001 only
call_command("migrate", "migrations", "0001", verbosity=0)
call_command("migrate", "migrations", "0001", verbosity=0, database="other")
# Make sure the right tables exist
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Also check the "other" database
self.assertTableNotExists("migrations_author", using="other")
self.assertTableExists("migrations_tribble", using="other")
# Fake a roll-back
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
call_command(
"migrate",
"migrations",
"zero",
fake=True,
verbosity=0,
database="other",
)
# Make sure the tables still exist
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble", using="other")
# Try to run initial migration
with self.assertRaises(DatabaseError):
call_command("migrate", "migrations", "0001", verbosity=0)
# Run initial migration with an explicit --fake-initial
out = io.StringIO()
with mock.patch(
"django.core.management.color.supports_color", lambda *args: False
):
call_command(
"migrate",
"migrations",
"0001",
fake_initial=True,
stdout=out,
verbosity=1,
)
call_command(
"migrate",
"migrations",
"0001",
fake_initial=True,
verbosity=0,
database="other",
)
self.assertIn("migrations.0001_initial... faked", out.getvalue().lower())
# Run migrations all the way.
call_command("migrate", verbosity=0)
call_command("migrate", verbosity=0, database="other")
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
self.assertTableNotExists("migrations_author", using="other")
self.assertTableNotExists("migrations_tribble", using="other")
self.assertTableNotExists("migrations_book", using="other")
# Fake a roll-back.
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
call_command(
"migrate",
"migrations",
"zero",
fake=True,
verbosity=0,
database="other",
)
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
# Run initial migration.
with self.assertRaises(DatabaseError):
call_command("migrate", "migrations", verbosity=0)
# Run initial migration with an explicit --fake-initial.
with self.assertRaises(DatabaseError):
# Fails because "migrations_tribble" does not exist but needs
# to in order to make --fake-initial work.
call_command("migrate", "migrations", fake_initial=True, verbosity=0)
# Fake an apply.
call_command("migrate", "migrations", fake=True, verbosity=0)
call_command(
"migrate", "migrations", fake=True, verbosity=0, database="other"
)
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
call_command("migrate", "migrations", "zero", verbosity=0, database="other")
# Make sure it's all gone
for db in self.databases:
self.assertTableNotExists("migrations_author", using=db)
self.assertTableNotExists("migrations_tribble", using=db)
self.assertTableNotExists("migrations_book", using=db)
@skipUnlessDBFeature("ignores_table_name_case")
def test_migrate_fake_initial_case_insensitive(self):
with override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_fake_initial_case_insensitive.initial",
}
):
call_command("migrate", "migrations", "0001", verbosity=0)
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
with override_settings(
MIGRATION_MODULES={
"migrations": (
"migrations.test_fake_initial_case_insensitive.fake_initial"
),
}
):
out = io.StringIO()
call_command(
"migrate",
"migrations",
"0001",
fake_initial=True,
stdout=out,
verbosity=1,
no_color=True,
)
self.assertIn(
"migrations.0001_initial... faked",
out.getvalue().lower(),
)
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_fake_split_initial"
}
)
def test_migrate_fake_split_initial(self):
"""
Split initial migrations can be faked with --fake-initial.
"""
try:
call_command("migrate", "migrations", "0002", verbosity=0)
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
out = io.StringIO()
with mock.patch(
"django.core.management.color.supports_color", lambda *args: False
):
call_command(
"migrate",
"migrations",
"0002",
fake_initial=True,
stdout=out,
verbosity=1,
)
value = out.getvalue().lower()
self.assertIn("migrations.0001_initial... faked", value)
self.assertIn("migrations.0002_second... faked", value)
finally:
# Fake an apply.
call_command("migrate", "migrations", fake=True, verbosity=0)
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"}
)
def test_migrate_conflict_exit(self):
"""
migrate exits if it detects a conflict.
"""
msg = (
"Conflicting migrations detected; multiple leaf nodes in the "
"migration graph: (0002_conflicting_second, 0002_second in "
"migrations).\n"
"To fix them run 'python manage.py makemigrations --merge'"
)
with self.assertRaisesMessage(CommandError, msg):
call_command("migrate", "migrations")
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations",
}
)
def test_migrate_check(self):
with self.assertRaises(SystemExit):
call_command("migrate", "migrations", "0001", check_unapplied=True)
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
]
)
def test_migrate_check_migrated_app(self):
out = io.StringIO()
try:
call_command("migrate", "migrated_app", verbosity=0)
call_command(
"migrate",
"migrated_app",
stdout=out,
check_unapplied=True,
)
self.assertEqual(out.getvalue(), "")
finally:
# Unmigrate everything.
call_command("migrate", "migrated_app", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_plan",
}
)
def test_migrate_check_plan(self):
out = io.StringIO()
with self.assertRaises(SystemExit):
call_command(
"migrate",
"migrations",
"0001",
check_unapplied=True,
plan=True,
stdout=out,
no_color=True,
)
self.assertEqual(
"Planned operations:\n"
"migrations.0001_initial\n"
" Create model Salamander\n"
" Raw Python operation -> Grow salamander tail.\n",
out.getvalue(),
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_showmigrations_list(self):
"""
showmigrations --list displays migrations and whether or not they're
applied.
"""
out = io.StringIO()
with mock.patch(
"django.core.management.color.supports_color", lambda *args: True
):
call_command(
"showmigrations", format="list", stdout=out, verbosity=0, no_color=False
)
self.assertEqual(
"\x1b[1mmigrations\n\x1b[0m [ ] 0001_initial\n [ ] 0002_second\n",
out.getvalue().lower(),
)
call_command("migrate", "migrations", "0001", verbosity=0)
out = io.StringIO()
# Giving the explicit app_label tests for selective `show_list` in the command
call_command(
"showmigrations",
"migrations",
format="list",
stdout=out,
verbosity=0,
no_color=True,
)
self.assertEqual(
"migrations\n [x] 0001_initial\n [ ] 0002_second\n", out.getvalue().lower()
)
out = io.StringIO()
# Applied datetimes are displayed at verbosity 2+.
call_command(
"showmigrations", "migrations", stdout=out, verbosity=2, no_color=True
)
migration1 = MigrationRecorder(connection).migration_qs.get(
app="migrations", name="0001_initial"
)
self.assertEqual(
"migrations\n"
" [x] 0001_initial (applied at %s)\n"
" [ ] 0002_second\n" % migration1.applied.strftime("%Y-%m-%d %H:%M:%S"),
out.getvalue().lower(),
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_showmigrations_list_squashed(self):
out = io.StringIO()
call_command(
"showmigrations", format="list", stdout=out, verbosity=2, no_color=True
)
self.assertEqual(
"migrations\n [ ] 0001_squashed_0002 (2 squashed migrations)\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command(
"migrate",
"migrations",
"0001_squashed_0002",
stdout=out,
verbosity=2,
no_color=True,
)
try:
self.assertIn(
"operations to perform:\n"
" target specific migration: 0001_squashed_0002, from migrations\n"
"running pre-migrate handlers for application migrations\n"
"running migrations:\n"
" applying migrations.0001_squashed_0002... ok (",
out.getvalue().lower(),
)
out = io.StringIO()
call_command(
"showmigrations", format="list", stdout=out, verbosity=2, no_color=True
)
self.assertEqual(
"migrations\n [x] 0001_squashed_0002 (2 squashed migrations)\n",
out.getvalue().lower(),
)
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"}
)
def test_showmigrations_plan(self):
"""
Tests --plan output of showmigrations command
"""
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out)
self.assertEqual(
"[ ] migrations.0001_initial\n"
"[ ] migrations.0003_third\n"
"[ ] migrations.0002_second\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out, verbosity=2)
self.assertEqual(
"[ ] migrations.0001_initial\n"
"[ ] migrations.0003_third ... (migrations.0001_initial)\n"
"[ ] migrations.0002_second ... (migrations.0001_initial, "
"migrations.0003_third)\n",
out.getvalue().lower(),
)
call_command("migrate", "migrations", "0003", verbosity=0)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out)
self.assertEqual(
"[x] migrations.0001_initial\n"
"[x] migrations.0003_third\n"
"[ ] migrations.0002_second\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out, verbosity=2)
self.assertEqual(
"[x] migrations.0001_initial\n"
"[x] migrations.0003_third ... (migrations.0001_initial)\n"
"[ ] migrations.0002_second ... (migrations.0001_initial, "
"migrations.0003_third)\n",
out.getvalue().lower(),
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_plan"}
)
def test_migrate_plan(self):
"""Tests migrate --plan output."""
out = io.StringIO()
# Show the plan up to the third migration.
call_command(
"migrate", "migrations", "0003", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n"
"migrations.0001_initial\n"
" Create model Salamander\n"
" Raw Python operation -> Grow salamander tail.\n"
"migrations.0002_second\n"
" Create model Book\n"
" Raw SQL operation -> ['SELECT * FROM migrations_book']\n"
"migrations.0003_third\n"
" Create model Author\n"
" Raw SQL operation -> ['SELECT * FROM migrations_author']\n",
out.getvalue(),
)
try:
# Migrate to the third migration.
call_command("migrate", "migrations", "0003", verbosity=0)
out = io.StringIO()
# Show the plan for when there is nothing to apply.
call_command(
"migrate", "migrations", "0003", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n No planned migration operations.\n",
out.getvalue(),
)
out = io.StringIO()
# Show the plan for reverse migration back to 0001.
call_command(
"migrate", "migrations", "0001", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n"
"migrations.0003_third\n"
" Undo Create model Author\n"
" Raw SQL operation -> ['SELECT * FROM migrations_book']\n"
"migrations.0002_second\n"
" Undo Create model Book\n"
" Raw SQL operation -> ['SELECT * FROM migrations_salamand…\n",
out.getvalue(),
)
out = io.StringIO()
# Show the migration plan to fourth, with truncated details.
call_command(
"migrate", "migrations", "0004", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n"
"migrations.0004_fourth\n"
" Raw SQL operation -> SELECT * FROM migrations_author WHE…\n",
out.getvalue(),
)
# Show the plan when an operation is irreversible.
# Migrate to the fourth migration.
call_command("migrate", "migrations", "0004", verbosity=0)
out = io.StringIO()
call_command(
"migrate", "migrations", "0003", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n"
"migrations.0004_fourth\n"
" Raw SQL operation -> IRREVERSIBLE\n",
out.getvalue(),
)
out = io.StringIO()
call_command(
"migrate", "migrations", "0005", plan=True, stdout=out, no_color=True
)
# Operation is marked as irreversible only in the revert plan.
self.assertEqual(
"Planned operations:\n"
"migrations.0005_fifth\n"
" Raw Python operation\n"
" Raw Python operation\n"
" Raw Python operation -> Feed salamander.\n",
out.getvalue(),
)
call_command("migrate", "migrations", "0005", verbosity=0)
out = io.StringIO()
call_command(
"migrate", "migrations", "0004", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n"
"migrations.0005_fifth\n"
" Raw Python operation -> IRREVERSIBLE\n"
" Raw Python operation -> IRREVERSIBLE\n"
" Raw Python operation\n",
out.getvalue(),
)
finally:
# Cleanup by unmigrating everything: fake the irreversible, then
# migrate all to zero.
call_command("migrate", "migrations", "0003", fake=True, verbosity=0)
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_empty"}
)
def test_showmigrations_no_migrations(self):
out = io.StringIO()
call_command("showmigrations", stdout=out, no_color=True)
self.assertEqual("migrations\n (no migrations)\n", out.getvalue().lower())
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.unmigrated_app"]
)
def test_showmigrations_unmigrated_app(self):
out = io.StringIO()
call_command("showmigrations", "unmigrated_app", stdout=out, no_color=True)
try:
self.assertEqual(
"unmigrated_app\n (no migrations)\n", out.getvalue().lower()
)
finally:
# unmigrated_app.SillyModel has a foreign key to
# 'migrations.Tribble', but that model is only defined in a
# migration, so the global app registry never sees it and the
# reference is left dangling. Remove it to avoid problems in
# subsequent tests.
apps._pending_operations.pop(("migrations", "tribble"), None)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_empty"}
)
def test_showmigrations_plan_no_migrations(self):
"""
Tests --plan output of showmigrations command without migrations
"""
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out, no_color=True)
self.assertEqual("(no migrations)\n", out.getvalue().lower())
out = io.StringIO()
call_command(
"showmigrations", format="plan", stdout=out, verbosity=2, no_color=True
)
self.assertEqual("(no migrations)\n", out.getvalue().lower())
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"}
)
def test_showmigrations_plan_squashed(self):
"""
Tests --plan output of showmigrations command with squashed migrations.
"""
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out)
self.assertEqual(
"[ ] migrations.1_auto\n"
"[ ] migrations.2_auto\n"
"[ ] migrations.3_squashed_5\n"
"[ ] migrations.6_auto\n"
"[ ] migrations.7_auto\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out, verbosity=2)
self.assertEqual(
"[ ] migrations.1_auto\n"
"[ ] migrations.2_auto ... (migrations.1_auto)\n"
"[ ] migrations.3_squashed_5 ... (migrations.2_auto)\n"
"[ ] migrations.6_auto ... (migrations.3_squashed_5)\n"
"[ ] migrations.7_auto ... (migrations.6_auto)\n",
out.getvalue().lower(),
)
call_command("migrate", "migrations", "3_squashed_5", verbosity=0)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out)
self.assertEqual(
"[x] migrations.1_auto\n"
"[x] migrations.2_auto\n"
"[x] migrations.3_squashed_5\n"
"[ ] migrations.6_auto\n"
"[ ] migrations.7_auto\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out, verbosity=2)
self.assertEqual(
"[x] migrations.1_auto\n"
"[x] migrations.2_auto ... (migrations.1_auto)\n"
"[x] migrations.3_squashed_5 ... (migrations.2_auto)\n"
"[ ] migrations.6_auto ... (migrations.3_squashed_5)\n"
"[ ] migrations.7_auto ... (migrations.6_auto)\n",
out.getvalue().lower(),
)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.mutate_state_b",
"migrations.migrations_test_apps.alter_fk.author_app",
"migrations.migrations_test_apps.alter_fk.book_app",
]
)
def test_showmigrations_plan_single_app_label(self):
"""
`showmigrations --plan app_label` output with a single app_label.
"""
# Single app with no dependencies on other apps.
out = io.StringIO()
call_command("showmigrations", "mutate_state_b", format="plan", stdout=out)
self.assertEqual(
"[ ] mutate_state_b.0001_initial\n[ ] mutate_state_b.0002_add_field\n",
out.getvalue(),
)
# Single app with dependencies.
out = io.StringIO()
call_command("showmigrations", "author_app", format="plan", stdout=out)
self.assertEqual(
"[ ] author_app.0001_initial\n"
"[ ] book_app.0001_initial\n"
"[ ] author_app.0002_alter_id\n",
out.getvalue(),
)
# Some migrations already applied.
call_command("migrate", "author_app", "0001", verbosity=0)
out = io.StringIO()
call_command("showmigrations", "author_app", format="plan", stdout=out)
self.assertEqual(
"[X] author_app.0001_initial\n"
"[ ] book_app.0001_initial\n"
"[ ] author_app.0002_alter_id\n",
out.getvalue(),
)
# Cleanup by unmigrating author_app.
call_command("migrate", "author_app", "zero", verbosity=0)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.mutate_state_b",
"migrations.migrations_test_apps.alter_fk.author_app",
"migrations.migrations_test_apps.alter_fk.book_app",
]
)
def test_showmigrations_plan_multiple_app_labels(self):
"""
`showmigrations --plan app_label` output with multiple app_labels.
"""
# Multiple apps: author_app depends on book_app; mutate_state_b doesn't
# depend on other apps.
out = io.StringIO()
call_command(
"showmigrations", "mutate_state_b", "author_app", format="plan", stdout=out
)
self.assertEqual(
"[ ] author_app.0001_initial\n"
"[ ] book_app.0001_initial\n"
"[ ] author_app.0002_alter_id\n"
"[ ] mutate_state_b.0001_initial\n"
"[ ] mutate_state_b.0002_add_field\n",
out.getvalue(),
)
# Multiple apps: args order shouldn't matter (the same result is
# expected as above).
out = io.StringIO()
call_command(
"showmigrations", "author_app", "mutate_state_b", format="plan", stdout=out
)
self.assertEqual(
"[ ] author_app.0001_initial\n"
"[ ] book_app.0001_initial\n"
"[ ] author_app.0002_alter_id\n"
"[ ] mutate_state_b.0001_initial\n"
"[ ] mutate_state_b.0002_add_field\n",
out.getvalue(),
)
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.unmigrated_app"]
)
def test_showmigrations_plan_app_label_no_migrations(self):
out = io.StringIO()
call_command(
"showmigrations", "unmigrated_app", format="plan", stdout=out, no_color=True
)
try:
self.assertEqual("(no migrations)\n", out.getvalue())
finally:
# unmigrated_app.SillyModel has a foreign key to
# 'migrations.Tribble', but that model is only defined in a
# migration, so the global app registry never sees it and the
# reference is left dangling. Remove it to avoid problems in
# subsequent tests.
apps._pending_operations.pop(("migrations", "tribble"), None)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_sqlmigrate_forwards(self):
"""
sqlmigrate outputs forward looking SQL.
"""
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out)
lines = out.getvalue().splitlines()
if connection.features.can_rollback_ddl:
self.assertEqual(lines[0], connection.ops.start_transaction_sql())
self.assertEqual(lines[-1], connection.ops.end_transaction_sql())
lines = lines[1:-1]
self.assertEqual(
lines[:3],
[
"--",
"-- Create model Author",
"--",
],
)
self.assertIn(
"create table %s" % connection.ops.quote_name("migrations_author").lower(),
lines[3].lower(),
)
pos = lines.index("--", 3)
self.assertEqual(
lines[pos : pos + 3],
[
"--",
"-- Create model Tribble",
"--",
],
)
self.assertIn(
"create table %s" % connection.ops.quote_name("migrations_tribble").lower(),
lines[pos + 3].lower(),
)
pos = lines.index("--", pos + 3)
self.assertEqual(
lines[pos : pos + 3],
[
"--",
"-- Add field bool to tribble",
"--",
],
)
pos = lines.index("--", pos + 3)
self.assertEqual(
lines[pos : pos + 3],
[
"--",
"-- Alter unique_together for author (1 constraint(s))",
"--",
],
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_sqlmigrate_backwards(self):
"""
sqlmigrate outputs reverse looking SQL.
"""
# Cannot generate the reverse SQL unless we've applied the migration.
call_command("migrate", "migrations", verbosity=0)
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out, backwards=True)
lines = out.getvalue().splitlines()
try:
if connection.features.can_rollback_ddl:
self.assertEqual(lines[0], connection.ops.start_transaction_sql())
self.assertEqual(lines[-1], connection.ops.end_transaction_sql())
lines = lines[1:-1]
self.assertEqual(
lines[:3],
[
"--",
"-- Alter unique_together for author (1 constraint(s))",
"--",
],
)
pos = lines.index("--", 3)
self.assertEqual(
lines[pos : pos + 3],
[
"--",
"-- Add field bool to tribble",
"--",
],
)
pos = lines.index("--", pos + 3)
self.assertEqual(
lines[pos : pos + 3],
[
"--",
"-- Create model Tribble",
"--",
],
)
next_pos = lines.index("--", pos + 3)
drop_table_sql = (
"drop table %s"
% connection.ops.quote_name("migrations_tribble").lower()
)
for line in lines[pos + 3 : next_pos]:
if drop_table_sql in line.lower():
break
else:
self.fail("DROP TABLE (tribble) not found.")
pos = next_pos
self.assertEqual(
lines[pos : pos + 3],
[
"--",
"-- Create model Author",
"--",
],
)
drop_table_sql = (
"drop table %s" % connection.ops.quote_name("migrations_author").lower()
)
for line in lines[pos + 3 :]:
if drop_table_sql in line.lower():
break
else:
self.fail("DROP TABLE (author) not found.")
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_non_atomic"}
)
def test_sqlmigrate_for_non_atomic_migration(self):
"""
Transaction wrappers aren't shown for non-atomic migrations.
"""
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out)
output = out.getvalue().lower()
queries = [q.strip() for q in output.splitlines()]
if connection.ops.start_transaction_sql():
self.assertNotIn(connection.ops.start_transaction_sql().lower(), queries)
self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_sqlmigrate_for_non_transactional_databases(self):
"""
Transaction wrappers aren't shown for databases that don't support
transactional DDL.
"""
out = io.StringIO()
with mock.patch.object(connection.features, "can_rollback_ddl", False):
call_command("sqlmigrate", "migrations", "0001", stdout=out)
output = out.getvalue().lower()
queries = [q.strip() for q in output.splitlines()]
start_transaction_sql = connection.ops.start_transaction_sql()
if start_transaction_sql:
self.assertNotIn(start_transaction_sql.lower(), queries)
self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_sqlmigrate_ambiguous_prefix_squashed_migrations(self):
msg = (
"More than one migration matches '0001' in app 'migrations'. "
"Please be more specific."
)
with self.assertRaisesMessage(CommandError, msg):
call_command("sqlmigrate", "migrations", "0001")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_sqlmigrate_squashed_migration(self):
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001_squashed_0002", stdout=out)
output = out.getvalue().lower()
self.assertIn("-- create model author", output)
self.assertIn("-- create model book", output)
self.assertNotIn("-- create model tribble", output)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_sqlmigrate_replaced_migration(self):
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001_initial", stdout=out)
output = out.getvalue().lower()
self.assertIn("-- create model author", output)
self.assertIn("-- create model tribble", output)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_no_operations"}
)
def test_sqlmigrate_no_operations(self):
err = io.StringIO()
call_command("sqlmigrate", "migrations", "0001_initial", stderr=err)
self.assertEqual(err.getvalue(), "No operations found.\n")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_noop"}
)
def test_sqlmigrate_noop(self):
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out)
lines = out.getvalue().splitlines()
if connection.features.can_rollback_ddl:
lines = lines[1:-1]
self.assertEqual(
lines,
[
"--",
"-- Raw SQL operation",
"--",
"-- (no-op)",
],
)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_manual_porting"}
)
def test_sqlmigrate_unrepresentable(self):
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0002", stdout=out)
lines = out.getvalue().splitlines()
if connection.features.can_rollback_ddl:
lines = lines[1:-1]
self.assertEqual(
lines,
[
"--",
"-- Raw Python operation",
"--",
"-- THIS OPERATION CANNOT BE WRITTEN AS SQL",
],
)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
"migrations.migrations_test_apps.migrated_unapplied_app",
"migrations.migrations_test_apps.unmigrated_app",
],
)
def test_regression_22823_unmigrated_fk_to_migrated_model(self):
"""
Assuming you have 3 apps, `A`, `B`, and `C`, such that:
* `A` has migrations
* `B` has a migration we want to apply
* `C` has no migrations, but has an FK to `A`
When we try to migrate "B", an exception occurs because the
"B" was not included in the ProjectState that is used to detect
soft-applied migrations (#22823).
"""
call_command("migrate", "migrated_unapplied_app", verbosity=0)
# unmigrated_app.SillyModel has a foreign key to 'migrations.Tribble',
# but that model is only defined in a migration, so the global app
# registry never sees it and the reference is left dangling. Remove it
# to avoid problems in subsequent tests.
apps._pending_operations.pop(("migrations", "tribble"), None)
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.unmigrated_app_syncdb"]
)
def test_migrate_syncdb_deferred_sql_executed_with_schemaeditor(self):
"""
For an app without migrations, editor.execute() is used for executing
the syncdb deferred SQL.
"""
stdout = io.StringIO()
with mock.patch.object(BaseDatabaseSchemaEditor, "execute") as execute:
call_command(
"migrate", run_syncdb=True, verbosity=1, stdout=stdout, no_color=True
)
create_table_count = len(
[call for call in execute.mock_calls if "CREATE TABLE" in str(call)]
)
self.assertEqual(create_table_count, 2)
# There's at least one deferred SQL for creating the foreign key
# index.
self.assertGreater(len(execute.mock_calls), 2)
stdout = stdout.getvalue()
self.assertIn("Synchronize unmigrated apps: unmigrated_app_syncdb", stdout)
self.assertIn("Creating tables...", stdout)
table_name = truncate_name(
"unmigrated_app_syncdb_classroom", connection.ops.max_name_length()
)
self.assertIn("Creating table %s" % table_name, stdout)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate_syncdb_app_with_migrations(self):
msg = "Can't use run_syncdb with app 'migrations' as it has migrations."
with self.assertRaisesMessage(CommandError, msg):
call_command("migrate", "migrations", run_syncdb=True, verbosity=0)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.unmigrated_app_syncdb",
"migrations.migrations_test_apps.unmigrated_app_simple",
]
)
def test_migrate_syncdb_app_label(self):
"""
Running migrate --run-syncdb with an app_label only creates tables for
the specified app.
"""
stdout = io.StringIO()
with mock.patch.object(BaseDatabaseSchemaEditor, "execute") as execute:
call_command(
"migrate", "unmigrated_app_syncdb", run_syncdb=True, stdout=stdout
)
create_table_count = len(
[call for call in execute.mock_calls if "CREATE TABLE" in str(call)]
)
self.assertEqual(create_table_count, 2)
self.assertGreater(len(execute.mock_calls), 2)
self.assertIn(
"Synchronize unmigrated app: unmigrated_app_syncdb", stdout.getvalue()
)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_migrate_record_replaced(self):
"""
Running a single squashed migration should record all of the original
replaced migrations as run.
"""
recorder = MigrationRecorder(connection)
out = io.StringIO()
call_command("migrate", "migrations", verbosity=0)
call_command("showmigrations", "migrations", stdout=out, no_color=True)
self.assertEqual(
"migrations\n [x] 0001_squashed_0002 (2 squashed migrations)\n",
out.getvalue().lower(),
)
applied_migrations = recorder.applied_migrations()
self.assertIn(("migrations", "0001_initial"), applied_migrations)
self.assertIn(("migrations", "0002_second"), applied_migrations)
self.assertIn(("migrations", "0001_squashed_0002"), applied_migrations)
# Rollback changes
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_migrate_record_squashed(self):
"""
Running migrate for a squashed migration should record as run
if all of the replaced migrations have been run (#25231).
"""
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0001_initial")
recorder.record_applied("migrations", "0002_second")
out = io.StringIO()
call_command("showmigrations", "migrations", stdout=out, no_color=True)
self.assertEqual(
"migrations\n"
" [-] 0001_squashed_0002 (2 squashed migrations) "
"run 'manage.py migrate' to finish recording.\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command("migrate", "migrations", verbosity=0)
call_command("showmigrations", "migrations", stdout=out, no_color=True)
self.assertEqual(
"migrations\n [x] 0001_squashed_0002 (2 squashed migrations)\n",
out.getvalue().lower(),
)
self.assertIn(
("migrations", "0001_squashed_0002"), recorder.applied_migrations()
)
# No changes were actually applied so there is nothing to rollback
def test_migrate_partially_applied_squashed_migration(self):
"""
Migrating to a squashed migration specified by name should succeed
even if it is partially applied.
"""
with self.temporary_migration_module(module="migrations.test_migrations"):
recorder = MigrationRecorder(connection)
try:
call_command("migrate", "migrations", "0001_initial", verbosity=0)
call_command(
"squashmigrations",
"migrations",
"0002",
interactive=False,
verbosity=0,
)
call_command(
"migrate",
"migrations",
"0001_squashed_0002_second",
verbosity=0,
)
applied_migrations = recorder.applied_migrations()
self.assertIn(("migrations", "0002_second"), applied_migrations)
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_migrate_backward_to_squashed_migration(self):
try:
call_command("migrate", "migrations", "0001_squashed_0002", verbosity=0)
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_book")
call_command("migrate", "migrations", "0001_initial", verbosity=0)
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_book")
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate_inconsistent_history(self):
"""
Running migrate with some migrations applied before their dependencies
should not be allowed.
"""
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0002_second")
msg = (
"Migration migrations.0002_second is applied before its dependency "
"migrations.0001_initial"
)
with self.assertRaisesMessage(InconsistentMigrationHistory, msg):
call_command("migrate")
applied_migrations = recorder.applied_migrations()
self.assertNotIn(("migrations", "0001_initial"), applied_migrations)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_unapplied_app",
"migrations.migrations_test_apps.migrated_app",
]
)
def test_migrate_not_reflected_changes(self):
class NewModel1(models.Model):
class Meta:
app_label = "migrated_app"
class NewModel2(models.Model):
class Meta:
app_label = "migrated_unapplied_app"
out = io.StringIO()
try:
call_command("migrate", verbosity=0)
call_command("migrate", stdout=out, no_color=True)
self.assertEqual(
"operations to perform:\n"
" apply all migrations: migrated_app, migrated_unapplied_app\n"
"running migrations:\n"
" no migrations to apply.\n"
" your models in app(s): 'migrated_app', "
"'migrated_unapplied_app' have changes that are not yet "
"reflected in a migration, and so won't be applied.\n"
" run 'manage.py makemigrations' to make new migrations, and "
"then re-run 'manage.py migrate' to apply them.\n",
out.getvalue().lower(),
)
finally:
# Unmigrate everything.
call_command("migrate", "migrated_app", "zero", verbosity=0)
call_command("migrate", "migrated_unapplied_app", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_squashed_no_replaces",
}
)
def test_migrate_prune(self):
"""
With prune=True, references to migration files deleted from the
migrations module (such as after being squashed) are removed from the
django_migrations table.
"""
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0001_initial")
recorder.record_applied("migrations", "0002_second")
recorder.record_applied("migrations", "0001_squashed_0002")
out = io.StringIO()
try:
call_command("migrate", "migrations", prune=True, stdout=out, no_color=True)
self.assertEqual(
out.getvalue(),
"Pruning migrations:\n"
" Pruning migrations.0001_initial OK\n"
" Pruning migrations.0002_second OK\n",
)
applied_migrations = [
migration
for migration in recorder.applied_migrations()
if migration[0] == "migrations"
]
self.assertEqual(applied_migrations, [("migrations", "0001_squashed_0002")])
finally:
recorder.record_unapplied("migrations", "0001_initial")
recorder.record_unapplied("migrations", "0001_second")
recorder.record_unapplied("migrations", "0001_squashed_0002")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_prune_deleted_squashed_migrations_in_replaces(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_squashed"
) as migration_dir:
try:
call_command("migrate", "migrations", verbosity=0)
# Delete the replaced migrations.
os.remove(os.path.join(migration_dir, "0001_initial.py"))
os.remove(os.path.join(migration_dir, "0002_second.py"))
# --prune cannot be used before removing the "replaces"
# attribute.
call_command(
"migrate",
"migrations",
prune=True,
stdout=out,
no_color=True,
)
self.assertEqual(
out.getvalue(),
"Pruning migrations:\n"
" Cannot use --prune because the following squashed "
"migrations have their 'replaces' attributes and may not "
"be recorded as applied:\n"
" migrations.0001_squashed_0002\n"
" Re-run 'manage.py migrate' if they are not marked as "
"applied, and remove 'replaces' attributes in their "
"Migration classes.\n",
)
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_prune_no_migrations_to_prune(self):
out = io.StringIO()
call_command("migrate", "migrations", prune=True, stdout=out, no_color=True)
self.assertEqual(
out.getvalue(),
"Pruning migrations:\n No migrations to prune.\n",
)
out = io.StringIO()
call_command(
"migrate",
"migrations",
prune=True,
stdout=out,
no_color=True,
verbosity=0,
)
self.assertEqual(out.getvalue(), "")
def test_prune_no_app_label(self):
msg = "Migrations can be pruned only when an app is specified."
with self.assertRaisesMessage(CommandError, msg):
call_command("migrate", prune=True)
class MakeMigrationsTests(MigrationTestBase):
"""
Tests running the makemigrations command.
"""
def setUp(self):
super().setUp()
self._old_models = apps.app_configs["migrations"].models.copy()
def tearDown(self):
apps.app_configs["migrations"].models = self._old_models
apps.all_models["migrations"] = self._old_models
apps.clear_cache()
super().tearDown()
def test_files_content(self):
self.assertTableNotExists("migrations_unicodemodel")
apps.register_model("migrations", UnicodeModel)
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", verbosity=0)
# Check for empty __init__.py file in migrations folder
init_file = os.path.join(migration_dir, "__init__.py")
self.assertTrue(os.path.exists(init_file))
with open(init_file) as fp:
content = fp.read()
self.assertEqual(content, "")
# Check for existing 0001_initial.py file in migration folder
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
with open(initial_file, encoding="utf-8") as fp:
content = fp.read()
self.assertIn("migrations.CreateModel", content)
self.assertIn("initial = True", content)
self.assertIn("úñí©óðé µóðéø", content) # Meta.verbose_name
self.assertIn("úñí©óðé µóðéøß", content) # Meta.verbose_name_plural
self.assertIn("ÚÑÍ¢ÓÐÉ", content) # title.verbose_name
self.assertIn("“Ðjáñgó”", content) # title.default
def test_makemigrations_order(self):
"""
makemigrations should recognize number-only migrations (0001.py).
"""
module = "migrations.test_migrations_order"
with self.temporary_migration_module(module=module) as migration_dir:
if hasattr(importlib, "invalidate_caches"):
# importlib caches os.listdir() on some platforms like macOS
# (#23850).
importlib.invalidate_caches()
call_command(
"makemigrations", "migrations", "--empty", "-n", "a", "-v", "0"
)
self.assertTrue(os.path.exists(os.path.join(migration_dir, "0002_a.py")))
def test_makemigrations_empty_connections(self):
empty_connections = ConnectionHandler({"default": {}})
with mock.patch(
"django.core.management.commands.makemigrations.connections",
new=empty_connections,
):
# with no apps
out = io.StringIO()
call_command("makemigrations", stdout=out)
self.assertIn("No changes detected", out.getvalue())
# with an app
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", verbosity=0)
init_file = os.path.join(migration_dir, "__init__.py")
self.assertTrue(os.path.exists(init_file))
@override_settings(INSTALLED_APPS=["migrations", "migrations2"])
def test_makemigrations_consistency_checks_respect_routers(self):
"""
The history consistency checks in makemigrations respect
settings.DATABASE_ROUTERS.
"""
def patched_has_table(migration_recorder):
if migration_recorder.connection is connections["other"]:
raise Exception("Other connection")
else:
return mock.DEFAULT
self.assertTableNotExists("migrations_unicodemodel")
apps.register_model("migrations", UnicodeModel)
with mock.patch.object(
MigrationRecorder, "has_table", autospec=True, side_effect=patched_has_table
) as has_table:
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", verbosity=0)
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
self.assertEqual(has_table.call_count, 1) # 'default' is checked
# Router says not to migrate 'other' so consistency shouldn't
# be checked.
with self.settings(DATABASE_ROUTERS=["migrations.routers.TestRouter"]):
call_command("makemigrations", "migrations", verbosity=0)
self.assertEqual(has_table.call_count, 2) # 'default' again
# With a router that doesn't prohibit migrating 'other',
# consistency is checked.
with self.settings(
DATABASE_ROUTERS=["migrations.routers.DefaultOtherRouter"]
):
with self.assertRaisesMessage(Exception, "Other connection"):
call_command("makemigrations", "migrations", verbosity=0)
self.assertEqual(has_table.call_count, 4) # 'default' and 'other'
# With a router that doesn't allow migrating on any database,
# no consistency checks are made.
with self.settings(DATABASE_ROUTERS=["migrations.routers.TestRouter"]):
with mock.patch.object(
TestRouter, "allow_migrate", return_value=False
) as allow_migrate:
call_command("makemigrations", "migrations", verbosity=0)
allow_migrate.assert_any_call(
"other", "migrations", model_name="UnicodeModel"
)
# allow_migrate() is called with the correct arguments.
self.assertGreater(len(allow_migrate.mock_calls), 0)
called_aliases = set()
for mock_call in allow_migrate.mock_calls:
_, call_args, call_kwargs = mock_call
connection_alias, app_name = call_args
called_aliases.add(connection_alias)
# Raises an error if invalid app_name/model_name occurs.
apps.get_app_config(app_name).get_model(call_kwargs["model_name"])
self.assertEqual(called_aliases, set(connections))
self.assertEqual(has_table.call_count, 4)
def test_failing_migration(self):
# If a migration fails to serialize, it shouldn't generate an empty file. #21280
apps.register_model("migrations", UnserializableModel)
with self.temporary_migration_module() as migration_dir:
with self.assertRaisesMessage(ValueError, "Cannot serialize"):
call_command("makemigrations", "migrations", verbosity=0)
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertFalse(os.path.exists(initial_file))
def test_makemigrations_conflict_exit(self):
"""
makemigrations exits if it detects a conflict.
"""
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
):
with self.assertRaises(CommandError) as context:
call_command("makemigrations")
self.assertEqual(
str(context.exception),
"Conflicting migrations detected; multiple leaf nodes in the "
"migration graph: (0002_conflicting_second, 0002_second in "
"migrations).\n"
"To fix them run 'python manage.py makemigrations --merge'",
)
def test_makemigrations_merge_no_conflict(self):
"""
makemigrations exits if in merge mode with no conflicts.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations"):
call_command("makemigrations", merge=True, stdout=out)
self.assertIn("No conflicts detected to merge.", out.getvalue())
def test_makemigrations_empty_no_app_specified(self):
"""
makemigrations exits if no app is specified with 'empty' mode.
"""
msg = "You must supply at least one app label when using --empty."
with self.assertRaisesMessage(CommandError, msg):
call_command("makemigrations", empty=True)
def test_makemigrations_empty_migration(self):
"""
makemigrations properly constructs an empty migration.
"""
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", empty=True, verbosity=0)
# Check for existing 0001_initial.py file in migration folder
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
with open(initial_file, encoding="utf-8") as fp:
content = fp.read()
# Remove all whitespace to check for empty dependencies and operations
content = content.replace(" ", "")
self.assertIn(
"dependencies=[]" if HAS_BLACK else "dependencies=[\n]", content
)
self.assertIn(
"operations=[]" if HAS_BLACK else "operations=[\n]", content
)
@override_settings(MIGRATION_MODULES={"migrations": None})
def test_makemigrations_disabled_migrations_for_app(self):
"""
makemigrations raises a nice error when migrations are disabled for an
app.
"""
msg = (
"Django can't create migrations for app 'migrations' because migrations "
"have been disabled via the MIGRATION_MODULES setting."
)
with self.assertRaisesMessage(ValueError, msg):
call_command("makemigrations", "migrations", empty=True, verbosity=0)
def test_makemigrations_no_changes_no_apps(self):
"""
makemigrations exits when there are no changes and no apps are specified.
"""
out = io.StringIO()
call_command("makemigrations", stdout=out)
self.assertIn("No changes detected", out.getvalue())
def test_makemigrations_no_changes(self):
"""
makemigrations exits when there are no changes to an app.
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_changes"
):
call_command("makemigrations", "migrations", stdout=out)
self.assertIn("No changes detected in app 'migrations'", out.getvalue())
def test_makemigrations_no_apps_initial(self):
"""
makemigrations should detect initial is needed on empty migration
modules if no app provided.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_empty"):
call_command("makemigrations", stdout=out)
self.assertIn("0001_initial.py", out.getvalue())
def test_makemigrations_no_init(self):
"""Migration directories without an __init__.py file are allowed."""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_init"
):
call_command("makemigrations", stdout=out)
self.assertIn("0001_initial.py", out.getvalue())
def test_makemigrations_migrations_announce(self):
"""
makemigrations announces the migration at the default verbosity level.
"""
out = io.StringIO()
with self.temporary_migration_module():
call_command("makemigrations", "migrations", stdout=out)
self.assertIn("Migrations for 'migrations'", out.getvalue())
def test_makemigrations_no_common_ancestor(self):
"""
makemigrations fails to merge migrations with no common ancestor.
"""
with self.assertRaises(ValueError) as context:
with self.temporary_migration_module(
module="migrations.test_migrations_no_ancestor"
):
call_command("makemigrations", "migrations", merge=True)
exception_message = str(context.exception)
self.assertIn("Could not find common ancestor of", exception_message)
self.assertIn("0002_second", exception_message)
self.assertIn("0002_conflicting_second", exception_message)
def test_makemigrations_interactive_reject(self):
"""
makemigrations enters and exits interactive mode properly.
"""
# Monkeypatch interactive questioner to auto reject
with mock.patch("builtins.input", mock.Mock(return_value="N")):
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
with captured_stdout():
call_command(
"makemigrations",
"migrations",
name="merge",
merge=True,
interactive=True,
verbosity=0,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertFalse(os.path.exists(merge_file))
def test_makemigrations_interactive_accept(self):
"""
makemigrations enters interactive mode and merges properly.
"""
# Monkeypatch interactive questioner to auto accept
with mock.patch("builtins.input", mock.Mock(return_value="y")):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
name="merge",
merge=True,
interactive=True,
stdout=out,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertTrue(os.path.exists(merge_file))
self.assertIn("Created new merge migration", out.getvalue())
def test_makemigrations_default_merge_name(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
merge=True,
interactive=False,
stdout=out,
)
merge_file = os.path.join(
migration_dir,
"0003_merge_0002_conflicting_second_0002_second.py",
)
self.assertIs(os.path.exists(merge_file), True)
with open(merge_file, encoding="utf-8") as fp:
content = fp.read()
if HAS_BLACK:
target_str = '("migrations", "0002_conflicting_second")'
else:
target_str = "('migrations', '0002_conflicting_second')"
self.assertIn(target_str, content)
self.assertIn("Created new merge migration %s" % merge_file, out.getvalue())
@mock.patch("django.db.migrations.utils.datetime")
def test_makemigrations_auto_merge_name(self, mock_datetime):
mock_datetime.datetime.now.return_value = datetime.datetime(2016, 1, 2, 3, 4)
with mock.patch("builtins.input", mock.Mock(return_value="y")):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict_long_name"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
merge=True,
interactive=True,
stdout=out,
)
merge_file = os.path.join(migration_dir, "0003_merge_20160102_0304.py")
self.assertTrue(os.path.exists(merge_file))
self.assertIn("Created new merge migration", out.getvalue())
def test_makemigrations_non_interactive_not_null_addition(self):
"""
Non-interactive makemigrations fails when a default is missing on a
new not-null field.
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
silly_int = models.IntegerField()
class Meta:
app_label = "migrations"
with self.assertRaises(SystemExit):
with self.temporary_migration_module(
module="migrations.test_migrations_no_default"
):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=False)
self.assertIn(
"Field 'silly_int' on model 'sillymodel' not migrated: it is "
"impossible to add a non-nullable field without specifying a "
"default.",
out.getvalue(),
)
def test_makemigrations_interactive_not_null_addition(self):
"""
makemigrations messages when adding a NOT NULL field in interactive
mode.
"""
class Author(models.Model):
silly_field = models.BooleanField(null=False)
class Meta:
app_label = "migrations"
input_msg = (
"It is impossible to add a non-nullable field 'silly_field' to "
"author without specifying a default. This is because the "
"database needs something to populate existing rows.\n"
"Please select a fix:\n"
" 1) Provide a one-off default now (will be set on all existing "
"rows with a null value for this column)\n"
" 2) Quit and manually define a default value in models.py."
)
with self.temporary_migration_module(module="migrations.test_migrations"):
# 2 - quit.
with mock.patch("builtins.input", return_value="2"):
with captured_stdout() as out, self.assertRaises(SystemExit):
call_command("makemigrations", "migrations", interactive=True)
self.assertIn(input_msg, out.getvalue())
# 1 - provide a default.
with mock.patch("builtins.input", return_value="1"):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=True)
output = out.getvalue()
self.assertIn(input_msg, output)
self.assertIn("Please enter the default value as valid Python.", output)
self.assertIn(
"The datetime and django.utils.timezone modules are "
"available, so it is possible to provide e.g. timezone.now as "
"a value",
output,
)
self.assertIn("Type 'exit' to exit this prompt", output)
def test_makemigrations_non_interactive_not_null_alteration(self):
"""
Non-interactive makemigrations fails when a default is missing on a
field changed to not-null.
"""
class Author(models.Model):
name = models.CharField(max_length=255)
slug = models.SlugField()
age = models.IntegerField(default=0)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(module="migrations.test_migrations"):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=False)
self.assertIn("Alter field slug on author", out.getvalue())
self.assertIn(
"Field 'slug' on model 'author' given a default of NOT PROVIDED "
"and must be corrected.",
out.getvalue(),
)
def test_makemigrations_interactive_not_null_alteration(self):
"""
makemigrations messages when changing a NULL field to NOT NULL in
interactive mode.
"""
class Author(models.Model):
slug = models.SlugField(null=False)
class Meta:
app_label = "migrations"
input_msg = (
"It is impossible to change a nullable field 'slug' on author to "
"non-nullable without providing a default. This is because the "
"database needs something to populate existing rows.\n"
"Please select a fix:\n"
" 1) Provide a one-off default now (will be set on all existing "
"rows with a null value for this column)\n"
" 2) Ignore for now. Existing rows that contain NULL values will "
"have to be handled manually, for example with a RunPython or "
"RunSQL operation.\n"
" 3) Quit and manually define a default value in models.py."
)
with self.temporary_migration_module(module="migrations.test_migrations"):
# No message appears if --dry-run.
with captured_stdout() as out:
call_command(
"makemigrations",
"migrations",
interactive=True,
dry_run=True,
)
self.assertNotIn(input_msg, out.getvalue())
# 3 - quit.
with mock.patch("builtins.input", return_value="3"):
with captured_stdout() as out, self.assertRaises(SystemExit):
call_command("makemigrations", "migrations", interactive=True)
self.assertIn(input_msg, out.getvalue())
# 1 - provide a default.
with mock.patch("builtins.input", return_value="1"):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=True)
output = out.getvalue()
self.assertIn(input_msg, output)
self.assertIn("Please enter the default value as valid Python.", output)
self.assertIn(
"The datetime and django.utils.timezone modules are "
"available, so it is possible to provide e.g. timezone.now as "
"a value",
output,
)
self.assertIn("Type 'exit' to exit this prompt", output)
def test_makemigrations_non_interactive_no_model_rename(self):
"""
makemigrations adds and removes a possible model rename in
non-interactive mode.
"""
class RenamedModel(models.Model):
silly_field = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_default"
):
call_command("makemigrations", "migrations", interactive=False, stdout=out)
self.assertIn("Delete model SillyModel", out.getvalue())
self.assertIn("Create model RenamedModel", out.getvalue())
def test_makemigrations_non_interactive_no_field_rename(self):
"""
makemigrations adds and removes a possible field rename in
non-interactive mode.
"""
class SillyModel(models.Model):
silly_rename = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_default"
):
call_command("makemigrations", "migrations", interactive=False, stdout=out)
self.assertIn("Remove field silly_field from sillymodel", out.getvalue())
self.assertIn("Add field silly_rename to sillymodel", out.getvalue())
@mock.patch("builtins.input", return_value="Y")
def test_makemigrations_model_rename_interactive(self, mock_input):
class RenamedModel(models.Model):
silly_field = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(
module="migrations.test_migrations_no_default",
):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=True)
self.assertIn("Rename model SillyModel to RenamedModel", out.getvalue())
@mock.patch("builtins.input", return_value="Y")
def test_makemigrations_field_rename_interactive(self, mock_input):
class SillyModel(models.Model):
silly_rename = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(
module="migrations.test_migrations_no_default",
):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=True)
self.assertIn(
"Rename field silly_field on sillymodel to silly_rename",
out.getvalue(),
)
def test_makemigrations_handle_merge(self):
"""
makemigrations properly merges the conflicting migrations with --noinput.
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
name="merge",
merge=True,
interactive=False,
stdout=out,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertTrue(os.path.exists(merge_file))
output = out.getvalue()
self.assertIn("Merging migrations", output)
self.assertIn("Branch 0002_second", output)
self.assertIn("Branch 0002_conflicting_second", output)
self.assertIn("Created new merge migration", output)
def test_makemigration_merge_dry_run(self):
"""
makemigrations respects --dry-run option when fixing migration
conflicts (#24427).
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
name="merge",
dry_run=True,
merge=True,
interactive=False,
stdout=out,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertFalse(os.path.exists(merge_file))
output = out.getvalue()
self.assertIn("Merging migrations", output)
self.assertIn("Branch 0002_second", output)
self.assertIn("Branch 0002_conflicting_second", output)
self.assertNotIn("Created new merge migration", output)
def test_makemigration_merge_dry_run_verbosity_3(self):
"""
`makemigrations --merge --dry-run` writes the merge migration file to
stdout with `verbosity == 3` (#24427).
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
name="merge",
dry_run=True,
merge=True,
interactive=False,
stdout=out,
verbosity=3,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertFalse(os.path.exists(merge_file))
output = out.getvalue()
self.assertIn("Merging migrations", output)
self.assertIn("Branch 0002_second", output)
self.assertIn("Branch 0002_conflicting_second", output)
self.assertNotIn("Created new merge migration", output)
# Additional output caused by verbosity 3
# The complete merge migration file that would be written
self.assertIn("class Migration(migrations.Migration):", output)
self.assertIn("dependencies = [", output)
self.assertIn("('migrations', '0002_second')", output)
self.assertIn("('migrations', '0002_conflicting_second')", output)
self.assertIn("operations = [", output)
self.assertIn("]", output)
def test_makemigrations_dry_run(self):
"""
`makemigrations --dry-run` should not ask for defaults.
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
silly_date = models.DateField() # Added field without a default
silly_auto_now = models.DateTimeField(auto_now_add=True)
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_default"
):
call_command("makemigrations", "migrations", dry_run=True, stdout=out)
# Output the expected changes directly, without asking for defaults
self.assertIn("Add field silly_date to sillymodel", out.getvalue())
def test_makemigrations_dry_run_verbosity_3(self):
"""
Allow `makemigrations --dry-run` to output the migrations file to
stdout (with verbosity == 3).
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
silly_char = models.CharField(default="")
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_default"
):
call_command(
"makemigrations", "migrations", dry_run=True, stdout=out, verbosity=3
)
# Normal --dry-run output
self.assertIn("- Add field silly_char to sillymodel", out.getvalue())
# Additional output caused by verbosity 3
# The complete migrations file that would be written
self.assertIn("class Migration(migrations.Migration):", out.getvalue())
self.assertIn("dependencies = [", out.getvalue())
self.assertIn("('migrations', '0001_initial'),", out.getvalue())
self.assertIn("migrations.AddField(", out.getvalue())
self.assertIn("model_name='sillymodel',", out.getvalue())
self.assertIn("name='silly_char',", out.getvalue())
def test_makemigrations_scriptable(self):
"""
With scriptable=True, log output is diverted to stderr, and only the
paths of generated migration files are written to stdout.
"""
out = io.StringIO()
err = io.StringIO()
with self.temporary_migration_module(
module="migrations.migrations.test_migrations",
) as migration_dir:
call_command(
"makemigrations",
"migrations",
scriptable=True,
stdout=out,
stderr=err,
)
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertEqual(out.getvalue(), f"{initial_file}\n")
self.assertIn(" - Create model ModelWithCustomBase\n", err.getvalue())
@mock.patch("builtins.input", return_value="Y")
def test_makemigrations_scriptable_merge(self, mock_input):
out = io.StringIO()
err = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict",
) as migration_dir:
call_command(
"makemigrations",
"migrations",
merge=True,
name="merge",
scriptable=True,
stdout=out,
stderr=err,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertEqual(out.getvalue(), f"{merge_file}\n")
self.assertIn(f"Created new merge migration {merge_file}", err.getvalue())
def test_makemigrations_migrations_modules_path_not_exist(self):
"""
makemigrations creates migrations when specifying a custom location
for migration files using MIGRATION_MODULES if the custom path
doesn't already exist.
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
out = io.StringIO()
migration_module = "migrations.test_migrations_path_doesnt_exist.foo.bar"
with self.temporary_migration_module(module=migration_module) as migration_dir:
call_command("makemigrations", "migrations", stdout=out)
# Migrations file is actually created in the expected path.
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
# Command output indicates the migration is created.
self.assertIn(" - Create model SillyModel", out.getvalue())
@override_settings(MIGRATION_MODULES={"migrations": "some.nonexistent.path"})
def test_makemigrations_migrations_modules_nonexistent_toplevel_package(self):
msg = (
"Could not locate an appropriate location to create migrations "
"package some.nonexistent.path. Make sure the toplevel package "
"exists and can be imported."
)
with self.assertRaisesMessage(ValueError, msg):
call_command("makemigrations", "migrations", empty=True, verbosity=0)
def test_makemigrations_interactive_by_default(self):
"""
The user is prompted to merge by default if there are conflicts and
merge is True. Answer negative to differentiate it from behavior when
--noinput is specified.
"""
# Monkeypatch interactive questioner to auto reject
out = io.StringIO()
with mock.patch("builtins.input", mock.Mock(return_value="N")):
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations", "migrations", name="merge", merge=True, stdout=out
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
# This will fail if interactive is False by default
self.assertFalse(os.path.exists(merge_file))
self.assertNotIn("Created new merge migration", out.getvalue())
@override_settings(
INSTALLED_APPS=[
"migrations",
"migrations.migrations_test_apps.unspecified_app_with_conflict",
]
)
def test_makemigrations_unspecified_app_with_conflict_no_merge(self):
"""
makemigrations does not raise a CommandError when an unspecified app
has conflicting migrations.
"""
with self.temporary_migration_module(
module="migrations.test_migrations_no_changes"
):
call_command("makemigrations", "migrations", merge=False, verbosity=0)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
"migrations.migrations_test_apps.unspecified_app_with_conflict",
]
)
def test_makemigrations_unspecified_app_with_conflict_merge(self):
"""
makemigrations does not create a merge for an unspecified app even if
it has conflicting migrations.
"""
# Monkeypatch interactive questioner to auto accept
with mock.patch("builtins.input", mock.Mock(return_value="y")):
out = io.StringIO()
with self.temporary_migration_module(
app_label="migrated_app"
) as migration_dir:
call_command(
"makemigrations",
"migrated_app",
name="merge",
merge=True,
interactive=True,
stdout=out,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertFalse(os.path.exists(merge_file))
self.assertIn("No conflicts detected to merge.", out.getvalue())
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
"migrations.migrations_test_apps.conflicting_app_with_dependencies",
]
)
def test_makemigrations_merge_dont_output_dependency_operations(self):
"""
makemigrations --merge does not output any operations from apps that
don't belong to a given app.
"""
# Monkeypatch interactive questioner to auto accept
with mock.patch("builtins.input", mock.Mock(return_value="N")):
out = io.StringIO()
with mock.patch(
"django.core.management.color.supports_color", lambda *args: False
):
call_command(
"makemigrations",
"conflicting_app_with_dependencies",
merge=True,
interactive=True,
stdout=out,
)
self.assertEqual(
out.getvalue().lower(),
"merging conflicting_app_with_dependencies\n"
" branch 0002_conflicting_second\n"
" - create model something\n"
" branch 0002_second\n"
" - delete model tribble\n"
" - remove field silly_field from author\n"
" - add field rating to author\n"
" - create model book\n"
"\n"
"merging will only work if the operations printed above do not "
"conflict\n"
"with each other (working on different fields or models)\n"
"should these migration branches be merged? [y/n] ",
)
def test_makemigrations_with_custom_name(self):
"""
makemigrations --name generate a custom migration name.
"""
with self.temporary_migration_module() as migration_dir:
def cmd(migration_count, migration_name, *args):
call_command(
"makemigrations",
"migrations",
"--verbosity",
"0",
"--name",
migration_name,
*args,
)
migration_file = os.path.join(
migration_dir, "%s_%s.py" % (migration_count, migration_name)
)
# Check for existing migration file in migration folder
self.assertTrue(os.path.exists(migration_file))
with open(migration_file, encoding="utf-8") as fp:
content = fp.read()
content = content.replace(" ", "")
return content
# generate an initial migration
migration_name_0001 = "my_initial_migration"
content = cmd("0001", migration_name_0001)
self.assertIn(
"dependencies=[]" if HAS_BLACK else "dependencies=[\n]", content
)
# importlib caches os.listdir() on some platforms like macOS
# (#23850).
if hasattr(importlib, "invalidate_caches"):
importlib.invalidate_caches()
# generate an empty migration
migration_name_0002 = "my_custom_migration"
content = cmd("0002", migration_name_0002, "--empty")
if HAS_BLACK:
template_str = 'dependencies=[\n("migrations","0001_%s"),\n]'
else:
template_str = "dependencies=[\n('migrations','0001_%s'),\n]"
self.assertIn(
template_str % migration_name_0001,
content,
)
self.assertIn("operations=[]" if HAS_BLACK else "operations=[\n]", content)
def test_makemigrations_with_invalid_custom_name(self):
msg = "The migration name must be a valid Python identifier."
with self.assertRaisesMessage(CommandError, msg):
call_command(
"makemigrations", "migrations", "--name", "invalid name", "--empty"
)
def test_makemigrations_check(self):
"""
makemigrations --check should exit with a non-zero status when
there are changes to an app requiring migrations.
"""
with self.temporary_migration_module() as tmpdir:
with self.assertRaises(SystemExit):
call_command("makemigrations", "--check", "migrations", verbosity=0)
self.assertFalse(os.path.exists(tmpdir))
with self.temporary_migration_module(
module="migrations.test_migrations_no_changes"
):
call_command("makemigrations", "--check", "migrations", verbosity=0)
def test_makemigrations_migration_path_output(self):
"""
makemigrations should print the relative paths to the migrations unless
they are outside of the current tree, in which case the absolute path
should be shown.
"""
out = io.StringIO()
apps.register_model("migrations", UnicodeModel)
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", stdout=out)
self.assertIn(
os.path.join(migration_dir, "0001_initial.py"), out.getvalue()
)
def test_makemigrations_migration_path_output_valueerror(self):
"""
makemigrations prints the absolute path if os.path.relpath() raises a
ValueError when it's impossible to obtain a relative path, e.g. on
Windows if Django is installed on a different drive than where the
migration files are created.
"""
out = io.StringIO()
with self.temporary_migration_module() as migration_dir:
with mock.patch("os.path.relpath", side_effect=ValueError):
call_command("makemigrations", "migrations", stdout=out)
self.assertIn(os.path.join(migration_dir, "0001_initial.py"), out.getvalue())
def test_makemigrations_inconsistent_history(self):
"""
makemigrations should raise InconsistentMigrationHistory exception if
there are some migrations applied before their dependencies.
"""
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0002_second")
msg = (
"Migration migrations.0002_second is applied before its dependency "
"migrations.0001_initial"
)
with self.temporary_migration_module(module="migrations.test_migrations"):
with self.assertRaisesMessage(InconsistentMigrationHistory, msg):
call_command("makemigrations")
def test_makemigrations_inconsistent_history_db_failure(self):
msg = (
"Got an error checking a consistent migration history performed "
"for database connection 'default': could not connect to server"
)
with mock.patch(
"django.db.migrations.loader.MigrationLoader.check_consistent_history",
side_effect=OperationalError("could not connect to server"),
):
with self.temporary_migration_module():
with self.assertWarns(RuntimeWarning) as cm:
call_command("makemigrations", verbosity=0)
self.assertEqual(str(cm.warning), msg)
@mock.patch("builtins.input", return_value="1")
@mock.patch(
"django.db.migrations.questioner.sys.stdin",
mock.MagicMock(encoding=sys.getdefaultencoding()),
)
def test_makemigrations_auto_now_add_interactive(self, *args):
"""
makemigrations prompts the user when adding auto_now_add to an existing
model.
"""
class Entry(models.Model):
title = models.CharField(max_length=255)
creation_date = models.DateTimeField(auto_now_add=True)
class Meta:
app_label = "migrations"
input_msg = (
"It is impossible to add the field 'creation_date' with "
"'auto_now_add=True' to entry without providing a default. This "
"is because the database needs something to populate existing "
"rows.\n"
" 1) Provide a one-off default now which will be set on all "
"existing rows\n"
" 2) Quit and manually define a default value in models.py."
)
# Monkeypatch interactive questioner to auto accept
prompt_stdout = io.StringIO()
with self.temporary_migration_module(module="migrations.test_auto_now_add"):
call_command(
"makemigrations", "migrations", interactive=True, stdout=prompt_stdout
)
prompt_output = prompt_stdout.getvalue()
self.assertIn(input_msg, prompt_output)
self.assertIn("Please enter the default value as valid Python.", prompt_output)
self.assertIn(
"Accept the default 'timezone.now' by pressing 'Enter' or provide "
"another value.",
prompt_output,
)
self.assertIn("Type 'exit' to exit this prompt", prompt_output)
self.assertIn("Add field creation_date to entry", prompt_output)
@mock.patch("builtins.input", return_value="2")
def test_makemigrations_auto_now_add_interactive_quit(self, mock_input):
class Author(models.Model):
publishing_date = models.DateField(auto_now_add=True)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(module="migrations.test_migrations"):
with captured_stdout():
with self.assertRaises(SystemExit):
call_command("makemigrations", "migrations", interactive=True)
def test_makemigrations_non_interactive_auto_now_add_addition(self):
"""
Non-interactive makemigrations fails when a default is missing on a
new field when auto_now_add=True.
"""
class Entry(models.Model):
creation_date = models.DateTimeField(auto_now_add=True)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(module="migrations.test_auto_now_add"):
with self.assertRaises(SystemExit), captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=False)
self.assertIn(
"Field 'creation_date' on model 'entry' not migrated: it is "
"impossible to add a field with 'auto_now_add=True' without "
"specifying a default.",
out.getvalue(),
)
def test_makemigrations_interactive_unique_callable_default_addition(self):
"""
makemigrations prompts the user when adding a unique field with
a callable default.
"""
class Book(models.Model):
created = models.DateTimeField(unique=True, default=timezone.now)
class Meta:
app_label = "migrations"
version = get_docs_version()
input_msg = (
f"Callable default on unique field book.created will not generate "
f"unique values upon migrating.\n"
f"Please choose how to proceed:\n"
f" 1) Continue making this migration as the first step in writing "
f"a manual migration to generate unique values described here: "
f"https://docs.djangoproject.com/en/{version}/howto/"
f"writing-migrations/#migrations-that-add-unique-fields.\n"
f" 2) Quit and edit field options in models.py.\n"
)
with self.temporary_migration_module(module="migrations.test_migrations"):
# 2 - quit.
with mock.patch("builtins.input", return_value="2"):
with captured_stdout() as out, self.assertRaises(SystemExit):
call_command("makemigrations", "migrations", interactive=True)
out_value = out.getvalue()
self.assertIn(input_msg, out_value)
self.assertNotIn("Add field created to book", out_value)
# 1 - continue.
with mock.patch("builtins.input", return_value="1"):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=True)
out_value = out.getvalue()
self.assertIn(input_msg, out_value)
self.assertIn("Add field created to book", out_value)
def test_makemigrations_non_interactive_unique_callable_default_addition(self):
class Book(models.Model):
created = models.DateTimeField(unique=True, default=timezone.now)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(module="migrations.test_migrations"):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=False)
out_value = out.getvalue()
self.assertIn("Add field created to book", out_value)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"},
)
def test_makemigrations_continues_number_sequence_after_squash(self):
with self.temporary_migration_module(
module="migrations.test_migrations_squashed"
):
with captured_stdout() as out:
call_command(
"makemigrations",
"migrations",
interactive=False,
empty=True,
)
out_value = out.getvalue()
self.assertIn("0003_auto", out_value)
def test_makemigrations_update(self):
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
migration_file = os.path.join(migration_dir, "0002_second.py")
with open(migration_file) as fp:
initial_content = fp.read()
with captured_stdout() as out:
call_command("makemigrations", "migrations", update=True)
self.assertFalse(
any(
filename.startswith("0003")
for filename in os.listdir(migration_dir)
)
)
self.assertIs(os.path.exists(migration_file), False)
new_migration_file = os.path.join(
migration_dir,
"0002_delete_tribble_author_rating_modelwithcustombase_and_more.py",
)
with open(new_migration_file) as fp:
self.assertNotEqual(initial_content, fp.read())
self.assertIn(f"Deleted {migration_file}", out.getvalue())
def test_makemigrations_update_existing_name(self):
with self.temporary_migration_module(
module="migrations.test_auto_now_add"
) as migration_dir:
migration_file = os.path.join(migration_dir, "0001_initial.py")
with open(migration_file) as fp:
initial_content = fp.read()
with captured_stdout() as out:
call_command("makemigrations", "migrations", update=True)
self.assertIs(os.path.exists(migration_file), False)
new_migration_file = os.path.join(
migration_dir,
"0001_initial_updated.py",
)
with open(new_migration_file) as fp:
self.assertNotEqual(initial_content, fp.read())
self.assertIn(f"Deleted {migration_file}", out.getvalue())
def test_makemigrations_update_custom_name(self):
custom_name = "delete_something"
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
old_migration_file = os.path.join(migration_dir, "0002_second.py")
with open(old_migration_file) as fp:
initial_content = fp.read()
with captured_stdout() as out:
call_command(
"makemigrations", "migrations", update=True, name=custom_name
)
self.assertFalse(
any(
filename.startswith("0003")
for filename in os.listdir(migration_dir)
)
)
self.assertIs(os.path.exists(old_migration_file), False)
new_migration_file = os.path.join(migration_dir, f"0002_{custom_name}.py")
self.assertIs(os.path.exists(new_migration_file), True)
with open(new_migration_file) as fp:
self.assertNotEqual(initial_content, fp.read())
self.assertIn(f"Deleted {old_migration_file}", out.getvalue())
def test_makemigrations_update_applied_migration(self):
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0001_initial")
recorder.record_applied("migrations", "0002_second")
with self.temporary_migration_module(module="migrations.test_migrations"):
msg = "Cannot update applied migration 'migrations.0002_second'."
with self.assertRaisesMessage(CommandError, msg):
call_command("makemigrations", "migrations", update=True)
def test_makemigrations_update_no_migration(self):
with self.temporary_migration_module(module="migrations.test_migrations_empty"):
msg = "App migrations has no migration, cannot update last migration."
with self.assertRaisesMessage(CommandError, msg):
call_command("makemigrations", "migrations", update=True)
def test_makemigrations_update_squash_migration(self):
with self.temporary_migration_module(
module="migrations.test_migrations_squashed"
):
msg = "Cannot update squash migration 'migrations.0001_squashed_0002'."
with self.assertRaisesMessage(CommandError, msg):
call_command("makemigrations", "migrations", update=True)
def test_makemigrations_update_manual_porting(self):
with self.temporary_migration_module(
module="migrations.test_migrations_plan"
) as migration_dir:
with captured_stdout() as out:
call_command("makemigrations", "migrations", update=True)
# Previous migration exists.
previous_migration_file = os.path.join(migration_dir, "0005_fifth.py")
self.assertIs(os.path.exists(previous_migration_file), True)
# New updated migration exists.
files = [f for f in os.listdir(migration_dir) if f.startswith("0005_auto")]
updated_migration_file = os.path.join(migration_dir, files[0])
self.assertIs(os.path.exists(updated_migration_file), True)
self.assertIn(
f"Updated migration {updated_migration_file} requires manual porting.\n"
f"Previous migration {previous_migration_file} was kept and must be "
f"deleted after porting functions manually.",
out.getvalue(),
)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.alter_fk.author_app",
"migrations.migrations_test_apps.alter_fk.book_app",
]
)
def test_makemigrations_update_dependency_migration(self):
with self.temporary_migration_module(app_label="book_app"):
msg = (
"Cannot update migration 'book_app.0001_initial' that migrations "
"'author_app.0002_alter_id' depend on."
)
with self.assertRaisesMessage(CommandError, msg):
call_command("makemigrations", "book_app", update=True)
class SquashMigrationsTests(MigrationTestBase):
"""
Tests running the squashmigrations command.
"""
def test_squashmigrations_squashes(self):
"""
squashmigrations squashes migrations.
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"squashmigrations",
"migrations",
"0002",
interactive=False,
stdout=out,
no_color=True,
)
squashed_migration_file = os.path.join(
migration_dir, "0001_squashed_0002_second.py"
)
self.assertTrue(os.path.exists(squashed_migration_file))
self.assertEqual(
out.getvalue(),
"Will squash the following migrations:\n"
" - 0001_initial\n"
" - 0002_second\n"
"Optimizing...\n"
" Optimized from 8 operations to 2 operations.\n"
"Created new squashed migration %s\n"
" You should commit this migration but leave the old ones in place;\n"
" the new migration will be used for new installs. Once you are sure\n"
" all instances of the codebase have applied the migrations you "
"squashed,\n"
" you can delete them.\n" % squashed_migration_file,
)
def test_squashmigrations_initial_attribute(self):
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"squashmigrations", "migrations", "0002", interactive=False, verbosity=0
)
squashed_migration_file = os.path.join(
migration_dir, "0001_squashed_0002_second.py"
)
with open(squashed_migration_file, encoding="utf-8") as fp:
content = fp.read()
self.assertIn("initial = True", content)
def test_squashmigrations_optimizes(self):
"""
squashmigrations optimizes operations.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations"):
call_command(
"squashmigrations",
"migrations",
"0002",
interactive=False,
verbosity=1,
stdout=out,
)
self.assertIn("Optimized from 8 operations to 2 operations.", out.getvalue())
def test_ticket_23799_squashmigrations_no_optimize(self):
"""
squashmigrations --no-optimize doesn't optimize operations.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations"):
call_command(
"squashmigrations",
"migrations",
"0002",
interactive=False,
verbosity=1,
no_optimize=True,
stdout=out,
)
self.assertIn("Skipping optimization", out.getvalue())
def test_squashmigrations_valid_start(self):
"""
squashmigrations accepts a starting migration.
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_changes"
) as migration_dir:
call_command(
"squashmigrations",
"migrations",
"0002",
"0003",
interactive=False,
verbosity=1,
stdout=out,
)
squashed_migration_file = os.path.join(
migration_dir, "0002_second_squashed_0003_third.py"
)
with open(squashed_migration_file, encoding="utf-8") as fp:
content = fp.read()
if HAS_BLACK:
test_str = ' ("migrations", "0001_initial")'
else:
test_str = " ('migrations', '0001_initial')"
self.assertIn(test_str, content)
self.assertNotIn("initial = True", content)
out = out.getvalue()
self.assertNotIn(" - 0001_initial", out)
self.assertIn(" - 0002_second", out)
self.assertIn(" - 0003_third", out)
def test_squashmigrations_invalid_start(self):
"""
squashmigrations doesn't accept a starting migration after the ending migration.
"""
with self.temporary_migration_module(
module="migrations.test_migrations_no_changes"
):
msg = (
"The migration 'migrations.0003_third' cannot be found. Maybe "
"it comes after the migration 'migrations.0002_second'"
)
with self.assertRaisesMessage(CommandError, msg):
call_command(
"squashmigrations",
"migrations",
"0003",
"0002",
interactive=False,
verbosity=0,
)
def test_squashed_name_with_start_migration_name(self):
"""--squashed-name specifies the new migration's name."""
squashed_name = "squashed_name"
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"squashmigrations",
"migrations",
"0001",
"0002",
squashed_name=squashed_name,
interactive=False,
verbosity=0,
)
squashed_migration_file = os.path.join(
migration_dir, "0001_%s.py" % squashed_name
)
self.assertTrue(os.path.exists(squashed_migration_file))
def test_squashed_name_without_start_migration_name(self):
"""--squashed-name also works if a start migration is omitted."""
squashed_name = "squashed_name"
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"squashmigrations",
"migrations",
"0001",
squashed_name=squashed_name,
interactive=False,
verbosity=0,
)
squashed_migration_file = os.path.join(
migration_dir, "0001_%s.py" % squashed_name
)
self.assertTrue(os.path.exists(squashed_migration_file))
def test_squashed_name_exists(self):
msg = "Migration 0001_initial already exists. Use a different name."
with self.temporary_migration_module(module="migrations.test_migrations"):
with self.assertRaisesMessage(CommandError, msg):
call_command(
"squashmigrations",
"migrations",
"0001",
"0002",
squashed_name="initial",
interactive=False,
verbosity=0,
)
def test_squashmigrations_manual_porting(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_manual_porting",
) as migration_dir:
call_command(
"squashmigrations",
"migrations",
"0002",
interactive=False,
stdout=out,
no_color=True,
)
squashed_migration_file = os.path.join(
migration_dir,
"0001_squashed_0002_second.py",
)
self.assertTrue(os.path.exists(squashed_migration_file))
black_warning = ""
if HAS_BLACK:
black_warning = (
"Squashed migration couldn't be formatted using the "
'"black" command. You can call it manually.\n'
)
self.assertEqual(
out.getvalue(),
f"Will squash the following migrations:\n"
f" - 0001_initial\n"
f" - 0002_second\n"
f"Optimizing...\n"
f" No optimizations possible.\n"
f"Created new squashed migration {squashed_migration_file}\n"
f" You should commit this migration but leave the old ones in place;\n"
f" the new migration will be used for new installs. Once you are sure\n"
f" all instances of the codebase have applied the migrations you "
f"squashed,\n"
f" you can delete them.\n"
f"Manual porting required\n"
f" Your migrations contained functions that must be manually copied "
f"over,\n"
f" as we could not safely copy their implementation.\n"
f" See the comment at the top of the squashed migration for details.\n"
+ black_warning,
)
class AppLabelErrorTests(TestCase):
"""
This class inherits TestCase because MigrationTestBase uses
`available_apps = ['migrations']` which means that it's the only installed
app. 'django.contrib.auth' must be in INSTALLED_APPS for some of these
tests.
"""
nonexistent_app_error = "No installed app with label 'nonexistent_app'."
did_you_mean_auth_error = (
"No installed app with label 'django.contrib.auth'. Did you mean 'auth'?"
)
def test_makemigrations_nonexistent_app_label(self):
err = io.StringIO()
with self.assertRaises(SystemExit):
call_command("makemigrations", "nonexistent_app", stderr=err)
self.assertIn(self.nonexistent_app_error, err.getvalue())
def test_makemigrations_app_name_specified_as_label(self):
err = io.StringIO()
with self.assertRaises(SystemExit):
call_command("makemigrations", "django.contrib.auth", stderr=err)
self.assertIn(self.did_you_mean_auth_error, err.getvalue())
def test_migrate_nonexistent_app_label(self):
with self.assertRaisesMessage(CommandError, self.nonexistent_app_error):
call_command("migrate", "nonexistent_app")
def test_migrate_app_name_specified_as_label(self):
with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error):
call_command("migrate", "django.contrib.auth")
def test_showmigrations_nonexistent_app_label(self):
err = io.StringIO()
with self.assertRaises(SystemExit):
call_command("showmigrations", "nonexistent_app", stderr=err)
self.assertIn(self.nonexistent_app_error, err.getvalue())
def test_showmigrations_app_name_specified_as_label(self):
err = io.StringIO()
with self.assertRaises(SystemExit):
call_command("showmigrations", "django.contrib.auth", stderr=err)
self.assertIn(self.did_you_mean_auth_error, err.getvalue())
def test_sqlmigrate_nonexistent_app_label(self):
with self.assertRaisesMessage(CommandError, self.nonexistent_app_error):
call_command("sqlmigrate", "nonexistent_app", "0002")
def test_sqlmigrate_app_name_specified_as_label(self):
with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error):
call_command("sqlmigrate", "django.contrib.auth", "0002")
def test_squashmigrations_nonexistent_app_label(self):
with self.assertRaisesMessage(CommandError, self.nonexistent_app_error):
call_command("squashmigrations", "nonexistent_app", "0002")
def test_squashmigrations_app_name_specified_as_label(self):
with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error):
call_command("squashmigrations", "django.contrib.auth", "0002")
def test_optimizemigration_nonexistent_app_label(self):
with self.assertRaisesMessage(CommandError, self.nonexistent_app_error):
call_command("optimizemigration", "nonexistent_app", "0002")
def test_optimizemigration_app_name_specified_as_label(self):
with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error):
call_command("optimizemigration", "django.contrib.auth", "0002")
class OptimizeMigrationTests(MigrationTestBase):
def test_no_optimization_possible(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"optimizemigration", "migrations", "0002", stdout=out, no_color=True
)
migration_file = os.path.join(migration_dir, "0002_second.py")
self.assertTrue(os.path.exists(migration_file))
call_command(
"optimizemigration",
"migrations",
"0002",
stdout=out,
no_color=True,
verbosity=0,
)
self.assertEqual(out.getvalue(), "No optimizations possible.\n")
def test_optimization(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"optimizemigration", "migrations", "0001", stdout=out, no_color=True
)
initial_migration_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_migration_file))
with open(initial_migration_file) as fp:
content = fp.read()
self.assertIn(
'("bool", models.BooleanField'
if HAS_BLACK
else "('bool', models.BooleanField",
content,
)
self.assertEqual(
out.getvalue(),
f"Optimizing from 4 operations to 2 operations.\n"
f"Optimized migration {initial_migration_file}\n",
)
def test_optimization_no_verbosity(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"optimizemigration",
"migrations",
"0001",
stdout=out,
no_color=True,
verbosity=0,
)
initial_migration_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_migration_file))
with open(initial_migration_file) as fp:
content = fp.read()
self.assertIn(
'("bool", models.BooleanField'
if HAS_BLACK
else "('bool', models.BooleanField",
content,
)
self.assertEqual(out.getvalue(), "")
def test_creates_replace_migration_manual_porting(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_manual_porting"
) as migration_dir:
call_command(
"optimizemigration", "migrations", "0003", stdout=out, no_color=True
)
optimized_migration_file = os.path.join(
migration_dir, "0003_third_optimized.py"
)
self.assertTrue(os.path.exists(optimized_migration_file))
with open(optimized_migration_file) as fp:
content = fp.read()
self.assertIn("replaces = [", content)
black_warning = ""
if HAS_BLACK:
black_warning = (
"Optimized migration couldn't be formatted using the "
'"black" command. You can call it manually.\n'
)
self.assertEqual(
out.getvalue(),
"Optimizing from 3 operations to 2 operations.\n"
"Manual porting required\n"
" Your migrations contained functions that must be manually copied over,\n"
" as we could not safely copy their implementation.\n"
" See the comment at the top of the optimized migration for details.\n"
+ black_warning
+ f"Optimized migration {optimized_migration_file}\n",
)
def test_fails_squash_migration_manual_porting(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_manual_porting"
) as migration_dir:
version = get_docs_version()
msg = (
f"Migration will require manual porting but is already a squashed "
f"migration.\nTransition to a normal migration first: "
f"https://docs.djangoproject.com/en/{version}/topics/migrations/"
f"#squashing-migrations"
)
with self.assertRaisesMessage(CommandError, msg):
call_command("optimizemigration", "migrations", "0004", stdout=out)
optimized_migration_file = os.path.join(
migration_dir, "0004_fourth_optimized.py"
)
self.assertFalse(os.path.exists(optimized_migration_file))
self.assertEqual(
out.getvalue(), "Optimizing from 3 operations to 2 operations.\n"
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_optimizemigration_check(self):
with self.assertRaises(SystemExit):
call_command(
"optimizemigration", "--check", "migrations", "0001", verbosity=0
)
call_command("optimizemigration", "--check", "migrations", "0002", verbosity=0)
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.unmigrated_app_simple"],
)
def test_app_without_migrations(self):
msg = "App 'unmigrated_app_simple' does not have migrations."
with self.assertRaisesMessage(CommandError, msg):
call_command("optimizemigration", "unmigrated_app_simple", "0001")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_clashing_prefix"},
)
def test_ambigious_prefix(self):
msg = (
"More than one migration matches 'a' in app 'migrations'. Please "
"be more specific."
)
with self.assertRaisesMessage(CommandError, msg):
call_command("optimizemigration", "migrations", "a")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_unknown_prefix(self):
msg = "Cannot find a migration matching 'nonexistent' from app 'migrations'."
with self.assertRaisesMessage(CommandError, msg):
call_command("optimizemigration", "migrations", "nonexistent")
|
89ba0918c9325d0aeaccc92cb6125ccf3c8c902e309e94de1a71c1bfb867e40d | import math
from django.core.exceptions import FieldDoesNotExist
from django.db import IntegrityError, connection, migrations, models, transaction
from django.db.migrations.migration import Migration
from django.db.migrations.operations.fields import FieldOperation
from django.db.migrations.state import ModelState, ProjectState
from django.db.models.expressions import Value
from django.db.models.functions import Abs, Pi
from django.db.transaction import atomic
from django.test import (
SimpleTestCase,
ignore_warnings,
override_settings,
skipIfDBFeature,
skipUnlessDBFeature,
)
from django.test.utils import CaptureQueriesContext
from django.utils.deprecation import RemovedInDjango51Warning
from .models import FoodManager, FoodQuerySet, UnicodeModel
from .test_base import OperationTestBase
class Mixin:
pass
class OperationTests(OperationTestBase):
"""
Tests running the operations and making sure they do what they say they do.
Each test looks at their state changing, and then their database operation -
both forwards and backwards.
"""
def test_create_model(self):
"""
Tests the CreateModel operation.
Most other tests use this operation as part of setup, so check failures
here first.
"""
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=1)),
],
)
self.assertEqual(operation.describe(), "Create model Pony")
self.assertEqual(operation.migration_name_fragment, "pony")
# Test the state alteration
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crmo", new_state)
self.assertEqual(new_state.models["test_crmo", "pony"].name, "Pony")
self.assertEqual(len(new_state.models["test_crmo", "pony"].fields), 2)
# Test the database alteration
self.assertTableNotExists("test_crmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmo", editor, project_state, new_state)
self.assertTableExists("test_crmo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmo", editor, new_state, project_state)
self.assertTableNotExists("test_crmo_pony")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["fields", "name"])
# And default manager not in set
operation = migrations.CreateModel(
"Foo", fields=[], managers=[("objects", models.Manager())]
)
definition = operation.deconstruct()
self.assertNotIn("managers", definition[2])
def test_create_model_with_duplicate_field_name(self):
with self.assertRaisesMessage(
ValueError, "Found duplicate value pink in CreateModel fields argument."
):
migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.TextField()),
("pink", models.IntegerField(default=1)),
],
)
def test_create_model_with_duplicate_base(self):
message = "Found duplicate value test_crmo.pony in CreateModel bases argument."
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(
"test_crmo.Pony",
"test_crmo.Pony",
),
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(
"test_crmo.Pony",
"test_crmo.pony",
),
)
message = (
"Found duplicate value migrations.unicodemodel in CreateModel bases "
"argument."
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(
UnicodeModel,
UnicodeModel,
),
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(
UnicodeModel,
"migrations.unicodemodel",
),
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(
UnicodeModel,
"migrations.UnicodeModel",
),
)
message = (
"Found duplicate value <class 'django.db.models.base.Model'> in "
"CreateModel bases argument."
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(
models.Model,
models.Model,
),
)
message = (
"Found duplicate value <class 'migrations.test_operations.Mixin'> in "
"CreateModel bases argument."
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(
Mixin,
Mixin,
),
)
def test_create_model_with_duplicate_manager_name(self):
with self.assertRaisesMessage(
ValueError,
"Found duplicate value objects in CreateModel managers argument.",
):
migrations.CreateModel(
"Pony",
fields=[],
managers=[
("objects", models.Manager()),
("objects", models.Manager()),
],
)
def test_create_model_with_unique_after(self):
"""
Tests the CreateModel operation directly followed by an
AlterUniqueTogether (bug #22844 - sqlite remake issues)
"""
operation1 = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=1)),
],
)
operation2 = migrations.CreateModel(
"Rider",
[
("id", models.AutoField(primary_key=True)),
("number", models.IntegerField(default=1)),
("pony", models.ForeignKey("test_crmoua.Pony", models.CASCADE)),
],
)
operation3 = migrations.AlterUniqueTogether(
"Rider",
[
("number", "pony"),
],
)
# Test the database alteration
project_state = ProjectState()
self.assertTableNotExists("test_crmoua_pony")
self.assertTableNotExists("test_crmoua_rider")
with connection.schema_editor() as editor:
new_state = project_state.clone()
operation1.state_forwards("test_crmoua", new_state)
operation1.database_forwards(
"test_crmoua", editor, project_state, new_state
)
project_state, new_state = new_state, new_state.clone()
operation2.state_forwards("test_crmoua", new_state)
operation2.database_forwards(
"test_crmoua", editor, project_state, new_state
)
project_state, new_state = new_state, new_state.clone()
operation3.state_forwards("test_crmoua", new_state)
operation3.database_forwards(
"test_crmoua", editor, project_state, new_state
)
self.assertTableExists("test_crmoua_pony")
self.assertTableExists("test_crmoua_rider")
def test_create_model_m2m(self):
"""
Test the creation of a model with a ManyToMany field and the
auto-created "through" model.
"""
project_state = self.set_up_test_model("test_crmomm")
operation = migrations.CreateModel(
"Stable",
[
("id", models.AutoField(primary_key=True)),
("ponies", models.ManyToManyField("Pony", related_name="stables")),
],
)
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_crmomm", new_state)
# Test the database alteration
self.assertTableNotExists("test_crmomm_stable_ponies")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmomm", editor, project_state, new_state)
self.assertTableExists("test_crmomm_stable")
self.assertTableExists("test_crmomm_stable_ponies")
self.assertColumnNotExists("test_crmomm_stable", "ponies")
# Make sure the M2M field actually works
with atomic():
Pony = new_state.apps.get_model("test_crmomm", "Pony")
Stable = new_state.apps.get_model("test_crmomm", "Stable")
stable = Stable.objects.create()
p1 = Pony.objects.create(pink=False, weight=4.55)
p2 = Pony.objects.create(pink=True, weight=5.43)
stable.ponies.add(p1, p2)
self.assertEqual(stable.ponies.count(), 2)
stable.ponies.all().delete()
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_crmomm", editor, new_state, project_state
)
self.assertTableNotExists("test_crmomm_stable")
self.assertTableNotExists("test_crmomm_stable_ponies")
@skipUnlessDBFeature("supports_collation_on_charfield", "supports_foreign_keys")
def test_create_fk_models_to_pk_field_db_collation(self):
"""Creation of models with a FK to a PK with db_collation."""
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
app_label = "test_cfkmtopkfdbc"
operations = [
migrations.CreateModel(
"Pony",
[
(
"id",
models.CharField(
primary_key=True,
max_length=10,
db_collation=collation,
),
),
],
)
]
project_state = self.apply_operations(app_label, ProjectState(), operations)
# ForeignKey.
new_state = project_state.clone()
operation = migrations.CreateModel(
"Rider",
[
("id", models.AutoField(primary_key=True)),
("pony", models.ForeignKey("Pony", models.CASCADE)),
],
)
operation.state_forwards(app_label, new_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertColumnCollation(f"{app_label}_rider", "pony_id", collation)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
# OneToOneField.
new_state = project_state.clone()
operation = migrations.CreateModel(
"ShetlandPony",
[
(
"pony",
models.OneToOneField("Pony", models.CASCADE, primary_key=True),
),
("cuteness", models.IntegerField(default=1)),
],
)
operation.state_forwards(app_label, new_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertColumnCollation(f"{app_label}_shetlandpony", "pony_id", collation)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
def test_create_model_inheritance(self):
"""
Tests the CreateModel operation on a multi-table inheritance setup.
"""
project_state = self.set_up_test_model("test_crmoih")
# Test the state alteration
operation = migrations.CreateModel(
"ShetlandPony",
[
(
"pony_ptr",
models.OneToOneField(
"test_crmoih.Pony",
models.CASCADE,
auto_created=True,
primary_key=True,
to_field="id",
serialize=False,
),
),
("cuteness", models.IntegerField(default=1)),
],
)
new_state = project_state.clone()
operation.state_forwards("test_crmoih", new_state)
self.assertIn(("test_crmoih", "shetlandpony"), new_state.models)
# Test the database alteration
self.assertTableNotExists("test_crmoih_shetlandpony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmoih", editor, project_state, new_state)
self.assertTableExists("test_crmoih_shetlandpony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_crmoih", editor, new_state, project_state
)
self.assertTableNotExists("test_crmoih_shetlandpony")
def test_create_proxy_model(self):
"""
CreateModel ignores proxy models.
"""
project_state = self.set_up_test_model("test_crprmo")
# Test the state alteration
operation = migrations.CreateModel(
"ProxyPony",
[],
options={"proxy": True},
bases=("test_crprmo.Pony",),
)
self.assertEqual(operation.describe(), "Create proxy model ProxyPony")
new_state = project_state.clone()
operation.state_forwards("test_crprmo", new_state)
self.assertIn(("test_crprmo", "proxypony"), new_state.models)
# Test the database alteration
self.assertTableNotExists("test_crprmo_proxypony")
self.assertTableExists("test_crprmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crprmo", editor, project_state, new_state)
self.assertTableNotExists("test_crprmo_proxypony")
self.assertTableExists("test_crprmo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_crprmo", editor, new_state, project_state
)
self.assertTableNotExists("test_crprmo_proxypony")
self.assertTableExists("test_crprmo_pony")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["bases", "fields", "name", "options"])
def test_create_unmanaged_model(self):
"""
CreateModel ignores unmanaged models.
"""
project_state = self.set_up_test_model("test_crummo")
# Test the state alteration
operation = migrations.CreateModel(
"UnmanagedPony",
[],
options={"proxy": True},
bases=("test_crummo.Pony",),
)
self.assertEqual(operation.describe(), "Create proxy model UnmanagedPony")
new_state = project_state.clone()
operation.state_forwards("test_crummo", new_state)
self.assertIn(("test_crummo", "unmanagedpony"), new_state.models)
# Test the database alteration
self.assertTableNotExists("test_crummo_unmanagedpony")
self.assertTableExists("test_crummo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crummo", editor, project_state, new_state)
self.assertTableNotExists("test_crummo_unmanagedpony")
self.assertTableExists("test_crummo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_crummo", editor, new_state, project_state
)
self.assertTableNotExists("test_crummo_unmanagedpony")
self.assertTableExists("test_crummo_pony")
@skipUnlessDBFeature("supports_table_check_constraints")
def test_create_model_with_constraint(self):
where = models.Q(pink__gt=2)
check_constraint = models.CheckConstraint(
check=where, name="test_constraint_pony_pink_gt_2"
)
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=3)),
],
options={"constraints": [check_constraint]},
)
# Test the state alteration
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crmo", new_state)
self.assertEqual(
len(new_state.models["test_crmo", "pony"].options["constraints"]), 1
)
# Test database alteration
self.assertTableNotExists("test_crmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmo", editor, project_state, new_state)
self.assertTableExists("test_crmo_pony")
with connection.cursor() as cursor:
with self.assertRaises(IntegrityError):
cursor.execute("INSERT INTO test_crmo_pony (id, pink) VALUES (1, 1)")
# Test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmo", editor, new_state, project_state)
self.assertTableNotExists("test_crmo_pony")
# Test deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2]["options"]["constraints"], [check_constraint])
@skipUnlessDBFeature("supports_table_check_constraints")
def test_create_model_with_boolean_expression_in_check_constraint(self):
app_label = "test_crmobechc"
rawsql_constraint = models.CheckConstraint(
check=models.expressions.RawSQL(
"price < %s", (1000,), output_field=models.BooleanField()
),
name=f"{app_label}_price_lt_1000_raw",
)
wrapper_constraint = models.CheckConstraint(
check=models.expressions.ExpressionWrapper(
models.Q(price__gt=500) | models.Q(price__lt=500),
output_field=models.BooleanField(),
),
name=f"{app_label}_price_neq_500_wrap",
)
operation = migrations.CreateModel(
"Product",
[
("id", models.AutoField(primary_key=True)),
("price", models.IntegerField(null=True)),
],
options={"constraints": [rawsql_constraint, wrapper_constraint]},
)
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
# Add table.
self.assertTableNotExists(app_label)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertTableExists(f"{app_label}_product")
insert_sql = f"INSERT INTO {app_label}_product (id, price) VALUES (%d, %d)"
with connection.cursor() as cursor:
with self.assertRaises(IntegrityError):
cursor.execute(insert_sql % (1, 1000))
cursor.execute(insert_sql % (1, 999))
with self.assertRaises(IntegrityError):
cursor.execute(insert_sql % (2, 500))
cursor.execute(insert_sql % (2, 499))
def test_create_model_with_partial_unique_constraint(self):
partial_unique_constraint = models.UniqueConstraint(
fields=["pink"],
condition=models.Q(weight__gt=5),
name="test_constraint_pony_pink_for_weight_gt_5_uniq",
)
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=3)),
("weight", models.FloatField()),
],
options={"constraints": [partial_unique_constraint]},
)
# Test the state alteration
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crmo", new_state)
self.assertEqual(
len(new_state.models["test_crmo", "pony"].options["constraints"]), 1
)
# Test database alteration
self.assertTableNotExists("test_crmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmo", editor, project_state, new_state)
self.assertTableExists("test_crmo_pony")
# Test constraint works
Pony = new_state.apps.get_model("test_crmo", "Pony")
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=6.0)
if connection.features.supports_partial_indexes:
with self.assertRaises(IntegrityError):
Pony.objects.create(pink=1, weight=7.0)
else:
Pony.objects.create(pink=1, weight=7.0)
# Test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmo", editor, new_state, project_state)
self.assertTableNotExists("test_crmo_pony")
# Test deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2]["options"]["constraints"], [partial_unique_constraint]
)
def test_create_model_with_deferred_unique_constraint(self):
deferred_unique_constraint = models.UniqueConstraint(
fields=["pink"],
name="deferrable_pink_constraint",
deferrable=models.Deferrable.DEFERRED,
)
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=3)),
],
options={"constraints": [deferred_unique_constraint]},
)
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crmo", new_state)
self.assertEqual(
len(new_state.models["test_crmo", "pony"].options["constraints"]), 1
)
self.assertTableNotExists("test_crmo_pony")
# Create table.
with connection.schema_editor() as editor:
operation.database_forwards("test_crmo", editor, project_state, new_state)
self.assertTableExists("test_crmo_pony")
Pony = new_state.apps.get_model("test_crmo", "Pony")
Pony.objects.create(pink=1)
if connection.features.supports_deferrable_unique_constraints:
# Unique constraint is deferred.
with transaction.atomic():
obj = Pony.objects.create(pink=1)
obj.pink = 2
obj.save()
# Constraint behavior can be changed with SET CONSTRAINTS.
with self.assertRaises(IntegrityError):
with transaction.atomic(), connection.cursor() as cursor:
quoted_name = connection.ops.quote_name(
deferred_unique_constraint.name
)
cursor.execute("SET CONSTRAINTS %s IMMEDIATE" % quoted_name)
obj = Pony.objects.create(pink=1)
obj.pink = 3
obj.save()
else:
Pony.objects.create(pink=1)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards("test_crmo", editor, new_state, project_state)
self.assertTableNotExists("test_crmo_pony")
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2]["options"]["constraints"],
[deferred_unique_constraint],
)
@skipUnlessDBFeature("supports_covering_indexes")
def test_create_model_with_covering_unique_constraint(self):
covering_unique_constraint = models.UniqueConstraint(
fields=["pink"],
include=["weight"],
name="test_constraint_pony_pink_covering_weight",
)
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=3)),
("weight", models.FloatField()),
],
options={"constraints": [covering_unique_constraint]},
)
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crmo", new_state)
self.assertEqual(
len(new_state.models["test_crmo", "pony"].options["constraints"]), 1
)
self.assertTableNotExists("test_crmo_pony")
# Create table.
with connection.schema_editor() as editor:
operation.database_forwards("test_crmo", editor, project_state, new_state)
self.assertTableExists("test_crmo_pony")
Pony = new_state.apps.get_model("test_crmo", "Pony")
Pony.objects.create(pink=1, weight=4.0)
with self.assertRaises(IntegrityError):
Pony.objects.create(pink=1, weight=7.0)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards("test_crmo", editor, new_state, project_state)
self.assertTableNotExists("test_crmo_pony")
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2]["options"]["constraints"],
[covering_unique_constraint],
)
def test_create_model_managers(self):
"""
The managers on a model are set.
"""
project_state = self.set_up_test_model("test_cmoma")
# Test the state alteration
operation = migrations.CreateModel(
"Food",
fields=[
("id", models.AutoField(primary_key=True)),
],
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
],
)
self.assertEqual(operation.describe(), "Create model Food")
new_state = project_state.clone()
operation.state_forwards("test_cmoma", new_state)
self.assertIn(("test_cmoma", "food"), new_state.models)
managers = new_state.models["test_cmoma", "food"].managers
self.assertEqual(managers[0][0], "food_qs")
self.assertIsInstance(managers[0][1], models.Manager)
self.assertEqual(managers[1][0], "food_mgr")
self.assertIsInstance(managers[1][1], FoodManager)
self.assertEqual(managers[1][1].args, ("a", "b", 1, 2))
self.assertEqual(managers[2][0], "food_mgr_kwargs")
self.assertIsInstance(managers[2][1], FoodManager)
self.assertEqual(managers[2][1].args, ("x", "y", 3, 4))
def test_delete_model(self):
"""
Tests the DeleteModel operation.
"""
project_state = self.set_up_test_model("test_dlmo")
# Test the state alteration
operation = migrations.DeleteModel("Pony")
self.assertEqual(operation.describe(), "Delete model Pony")
self.assertEqual(operation.migration_name_fragment, "delete_pony")
new_state = project_state.clone()
operation.state_forwards("test_dlmo", new_state)
self.assertNotIn(("test_dlmo", "pony"), new_state.models)
# Test the database alteration
self.assertTableExists("test_dlmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_dlmo", editor, project_state, new_state)
self.assertTableNotExists("test_dlmo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_dlmo", editor, new_state, project_state)
self.assertTableExists("test_dlmo_pony")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "DeleteModel")
self.assertEqual(definition[1], [])
self.assertEqual(list(definition[2]), ["name"])
def test_delete_proxy_model(self):
"""
Tests the DeleteModel operation ignores proxy models.
"""
project_state = self.set_up_test_model("test_dlprmo", proxy_model=True)
# Test the state alteration
operation = migrations.DeleteModel("ProxyPony")
new_state = project_state.clone()
operation.state_forwards("test_dlprmo", new_state)
self.assertIn(("test_dlprmo", "proxypony"), project_state.models)
self.assertNotIn(("test_dlprmo", "proxypony"), new_state.models)
# Test the database alteration
self.assertTableExists("test_dlprmo_pony")
self.assertTableNotExists("test_dlprmo_proxypony")
with connection.schema_editor() as editor:
operation.database_forwards("test_dlprmo", editor, project_state, new_state)
self.assertTableExists("test_dlprmo_pony")
self.assertTableNotExists("test_dlprmo_proxypony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_dlprmo", editor, new_state, project_state
)
self.assertTableExists("test_dlprmo_pony")
self.assertTableNotExists("test_dlprmo_proxypony")
def test_delete_mti_model(self):
project_state = self.set_up_test_model("test_dlmtimo", mti_model=True)
# Test the state alteration
operation = migrations.DeleteModel("ShetlandPony")
new_state = project_state.clone()
operation.state_forwards("test_dlmtimo", new_state)
self.assertIn(("test_dlmtimo", "shetlandpony"), project_state.models)
self.assertNotIn(("test_dlmtimo", "shetlandpony"), new_state.models)
# Test the database alteration
self.assertTableExists("test_dlmtimo_pony")
self.assertTableExists("test_dlmtimo_shetlandpony")
self.assertColumnExists("test_dlmtimo_shetlandpony", "pony_ptr_id")
with connection.schema_editor() as editor:
operation.database_forwards(
"test_dlmtimo", editor, project_state, new_state
)
self.assertTableExists("test_dlmtimo_pony")
self.assertTableNotExists("test_dlmtimo_shetlandpony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_dlmtimo", editor, new_state, project_state
)
self.assertTableExists("test_dlmtimo_pony")
self.assertTableExists("test_dlmtimo_shetlandpony")
self.assertColumnExists("test_dlmtimo_shetlandpony", "pony_ptr_id")
def test_rename_model(self):
"""
Tests the RenameModel operation.
"""
project_state = self.set_up_test_model("test_rnmo", related_model=True)
# Test the state alteration
operation = migrations.RenameModel("Pony", "Horse")
self.assertEqual(operation.describe(), "Rename model Pony to Horse")
self.assertEqual(operation.migration_name_fragment, "rename_pony_horse")
# Test initial state and database
self.assertIn(("test_rnmo", "pony"), project_state.models)
self.assertNotIn(("test_rnmo", "horse"), project_state.models)
self.assertTableExists("test_rnmo_pony")
self.assertTableNotExists("test_rnmo_horse")
if connection.features.supports_foreign_keys:
self.assertFKExists(
"test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")
)
self.assertFKNotExists(
"test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")
)
# Migrate forwards
new_state = project_state.clone()
atomic_rename = connection.features.supports_atomic_references_rename
new_state = self.apply_operations(
"test_rnmo", new_state, [operation], atomic=atomic_rename
)
# Test new state and database
self.assertNotIn(("test_rnmo", "pony"), new_state.models)
self.assertIn(("test_rnmo", "horse"), new_state.models)
# RenameModel also repoints all incoming FKs and M2Ms
self.assertEqual(
new_state.models["test_rnmo", "rider"].fields["pony"].remote_field.model,
"test_rnmo.Horse",
)
self.assertTableNotExists("test_rnmo_pony")
self.assertTableExists("test_rnmo_horse")
if connection.features.supports_foreign_keys:
self.assertFKNotExists(
"test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")
)
self.assertFKExists(
"test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")
)
# Migrate backwards
original_state = self.unapply_operations(
"test_rnmo", project_state, [operation], atomic=atomic_rename
)
# Test original state and database
self.assertIn(("test_rnmo", "pony"), original_state.models)
self.assertNotIn(("test_rnmo", "horse"), original_state.models)
self.assertEqual(
original_state.models["test_rnmo", "rider"]
.fields["pony"]
.remote_field.model,
"Pony",
)
self.assertTableExists("test_rnmo_pony")
self.assertTableNotExists("test_rnmo_horse")
if connection.features.supports_foreign_keys:
self.assertFKExists(
"test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")
)
self.assertFKNotExists(
"test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")
)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RenameModel")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"old_name": "Pony", "new_name": "Horse"})
def test_rename_model_state_forwards(self):
"""
RenameModel operations shouldn't trigger the caching of rendered apps
on state without prior apps.
"""
state = ProjectState()
state.add_model(ModelState("migrations", "Foo", []))
operation = migrations.RenameModel("Foo", "Bar")
operation.state_forwards("migrations", state)
self.assertNotIn("apps", state.__dict__)
self.assertNotIn(("migrations", "foo"), state.models)
self.assertIn(("migrations", "bar"), state.models)
# Now with apps cached.
apps = state.apps
operation = migrations.RenameModel("Bar", "Foo")
operation.state_forwards("migrations", state)
self.assertIs(state.apps, apps)
self.assertNotIn(("migrations", "bar"), state.models)
self.assertIn(("migrations", "foo"), state.models)
def test_rename_model_with_self_referential_fk(self):
"""
Tests the RenameModel operation on model with self referential FK.
"""
project_state = self.set_up_test_model("test_rmwsrf", related_model=True)
# Test the state alteration
operation = migrations.RenameModel("Rider", "HorseRider")
self.assertEqual(operation.describe(), "Rename model Rider to HorseRider")
new_state = project_state.clone()
operation.state_forwards("test_rmwsrf", new_state)
self.assertNotIn(("test_rmwsrf", "rider"), new_state.models)
self.assertIn(("test_rmwsrf", "horserider"), new_state.models)
# Remember, RenameModel also repoints all incoming FKs and M2Ms
self.assertEqual(
"self",
new_state.models["test_rmwsrf", "horserider"]
.fields["friend"]
.remote_field.model,
)
HorseRider = new_state.apps.get_model("test_rmwsrf", "horserider")
self.assertIs(
HorseRider._meta.get_field("horserider").remote_field.model, HorseRider
)
# Test the database alteration
self.assertTableExists("test_rmwsrf_rider")
self.assertTableNotExists("test_rmwsrf_horserider")
if connection.features.supports_foreign_keys:
self.assertFKExists(
"test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")
)
self.assertFKNotExists(
"test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")
)
atomic_rename = connection.features.supports_atomic_references_rename
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_forwards("test_rmwsrf", editor, project_state, new_state)
self.assertTableNotExists("test_rmwsrf_rider")
self.assertTableExists("test_rmwsrf_horserider")
if connection.features.supports_foreign_keys:
self.assertFKNotExists(
"test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_rider", "id")
)
self.assertFKExists(
"test_rmwsrf_horserider",
["friend_id"],
("test_rmwsrf_horserider", "id"),
)
# And test reversal
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_backwards(
"test_rmwsrf", editor, new_state, project_state
)
self.assertTableExists("test_rmwsrf_rider")
self.assertTableNotExists("test_rmwsrf_horserider")
if connection.features.supports_foreign_keys:
self.assertFKExists(
"test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")
)
self.assertFKNotExists(
"test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")
)
def test_rename_model_with_superclass_fk(self):
"""
Tests the RenameModel operation on a model which has a superclass that
has a foreign key.
"""
project_state = self.set_up_test_model(
"test_rmwsc", related_model=True, mti_model=True
)
# Test the state alteration
operation = migrations.RenameModel("ShetlandPony", "LittleHorse")
self.assertEqual(
operation.describe(), "Rename model ShetlandPony to LittleHorse"
)
new_state = project_state.clone()
operation.state_forwards("test_rmwsc", new_state)
self.assertNotIn(("test_rmwsc", "shetlandpony"), new_state.models)
self.assertIn(("test_rmwsc", "littlehorse"), new_state.models)
# RenameModel shouldn't repoint the superclass's relations, only local ones
self.assertEqual(
project_state.models["test_rmwsc", "rider"]
.fields["pony"]
.remote_field.model,
new_state.models["test_rmwsc", "rider"].fields["pony"].remote_field.model,
)
# Before running the migration we have a table for Shetland Pony, not
# Little Horse.
self.assertTableExists("test_rmwsc_shetlandpony")
self.assertTableNotExists("test_rmwsc_littlehorse")
if connection.features.supports_foreign_keys:
# and the foreign key on rider points to pony, not shetland pony
self.assertFKExists(
"test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")
)
self.assertFKNotExists(
"test_rmwsc_rider", ["pony_id"], ("test_rmwsc_shetlandpony", "id")
)
with connection.schema_editor(
atomic=connection.features.supports_atomic_references_rename
) as editor:
operation.database_forwards("test_rmwsc", editor, project_state, new_state)
# Now we have a little horse table, not shetland pony
self.assertTableNotExists("test_rmwsc_shetlandpony")
self.assertTableExists("test_rmwsc_littlehorse")
if connection.features.supports_foreign_keys:
# but the Foreign keys still point at pony, not little horse
self.assertFKExists(
"test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")
)
self.assertFKNotExists(
"test_rmwsc_rider", ["pony_id"], ("test_rmwsc_littlehorse", "id")
)
def test_rename_model_with_self_referential_m2m(self):
app_label = "test_rename_model_with_self_referential_m2m"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"ReflexivePony",
fields=[
("id", models.AutoField(primary_key=True)),
("ponies", models.ManyToManyField("self")),
],
),
],
)
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.RenameModel("ReflexivePony", "ReflexivePony2"),
],
atomic=connection.features.supports_atomic_references_rename,
)
Pony = project_state.apps.get_model(app_label, "ReflexivePony2")
pony = Pony.objects.create()
pony.ponies.add(pony)
def test_rename_model_with_m2m(self):
app_label = "test_rename_model_with_m2m"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
("riders", models.ManyToManyField("Rider")),
],
),
],
)
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.RenameModel("Pony", "Pony2"),
],
atomic=connection.features.supports_atomic_references_rename,
)
Pony = project_state.apps.get_model(app_label, "Pony2")
Rider = project_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
self.assertEqual(Pony.objects.count(), 2)
self.assertEqual(Rider.objects.count(), 2)
self.assertEqual(
Pony._meta.get_field("riders").remote_field.through.objects.count(), 2
)
def test_rename_model_with_m2m_models_in_different_apps_with_same_name(self):
app_label_1 = "test_rmw_m2m_1"
app_label_2 = "test_rmw_m2m_2"
project_state = self.apply_operations(
app_label_1,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
],
),
],
)
project_state = self.apply_operations(
app_label_2,
project_state,
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
("riders", models.ManyToManyField(f"{app_label_1}.Rider")),
],
),
],
)
m2m_table = f"{app_label_2}_rider_riders"
self.assertColumnExists(m2m_table, "from_rider_id")
self.assertColumnExists(m2m_table, "to_rider_id")
Rider_1 = project_state.apps.get_model(app_label_1, "Rider")
Rider_2 = project_state.apps.get_model(app_label_2, "Rider")
rider_2 = Rider_2.objects.create()
rider_2.riders.add(Rider_1.objects.create())
# Rename model.
project_state_2 = project_state.clone()
project_state = self.apply_operations(
app_label_2,
project_state,
operations=[migrations.RenameModel("Rider", "Pony")],
atomic=connection.features.supports_atomic_references_rename,
)
m2m_table = f"{app_label_2}_pony_riders"
self.assertColumnExists(m2m_table, "pony_id")
self.assertColumnExists(m2m_table, "rider_id")
Rider_1 = project_state.apps.get_model(app_label_1, "Rider")
Rider_2 = project_state.apps.get_model(app_label_2, "Pony")
rider_2 = Rider_2.objects.create()
rider_2.riders.add(Rider_1.objects.create())
self.assertEqual(Rider_1.objects.count(), 2)
self.assertEqual(Rider_2.objects.count(), 2)
self.assertEqual(
Rider_2._meta.get_field("riders").remote_field.through.objects.count(), 2
)
# Reversal.
self.unapply_operations(
app_label_2,
project_state_2,
operations=[migrations.RenameModel("Rider", "Pony")],
atomic=connection.features.supports_atomic_references_rename,
)
m2m_table = f"{app_label_2}_rider_riders"
self.assertColumnExists(m2m_table, "to_rider_id")
self.assertColumnExists(m2m_table, "from_rider_id")
def test_rename_model_with_db_table_rename_m2m(self):
app_label = "test_rmwdbrm2m"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
("riders", models.ManyToManyField("Rider")),
],
options={"db_table": "pony"},
),
],
)
new_state = self.apply_operations(
app_label,
project_state,
operations=[migrations.RenameModel("Pony", "PinkPony")],
atomic=connection.features.supports_atomic_references_rename,
)
Pony = new_state.apps.get_model(app_label, "PinkPony")
Rider = new_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
def test_rename_m2m_target_model(self):
app_label = "test_rename_m2m_target_model"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
("riders", models.ManyToManyField("Rider")),
],
),
],
)
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.RenameModel("Rider", "Rider2"),
],
atomic=connection.features.supports_atomic_references_rename,
)
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider2")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
self.assertEqual(Pony.objects.count(), 2)
self.assertEqual(Rider.objects.count(), 2)
self.assertEqual(
Pony._meta.get_field("riders").remote_field.through.objects.count(), 2
)
def test_rename_m2m_through_model(self):
app_label = "test_rename_through"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
],
),
migrations.CreateModel(
"PonyRider",
fields=[
("id", models.AutoField(primary_key=True)),
(
"rider",
models.ForeignKey(
"test_rename_through.Rider", models.CASCADE
),
),
(
"pony",
models.ForeignKey(
"test_rename_through.Pony", models.CASCADE
),
),
],
),
migrations.AddField(
"Pony",
"riders",
models.ManyToManyField(
"test_rename_through.Rider",
through="test_rename_through.PonyRider",
),
),
],
)
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
PonyRider = project_state.apps.get_model(app_label, "PonyRider")
pony = Pony.objects.create()
rider = Rider.objects.create()
PonyRider.objects.create(pony=pony, rider=rider)
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.RenameModel("PonyRider", "PonyRider2"),
],
)
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
PonyRider = project_state.apps.get_model(app_label, "PonyRider2")
pony = Pony.objects.first()
rider = Rider.objects.create()
PonyRider.objects.create(pony=pony, rider=rider)
self.assertEqual(Pony.objects.count(), 1)
self.assertEqual(Rider.objects.count(), 2)
self.assertEqual(PonyRider.objects.count(), 2)
self.assertEqual(pony.riders.count(), 2)
def test_rename_m2m_model_after_rename_field(self):
"""RenameModel renames a many-to-many column after a RenameField."""
app_label = "test_rename_multiple"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=20)),
],
),
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
(
"pony",
models.ForeignKey(
"test_rename_multiple.Pony", models.CASCADE
),
),
],
),
migrations.CreateModel(
"PonyRider",
fields=[
("id", models.AutoField(primary_key=True)),
("riders", models.ManyToManyField("Rider")),
],
),
migrations.RenameField(
model_name="pony", old_name="name", new_name="fancy_name"
),
migrations.RenameModel(old_name="Rider", new_name="Jockey"),
],
atomic=connection.features.supports_atomic_references_rename,
)
Pony = project_state.apps.get_model(app_label, "Pony")
Jockey = project_state.apps.get_model(app_label, "Jockey")
PonyRider = project_state.apps.get_model(app_label, "PonyRider")
# No "no such column" error means the column was renamed correctly.
pony = Pony.objects.create(fancy_name="a good name")
jockey = Jockey.objects.create(pony=pony)
ponyrider = PonyRider.objects.create()
ponyrider.riders.add(jockey)
def test_add_field(self):
"""
Tests the AddField operation.
"""
# Test the state alteration
operation = migrations.AddField(
"Pony",
"height",
models.FloatField(null=True, default=5),
)
self.assertEqual(operation.describe(), "Add field height to Pony")
self.assertEqual(operation.migration_name_fragment, "pony_height")
project_state, new_state = self.make_test_state("test_adfl", operation)
self.assertEqual(len(new_state.models["test_adfl", "pony"].fields), 6)
field = new_state.models["test_adfl", "pony"].fields["height"]
self.assertEqual(field.default, 5)
# Test the database alteration
self.assertColumnNotExists("test_adfl_pony", "height")
with connection.schema_editor() as editor:
operation.database_forwards("test_adfl", editor, project_state, new_state)
self.assertColumnExists("test_adfl_pony", "height")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_adfl", editor, new_state, project_state)
self.assertColumnNotExists("test_adfl_pony", "height")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddField")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"])
def test_add_charfield(self):
"""
Tests the AddField operation on TextField.
"""
project_state = self.set_up_test_model("test_adchfl")
Pony = project_state.apps.get_model("test_adchfl", "Pony")
pony = Pony.objects.create(weight=42)
new_state = self.apply_operations(
"test_adchfl",
project_state,
[
migrations.AddField(
"Pony",
"text",
models.CharField(max_length=10, default="some text"),
),
migrations.AddField(
"Pony",
"empty",
models.CharField(max_length=10, default=""),
),
# If not properly quoted digits would be interpreted as an int.
migrations.AddField(
"Pony",
"digits",
models.CharField(max_length=10, default="42"),
),
# Manual quoting is fragile and could trip on quotes. Refs #xyz.
migrations.AddField(
"Pony",
"quotes",
models.CharField(max_length=10, default='"\'"'),
),
],
)
Pony = new_state.apps.get_model("test_adchfl", "Pony")
pony = Pony.objects.get(pk=pony.pk)
self.assertEqual(pony.text, "some text")
self.assertEqual(pony.empty, "")
self.assertEqual(pony.digits, "42")
self.assertEqual(pony.quotes, '"\'"')
def test_add_textfield(self):
"""
Tests the AddField operation on TextField.
"""
project_state = self.set_up_test_model("test_adtxtfl")
Pony = project_state.apps.get_model("test_adtxtfl", "Pony")
pony = Pony.objects.create(weight=42)
new_state = self.apply_operations(
"test_adtxtfl",
project_state,
[
migrations.AddField(
"Pony",
"text",
models.TextField(default="some text"),
),
migrations.AddField(
"Pony",
"empty",
models.TextField(default=""),
),
# If not properly quoted digits would be interpreted as an int.
migrations.AddField(
"Pony",
"digits",
models.TextField(default="42"),
),
# Manual quoting is fragile and could trip on quotes. Refs #xyz.
migrations.AddField(
"Pony",
"quotes",
models.TextField(default='"\'"'),
),
],
)
Pony = new_state.apps.get_model("test_adtxtfl", "Pony")
pony = Pony.objects.get(pk=pony.pk)
self.assertEqual(pony.text, "some text")
self.assertEqual(pony.empty, "")
self.assertEqual(pony.digits, "42")
self.assertEqual(pony.quotes, '"\'"')
def test_add_binaryfield(self):
"""
Tests the AddField operation on TextField/BinaryField.
"""
project_state = self.set_up_test_model("test_adbinfl")
Pony = project_state.apps.get_model("test_adbinfl", "Pony")
pony = Pony.objects.create(weight=42)
new_state = self.apply_operations(
"test_adbinfl",
project_state,
[
migrations.AddField(
"Pony",
"blob",
models.BinaryField(default=b"some text"),
),
migrations.AddField(
"Pony",
"empty",
models.BinaryField(default=b""),
),
# If not properly quoted digits would be interpreted as an int.
migrations.AddField(
"Pony",
"digits",
models.BinaryField(default=b"42"),
),
# Manual quoting is fragile and could trip on quotes. Refs #xyz.
migrations.AddField(
"Pony",
"quotes",
models.BinaryField(default=b'"\'"'),
),
],
)
Pony = new_state.apps.get_model("test_adbinfl", "Pony")
pony = Pony.objects.get(pk=pony.pk)
# SQLite returns buffer/memoryview, cast to bytes for checking.
self.assertEqual(bytes(pony.blob), b"some text")
self.assertEqual(bytes(pony.empty), b"")
self.assertEqual(bytes(pony.digits), b"42")
self.assertEqual(bytes(pony.quotes), b'"\'"')
def test_column_name_quoting(self):
"""
Column names that are SQL keywords shouldn't cause problems when used
in migrations (#22168).
"""
project_state = self.set_up_test_model("test_regr22168")
operation = migrations.AddField(
"Pony",
"order",
models.IntegerField(default=0),
)
new_state = project_state.clone()
operation.state_forwards("test_regr22168", new_state)
with connection.schema_editor() as editor:
operation.database_forwards(
"test_regr22168", editor, project_state, new_state
)
self.assertColumnExists("test_regr22168_pony", "order")
def test_add_field_preserve_default(self):
"""
Tests the AddField operation's state alteration
when preserve_default = False.
"""
project_state = self.set_up_test_model("test_adflpd")
# Test the state alteration
operation = migrations.AddField(
"Pony",
"height",
models.FloatField(null=True, default=4),
preserve_default=False,
)
new_state = project_state.clone()
operation.state_forwards("test_adflpd", new_state)
self.assertEqual(len(new_state.models["test_adflpd", "pony"].fields), 6)
field = new_state.models["test_adflpd", "pony"].fields["height"]
self.assertEqual(field.default, models.NOT_PROVIDED)
# Test the database alteration
project_state.apps.get_model("test_adflpd", "pony").objects.create(
weight=4,
)
self.assertColumnNotExists("test_adflpd_pony", "height")
with connection.schema_editor() as editor:
operation.database_forwards("test_adflpd", editor, project_state, new_state)
self.assertColumnExists("test_adflpd_pony", "height")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddField")
self.assertEqual(definition[1], [])
self.assertEqual(
sorted(definition[2]), ["field", "model_name", "name", "preserve_default"]
)
def test_add_field_database_default(self):
"""The AddField operation can set and unset a database default."""
app_label = "test_adfldd"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(app_label)
operation = migrations.AddField(
"Pony", "height", models.FloatField(null=True, db_default=4)
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(len(new_state.models[app_label, "pony"].fields), 6)
field = new_state.models[app_label, "pony"].fields["height"]
self.assertEqual(field.default, models.NOT_PROVIDED)
self.assertEqual(field.db_default, Value(4))
project_state.apps.get_model(app_label, "pony").objects.create(weight=4)
self.assertColumnNotExists(table_name, "height")
# Add field.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertColumnExists(table_name, "height")
new_model = new_state.apps.get_model(app_label, "pony")
old_pony = new_model.objects.get()
self.assertEqual(old_pony.height, 4)
new_pony = new_model.objects.create(weight=5)
if not connection.features.can_return_columns_from_insert:
new_pony.refresh_from_db()
self.assertEqual(new_pony.height, 4)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertColumnNotExists(table_name, "height")
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddField")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{
"field": field,
"model_name": "Pony",
"name": "height",
},
)
def test_add_field_database_default_special_char_escaping(self):
app_label = "test_adflddsce"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(app_label)
old_pony_pk = (
project_state.apps.get_model(app_label, "pony").objects.create(weight=4).pk
)
tests = ["%", "'", '"']
for db_default in tests:
with self.subTest(db_default=db_default):
operation = migrations.AddField(
"Pony",
"special_char",
models.CharField(max_length=1, db_default=db_default),
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(len(new_state.models[app_label, "pony"].fields), 6)
field = new_state.models[app_label, "pony"].fields["special_char"]
self.assertEqual(field.default, models.NOT_PROVIDED)
self.assertEqual(field.db_default, Value(db_default))
self.assertColumnNotExists(table_name, "special_char")
with connection.schema_editor() as editor:
operation.database_forwards(
app_label, editor, project_state, new_state
)
self.assertColumnExists(table_name, "special_char")
new_model = new_state.apps.get_model(app_label, "pony")
try:
new_pony = new_model.objects.create(weight=5)
if not connection.features.can_return_columns_from_insert:
new_pony.refresh_from_db()
self.assertEqual(new_pony.special_char, db_default)
old_pony = new_model.objects.get(pk=old_pony_pk)
if connection.vendor != "oracle" or db_default != "'":
# The single quotation mark ' is properly quoted and is
# set for new rows on Oracle, however it is not set on
# existing rows. Skip the assertion as it's probably a
# bug in Oracle.
self.assertEqual(old_pony.special_char, db_default)
finally:
with connection.schema_editor() as editor:
operation.database_backwards(
app_label, editor, new_state, project_state
)
@skipUnlessDBFeature("supports_expression_defaults")
def test_add_field_database_default_function(self):
app_label = "test_adflddf"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(app_label)
operation = migrations.AddField(
"Pony", "height", models.FloatField(db_default=Pi())
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(len(new_state.models[app_label, "pony"].fields), 6)
field = new_state.models[app_label, "pony"].fields["height"]
self.assertEqual(field.default, models.NOT_PROVIDED)
self.assertEqual(field.db_default, Pi())
project_state.apps.get_model(app_label, "pony").objects.create(weight=4)
self.assertColumnNotExists(table_name, "height")
# Add field.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertColumnExists(table_name, "height")
new_model = new_state.apps.get_model(app_label, "pony")
old_pony = new_model.objects.get()
self.assertAlmostEqual(old_pony.height, math.pi)
new_pony = new_model.objects.create(weight=5)
if not connection.features.can_return_columns_from_insert:
new_pony.refresh_from_db()
self.assertAlmostEqual(old_pony.height, math.pi)
def test_add_field_both_defaults(self):
"""The AddField operation with both default and db_default."""
app_label = "test_adflbddd"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(app_label)
operation = migrations.AddField(
"Pony", "height", models.FloatField(default=3, db_default=4)
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(len(new_state.models[app_label, "pony"].fields), 6)
field = new_state.models[app_label, "pony"].fields["height"]
self.assertEqual(field.default, 3)
self.assertEqual(field.db_default, Value(4))
project_state.apps.get_model(app_label, "pony").objects.create(weight=4)
self.assertColumnNotExists(table_name, "height")
# Add field.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertColumnExists(table_name, "height")
new_model = new_state.apps.get_model(app_label, "pony")
old_pony = new_model.objects.get()
self.assertEqual(old_pony.height, 4)
new_pony = new_model.objects.create(weight=5)
if not connection.features.can_return_columns_from_insert:
new_pony.refresh_from_db()
self.assertEqual(new_pony.height, 3)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertColumnNotExists(table_name, "height")
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddField")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{
"field": field,
"model_name": "Pony",
"name": "height",
},
)
def test_add_field_m2m(self):
"""
Tests the AddField operation with a ManyToManyField.
"""
project_state = self.set_up_test_model("test_adflmm", second_model=True)
# Test the state alteration
operation = migrations.AddField(
"Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")
)
new_state = project_state.clone()
operation.state_forwards("test_adflmm", new_state)
self.assertEqual(len(new_state.models["test_adflmm", "pony"].fields), 6)
# Test the database alteration
self.assertTableNotExists("test_adflmm_pony_stables")
with connection.schema_editor() as editor:
operation.database_forwards("test_adflmm", editor, project_state, new_state)
self.assertTableExists("test_adflmm_pony_stables")
self.assertColumnNotExists("test_adflmm_pony", "stables")
# Make sure the M2M field actually works
with atomic():
Pony = new_state.apps.get_model("test_adflmm", "Pony")
p = Pony.objects.create(pink=False, weight=4.55)
p.stables.create()
self.assertEqual(p.stables.count(), 1)
p.stables.all().delete()
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_adflmm", editor, new_state, project_state
)
self.assertTableNotExists("test_adflmm_pony_stables")
def test_alter_field_m2m(self):
project_state = self.set_up_test_model("test_alflmm", second_model=True)
project_state = self.apply_operations(
"test_alflmm",
project_state,
operations=[
migrations.AddField(
"Pony",
"stables",
models.ManyToManyField("Stable", related_name="ponies"),
)
],
)
Pony = project_state.apps.get_model("test_alflmm", "Pony")
self.assertFalse(Pony._meta.get_field("stables").blank)
project_state = self.apply_operations(
"test_alflmm",
project_state,
operations=[
migrations.AlterField(
"Pony",
"stables",
models.ManyToManyField(
to="Stable", related_name="ponies", blank=True
),
)
],
)
Pony = project_state.apps.get_model("test_alflmm", "Pony")
self.assertTrue(Pony._meta.get_field("stables").blank)
def test_repoint_field_m2m(self):
project_state = self.set_up_test_model(
"test_alflmm", second_model=True, third_model=True
)
project_state = self.apply_operations(
"test_alflmm",
project_state,
operations=[
migrations.AddField(
"Pony",
"places",
models.ManyToManyField("Stable", related_name="ponies"),
)
],
)
Pony = project_state.apps.get_model("test_alflmm", "Pony")
project_state = self.apply_operations(
"test_alflmm",
project_state,
operations=[
migrations.AlterField(
"Pony",
"places",
models.ManyToManyField(to="Van", related_name="ponies"),
)
],
)
# Ensure the new field actually works
Pony = project_state.apps.get_model("test_alflmm", "Pony")
p = Pony.objects.create(pink=False, weight=4.55)
p.places.create()
self.assertEqual(p.places.count(), 1)
p.places.all().delete()
def test_remove_field_m2m(self):
project_state = self.set_up_test_model("test_rmflmm", second_model=True)
project_state = self.apply_operations(
"test_rmflmm",
project_state,
operations=[
migrations.AddField(
"Pony",
"stables",
models.ManyToManyField("Stable", related_name="ponies"),
)
],
)
self.assertTableExists("test_rmflmm_pony_stables")
with_field_state = project_state.clone()
operations = [migrations.RemoveField("Pony", "stables")]
project_state = self.apply_operations(
"test_rmflmm", project_state, operations=operations
)
self.assertTableNotExists("test_rmflmm_pony_stables")
# And test reversal
self.unapply_operations("test_rmflmm", with_field_state, operations=operations)
self.assertTableExists("test_rmflmm_pony_stables")
def test_remove_field_m2m_with_through(self):
project_state = self.set_up_test_model("test_rmflmmwt", second_model=True)
self.assertTableNotExists("test_rmflmmwt_ponystables")
project_state = self.apply_operations(
"test_rmflmmwt",
project_state,
operations=[
migrations.CreateModel(
"PonyStables",
fields=[
(
"pony",
models.ForeignKey("test_rmflmmwt.Pony", models.CASCADE),
),
(
"stable",
models.ForeignKey("test_rmflmmwt.Stable", models.CASCADE),
),
],
),
migrations.AddField(
"Pony",
"stables",
models.ManyToManyField(
"Stable",
related_name="ponies",
through="test_rmflmmwt.PonyStables",
),
),
],
)
self.assertTableExists("test_rmflmmwt_ponystables")
operations = [
migrations.RemoveField("Pony", "stables"),
migrations.DeleteModel("PonyStables"),
]
self.apply_operations("test_rmflmmwt", project_state, operations=operations)
def test_remove_field(self):
"""
Tests the RemoveField operation.
"""
project_state = self.set_up_test_model("test_rmfl")
# Test the state alteration
operation = migrations.RemoveField("Pony", "pink")
self.assertEqual(operation.describe(), "Remove field pink from Pony")
self.assertEqual(operation.migration_name_fragment, "remove_pony_pink")
new_state = project_state.clone()
operation.state_forwards("test_rmfl", new_state)
self.assertEqual(len(new_state.models["test_rmfl", "pony"].fields), 4)
# Test the database alteration
self.assertColumnExists("test_rmfl_pony", "pink")
with connection.schema_editor() as editor:
operation.database_forwards("test_rmfl", editor, project_state, new_state)
self.assertColumnNotExists("test_rmfl_pony", "pink")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_rmfl", editor, new_state, project_state)
self.assertColumnExists("test_rmfl_pony", "pink")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveField")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"model_name": "Pony", "name": "pink"})
def test_remove_fk(self):
"""
Tests the RemoveField operation on a foreign key.
"""
project_state = self.set_up_test_model("test_rfk", related_model=True)
self.assertColumnExists("test_rfk_rider", "pony_id")
operation = migrations.RemoveField("Rider", "pony")
new_state = project_state.clone()
operation.state_forwards("test_rfk", new_state)
with connection.schema_editor() as editor:
operation.database_forwards("test_rfk", editor, project_state, new_state)
self.assertColumnNotExists("test_rfk_rider", "pony_id")
with connection.schema_editor() as editor:
operation.database_backwards("test_rfk", editor, new_state, project_state)
self.assertColumnExists("test_rfk_rider", "pony_id")
def test_alter_model_table(self):
"""
Tests the AlterModelTable operation.
"""
project_state = self.set_up_test_model("test_almota")
# Test the state alteration
operation = migrations.AlterModelTable("Pony", "test_almota_pony_2")
self.assertEqual(
operation.describe(), "Rename table for Pony to test_almota_pony_2"
)
self.assertEqual(operation.migration_name_fragment, "alter_pony_table")
new_state = project_state.clone()
operation.state_forwards("test_almota", new_state)
self.assertEqual(
new_state.models["test_almota", "pony"].options["db_table"],
"test_almota_pony_2",
)
# Test the database alteration
self.assertTableExists("test_almota_pony")
self.assertTableNotExists("test_almota_pony_2")
with connection.schema_editor() as editor:
operation.database_forwards("test_almota", editor, project_state, new_state)
self.assertTableNotExists("test_almota_pony")
self.assertTableExists("test_almota_pony_2")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_almota", editor, new_state, project_state
)
self.assertTableExists("test_almota_pony")
self.assertTableNotExists("test_almota_pony_2")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelTable")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"name": "Pony", "table": "test_almota_pony_2"})
def test_alter_model_table_none(self):
"""
Tests the AlterModelTable operation if the table name is set to None.
"""
operation = migrations.AlterModelTable("Pony", None)
self.assertEqual(operation.describe(), "Rename table for Pony to (default)")
def test_alter_model_table_noop(self):
"""
Tests the AlterModelTable operation if the table name is not changed.
"""
project_state = self.set_up_test_model("test_almota")
# Test the state alteration
operation = migrations.AlterModelTable("Pony", "test_almota_pony")
new_state = project_state.clone()
operation.state_forwards("test_almota", new_state)
self.assertEqual(
new_state.models["test_almota", "pony"].options["db_table"],
"test_almota_pony",
)
# Test the database alteration
self.assertTableExists("test_almota_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_almota", editor, project_state, new_state)
self.assertTableExists("test_almota_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_almota", editor, new_state, project_state
)
self.assertTableExists("test_almota_pony")
def test_alter_model_table_m2m(self):
"""
AlterModelTable should rename auto-generated M2M tables.
"""
app_label = "test_talflmltlm2m"
pony_db_table = "pony_foo"
project_state = self.set_up_test_model(
app_label, second_model=True, db_table=pony_db_table
)
# Add the M2M field
first_state = project_state.clone()
operation = migrations.AddField(
"Pony", "stables", models.ManyToManyField("Stable")
)
operation.state_forwards(app_label, first_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, first_state)
original_m2m_table = "%s_%s" % (pony_db_table, "stables")
new_m2m_table = "%s_%s" % (app_label, "pony_stables")
self.assertTableExists(original_m2m_table)
self.assertTableNotExists(new_m2m_table)
# Rename the Pony db_table which should also rename the m2m table.
second_state = first_state.clone()
operation = migrations.AlterModelTable(name="pony", table=None)
operation.state_forwards(app_label, second_state)
atomic_rename = connection.features.supports_atomic_references_rename
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_forwards(app_label, editor, first_state, second_state)
self.assertTableExists(new_m2m_table)
self.assertTableNotExists(original_m2m_table)
# And test reversal
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_backwards(app_label, editor, second_state, first_state)
self.assertTableExists(original_m2m_table)
self.assertTableNotExists(new_m2m_table)
def test_alter_model_table_m2m_field(self):
app_label = "test_talm2mfl"
project_state = self.set_up_test_model(app_label, second_model=True)
# Add the M2M field.
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.AddField(
"Pony",
"stables",
models.ManyToManyField("Stable"),
)
],
)
m2m_table = f"{app_label}_pony_stables"
self.assertColumnExists(m2m_table, "pony_id")
self.assertColumnExists(m2m_table, "stable_id")
# Point the M2M field to self.
with_field_state = project_state.clone()
operations = [
migrations.AlterField(
model_name="Pony",
name="stables",
field=models.ManyToManyField("self"),
)
]
project_state = self.apply_operations(
app_label, project_state, operations=operations
)
self.assertColumnExists(m2m_table, "from_pony_id")
self.assertColumnExists(m2m_table, "to_pony_id")
# Reversal.
self.unapply_operations(app_label, with_field_state, operations=operations)
self.assertColumnExists(m2m_table, "pony_id")
self.assertColumnExists(m2m_table, "stable_id")
def test_alter_field(self):
"""
Tests the AlterField operation.
"""
project_state = self.set_up_test_model("test_alfl")
# Test the state alteration
operation = migrations.AlterField(
"Pony", "pink", models.IntegerField(null=True)
)
self.assertEqual(operation.describe(), "Alter field pink on Pony")
self.assertEqual(operation.migration_name_fragment, "alter_pony_pink")
new_state = project_state.clone()
operation.state_forwards("test_alfl", new_state)
self.assertIs(
project_state.models["test_alfl", "pony"].fields["pink"].null, False
)
self.assertIs(new_state.models["test_alfl", "pony"].fields["pink"].null, True)
# Test the database alteration
self.assertColumnNotNull("test_alfl_pony", "pink")
with connection.schema_editor() as editor:
operation.database_forwards("test_alfl", editor, project_state, new_state)
self.assertColumnNull("test_alfl_pony", "pink")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alfl", editor, new_state, project_state)
self.assertColumnNotNull("test_alfl_pony", "pink")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterField")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"])
def test_alter_field_add_database_default(self):
app_label = "test_alfladd"
project_state = self.set_up_test_model(app_label)
operation = migrations.AlterField(
"Pony", "weight", models.FloatField(db_default=4.5)
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
old_weight = project_state.models[app_label, "pony"].fields["weight"]
self.assertIs(old_weight.db_default, models.NOT_PROVIDED)
new_weight = new_state.models[app_label, "pony"].fields["weight"]
self.assertEqual(new_weight.db_default, Value(4.5))
with self.assertRaises(IntegrityError), transaction.atomic():
project_state.apps.get_model(app_label, "pony").objects.create()
# Alter field.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
pony = new_state.apps.get_model(app_label, "pony").objects.create()
if not connection.features.can_return_columns_from_insert:
pony.refresh_from_db()
self.assertEqual(pony.weight, 4.5)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
with self.assertRaises(IntegrityError), transaction.atomic():
project_state.apps.get_model(app_label, "pony").objects.create()
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterField")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{
"field": new_weight,
"model_name": "Pony",
"name": "weight",
},
)
def test_alter_field_change_default_to_database_default(self):
"""The AlterField operation changing default to db_default."""
app_label = "test_alflcdtdd"
project_state = self.set_up_test_model(app_label)
operation = migrations.AlterField(
"Pony", "pink", models.IntegerField(db_default=4)
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
old_pink = project_state.models[app_label, "pony"].fields["pink"]
self.assertEqual(old_pink.default, 3)
self.assertIs(old_pink.db_default, models.NOT_PROVIDED)
new_pink = new_state.models[app_label, "pony"].fields["pink"]
self.assertIs(new_pink.default, models.NOT_PROVIDED)
self.assertEqual(new_pink.db_default, Value(4))
pony = project_state.apps.get_model(app_label, "pony").objects.create(weight=1)
self.assertEqual(pony.pink, 3)
# Alter field.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
pony = new_state.apps.get_model(app_label, "pony").objects.create(weight=1)
if not connection.features.can_return_columns_from_insert:
pony.refresh_from_db()
self.assertEqual(pony.pink, 4)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
pony = project_state.apps.get_model(app_label, "pony").objects.create(weight=1)
self.assertEqual(pony.pink, 3)
def test_alter_field_change_nullable_to_database_default_not_null(self):
"""
The AlterField operation changing a null field to db_default.
"""
app_label = "test_alflcntddnn"
project_state = self.set_up_test_model(app_label)
operation = migrations.AlterField(
"Pony", "green", models.IntegerField(db_default=4)
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
old_green = project_state.models[app_label, "pony"].fields["green"]
self.assertIs(old_green.db_default, models.NOT_PROVIDED)
new_green = new_state.models[app_label, "pony"].fields["green"]
self.assertEqual(new_green.db_default, Value(4))
old_pony = project_state.apps.get_model(app_label, "pony").objects.create(
weight=1
)
self.assertIsNone(old_pony.green)
# Alter field.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
old_pony.refresh_from_db()
self.assertEqual(old_pony.green, 4)
pony = new_state.apps.get_model(app_label, "pony").objects.create(weight=1)
if not connection.features.can_return_columns_from_insert:
pony.refresh_from_db()
self.assertEqual(pony.green, 4)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
pony = project_state.apps.get_model(app_label, "pony").objects.create(weight=1)
self.assertIsNone(pony.green)
@skipIfDBFeature("interprets_empty_strings_as_nulls")
def test_alter_field_change_blank_nullable_database_default_to_not_null(self):
app_label = "test_alflcbnddnn"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(app_label)
default = "Yellow"
operation = migrations.AlterField(
"Pony",
"yellow",
models.CharField(blank=True, db_default=default, max_length=20),
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertColumnNull(table_name, "yellow")
pony = project_state.apps.get_model(app_label, "pony").objects.create(
weight=1, yellow=None
)
self.assertIsNone(pony.yellow)
# Alter field.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertColumnNotNull(table_name, "yellow")
pony.refresh_from_db()
self.assertEqual(pony.yellow, default)
pony = new_state.apps.get_model(app_label, "pony").objects.create(weight=1)
if not connection.features.can_return_columns_from_insert:
pony.refresh_from_db()
self.assertEqual(pony.yellow, default)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertColumnNull(table_name, "yellow")
pony = project_state.apps.get_model(app_label, "pony").objects.create(
weight=1, yellow=None
)
self.assertIsNone(pony.yellow)
def test_alter_field_add_db_column_noop(self):
"""
AlterField operation is a noop when adding only a db_column and the
column name is not changed.
"""
app_label = "test_afadbn"
project_state = self.set_up_test_model(app_label, related_model=True)
pony_table = "%s_pony" % app_label
new_state = project_state.clone()
operation = migrations.AlterField(
"Pony", "weight", models.FloatField(db_column="weight")
)
operation.state_forwards(app_label, new_state)
self.assertIsNone(
project_state.models[app_label, "pony"].fields["weight"].db_column,
)
self.assertEqual(
new_state.models[app_label, "pony"].fields["weight"].db_column,
"weight",
)
self.assertColumnExists(pony_table, "weight")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertColumnExists(pony_table, "weight")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_backwards(
app_label, editor, new_state, project_state
)
self.assertColumnExists(pony_table, "weight")
rider_table = "%s_rider" % app_label
new_state = project_state.clone()
operation = migrations.AlterField(
"Rider",
"pony",
models.ForeignKey("Pony", models.CASCADE, db_column="pony_id"),
)
operation.state_forwards(app_label, new_state)
self.assertIsNone(
project_state.models[app_label, "rider"].fields["pony"].db_column,
)
self.assertIs(
new_state.models[app_label, "rider"].fields["pony"].db_column,
"pony_id",
)
self.assertColumnExists(rider_table, "pony_id")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertColumnExists(rider_table, "pony_id")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_forwards(app_label, editor, new_state, project_state)
self.assertColumnExists(rider_table, "pony_id")
@skipUnlessDBFeature("supports_comments")
def test_alter_model_table_comment(self):
app_label = "test_almotaco"
project_state = self.set_up_test_model(app_label)
pony_table = f"{app_label}_pony"
# Add table comment.
operation = migrations.AlterModelTableComment("Pony", "Custom pony comment")
self.assertEqual(operation.describe(), "Alter Pony table comment")
self.assertEqual(operation.migration_name_fragment, "alter_pony_table_comment")
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(
new_state.models[app_label, "pony"].options["db_table_comment"],
"Custom pony comment",
)
self.assertTableCommentNotExists(pony_table)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertTableComment(pony_table, "Custom pony comment")
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertTableCommentNotExists(pony_table)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelTableComment")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2], {"name": "Pony", "table_comment": "Custom pony comment"}
)
def test_alter_field_pk(self):
"""
The AlterField operation on primary keys (things like PostgreSQL's
SERIAL weirdness).
"""
project_state = self.set_up_test_model("test_alflpk")
# Test the state alteration
operation = migrations.AlterField(
"Pony", "id", models.IntegerField(primary_key=True)
)
new_state = project_state.clone()
operation.state_forwards("test_alflpk", new_state)
self.assertIsInstance(
project_state.models["test_alflpk", "pony"].fields["id"],
models.AutoField,
)
self.assertIsInstance(
new_state.models["test_alflpk", "pony"].fields["id"],
models.IntegerField,
)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alflpk", editor, project_state, new_state)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_alflpk", editor, new_state, project_state
)
@skipUnlessDBFeature("supports_foreign_keys")
def test_alter_field_pk_fk(self):
"""
Tests the AlterField operation on primary keys changes any FKs pointing to it.
"""
project_state = self.set_up_test_model("test_alflpkfk", related_model=True)
project_state = self.apply_operations(
"test_alflpkfk",
project_state,
[
migrations.CreateModel(
"Stable",
fields=[
("ponies", models.ManyToManyField("Pony")),
],
),
migrations.AddField(
"Pony",
"stables",
models.ManyToManyField("Stable"),
),
],
)
# Test the state alteration
operation = migrations.AlterField(
"Pony", "id", models.FloatField(primary_key=True)
)
new_state = project_state.clone()
operation.state_forwards("test_alflpkfk", new_state)
self.assertIsInstance(
project_state.models["test_alflpkfk", "pony"].fields["id"],
models.AutoField,
)
self.assertIsInstance(
new_state.models["test_alflpkfk", "pony"].fields["id"],
models.FloatField,
)
def assertIdTypeEqualsFkType():
with connection.cursor() as cursor:
id_type, id_null = [
(c.type_code, c.null_ok)
for c in connection.introspection.get_table_description(
cursor, "test_alflpkfk_pony"
)
if c.name == "id"
][0]
fk_type, fk_null = [
(c.type_code, c.null_ok)
for c in connection.introspection.get_table_description(
cursor, "test_alflpkfk_rider"
)
if c.name == "pony_id"
][0]
m2m_fk_type, m2m_fk_null = [
(c.type_code, c.null_ok)
for c in connection.introspection.get_table_description(
cursor,
"test_alflpkfk_pony_stables",
)
if c.name == "pony_id"
][0]
remote_m2m_fk_type, remote_m2m_fk_null = [
(c.type_code, c.null_ok)
for c in connection.introspection.get_table_description(
cursor,
"test_alflpkfk_stable_ponies",
)
if c.name == "pony_id"
][0]
self.assertEqual(id_type, fk_type)
self.assertEqual(id_type, m2m_fk_type)
self.assertEqual(id_type, remote_m2m_fk_type)
self.assertEqual(id_null, fk_null)
self.assertEqual(id_null, m2m_fk_null)
self.assertEqual(id_null, remote_m2m_fk_null)
assertIdTypeEqualsFkType()
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards(
"test_alflpkfk", editor, project_state, new_state
)
assertIdTypeEqualsFkType()
if connection.features.supports_foreign_keys:
self.assertFKExists(
"test_alflpkfk_pony_stables",
["pony_id"],
("test_alflpkfk_pony", "id"),
)
self.assertFKExists(
"test_alflpkfk_stable_ponies",
["pony_id"],
("test_alflpkfk_pony", "id"),
)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_alflpkfk", editor, new_state, project_state
)
assertIdTypeEqualsFkType()
if connection.features.supports_foreign_keys:
self.assertFKExists(
"test_alflpkfk_pony_stables",
["pony_id"],
("test_alflpkfk_pony", "id"),
)
self.assertFKExists(
"test_alflpkfk_stable_ponies",
["pony_id"],
("test_alflpkfk_pony", "id"),
)
@skipUnlessDBFeature("supports_collation_on_charfield", "supports_foreign_keys")
def test_alter_field_pk_fk_db_collation(self):
"""
AlterField operation of db_collation on primary keys changes any FKs
pointing to it.
"""
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
app_label = "test_alflpkfkdbc"
project_state = self.apply_operations(
app_label,
ProjectState(),
[
migrations.CreateModel(
"Pony",
[
("id", models.CharField(primary_key=True, max_length=10)),
],
),
migrations.CreateModel(
"Rider",
[
("pony", models.ForeignKey("Pony", models.CASCADE)),
],
),
migrations.CreateModel(
"Stable",
[
("ponies", models.ManyToManyField("Pony")),
],
),
],
)
# State alteration.
operation = migrations.AlterField(
"Pony",
"id",
models.CharField(
primary_key=True,
max_length=10,
db_collation=collation,
),
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
# Database alteration.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertColumnCollation(f"{app_label}_pony", "id", collation)
self.assertColumnCollation(f"{app_label}_rider", "pony_id", collation)
self.assertColumnCollation(f"{app_label}_stable_ponies", "pony_id", collation)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
def test_alter_field_pk_mti_fk(self):
app_label = "test_alflpkmtifk"
project_state = self.set_up_test_model(app_label, mti_model=True)
project_state = self.apply_operations(
app_label,
project_state,
[
migrations.CreateModel(
"ShetlandRider",
fields=[
(
"pony",
models.ForeignKey(
f"{app_label}.ShetlandPony", models.CASCADE
),
),
],
),
],
)
operation = migrations.AlterField(
"Pony",
"id",
models.BigAutoField(primary_key=True),
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertIsInstance(
new_state.models[app_label, "pony"].fields["id"],
models.BigAutoField,
)
def _get_column_id_type(cursor, table, column):
return [
c.type_code
for c in connection.introspection.get_table_description(
cursor,
f"{app_label}_{table}",
)
if c.name == column
][0]
def assertIdTypeEqualsMTIFkType():
with connection.cursor() as cursor:
parent_id_type = _get_column_id_type(cursor, "pony", "id")
child_id_type = _get_column_id_type(
cursor, "shetlandpony", "pony_ptr_id"
)
mti_id_type = _get_column_id_type(cursor, "shetlandrider", "pony_id")
self.assertEqual(parent_id_type, child_id_type)
self.assertEqual(parent_id_type, mti_id_type)
assertIdTypeEqualsMTIFkType()
# Alter primary key.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
assertIdTypeEqualsMTIFkType()
if connection.features.supports_foreign_keys:
self.assertFKExists(
f"{app_label}_shetlandpony",
["pony_ptr_id"],
(f"{app_label}_pony", "id"),
)
self.assertFKExists(
f"{app_label}_shetlandrider",
["pony_id"],
(f"{app_label}_shetlandpony", "pony_ptr_id"),
)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
assertIdTypeEqualsMTIFkType()
if connection.features.supports_foreign_keys:
self.assertFKExists(
f"{app_label}_shetlandpony",
["pony_ptr_id"],
(f"{app_label}_pony", "id"),
)
self.assertFKExists(
f"{app_label}_shetlandrider",
["pony_id"],
(f"{app_label}_shetlandpony", "pony_ptr_id"),
)
def test_alter_field_pk_mti_and_fk_to_base(self):
app_label = "test_alflpkmtiftb"
project_state = self.set_up_test_model(
app_label,
mti_model=True,
related_model=True,
)
operation = migrations.AlterField(
"Pony",
"id",
models.BigAutoField(primary_key=True),
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertIsInstance(
new_state.models[app_label, "pony"].fields["id"],
models.BigAutoField,
)
def _get_column_id_type(cursor, table, column):
return [
c.type_code
for c in connection.introspection.get_table_description(
cursor,
f"{app_label}_{table}",
)
if c.name == column
][0]
def assertIdTypeEqualsMTIFkType():
with connection.cursor() as cursor:
parent_id_type = _get_column_id_type(cursor, "pony", "id")
fk_id_type = _get_column_id_type(cursor, "rider", "pony_id")
child_id_type = _get_column_id_type(
cursor, "shetlandpony", "pony_ptr_id"
)
self.assertEqual(parent_id_type, child_id_type)
self.assertEqual(parent_id_type, fk_id_type)
assertIdTypeEqualsMTIFkType()
# Alter primary key.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
assertIdTypeEqualsMTIFkType()
if connection.features.supports_foreign_keys:
self.assertFKExists(
f"{app_label}_shetlandpony",
["pony_ptr_id"],
(f"{app_label}_pony", "id"),
)
self.assertFKExists(
f"{app_label}_rider",
["pony_id"],
(f"{app_label}_pony", "id"),
)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
assertIdTypeEqualsMTIFkType()
if connection.features.supports_foreign_keys:
self.assertFKExists(
f"{app_label}_shetlandpony",
["pony_ptr_id"],
(f"{app_label}_pony", "id"),
)
self.assertFKExists(
f"{app_label}_rider",
["pony_id"],
(f"{app_label}_pony", "id"),
)
@skipUnlessDBFeature("supports_foreign_keys")
def test_alter_field_reloads_state_on_fk_with_to_field_target_type_change(self):
app_label = "test_alflrsfkwtflttc"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
("code", models.IntegerField(unique=True)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
(
"rider",
models.ForeignKey(
"%s.Rider" % app_label, models.CASCADE, to_field="code"
),
),
],
),
],
)
operation = migrations.AlterField(
"Rider",
"code",
models.CharField(max_length=100, unique=True),
)
self.apply_operations(app_label, project_state, operations=[operation])
id_type, id_null = [
(c.type_code, c.null_ok)
for c in self.get_table_description("%s_rider" % app_label)
if c.name == "code"
][0]
fk_type, fk_null = [
(c.type_code, c.null_ok)
for c in self.get_table_description("%s_pony" % app_label)
if c.name == "rider_id"
][0]
self.assertEqual(id_type, fk_type)
self.assertEqual(id_null, fk_null)
@skipUnlessDBFeature("supports_foreign_keys")
def test_alter_field_reloads_state_fk_with_to_field_related_name_target_type_change(
self,
):
app_label = "test_alflrsfkwtflrnttc"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
("code", models.PositiveIntegerField(unique=True)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
(
"rider",
models.ForeignKey(
"%s.Rider" % app_label,
models.CASCADE,
to_field="code",
related_name="+",
),
),
],
),
],
)
operation = migrations.AlterField(
"Rider",
"code",
models.CharField(max_length=100, unique=True),
)
self.apply_operations(app_label, project_state, operations=[operation])
def test_alter_field_reloads_state_on_fk_target_changes(self):
"""
If AlterField doesn't reload state appropriately, the second AlterField
crashes on MySQL due to not dropping the PonyRider.pony foreign key
constraint before modifying the column.
"""
app_label = "alter_alter_field_reloads_state_on_fk_target_changes"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.CharField(primary_key=True, max_length=100)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.CharField(primary_key=True, max_length=100)),
(
"rider",
models.ForeignKey("%s.Rider" % app_label, models.CASCADE),
),
],
),
migrations.CreateModel(
"PonyRider",
fields=[
("id", models.AutoField(primary_key=True)),
(
"pony",
models.ForeignKey("%s.Pony" % app_label, models.CASCADE),
),
],
),
],
)
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.AlterField(
"Rider", "id", models.CharField(primary_key=True, max_length=99)
),
migrations.AlterField(
"Pony", "id", models.CharField(primary_key=True, max_length=99)
),
],
)
def test_alter_field_reloads_state_on_fk_with_to_field_target_changes(self):
"""
If AlterField doesn't reload state appropriately, the second AlterField
crashes on MySQL due to not dropping the PonyRider.pony foreign key
constraint before modifying the column.
"""
app_label = "alter_alter_field_reloads_state_on_fk_with_to_field_target_changes"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.CharField(primary_key=True, max_length=100)),
("slug", models.CharField(unique=True, max_length=100)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.CharField(primary_key=True, max_length=100)),
(
"rider",
models.ForeignKey(
"%s.Rider" % app_label, models.CASCADE, to_field="slug"
),
),
("slug", models.CharField(unique=True, max_length=100)),
],
),
migrations.CreateModel(
"PonyRider",
fields=[
("id", models.AutoField(primary_key=True)),
(
"pony",
models.ForeignKey(
"%s.Pony" % app_label, models.CASCADE, to_field="slug"
),
),
],
),
],
)
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.AlterField(
"Rider", "slug", models.CharField(unique=True, max_length=99)
),
migrations.AlterField(
"Pony", "slug", models.CharField(unique=True, max_length=99)
),
],
)
def test_alter_field_pk_fk_char_to_int(self):
app_label = "alter_field_pk_fk_char_to_int"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
name="Parent",
fields=[
("id", models.CharField(max_length=255, primary_key=True)),
],
),
migrations.CreateModel(
name="Child",
fields=[
("id", models.BigAutoField(primary_key=True)),
(
"parent",
models.ForeignKey(
f"{app_label}.Parent",
on_delete=models.CASCADE,
),
),
],
),
],
)
self.apply_operations(
app_label,
project_state,
operations=[
migrations.AlterField(
model_name="parent",
name="id",
field=models.BigIntegerField(primary_key=True),
),
],
)
def test_rename_field_reloads_state_on_fk_target_changes(self):
"""
If RenameField doesn't reload state appropriately, the AlterField
crashes on MySQL due to not dropping the PonyRider.pony foreign key
constraint before modifying the column.
"""
app_label = "alter_rename_field_reloads_state_on_fk_target_changes"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.CharField(primary_key=True, max_length=100)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.CharField(primary_key=True, max_length=100)),
(
"rider",
models.ForeignKey("%s.Rider" % app_label, models.CASCADE),
),
],
),
migrations.CreateModel(
"PonyRider",
fields=[
("id", models.AutoField(primary_key=True)),
(
"pony",
models.ForeignKey("%s.Pony" % app_label, models.CASCADE),
),
],
),
],
)
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.RenameField("Rider", "id", "id2"),
migrations.AlterField(
"Pony", "id", models.CharField(primary_key=True, max_length=99)
),
],
atomic=connection.features.supports_atomic_references_rename,
)
def test_rename_field(self):
"""
Tests the RenameField operation.
"""
project_state = self.set_up_test_model("test_rnfl")
operation = migrations.RenameField("Pony", "pink", "blue")
self.assertEqual(operation.describe(), "Rename field pink on Pony to blue")
self.assertEqual(operation.migration_name_fragment, "rename_pink_pony_blue")
new_state = project_state.clone()
operation.state_forwards("test_rnfl", new_state)
self.assertIn("blue", new_state.models["test_rnfl", "pony"].fields)
self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].fields)
# Rename field.
self.assertColumnExists("test_rnfl_pony", "pink")
self.assertColumnNotExists("test_rnfl_pony", "blue")
with connection.schema_editor() as editor:
operation.database_forwards("test_rnfl", editor, project_state, new_state)
self.assertColumnExists("test_rnfl_pony", "blue")
self.assertColumnNotExists("test_rnfl_pony", "pink")
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards("test_rnfl", editor, new_state, project_state)
self.assertColumnExists("test_rnfl_pony", "pink")
self.assertColumnNotExists("test_rnfl_pony", "blue")
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "RenameField")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{"model_name": "Pony", "old_name": "pink", "new_name": "blue"},
)
def test_rename_field_unique_together(self):
project_state = self.set_up_test_model("test_rnflut", unique_together=True)
operation = migrations.RenameField("Pony", "pink", "blue")
new_state = project_state.clone()
operation.state_forwards("test_rnflut", new_state)
# unique_together has the renamed column.
self.assertIn(
"blue",
new_state.models["test_rnflut", "pony"].options["unique_together"][0],
)
self.assertNotIn(
"pink",
new_state.models["test_rnflut", "pony"].options["unique_together"][0],
)
# Rename field.
self.assertColumnExists("test_rnflut_pony", "pink")
self.assertColumnNotExists("test_rnflut_pony", "blue")
with connection.schema_editor() as editor:
operation.database_forwards("test_rnflut", editor, project_state, new_state)
self.assertColumnExists("test_rnflut_pony", "blue")
self.assertColumnNotExists("test_rnflut_pony", "pink")
# The unique constraint has been ported over.
with connection.cursor() as cursor:
cursor.execute("INSERT INTO test_rnflut_pony (blue, weight) VALUES (1, 1)")
with self.assertRaises(IntegrityError):
with atomic():
cursor.execute(
"INSERT INTO test_rnflut_pony (blue, weight) VALUES (1, 1)"
)
cursor.execute("DELETE FROM test_rnflut_pony")
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(
"test_rnflut", editor, new_state, project_state
)
self.assertColumnExists("test_rnflut_pony", "pink")
self.assertColumnNotExists("test_rnflut_pony", "blue")
@ignore_warnings(category=RemovedInDjango51Warning)
def test_rename_field_index_together(self):
project_state = self.set_up_test_model("test_rnflit", index_together=True)
operation = migrations.RenameField("Pony", "pink", "blue")
new_state = project_state.clone()
operation.state_forwards("test_rnflit", new_state)
self.assertIn("blue", new_state.models["test_rnflit", "pony"].fields)
self.assertNotIn("pink", new_state.models["test_rnflit", "pony"].fields)
# index_together has the renamed column.
self.assertIn(
"blue", new_state.models["test_rnflit", "pony"].options["index_together"][0]
)
self.assertNotIn(
"pink", new_state.models["test_rnflit", "pony"].options["index_together"][0]
)
# Rename field.
self.assertColumnExists("test_rnflit_pony", "pink")
self.assertColumnNotExists("test_rnflit_pony", "blue")
with connection.schema_editor() as editor:
operation.database_forwards("test_rnflit", editor, project_state, new_state)
self.assertColumnExists("test_rnflit_pony", "blue")
self.assertColumnNotExists("test_rnflit_pony", "pink")
# The index constraint has been ported over.
self.assertIndexExists("test_rnflit_pony", ["weight", "blue"])
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(
"test_rnflit", editor, new_state, project_state
)
self.assertIndexExists("test_rnflit_pony", ["weight", "pink"])
def test_rename_field_with_db_column(self):
project_state = self.apply_operations(
"test_rfwdbc",
ProjectState(),
operations=[
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
("field", models.IntegerField(db_column="db_field")),
(
"fk_field",
models.ForeignKey(
"Pony",
models.CASCADE,
db_column="db_fk_field",
),
),
],
),
],
)
new_state = project_state.clone()
operation = migrations.RenameField("Pony", "field", "renamed_field")
operation.state_forwards("test_rfwdbc", new_state)
self.assertIn("renamed_field", new_state.models["test_rfwdbc", "pony"].fields)
self.assertNotIn("field", new_state.models["test_rfwdbc", "pony"].fields)
self.assertColumnExists("test_rfwdbc_pony", "db_field")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_forwards(
"test_rfwdbc", editor, project_state, new_state
)
self.assertColumnExists("test_rfwdbc_pony", "db_field")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_backwards(
"test_rfwdbc", editor, new_state, project_state
)
self.assertColumnExists("test_rfwdbc_pony", "db_field")
new_state = project_state.clone()
operation = migrations.RenameField("Pony", "fk_field", "renamed_fk_field")
operation.state_forwards("test_rfwdbc", new_state)
self.assertIn(
"renamed_fk_field", new_state.models["test_rfwdbc", "pony"].fields
)
self.assertNotIn("fk_field", new_state.models["test_rfwdbc", "pony"].fields)
self.assertColumnExists("test_rfwdbc_pony", "db_fk_field")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_forwards(
"test_rfwdbc", editor, project_state, new_state
)
self.assertColumnExists("test_rfwdbc_pony", "db_fk_field")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_backwards(
"test_rfwdbc", editor, new_state, project_state
)
self.assertColumnExists("test_rfwdbc_pony", "db_fk_field")
def test_rename_field_case(self):
project_state = self.apply_operations(
"test_rfmx",
ProjectState(),
operations=[
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
("field", models.IntegerField()),
],
),
],
)
new_state = project_state.clone()
operation = migrations.RenameField("Pony", "field", "FiElD")
operation.state_forwards("test_rfmx", new_state)
self.assertIn("FiElD", new_state.models["test_rfmx", "pony"].fields)
self.assertColumnExists("test_rfmx_pony", "field")
with connection.schema_editor() as editor:
operation.database_forwards("test_rfmx", editor, project_state, new_state)
self.assertColumnExists(
"test_rfmx_pony",
connection.introspection.identifier_converter("FiElD"),
)
with connection.schema_editor() as editor:
operation.database_backwards("test_rfmx", editor, new_state, project_state)
self.assertColumnExists("test_rfmx_pony", "field")
def test_rename_missing_field(self):
state = ProjectState()
state.add_model(ModelState("app", "model", []))
with self.assertRaisesMessage(
FieldDoesNotExist, "app.model has no field named 'field'"
):
migrations.RenameField("model", "field", "new_field").state_forwards(
"app", state
)
def test_rename_referenced_field_state_forward(self):
state = ProjectState()
state.add_model(
ModelState(
"app",
"Model",
[
("id", models.AutoField(primary_key=True)),
("field", models.IntegerField(unique=True)),
],
)
)
state.add_model(
ModelState(
"app",
"OtherModel",
[
("id", models.AutoField(primary_key=True)),
(
"fk",
models.ForeignKey("Model", models.CASCADE, to_field="field"),
),
(
"fo",
models.ForeignObject(
"Model",
models.CASCADE,
from_fields=("fk",),
to_fields=("field",),
),
),
],
)
)
operation = migrations.RenameField("Model", "field", "renamed")
new_state = state.clone()
operation.state_forwards("app", new_state)
self.assertEqual(
new_state.models["app", "othermodel"].fields["fk"].remote_field.field_name,
"renamed",
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["fk"].from_fields, ["self"]
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["fk"].to_fields, ("renamed",)
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["fo"].from_fields, ("fk",)
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["fo"].to_fields, ("renamed",)
)
operation = migrations.RenameField("OtherModel", "fk", "renamed_fk")
new_state = state.clone()
operation.state_forwards("app", new_state)
self.assertEqual(
new_state.models["app", "othermodel"]
.fields["renamed_fk"]
.remote_field.field_name,
"renamed",
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["renamed_fk"].from_fields,
("self",),
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["renamed_fk"].to_fields,
("renamed",),
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["fo"].from_fields,
("renamed_fk",),
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["fo"].to_fields, ("renamed",)
)
def test_alter_unique_together(self):
"""
Tests the AlterUniqueTogether operation.
"""
project_state = self.set_up_test_model("test_alunto")
# Test the state alteration
operation = migrations.AlterUniqueTogether("Pony", [("pink", "weight")])
self.assertEqual(
operation.describe(), "Alter unique_together for Pony (1 constraint(s))"
)
self.assertEqual(
operation.migration_name_fragment,
"alter_pony_unique_together",
)
new_state = project_state.clone()
operation.state_forwards("test_alunto", new_state)
self.assertEqual(
len(
project_state.models["test_alunto", "pony"].options.get(
"unique_together", set()
)
),
0,
)
self.assertEqual(
len(
new_state.models["test_alunto", "pony"].options.get(
"unique_together", set()
)
),
1,
)
# Make sure we can insert duplicate rows
with connection.cursor() as cursor:
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_alunto_pony")
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards(
"test_alunto", editor, project_state, new_state
)
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
with self.assertRaises(IntegrityError):
with atomic():
cursor.execute(
"INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)"
)
cursor.execute("DELETE FROM test_alunto_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_alunto", editor, new_state, project_state
)
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_alunto_pony")
# Test flat unique_together
operation = migrations.AlterUniqueTogether("Pony", ("pink", "weight"))
operation.state_forwards("test_alunto", new_state)
self.assertEqual(
len(
new_state.models["test_alunto", "pony"].options.get(
"unique_together", set()
)
),
1,
)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterUniqueTogether")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2], {"name": "Pony", "unique_together": {("pink", "weight")}}
)
def test_alter_unique_together_remove(self):
operation = migrations.AlterUniqueTogether("Pony", None)
self.assertEqual(
operation.describe(), "Alter unique_together for Pony (0 constraint(s))"
)
@skipUnlessDBFeature("allows_multiple_constraints_on_same_fields")
def test_remove_unique_together_on_pk_field(self):
app_label = "test_rutopkf"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Pony",
fields=[("id", models.AutoField(primary_key=True))],
options={"unique_together": {("id",)}},
),
],
)
table_name = f"{app_label}_pony"
pk_constraint_name = f"{table_name}_pkey"
unique_together_constraint_name = f"{table_name}_id_fb61f881_uniq"
self.assertConstraintExists(table_name, pk_constraint_name, value=False)
self.assertConstraintExists(
table_name, unique_together_constraint_name, value=False
)
new_state = project_state.clone()
operation = migrations.AlterUniqueTogether("Pony", set())
operation.state_forwards(app_label, new_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertConstraintExists(table_name, pk_constraint_name, value=False)
self.assertConstraintNotExists(table_name, unique_together_constraint_name)
@skipUnlessDBFeature("allows_multiple_constraints_on_same_fields")
def test_remove_unique_together_on_unique_field(self):
app_label = "test_rutouf"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=30, unique=True)),
],
options={"unique_together": {("name",)}},
),
],
)
table_name = f"{app_label}_pony"
unique_constraint_name = f"{table_name}_name_key"
unique_together_constraint_name = f"{table_name}_name_694f3b9f_uniq"
self.assertConstraintExists(table_name, unique_constraint_name, value=False)
self.assertConstraintExists(
table_name, unique_together_constraint_name, value=False
)
new_state = project_state.clone()
operation = migrations.AlterUniqueTogether("Pony", set())
operation.state_forwards(app_label, new_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertConstraintExists(table_name, unique_constraint_name, value=False)
self.assertConstraintNotExists(table_name, unique_together_constraint_name)
def test_add_index(self):
"""
Test the AddIndex operation.
"""
project_state = self.set_up_test_model("test_adin")
msg = (
"Indexes passed to AddIndex operations require a name argument. "
"<Index: fields=['pink']> doesn't have one."
)
with self.assertRaisesMessage(ValueError, msg):
migrations.AddIndex("Pony", models.Index(fields=["pink"]))
index = models.Index(fields=["pink"], name="test_adin_pony_pink_idx")
operation = migrations.AddIndex("Pony", index)
self.assertEqual(
operation.describe(),
"Create index test_adin_pony_pink_idx on field(s) pink of model Pony",
)
self.assertEqual(
operation.migration_name_fragment,
"pony_test_adin_pony_pink_idx",
)
new_state = project_state.clone()
operation.state_forwards("test_adin", new_state)
# Test the database alteration
self.assertEqual(
len(new_state.models["test_adin", "pony"].options["indexes"]), 1
)
self.assertIndexNotExists("test_adin_pony", ["pink"])
with connection.schema_editor() as editor:
operation.database_forwards("test_adin", editor, project_state, new_state)
self.assertIndexExists("test_adin_pony", ["pink"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_adin", editor, new_state, project_state)
self.assertIndexNotExists("test_adin_pony", ["pink"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddIndex")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"model_name": "Pony", "index": index})
def test_remove_index(self):
"""
Test the RemoveIndex operation.
"""
project_state = self.set_up_test_model("test_rmin", multicol_index=True)
self.assertTableExists("test_rmin_pony")
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
operation = migrations.RemoveIndex("Pony", "pony_test_idx")
self.assertEqual(operation.describe(), "Remove index pony_test_idx from Pony")
self.assertEqual(
operation.migration_name_fragment,
"remove_pony_pony_test_idx",
)
new_state = project_state.clone()
operation.state_forwards("test_rmin", new_state)
# Test the state alteration
self.assertEqual(
len(new_state.models["test_rmin", "pony"].options["indexes"]), 0
)
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_rmin", editor, project_state, new_state)
self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_rmin", editor, new_state, project_state)
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveIndex")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"model_name": "Pony", "name": "pony_test_idx"})
# Also test a field dropped with index - sqlite remake issue
operations = [
migrations.RemoveIndex("Pony", "pony_test_idx"),
migrations.RemoveField("Pony", "pink"),
]
self.assertColumnExists("test_rmin_pony", "pink")
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
# Test database alteration
new_state = project_state.clone()
self.apply_operations("test_rmin", new_state, operations=operations)
self.assertColumnNotExists("test_rmin_pony", "pink")
self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"])
# And test reversal
self.unapply_operations("test_rmin", project_state, operations=operations)
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
def test_rename_index(self):
app_label = "test_rnin"
project_state = self.set_up_test_model(app_label, index=True)
table_name = app_label + "_pony"
self.assertIndexNameExists(table_name, "pony_pink_idx")
self.assertIndexNameNotExists(table_name, "new_pony_test_idx")
operation = migrations.RenameIndex(
"Pony", new_name="new_pony_test_idx", old_name="pony_pink_idx"
)
self.assertEqual(
operation.describe(),
"Rename index pony_pink_idx on Pony to new_pony_test_idx",
)
self.assertEqual(
operation.migration_name_fragment,
"rename_pony_pink_idx_new_pony_test_idx",
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
# Rename index.
expected_queries = 1 if connection.features.can_rename_index else 2
with connection.schema_editor() as editor, self.assertNumQueries(
expected_queries
):
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexNameNotExists(table_name, "pony_pink_idx")
self.assertIndexNameExists(table_name, "new_pony_test_idx")
# Reversal.
with connection.schema_editor() as editor, self.assertNumQueries(
expected_queries
):
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertIndexNameExists(table_name, "pony_pink_idx")
self.assertIndexNameNotExists(table_name, "new_pony_test_idx")
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "RenameIndex")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{
"model_name": "Pony",
"old_name": "pony_pink_idx",
"new_name": "new_pony_test_idx",
},
)
def test_rename_index_arguments(self):
msg = "RenameIndex.old_name and old_fields are mutually exclusive."
with self.assertRaisesMessage(ValueError, msg):
migrations.RenameIndex(
"Pony",
new_name="new_idx_name",
old_name="old_idx_name",
old_fields=("weight", "pink"),
)
msg = "RenameIndex requires one of old_name and old_fields arguments to be set."
with self.assertRaisesMessage(ValueError, msg):
migrations.RenameIndex("Pony", new_name="new_idx_name")
@ignore_warnings(category=RemovedInDjango51Warning)
def test_rename_index_unnamed_index(self):
app_label = "test_rninui"
project_state = self.set_up_test_model(app_label, index_together=True)
table_name = app_label + "_pony"
self.assertIndexNameNotExists(table_name, "new_pony_test_idx")
operation = migrations.RenameIndex(
"Pony", new_name="new_pony_test_idx", old_fields=("weight", "pink")
)
self.assertEqual(
operation.describe(),
"Rename unnamed index for ('weight', 'pink') on Pony to new_pony_test_idx",
)
self.assertEqual(
operation.migration_name_fragment,
"rename_pony_weight_pink_new_pony_test_idx",
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
# Rename index.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexNameExists(table_name, "new_pony_test_idx")
# Reverse is a no-op.
with connection.schema_editor() as editor, self.assertNumQueries(0):
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertIndexNameExists(table_name, "new_pony_test_idx")
# Reapply, RenameIndex operation is a noop when the old and new name
# match.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, new_state, project_state)
self.assertIndexNameExists(table_name, "new_pony_test_idx")
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "RenameIndex")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{
"model_name": "Pony",
"new_name": "new_pony_test_idx",
"old_fields": ("weight", "pink"),
},
)
def test_rename_index_unknown_unnamed_index(self):
app_label = "test_rninuui"
project_state = self.set_up_test_model(app_label)
operation = migrations.RenameIndex(
"Pony", new_name="new_pony_test_idx", old_fields=("weight", "pink")
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
msg = "Found wrong number (0) of indexes for test_rninuui_pony(weight, pink)."
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, msg):
operation.database_forwards(app_label, editor, project_state, new_state)
def test_add_index_state_forwards(self):
project_state = self.set_up_test_model("test_adinsf")
index = models.Index(fields=["pink"], name="test_adinsf_pony_pink_idx")
old_model = project_state.apps.get_model("test_adinsf", "Pony")
new_state = project_state.clone()
operation = migrations.AddIndex("Pony", index)
operation.state_forwards("test_adinsf", new_state)
new_model = new_state.apps.get_model("test_adinsf", "Pony")
self.assertIsNot(old_model, new_model)
def test_remove_index_state_forwards(self):
project_state = self.set_up_test_model("test_rminsf")
index = models.Index(fields=["pink"], name="test_rminsf_pony_pink_idx")
migrations.AddIndex("Pony", index).state_forwards("test_rminsf", project_state)
old_model = project_state.apps.get_model("test_rminsf", "Pony")
new_state = project_state.clone()
operation = migrations.RemoveIndex("Pony", "test_rminsf_pony_pink_idx")
operation.state_forwards("test_rminsf", new_state)
new_model = new_state.apps.get_model("test_rminsf", "Pony")
self.assertIsNot(old_model, new_model)
def test_rename_index_state_forwards(self):
app_label = "test_rnidsf"
project_state = self.set_up_test_model(app_label, index=True)
old_model = project_state.apps.get_model(app_label, "Pony")
new_state = project_state.clone()
operation = migrations.RenameIndex(
"Pony", new_name="new_pony_pink_idx", old_name="pony_pink_idx"
)
operation.state_forwards(app_label, new_state)
new_model = new_state.apps.get_model(app_label, "Pony")
self.assertIsNot(old_model, new_model)
self.assertEqual(new_model._meta.indexes[0].name, "new_pony_pink_idx")
@ignore_warnings(category=RemovedInDjango51Warning)
def test_rename_index_state_forwards_unnamed_index(self):
app_label = "test_rnidsfui"
project_state = self.set_up_test_model(app_label, index_together=True)
old_model = project_state.apps.get_model(app_label, "Pony")
new_state = project_state.clone()
operation = migrations.RenameIndex(
"Pony", new_name="new_pony_pink_idx", old_fields=("weight", "pink")
)
operation.state_forwards(app_label, new_state)
new_model = new_state.apps.get_model(app_label, "Pony")
self.assertIsNot(old_model, new_model)
self.assertEqual(new_model._meta.index_together, tuple())
self.assertEqual(new_model._meta.indexes[0].name, "new_pony_pink_idx")
@skipUnlessDBFeature("supports_expression_indexes")
def test_add_func_index(self):
app_label = "test_addfuncin"
index_name = f"{app_label}_pony_abs_idx"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(app_label)
index = models.Index(Abs("weight"), name=index_name)
operation = migrations.AddIndex("Pony", index)
self.assertEqual(
operation.describe(),
"Create index test_addfuncin_pony_abs_idx on Abs(F(weight)) on model Pony",
)
self.assertEqual(
operation.migration_name_fragment,
"pony_test_addfuncin_pony_abs_idx",
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(len(new_state.models[app_label, "pony"].options["indexes"]), 1)
self.assertIndexNameNotExists(table_name, index_name)
# Add index.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexNameExists(table_name, index_name)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertIndexNameNotExists(table_name, index_name)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddIndex")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"model_name": "Pony", "index": index})
@skipUnlessDBFeature("supports_expression_indexes")
def test_remove_func_index(self):
app_label = "test_rmfuncin"
index_name = f"{app_label}_pony_abs_idx"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(
app_label,
indexes=[
models.Index(Abs("weight"), name=index_name),
],
)
self.assertTableExists(table_name)
self.assertIndexNameExists(table_name, index_name)
operation = migrations.RemoveIndex("Pony", index_name)
self.assertEqual(
operation.describe(),
"Remove index test_rmfuncin_pony_abs_idx from Pony",
)
self.assertEqual(
operation.migration_name_fragment,
"remove_pony_test_rmfuncin_pony_abs_idx",
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(len(new_state.models[app_label, "pony"].options["indexes"]), 0)
# Remove index.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexNameNotExists(table_name, index_name)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertIndexNameExists(table_name, index_name)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveIndex")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"model_name": "Pony", "name": index_name})
@skipUnlessDBFeature("supports_expression_indexes")
def test_alter_field_with_func_index(self):
app_label = "test_alfuncin"
index_name = f"{app_label}_pony_idx"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(
app_label,
indexes=[models.Index(Abs("pink"), name=index_name)],
)
operation = migrations.AlterField(
"Pony", "pink", models.IntegerField(null=True)
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexNameExists(table_name, index_name)
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertIndexNameExists(table_name, index_name)
def test_alter_field_with_index(self):
"""
Test AlterField operation with an index to ensure indexes created via
Meta.indexes don't get dropped with sqlite3 remake.
"""
project_state = self.set_up_test_model("test_alflin", index=True)
operation = migrations.AlterField(
"Pony", "pink", models.IntegerField(null=True)
)
new_state = project_state.clone()
operation.state_forwards("test_alflin", new_state)
# Test the database alteration
self.assertColumnNotNull("test_alflin_pony", "pink")
with connection.schema_editor() as editor:
operation.database_forwards("test_alflin", editor, project_state, new_state)
# Index hasn't been dropped
self.assertIndexExists("test_alflin_pony", ["pink"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_alflin", editor, new_state, project_state
)
# Ensure the index is still there
self.assertIndexExists("test_alflin_pony", ["pink"])
@ignore_warnings(category=RemovedInDjango51Warning)
def test_alter_index_together(self):
"""
Tests the AlterIndexTogether operation.
"""
project_state = self.set_up_test_model("test_alinto")
# Test the state alteration
operation = migrations.AlterIndexTogether("Pony", [("pink", "weight")])
self.assertEqual(
operation.describe(), "Alter index_together for Pony (1 constraint(s))"
)
self.assertEqual(
operation.migration_name_fragment,
"alter_pony_index_together",
)
new_state = project_state.clone()
operation.state_forwards("test_alinto", new_state)
self.assertEqual(
len(
project_state.models["test_alinto", "pony"].options.get(
"index_together", set()
)
),
0,
)
self.assertEqual(
len(
new_state.models["test_alinto", "pony"].options.get(
"index_together", set()
)
),
1,
)
# Make sure there's no matching index
self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"])
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alinto", editor, project_state, new_state)
self.assertIndexExists("test_alinto_pony", ["pink", "weight"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_alinto", editor, new_state, project_state
)
self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterIndexTogether")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2], {"name": "Pony", "index_together": {("pink", "weight")}}
)
def test_alter_index_together_remove(self):
operation = migrations.AlterIndexTogether("Pony", None)
self.assertEqual(
operation.describe(), "Alter index_together for Pony (0 constraint(s))"
)
@skipUnlessDBFeature("allows_multiple_constraints_on_same_fields")
@ignore_warnings(category=RemovedInDjango51Warning)
def test_alter_index_together_remove_with_unique_together(self):
app_label = "test_alintoremove_wunto"
table_name = "%s_pony" % app_label
project_state = self.set_up_test_model(app_label, unique_together=True)
self.assertUniqueConstraintExists(table_name, ["pink", "weight"])
# Add index together.
new_state = project_state.clone()
operation = migrations.AlterIndexTogether("Pony", [("pink", "weight")])
operation.state_forwards(app_label, new_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexExists(table_name, ["pink", "weight"])
# Remove index together.
project_state = new_state
new_state = project_state.clone()
operation = migrations.AlterIndexTogether("Pony", set())
operation.state_forwards(app_label, new_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexNotExists(table_name, ["pink", "weight"])
self.assertUniqueConstraintExists(table_name, ["pink", "weight"])
def test_add_constraint(self):
project_state = self.set_up_test_model("test_addconstraint")
gt_check = models.Q(pink__gt=2)
gt_constraint = models.CheckConstraint(
check=gt_check, name="test_add_constraint_pony_pink_gt_2"
)
gt_operation = migrations.AddConstraint("Pony", gt_constraint)
self.assertEqual(
gt_operation.describe(),
"Create constraint test_add_constraint_pony_pink_gt_2 on model Pony",
)
self.assertEqual(
gt_operation.migration_name_fragment,
"pony_test_add_constraint_pony_pink_gt_2",
)
# Test the state alteration
new_state = project_state.clone()
gt_operation.state_forwards("test_addconstraint", new_state)
self.assertEqual(
len(new_state.models["test_addconstraint", "pony"].options["constraints"]),
1,
)
Pony = new_state.apps.get_model("test_addconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 1)
# Test the database alteration
with (
CaptureQueriesContext(connection) as ctx,
connection.schema_editor() as editor,
):
gt_operation.database_forwards(
"test_addconstraint", editor, project_state, new_state
)
if connection.features.supports_table_check_constraints:
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=1, weight=1.0)
else:
self.assertIs(
any("CHECK" in query["sql"] for query in ctx.captured_queries), False
)
Pony.objects.create(pink=1, weight=1.0)
# Add another one.
lt_check = models.Q(pink__lt=100)
lt_constraint = models.CheckConstraint(
check=lt_check, name="test_add_constraint_pony_pink_lt_100"
)
lt_operation = migrations.AddConstraint("Pony", lt_constraint)
lt_operation.state_forwards("test_addconstraint", new_state)
self.assertEqual(
len(new_state.models["test_addconstraint", "pony"].options["constraints"]),
2,
)
Pony = new_state.apps.get_model("test_addconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 2)
with (
CaptureQueriesContext(connection) as ctx,
connection.schema_editor() as editor,
):
lt_operation.database_forwards(
"test_addconstraint", editor, project_state, new_state
)
if connection.features.supports_table_check_constraints:
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=100, weight=1.0)
else:
self.assertIs(
any("CHECK" in query["sql"] for query in ctx.captured_queries), False
)
Pony.objects.create(pink=100, weight=1.0)
# Test reversal
with connection.schema_editor() as editor:
gt_operation.database_backwards(
"test_addconstraint", editor, new_state, project_state
)
Pony.objects.create(pink=1, weight=1.0)
# Test deconstruction
definition = gt_operation.deconstruct()
self.assertEqual(definition[0], "AddConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2], {"model_name": "Pony", "constraint": gt_constraint}
)
@skipUnlessDBFeature("supports_table_check_constraints")
def test_add_constraint_percent_escaping(self):
app_label = "add_constraint_string_quoting"
operations = [
migrations.CreateModel(
"Author",
fields=[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
("surname", models.CharField(max_length=100, default="")),
("rebate", models.CharField(max_length=100)),
],
),
]
from_state = self.apply_operations(app_label, ProjectState(), operations)
checks = [
# "%" generated in startswith lookup should be escaped in a way
# that is considered a leading wildcard.
(
models.Q(name__startswith="Albert"),
{"name": "Alberta"},
{"name": "Artur"},
),
# Literal "%" should be escaped in a way that is not a considered a
# wildcard.
(models.Q(rebate__endswith="%"), {"rebate": "10%"}, {"rebate": "10%$"}),
# Right-hand-side baked "%" literals should not be used for
# parameters interpolation.
(
~models.Q(surname__startswith=models.F("name")),
{"name": "Albert"},
{"name": "Albert", "surname": "Alberto"},
),
# Exact matches against "%" literals should also be supported.
(
models.Q(name="%"),
{"name": "%"},
{"name": "Albert"},
),
]
for check, valid, invalid in checks:
with self.subTest(check=check, valid=valid, invalid=invalid):
constraint = models.CheckConstraint(check=check, name="constraint")
operation = migrations.AddConstraint("Author", constraint)
to_state = from_state.clone()
operation.state_forwards(app_label, to_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, from_state, to_state)
Author = to_state.apps.get_model(app_label, "Author")
try:
with transaction.atomic():
Author.objects.create(**valid).delete()
with self.assertRaises(IntegrityError), transaction.atomic():
Author.objects.create(**invalid)
finally:
with connection.schema_editor() as editor:
operation.database_backwards(
app_label, editor, from_state, to_state
)
@skipUnlessDBFeature("supports_table_check_constraints")
def test_add_or_constraint(self):
app_label = "test_addorconstraint"
constraint_name = "add_constraint_or"
from_state = self.set_up_test_model(app_label)
check = models.Q(pink__gt=2, weight__gt=2) | models.Q(weight__lt=0)
constraint = models.CheckConstraint(check=check, name=constraint_name)
operation = migrations.AddConstraint("Pony", constraint)
to_state = from_state.clone()
operation.state_forwards(app_label, to_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, from_state, to_state)
Pony = to_state.apps.get_model(app_label, "Pony")
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=2, weight=3.0)
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=3, weight=1.0)
Pony.objects.bulk_create(
[
Pony(pink=3, weight=-1.0),
Pony(pink=1, weight=-1.0),
Pony(pink=3, weight=3.0),
]
)
@skipUnlessDBFeature("supports_table_check_constraints")
def test_add_constraint_combinable(self):
app_label = "test_addconstraint_combinable"
operations = [
migrations.CreateModel(
"Book",
fields=[
("id", models.AutoField(primary_key=True)),
("read", models.PositiveIntegerField()),
("unread", models.PositiveIntegerField()),
],
),
]
from_state = self.apply_operations(app_label, ProjectState(), operations)
constraint = models.CheckConstraint(
check=models.Q(read=(100 - models.F("unread"))),
name="test_addconstraint_combinable_sum_100",
)
operation = migrations.AddConstraint("Book", constraint)
to_state = from_state.clone()
operation.state_forwards(app_label, to_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, from_state, to_state)
Book = to_state.apps.get_model(app_label, "Book")
with self.assertRaises(IntegrityError), transaction.atomic():
Book.objects.create(read=70, unread=10)
Book.objects.create(read=70, unread=30)
def test_remove_constraint(self):
project_state = self.set_up_test_model(
"test_removeconstraint",
constraints=[
models.CheckConstraint(
check=models.Q(pink__gt=2),
name="test_remove_constraint_pony_pink_gt_2",
),
models.CheckConstraint(
check=models.Q(pink__lt=100),
name="test_remove_constraint_pony_pink_lt_100",
),
],
)
gt_operation = migrations.RemoveConstraint(
"Pony", "test_remove_constraint_pony_pink_gt_2"
)
self.assertEqual(
gt_operation.describe(),
"Remove constraint test_remove_constraint_pony_pink_gt_2 from model Pony",
)
self.assertEqual(
gt_operation.migration_name_fragment,
"remove_pony_test_remove_constraint_pony_pink_gt_2",
)
# Test state alteration
new_state = project_state.clone()
gt_operation.state_forwards("test_removeconstraint", new_state)
self.assertEqual(
len(
new_state.models["test_removeconstraint", "pony"].options["constraints"]
),
1,
)
Pony = new_state.apps.get_model("test_removeconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 1)
# Test database alteration
with connection.schema_editor() as editor:
gt_operation.database_forwards(
"test_removeconstraint", editor, project_state, new_state
)
Pony.objects.create(pink=1, weight=1.0).delete()
if connection.features.supports_table_check_constraints:
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=100, weight=1.0)
else:
Pony.objects.create(pink=100, weight=1.0)
# Remove the other one.
lt_operation = migrations.RemoveConstraint(
"Pony", "test_remove_constraint_pony_pink_lt_100"
)
lt_operation.state_forwards("test_removeconstraint", new_state)
self.assertEqual(
len(
new_state.models["test_removeconstraint", "pony"].options["constraints"]
),
0,
)
Pony = new_state.apps.get_model("test_removeconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 0)
with connection.schema_editor() as editor:
lt_operation.database_forwards(
"test_removeconstraint", editor, project_state, new_state
)
Pony.objects.create(pink=100, weight=1.0).delete()
# Test reversal
with connection.schema_editor() as editor:
gt_operation.database_backwards(
"test_removeconstraint", editor, new_state, project_state
)
if connection.features.supports_table_check_constraints:
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=1, weight=1.0)
else:
Pony.objects.create(pink=1, weight=1.0)
# Test deconstruction
definition = gt_operation.deconstruct()
self.assertEqual(definition[0], "RemoveConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{"model_name": "Pony", "name": "test_remove_constraint_pony_pink_gt_2"},
)
def test_add_partial_unique_constraint(self):
project_state = self.set_up_test_model("test_addpartialuniqueconstraint")
partial_unique_constraint = models.UniqueConstraint(
fields=["pink"],
condition=models.Q(weight__gt=5),
name="test_constraint_pony_pink_for_weight_gt_5_uniq",
)
operation = migrations.AddConstraint("Pony", partial_unique_constraint)
self.assertEqual(
operation.describe(),
"Create constraint test_constraint_pony_pink_for_weight_gt_5_uniq "
"on model Pony",
)
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_addpartialuniqueconstraint", new_state)
self.assertEqual(
len(
new_state.models["test_addpartialuniqueconstraint", "pony"].options[
"constraints"
]
),
1,
)
Pony = new_state.apps.get_model("test_addpartialuniqueconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 1)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards(
"test_addpartialuniqueconstraint", editor, project_state, new_state
)
# Test constraint works
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=6.0)
if connection.features.supports_partial_indexes:
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=1, weight=7.0)
else:
Pony.objects.create(pink=1, weight=7.0)
# Test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_addpartialuniqueconstraint", editor, new_state, project_state
)
# Test constraint doesn't work
Pony.objects.create(pink=1, weight=7.0)
# Test deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{"model_name": "Pony", "constraint": partial_unique_constraint},
)
def test_remove_partial_unique_constraint(self):
project_state = self.set_up_test_model(
"test_removepartialuniqueconstraint",
constraints=[
models.UniqueConstraint(
fields=["pink"],
condition=models.Q(weight__gt=5),
name="test_constraint_pony_pink_for_weight_gt_5_uniq",
),
],
)
gt_operation = migrations.RemoveConstraint(
"Pony", "test_constraint_pony_pink_for_weight_gt_5_uniq"
)
self.assertEqual(
gt_operation.describe(),
"Remove constraint test_constraint_pony_pink_for_weight_gt_5_uniq from "
"model Pony",
)
# Test state alteration
new_state = project_state.clone()
gt_operation.state_forwards("test_removepartialuniqueconstraint", new_state)
self.assertEqual(
len(
new_state.models["test_removepartialuniqueconstraint", "pony"].options[
"constraints"
]
),
0,
)
Pony = new_state.apps.get_model("test_removepartialuniqueconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 0)
# Test database alteration
with connection.schema_editor() as editor:
gt_operation.database_forwards(
"test_removepartialuniqueconstraint", editor, project_state, new_state
)
# Test constraint doesn't work
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=6.0)
Pony.objects.create(pink=1, weight=7.0).delete()
# Test reversal
with connection.schema_editor() as editor:
gt_operation.database_backwards(
"test_removepartialuniqueconstraint", editor, new_state, project_state
)
# Test constraint works
if connection.features.supports_partial_indexes:
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=1, weight=7.0)
else:
Pony.objects.create(pink=1, weight=7.0)
# Test deconstruction
definition = gt_operation.deconstruct()
self.assertEqual(definition[0], "RemoveConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{
"model_name": "Pony",
"name": "test_constraint_pony_pink_for_weight_gt_5_uniq",
},
)
def test_add_deferred_unique_constraint(self):
app_label = "test_adddeferred_uc"
project_state = self.set_up_test_model(app_label)
deferred_unique_constraint = models.UniqueConstraint(
fields=["pink"],
name="deferred_pink_constraint_add",
deferrable=models.Deferrable.DEFERRED,
)
operation = migrations.AddConstraint("Pony", deferred_unique_constraint)
self.assertEqual(
operation.describe(),
"Create constraint deferred_pink_constraint_add on model Pony",
)
# Add constraint.
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(
len(new_state.models[app_label, "pony"].options["constraints"]), 1
)
Pony = new_state.apps.get_model(app_label, "Pony")
self.assertEqual(len(Pony._meta.constraints), 1)
with connection.schema_editor() as editor, CaptureQueriesContext(
connection
) as ctx:
operation.database_forwards(app_label, editor, project_state, new_state)
Pony.objects.create(pink=1, weight=4.0)
if connection.features.supports_deferrable_unique_constraints:
# Unique constraint is deferred.
with transaction.atomic():
obj = Pony.objects.create(pink=1, weight=4.0)
obj.pink = 2
obj.save()
# Constraint behavior can be changed with SET CONSTRAINTS.
with self.assertRaises(IntegrityError):
with transaction.atomic(), connection.cursor() as cursor:
quoted_name = connection.ops.quote_name(
deferred_unique_constraint.name
)
cursor.execute("SET CONSTRAINTS %s IMMEDIATE" % quoted_name)
obj = Pony.objects.create(pink=1, weight=4.0)
obj.pink = 3
obj.save()
else:
self.assertEqual(len(ctx), 0)
Pony.objects.create(pink=1, weight=4.0)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
# Constraint doesn't work.
Pony.objects.create(pink=1, weight=4.0)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{"model_name": "Pony", "constraint": deferred_unique_constraint},
)
def test_remove_deferred_unique_constraint(self):
app_label = "test_removedeferred_uc"
deferred_unique_constraint = models.UniqueConstraint(
fields=["pink"],
name="deferred_pink_constraint_rm",
deferrable=models.Deferrable.DEFERRED,
)
project_state = self.set_up_test_model(
app_label, constraints=[deferred_unique_constraint]
)
operation = migrations.RemoveConstraint("Pony", deferred_unique_constraint.name)
self.assertEqual(
operation.describe(),
"Remove constraint deferred_pink_constraint_rm from model Pony",
)
# Remove constraint.
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(
len(new_state.models[app_label, "pony"].options["constraints"]), 0
)
Pony = new_state.apps.get_model(app_label, "Pony")
self.assertEqual(len(Pony._meta.constraints), 0)
with connection.schema_editor() as editor, CaptureQueriesContext(
connection
) as ctx:
operation.database_forwards(app_label, editor, project_state, new_state)
# Constraint doesn't work.
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=4.0).delete()
if not connection.features.supports_deferrable_unique_constraints:
self.assertEqual(len(ctx), 0)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
if connection.features.supports_deferrable_unique_constraints:
# Unique constraint is deferred.
with transaction.atomic():
obj = Pony.objects.create(pink=1, weight=4.0)
obj.pink = 2
obj.save()
# Constraint behavior can be changed with SET CONSTRAINTS.
with self.assertRaises(IntegrityError):
with transaction.atomic(), connection.cursor() as cursor:
quoted_name = connection.ops.quote_name(
deferred_unique_constraint.name
)
cursor.execute("SET CONSTRAINTS %s IMMEDIATE" % quoted_name)
obj = Pony.objects.create(pink=1, weight=4.0)
obj.pink = 3
obj.save()
else:
Pony.objects.create(pink=1, weight=4.0)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{
"model_name": "Pony",
"name": "deferred_pink_constraint_rm",
},
)
def test_add_covering_unique_constraint(self):
app_label = "test_addcovering_uc"
project_state = self.set_up_test_model(app_label)
covering_unique_constraint = models.UniqueConstraint(
fields=["pink"],
name="covering_pink_constraint_add",
include=["weight"],
)
operation = migrations.AddConstraint("Pony", covering_unique_constraint)
self.assertEqual(
operation.describe(),
"Create constraint covering_pink_constraint_add on model Pony",
)
# Add constraint.
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(
len(new_state.models[app_label, "pony"].options["constraints"]), 1
)
Pony = new_state.apps.get_model(app_label, "Pony")
self.assertEqual(len(Pony._meta.constraints), 1)
with connection.schema_editor() as editor, CaptureQueriesContext(
connection
) as ctx:
operation.database_forwards(app_label, editor, project_state, new_state)
Pony.objects.create(pink=1, weight=4.0)
if connection.features.supports_covering_indexes:
with self.assertRaises(IntegrityError):
Pony.objects.create(pink=1, weight=4.0)
else:
self.assertEqual(len(ctx), 0)
Pony.objects.create(pink=1, weight=4.0)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
# Constraint doesn't work.
Pony.objects.create(pink=1, weight=4.0)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{"model_name": "Pony", "constraint": covering_unique_constraint},
)
def test_remove_covering_unique_constraint(self):
app_label = "test_removecovering_uc"
covering_unique_constraint = models.UniqueConstraint(
fields=["pink"],
name="covering_pink_constraint_rm",
include=["weight"],
)
project_state = self.set_up_test_model(
app_label, constraints=[covering_unique_constraint]
)
operation = migrations.RemoveConstraint("Pony", covering_unique_constraint.name)
self.assertEqual(
operation.describe(),
"Remove constraint covering_pink_constraint_rm from model Pony",
)
# Remove constraint.
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(
len(new_state.models[app_label, "pony"].options["constraints"]), 0
)
Pony = new_state.apps.get_model(app_label, "Pony")
self.assertEqual(len(Pony._meta.constraints), 0)
with connection.schema_editor() as editor, CaptureQueriesContext(
connection
) as ctx:
operation.database_forwards(app_label, editor, project_state, new_state)
# Constraint doesn't work.
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=4.0).delete()
if not connection.features.supports_covering_indexes:
self.assertEqual(len(ctx), 0)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
if connection.features.supports_covering_indexes:
with self.assertRaises(IntegrityError):
Pony.objects.create(pink=1, weight=4.0)
else:
Pony.objects.create(pink=1, weight=4.0)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{
"model_name": "Pony",
"name": "covering_pink_constraint_rm",
},
)
def test_alter_field_with_func_unique_constraint(self):
app_label = "test_alfuncuc"
constraint_name = f"{app_label}_pony_uq"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(
app_label,
constraints=[
models.UniqueConstraint("pink", "weight", name=constraint_name)
],
)
operation = migrations.AlterField(
"Pony", "pink", models.IntegerField(null=True)
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
if connection.features.supports_expression_indexes:
self.assertIndexNameExists(table_name, constraint_name)
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
if connection.features.supports_expression_indexes:
self.assertIndexNameExists(table_name, constraint_name)
def test_add_func_unique_constraint(self):
app_label = "test_adfuncuc"
constraint_name = f"{app_label}_pony_abs_uq"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(app_label)
constraint = models.UniqueConstraint(Abs("weight"), name=constraint_name)
operation = migrations.AddConstraint("Pony", constraint)
self.assertEqual(
operation.describe(),
"Create constraint test_adfuncuc_pony_abs_uq on model Pony",
)
self.assertEqual(
operation.migration_name_fragment,
"pony_test_adfuncuc_pony_abs_uq",
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(
len(new_state.models[app_label, "pony"].options["constraints"]), 1
)
self.assertIndexNameNotExists(table_name, constraint_name)
# Add constraint.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
Pony = new_state.apps.get_model(app_label, "Pony")
Pony.objects.create(weight=4.0)
if connection.features.supports_expression_indexes:
self.assertIndexNameExists(table_name, constraint_name)
with self.assertRaises(IntegrityError):
Pony.objects.create(weight=-4.0)
else:
self.assertIndexNameNotExists(table_name, constraint_name)
Pony.objects.create(weight=-4.0)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertIndexNameNotExists(table_name, constraint_name)
# Constraint doesn't work.
Pony.objects.create(weight=-4.0)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{"model_name": "Pony", "constraint": constraint},
)
def test_remove_func_unique_constraint(self):
app_label = "test_rmfuncuc"
constraint_name = f"{app_label}_pony_abs_uq"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(
app_label,
constraints=[
models.UniqueConstraint(Abs("weight"), name=constraint_name),
],
)
self.assertTableExists(table_name)
if connection.features.supports_expression_indexes:
self.assertIndexNameExists(table_name, constraint_name)
operation = migrations.RemoveConstraint("Pony", constraint_name)
self.assertEqual(
operation.describe(),
"Remove constraint test_rmfuncuc_pony_abs_uq from model Pony",
)
self.assertEqual(
operation.migration_name_fragment,
"remove_pony_test_rmfuncuc_pony_abs_uq",
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(
len(new_state.models[app_label, "pony"].options["constraints"]), 0
)
Pony = new_state.apps.get_model(app_label, "Pony")
self.assertEqual(len(Pony._meta.constraints), 0)
# Remove constraint.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexNameNotExists(table_name, constraint_name)
# Constraint doesn't work.
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=-4.0).delete()
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
if connection.features.supports_expression_indexes:
self.assertIndexNameExists(table_name, constraint_name)
with self.assertRaises(IntegrityError):
Pony.objects.create(weight=-4.0)
else:
self.assertIndexNameNotExists(table_name, constraint_name)
Pony.objects.create(weight=-4.0)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"model_name": "Pony", "name": constraint_name})
def test_alter_model_options(self):
"""
Tests the AlterModelOptions operation.
"""
project_state = self.set_up_test_model("test_almoop")
# Test the state alteration (no DB alteration to test)
operation = migrations.AlterModelOptions(
"Pony", {"permissions": [("can_groom", "Can groom")]}
)
self.assertEqual(operation.describe(), "Change Meta options on Pony")
self.assertEqual(operation.migration_name_fragment, "alter_pony_options")
new_state = project_state.clone()
operation.state_forwards("test_almoop", new_state)
self.assertEqual(
len(
project_state.models["test_almoop", "pony"].options.get(
"permissions", []
)
),
0,
)
self.assertEqual(
len(new_state.models["test_almoop", "pony"].options.get("permissions", [])),
1,
)
self.assertEqual(
new_state.models["test_almoop", "pony"].options["permissions"][0][0],
"can_groom",
)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelOptions")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{"name": "Pony", "options": {"permissions": [("can_groom", "Can groom")]}},
)
def test_alter_model_options_emptying(self):
"""
The AlterModelOptions operation removes keys from the dict (#23121)
"""
project_state = self.set_up_test_model("test_almoop", options=True)
# Test the state alteration (no DB alteration to test)
operation = migrations.AlterModelOptions("Pony", {})
self.assertEqual(operation.describe(), "Change Meta options on Pony")
new_state = project_state.clone()
operation.state_forwards("test_almoop", new_state)
self.assertEqual(
len(
project_state.models["test_almoop", "pony"].options.get(
"permissions", []
)
),
1,
)
self.assertEqual(
len(new_state.models["test_almoop", "pony"].options.get("permissions", [])),
0,
)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelOptions")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"name": "Pony", "options": {}})
def test_alter_order_with_respect_to(self):
"""
Tests the AlterOrderWithRespectTo operation.
"""
project_state = self.set_up_test_model("test_alorwrtto", related_model=True)
# Test the state alteration
operation = migrations.AlterOrderWithRespectTo("Rider", "pony")
self.assertEqual(
operation.describe(), "Set order_with_respect_to on Rider to pony"
)
self.assertEqual(
operation.migration_name_fragment,
"alter_rider_order_with_respect_to",
)
new_state = project_state.clone()
operation.state_forwards("test_alorwrtto", new_state)
self.assertIsNone(
project_state.models["test_alorwrtto", "rider"].options.get(
"order_with_respect_to", None
)
)
self.assertEqual(
new_state.models["test_alorwrtto", "rider"].options.get(
"order_with_respect_to", None
),
"pony",
)
# Make sure there's no matching index
self.assertColumnNotExists("test_alorwrtto_rider", "_order")
# Create some rows before alteration
rendered_state = project_state.apps
pony = rendered_state.get_model("test_alorwrtto", "Pony").objects.create(
weight=50
)
rider1 = rendered_state.get_model("test_alorwrtto", "Rider").objects.create(
pony=pony
)
rider1.friend = rider1
rider1.save()
rider2 = rendered_state.get_model("test_alorwrtto", "Rider").objects.create(
pony=pony
)
rider2.friend = rider2
rider2.save()
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards(
"test_alorwrtto", editor, project_state, new_state
)
self.assertColumnExists("test_alorwrtto_rider", "_order")
# Check for correct value in rows
updated_riders = new_state.apps.get_model(
"test_alorwrtto", "Rider"
).objects.all()
self.assertEqual(updated_riders[0]._order, 0)
self.assertEqual(updated_riders[1]._order, 0)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_alorwrtto", editor, new_state, project_state
)
self.assertColumnNotExists("test_alorwrtto_rider", "_order")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterOrderWithRespectTo")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2], {"name": "Rider", "order_with_respect_to": "pony"}
)
def test_alter_model_managers(self):
"""
The managers on a model are set.
"""
project_state = self.set_up_test_model("test_almoma")
# Test the state alteration
operation = migrations.AlterModelManagers(
"Pony",
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
],
)
self.assertEqual(operation.describe(), "Change managers on Pony")
self.assertEqual(operation.migration_name_fragment, "alter_pony_managers")
managers = project_state.models["test_almoma", "pony"].managers
self.assertEqual(managers, [])
new_state = project_state.clone()
operation.state_forwards("test_almoma", new_state)
self.assertIn(("test_almoma", "pony"), new_state.models)
managers = new_state.models["test_almoma", "pony"].managers
self.assertEqual(managers[0][0], "food_qs")
self.assertIsInstance(managers[0][1], models.Manager)
self.assertEqual(managers[1][0], "food_mgr")
self.assertIsInstance(managers[1][1], FoodManager)
self.assertEqual(managers[1][1].args, ("a", "b", 1, 2))
self.assertEqual(managers[2][0], "food_mgr_kwargs")
self.assertIsInstance(managers[2][1], FoodManager)
self.assertEqual(managers[2][1].args, ("x", "y", 3, 4))
rendered_state = new_state.apps
model = rendered_state.get_model("test_almoma", "pony")
self.assertIsInstance(model.food_qs, models.Manager)
self.assertIsInstance(model.food_mgr, FoodManager)
self.assertIsInstance(model.food_mgr_kwargs, FoodManager)
def test_alter_model_managers_emptying(self):
"""
The managers on a model are set.
"""
project_state = self.set_up_test_model("test_almomae", manager_model=True)
# Test the state alteration
operation = migrations.AlterModelManagers("Food", managers=[])
self.assertEqual(operation.describe(), "Change managers on Food")
self.assertIn(("test_almomae", "food"), project_state.models)
managers = project_state.models["test_almomae", "food"].managers
self.assertEqual(managers[0][0], "food_qs")
self.assertIsInstance(managers[0][1], models.Manager)
self.assertEqual(managers[1][0], "food_mgr")
self.assertIsInstance(managers[1][1], FoodManager)
self.assertEqual(managers[1][1].args, ("a", "b", 1, 2))
self.assertEqual(managers[2][0], "food_mgr_kwargs")
self.assertIsInstance(managers[2][1], FoodManager)
self.assertEqual(managers[2][1].args, ("x", "y", 3, 4))
new_state = project_state.clone()
operation.state_forwards("test_almomae", new_state)
managers = new_state.models["test_almomae", "food"].managers
self.assertEqual(managers, [])
def test_alter_fk(self):
"""
Creating and then altering an FK works correctly
and deals with the pending SQL (#23091)
"""
project_state = self.set_up_test_model("test_alfk")
# Test adding and then altering the FK in one go
create_operation = migrations.CreateModel(
name="Rider",
fields=[
("id", models.AutoField(primary_key=True)),
("pony", models.ForeignKey("Pony", models.CASCADE)),
],
)
create_state = project_state.clone()
create_operation.state_forwards("test_alfk", create_state)
alter_operation = migrations.AlterField(
model_name="Rider",
name="pony",
field=models.ForeignKey("Pony", models.CASCADE, editable=False),
)
alter_state = create_state.clone()
alter_operation.state_forwards("test_alfk", alter_state)
with connection.schema_editor() as editor:
create_operation.database_forwards(
"test_alfk", editor, project_state, create_state
)
alter_operation.database_forwards(
"test_alfk", editor, create_state, alter_state
)
def test_alter_fk_non_fk(self):
"""
Altering an FK to a non-FK works (#23244)
"""
# Test the state alteration
operation = migrations.AlterField(
model_name="Rider",
name="pony",
field=models.FloatField(),
)
project_state, new_state = self.make_test_state(
"test_afknfk", operation, related_model=True
)
# Test the database alteration
self.assertColumnExists("test_afknfk_rider", "pony_id")
self.assertColumnNotExists("test_afknfk_rider", "pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_afknfk", editor, project_state, new_state)
self.assertColumnExists("test_afknfk_rider", "pony")
self.assertColumnNotExists("test_afknfk_rider", "pony_id")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_afknfk", editor, new_state, project_state
)
self.assertColumnExists("test_afknfk_rider", "pony_id")
self.assertColumnNotExists("test_afknfk_rider", "pony")
def test_run_sql(self):
"""
Tests the RunSQL operation.
"""
project_state = self.set_up_test_model("test_runsql")
# Create the operation
operation = migrations.RunSQL(
# Use a multi-line string with a comment to test splitting on
# SQLite and MySQL respectively.
"CREATE TABLE i_love_ponies (id int, special_thing varchar(15));\n"
"INSERT INTO i_love_ponies (id, special_thing) "
"VALUES (1, 'i love ponies'); -- this is magic!\n"
"INSERT INTO i_love_ponies (id, special_thing) "
"VALUES (2, 'i love django');\n"
"UPDATE i_love_ponies SET special_thing = 'Ponies' "
"WHERE special_thing LIKE '%%ponies';"
"UPDATE i_love_ponies SET special_thing = 'Django' "
"WHERE special_thing LIKE '%django';",
# Run delete queries to test for parameter substitution failure
# reported in #23426
"DELETE FROM i_love_ponies WHERE special_thing LIKE '%Django%';"
"DELETE FROM i_love_ponies WHERE special_thing LIKE '%%Ponies%%';"
"DROP TABLE i_love_ponies",
state_operations=[
migrations.CreateModel(
"SomethingElse", [("id", models.AutoField(primary_key=True))]
)
],
)
self.assertEqual(operation.describe(), "Raw SQL operation")
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_runsql", new_state)
self.assertEqual(
len(new_state.models["test_runsql", "somethingelse"].fields), 1
)
# Make sure there's no table
self.assertTableNotExists("i_love_ponies")
# Test SQL collection
with connection.schema_editor(collect_sql=True) as editor:
operation.database_forwards("test_runsql", editor, project_state, new_state)
self.assertIn("LIKE '%%ponies';", "\n".join(editor.collected_sql))
operation.database_backwards(
"test_runsql", editor, project_state, new_state
)
self.assertIn("LIKE '%%Ponies%%';", "\n".join(editor.collected_sql))
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_runsql", editor, project_state, new_state)
self.assertTableExists("i_love_ponies")
# Make sure all the SQL was processed
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM i_love_ponies")
self.assertEqual(cursor.fetchall()[0][0], 2)
cursor.execute(
"SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Django'"
)
self.assertEqual(cursor.fetchall()[0][0], 1)
cursor.execute(
"SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Ponies'"
)
self.assertEqual(cursor.fetchall()[0][0], 1)
# And test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards(
"test_runsql", editor, new_state, project_state
)
self.assertTableNotExists("i_love_ponies")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RunSQL")
self.assertEqual(definition[1], [])
self.assertEqual(
sorted(definition[2]), ["reverse_sql", "sql", "state_operations"]
)
# And elidable reduction
self.assertIs(False, operation.reduce(operation, []))
elidable_operation = migrations.RunSQL("SELECT 1 FROM void;", elidable=True)
self.assertEqual(elidable_operation.reduce(operation, []), [operation])
def test_run_sql_params(self):
"""
#23426 - RunSQL should accept parameters.
"""
project_state = self.set_up_test_model("test_runsql")
# Create the operation
operation = migrations.RunSQL(
["CREATE TABLE i_love_ponies (id int, special_thing varchar(15));"],
["DROP TABLE i_love_ponies"],
)
param_operation = migrations.RunSQL(
# forwards
(
"INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'Django');",
[
"INSERT INTO i_love_ponies (id, special_thing) VALUES (2, %s);",
["Ponies"],
],
(
"INSERT INTO i_love_ponies (id, special_thing) VALUES (%s, %s);",
(
3,
"Python",
),
),
),
# backwards
[
"DELETE FROM i_love_ponies WHERE special_thing = 'Django';",
["DELETE FROM i_love_ponies WHERE special_thing = 'Ponies';", None],
(
"DELETE FROM i_love_ponies WHERE id = %s OR special_thing = %s;",
[3, "Python"],
),
],
)
# Make sure there's no table
self.assertTableNotExists("i_love_ponies")
new_state = project_state.clone()
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_runsql", editor, project_state, new_state)
# Test parameter passing
with connection.schema_editor() as editor:
param_operation.database_forwards(
"test_runsql", editor, project_state, new_state
)
# Make sure all the SQL was processed
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM i_love_ponies")
self.assertEqual(cursor.fetchall()[0][0], 3)
with connection.schema_editor() as editor:
param_operation.database_backwards(
"test_runsql", editor, new_state, project_state
)
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM i_love_ponies")
self.assertEqual(cursor.fetchall()[0][0], 0)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_runsql", editor, new_state, project_state
)
self.assertTableNotExists("i_love_ponies")
def test_run_sql_params_invalid(self):
"""
#23426 - RunSQL should fail when a list of statements with an incorrect
number of tuples is given.
"""
project_state = self.set_up_test_model("test_runsql")
new_state = project_state.clone()
operation = migrations.RunSQL(
# forwards
[["INSERT INTO foo (bar) VALUES ('buz');"]],
# backwards
(("DELETE FROM foo WHERE bar = 'buz';", "invalid", "parameter count"),),
)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 1"):
operation.database_forwards(
"test_runsql", editor, project_state, new_state
)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 3"):
operation.database_backwards(
"test_runsql", editor, new_state, project_state
)
def test_run_sql_noop(self):
"""
#24098 - Tests no-op RunSQL operations.
"""
operation = migrations.RunSQL(migrations.RunSQL.noop, migrations.RunSQL.noop)
with connection.schema_editor() as editor:
operation.database_forwards("test_runsql", editor, None, None)
operation.database_backwards("test_runsql", editor, None, None)
def test_run_sql_add_missing_semicolon_on_collect_sql(self):
project_state = self.set_up_test_model("test_runsql")
new_state = project_state.clone()
tests = [
"INSERT INTO test_runsql_pony (pink, weight) VALUES (1, 1);\n",
"INSERT INTO test_runsql_pony (pink, weight) VALUES (1, 1)\n",
]
for sql in tests:
with self.subTest(sql=sql):
operation = migrations.RunSQL(sql, migrations.RunPython.noop)
with connection.schema_editor(collect_sql=True) as editor:
operation.database_forwards(
"test_runsql", editor, project_state, new_state
)
collected_sql = "\n".join(editor.collected_sql)
self.assertEqual(collected_sql.count(";"), 1)
def test_run_python(self):
"""
Tests the RunPython operation
"""
project_state = self.set_up_test_model("test_runpython", mti_model=True)
# Create the operation
def inner_method(models, schema_editor):
Pony = models.get_model("test_runpython", "Pony")
Pony.objects.create(pink=1, weight=3.55)
Pony.objects.create(weight=5)
def inner_method_reverse(models, schema_editor):
Pony = models.get_model("test_runpython", "Pony")
Pony.objects.filter(pink=1, weight=3.55).delete()
Pony.objects.filter(weight=5).delete()
operation = migrations.RunPython(
inner_method, reverse_code=inner_method_reverse
)
self.assertEqual(operation.describe(), "Raw Python operation")
# Test the state alteration does nothing
new_state = project_state.clone()
operation.state_forwards("test_runpython", new_state)
self.assertEqual(new_state, project_state)
# Test the database alteration
self.assertEqual(
project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0
)
with connection.schema_editor() as editor:
operation.database_forwards(
"test_runpython", editor, project_state, new_state
)
self.assertEqual(
project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2
)
# Now test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards(
"test_runpython", editor, project_state, new_state
)
self.assertEqual(
project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0
)
# Now test we can't use a string
with self.assertRaisesMessage(
ValueError, "RunPython must be supplied with a callable"
):
migrations.RunPython("print 'ahahaha'")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RunPython")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["code", "reverse_code"])
# Also test reversal fails, with an operation identical to above but
# without reverse_code set.
no_reverse_operation = migrations.RunPython(inner_method)
self.assertFalse(no_reverse_operation.reversible)
with connection.schema_editor() as editor:
no_reverse_operation.database_forwards(
"test_runpython", editor, project_state, new_state
)
with self.assertRaises(NotImplementedError):
no_reverse_operation.database_backwards(
"test_runpython", editor, new_state, project_state
)
self.assertEqual(
project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2
)
def create_ponies(models, schema_editor):
Pony = models.get_model("test_runpython", "Pony")
pony1 = Pony.objects.create(pink=1, weight=3.55)
self.assertIsNot(pony1.pk, None)
pony2 = Pony.objects.create(weight=5)
self.assertIsNot(pony2.pk, None)
self.assertNotEqual(pony1.pk, pony2.pk)
operation = migrations.RunPython(create_ponies)
with connection.schema_editor() as editor:
operation.database_forwards(
"test_runpython", editor, project_state, new_state
)
self.assertEqual(
project_state.apps.get_model("test_runpython", "Pony").objects.count(), 4
)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RunPython")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["code"])
def create_shetlandponies(models, schema_editor):
ShetlandPony = models.get_model("test_runpython", "ShetlandPony")
pony1 = ShetlandPony.objects.create(weight=4.0)
self.assertIsNot(pony1.pk, None)
pony2 = ShetlandPony.objects.create(weight=5.0)
self.assertIsNot(pony2.pk, None)
self.assertNotEqual(pony1.pk, pony2.pk)
operation = migrations.RunPython(create_shetlandponies)
with connection.schema_editor() as editor:
operation.database_forwards(
"test_runpython", editor, project_state, new_state
)
self.assertEqual(
project_state.apps.get_model("test_runpython", "Pony").objects.count(), 6
)
self.assertEqual(
project_state.apps.get_model(
"test_runpython", "ShetlandPony"
).objects.count(),
2,
)
# And elidable reduction
self.assertIs(False, operation.reduce(operation, []))
elidable_operation = migrations.RunPython(inner_method, elidable=True)
self.assertEqual(elidable_operation.reduce(operation, []), [operation])
def test_run_python_atomic(self):
"""
Tests the RunPython operation correctly handles the "atomic" keyword
"""
project_state = self.set_up_test_model("test_runpythonatomic", mti_model=True)
def inner_method(models, schema_editor):
Pony = models.get_model("test_runpythonatomic", "Pony")
Pony.objects.create(pink=1, weight=3.55)
raise ValueError("Adrian hates ponies.")
# Verify atomicity when applying.
atomic_migration = Migration("test", "test_runpythonatomic")
atomic_migration.operations = [
migrations.RunPython(inner_method, reverse_code=inner_method)
]
non_atomic_migration = Migration("test", "test_runpythonatomic")
non_atomic_migration.operations = [
migrations.RunPython(inner_method, reverse_code=inner_method, atomic=False)
]
# If we're a fully-transactional database, both versions should rollback
if connection.features.can_rollback_ddl:
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.apply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.apply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
# Otherwise, the non-atomic operation should leave a row there
else:
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.apply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.apply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
1,
)
# Reset object count to zero and verify atomicity when unapplying.
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.all().delete()
# On a fully-transactional database, both versions rollback.
if connection.features.can_rollback_ddl:
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.unapply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.unapply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
# Otherwise, the non-atomic operation leaves a row there.
else:
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.unapply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.unapply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
1,
)
# Verify deconstruction.
definition = non_atomic_migration.operations[0].deconstruct()
self.assertEqual(definition[0], "RunPython")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["atomic", "code", "reverse_code"])
def test_run_python_related_assignment(self):
"""
#24282 - Model changes to a FK reverse side update the model
on the FK side as well.
"""
def inner_method(models, schema_editor):
Author = models.get_model("test_authors", "Author")
Book = models.get_model("test_books", "Book")
author = Author.objects.create(name="Hemingway")
Book.objects.create(title="Old Man and The Sea", author=author)
create_author = migrations.CreateModel(
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
options={},
)
create_book = migrations.CreateModel(
"Book",
[
("id", models.AutoField(primary_key=True)),
("title", models.CharField(max_length=100)),
("author", models.ForeignKey("test_authors.Author", models.CASCADE)),
],
options={},
)
add_hometown = migrations.AddField(
"Author",
"hometown",
models.CharField(max_length=100),
)
create_old_man = migrations.RunPython(inner_method, inner_method)
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor() as editor:
create_author.state_forwards("test_authors", new_state)
create_author.database_forwards(
"test_authors", editor, project_state, new_state
)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_book.state_forwards("test_books", new_state)
create_book.database_forwards(
"test_books", editor, project_state, new_state
)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
add_hometown.state_forwards("test_authors", new_state)
add_hometown.database_forwards(
"test_authors", editor, project_state, new_state
)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_old_man.state_forwards("test_books", new_state)
create_old_man.database_forwards(
"test_books", editor, project_state, new_state
)
def test_model_with_bigautofield(self):
"""
A model with BigAutoField can be created.
"""
def create_data(models, schema_editor):
Author = models.get_model("test_author", "Author")
Book = models.get_model("test_book", "Book")
author1 = Author.objects.create(name="Hemingway")
Book.objects.create(title="Old Man and The Sea", author=author1)
Book.objects.create(id=2**33, title="A farewell to arms", author=author1)
author2 = Author.objects.create(id=2**33, name="Remarque")
Book.objects.create(title="All quiet on the western front", author=author2)
Book.objects.create(title="Arc de Triomphe", author=author2)
create_author = migrations.CreateModel(
"Author",
[
("id", models.BigAutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
options={},
)
create_book = migrations.CreateModel(
"Book",
[
("id", models.BigAutoField(primary_key=True)),
("title", models.CharField(max_length=100)),
(
"author",
models.ForeignKey(
to="test_author.Author", on_delete=models.CASCADE
),
),
],
options={},
)
fill_data = migrations.RunPython(create_data)
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor() as editor:
create_author.state_forwards("test_author", new_state)
create_author.database_forwards(
"test_author", editor, project_state, new_state
)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_book.state_forwards("test_book", new_state)
create_book.database_forwards("test_book", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
fill_data.state_forwards("fill_data", new_state)
fill_data.database_forwards("fill_data", editor, project_state, new_state)
def _test_autofield_foreignfield_growth(
self, source_field, target_field, target_value
):
"""
A field may be migrated in the following ways:
- AutoField to BigAutoField
- SmallAutoField to AutoField
- SmallAutoField to BigAutoField
"""
def create_initial_data(models, schema_editor):
Article = models.get_model("test_article", "Article")
Blog = models.get_model("test_blog", "Blog")
blog = Blog.objects.create(name="web development done right")
Article.objects.create(name="Frameworks", blog=blog)
Article.objects.create(name="Programming Languages", blog=blog)
def create_big_data(models, schema_editor):
Article = models.get_model("test_article", "Article")
Blog = models.get_model("test_blog", "Blog")
blog2 = Blog.objects.create(name="Frameworks", id=target_value)
Article.objects.create(name="Django", blog=blog2)
Article.objects.create(id=target_value, name="Django2", blog=blog2)
create_blog = migrations.CreateModel(
"Blog",
[
("id", source_field(primary_key=True)),
("name", models.CharField(max_length=100)),
],
options={},
)
create_article = migrations.CreateModel(
"Article",
[
("id", source_field(primary_key=True)),
(
"blog",
models.ForeignKey(to="test_blog.Blog", on_delete=models.CASCADE),
),
("name", models.CharField(max_length=100)),
("data", models.TextField(default="")),
],
options={},
)
fill_initial_data = migrations.RunPython(
create_initial_data, create_initial_data
)
fill_big_data = migrations.RunPython(create_big_data, create_big_data)
grow_article_id = migrations.AlterField(
"Article", "id", target_field(primary_key=True)
)
grow_blog_id = migrations.AlterField(
"Blog", "id", target_field(primary_key=True)
)
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor() as editor:
create_blog.state_forwards("test_blog", new_state)
create_blog.database_forwards("test_blog", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_article.state_forwards("test_article", new_state)
create_article.database_forwards(
"test_article", editor, project_state, new_state
)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
fill_initial_data.state_forwards("fill_initial_data", new_state)
fill_initial_data.database_forwards(
"fill_initial_data", editor, project_state, new_state
)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
grow_article_id.state_forwards("test_article", new_state)
grow_article_id.database_forwards(
"test_article", editor, project_state, new_state
)
state = new_state.clone()
article = state.apps.get_model("test_article.Article")
self.assertIsInstance(article._meta.pk, target_field)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
grow_blog_id.state_forwards("test_blog", new_state)
grow_blog_id.database_forwards(
"test_blog", editor, project_state, new_state
)
state = new_state.clone()
blog = state.apps.get_model("test_blog.Blog")
self.assertIsInstance(blog._meta.pk, target_field)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
fill_big_data.state_forwards("fill_big_data", new_state)
fill_big_data.database_forwards(
"fill_big_data", editor, project_state, new_state
)
def test_autofield__bigautofield_foreignfield_growth(self):
"""A field may be migrated from AutoField to BigAutoField."""
self._test_autofield_foreignfield_growth(
models.AutoField,
models.BigAutoField,
2**33,
)
def test_smallfield_autofield_foreignfield_growth(self):
"""A field may be migrated from SmallAutoField to AutoField."""
self._test_autofield_foreignfield_growth(
models.SmallAutoField,
models.AutoField,
2**22,
)
def test_smallfield_bigautofield_foreignfield_growth(self):
"""A field may be migrated from SmallAutoField to BigAutoField."""
self._test_autofield_foreignfield_growth(
models.SmallAutoField,
models.BigAutoField,
2**33,
)
def test_run_python_noop(self):
"""
#24098 - Tests no-op RunPython operations.
"""
project_state = ProjectState()
new_state = project_state.clone()
operation = migrations.RunPython(
migrations.RunPython.noop, migrations.RunPython.noop
)
with connection.schema_editor() as editor:
operation.database_forwards(
"test_runpython", editor, project_state, new_state
)
operation.database_backwards(
"test_runpython", editor, new_state, project_state
)
def test_separate_database_and_state(self):
"""
Tests the SeparateDatabaseAndState operation.
"""
project_state = self.set_up_test_model("test_separatedatabaseandstate")
# Create the operation
database_operation = migrations.RunSQL(
"CREATE TABLE i_love_ponies (id int, special_thing int);",
"DROP TABLE i_love_ponies;",
)
state_operation = migrations.CreateModel(
"SomethingElse", [("id", models.AutoField(primary_key=True))]
)
operation = migrations.SeparateDatabaseAndState(
state_operations=[state_operation], database_operations=[database_operation]
)
self.assertEqual(
operation.describe(), "Custom state/database change combination"
)
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_separatedatabaseandstate", new_state)
self.assertEqual(
len(
new_state.models[
"test_separatedatabaseandstate", "somethingelse"
].fields
),
1,
)
# Make sure there's no table
self.assertTableNotExists("i_love_ponies")
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards(
"test_separatedatabaseandstate", editor, project_state, new_state
)
self.assertTableExists("i_love_ponies")
# And test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards(
"test_separatedatabaseandstate", editor, new_state, project_state
)
self.assertTableNotExists("i_love_ponies")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "SeparateDatabaseAndState")
self.assertEqual(definition[1], [])
self.assertEqual(
sorted(definition[2]), ["database_operations", "state_operations"]
)
def test_separate_database_and_state2(self):
"""
A complex SeparateDatabaseAndState operation: Multiple operations both
for state and database. Verify the state dependencies within each list
and that state ops don't affect the database.
"""
app_label = "test_separatedatabaseandstate2"
project_state = self.set_up_test_model(app_label)
# Create the operation
database_operations = [
migrations.CreateModel(
"ILovePonies",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "iloveponies"},
),
migrations.CreateModel(
"ILoveMorePonies",
# We use IntegerField and not AutoField because
# the model is going to be deleted immediately
# and with an AutoField this fails on Oracle
[("id", models.IntegerField(primary_key=True))],
options={"db_table": "ilovemoreponies"},
),
migrations.DeleteModel("ILoveMorePonies"),
migrations.CreateModel(
"ILoveEvenMorePonies",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "iloveevenmoreponies"},
),
]
state_operations = [
migrations.CreateModel(
"SomethingElse",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "somethingelse"},
),
migrations.DeleteModel("SomethingElse"),
migrations.CreateModel(
"SomethingCompletelyDifferent",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "somethingcompletelydifferent"},
),
]
operation = migrations.SeparateDatabaseAndState(
state_operations=state_operations,
database_operations=database_operations,
)
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
def assertModelsAndTables(after_db):
# Tables and models exist, or don't, as they should:
self.assertNotIn((app_label, "somethingelse"), new_state.models)
self.assertEqual(
len(new_state.models[app_label, "somethingcompletelydifferent"].fields),
1,
)
self.assertNotIn((app_label, "iloveponiesonies"), new_state.models)
self.assertNotIn((app_label, "ilovemoreponies"), new_state.models)
self.assertNotIn((app_label, "iloveevenmoreponies"), new_state.models)
self.assertTableNotExists("somethingelse")
self.assertTableNotExists("somethingcompletelydifferent")
self.assertTableNotExists("ilovemoreponies")
if after_db:
self.assertTableExists("iloveponies")
self.assertTableExists("iloveevenmoreponies")
else:
self.assertTableNotExists("iloveponies")
self.assertTableNotExists("iloveevenmoreponies")
assertModelsAndTables(after_db=False)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
assertModelsAndTables(after_db=True)
# And test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
assertModelsAndTables(after_db=False)
class SwappableOperationTests(OperationTestBase):
"""
Key operations ignore swappable models
(we don't want to replicate all of them here, as the functionality
is in a common base class anyway)
"""
available_apps = ["migrations"]
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_create_ignore_swapped(self):
"""
The CreateTable operation ignores swapped models.
"""
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=1)),
],
options={
"swappable": "TEST_SWAP_MODEL",
},
)
# Test the state alteration (it should still be there!)
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crigsw", new_state)
self.assertEqual(new_state.models["test_crigsw", "pony"].name, "Pony")
self.assertEqual(len(new_state.models["test_crigsw", "pony"].fields), 2)
# Test the database alteration
self.assertTableNotExists("test_crigsw_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crigsw", editor, project_state, new_state)
self.assertTableNotExists("test_crigsw_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_crigsw", editor, new_state, project_state
)
self.assertTableNotExists("test_crigsw_pony")
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_delete_ignore_swapped(self):
"""
Tests the DeleteModel operation ignores swapped models.
"""
operation = migrations.DeleteModel("Pony")
project_state, new_state = self.make_test_state("test_dligsw", operation)
# Test the database alteration
self.assertTableNotExists("test_dligsw_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_dligsw", editor, project_state, new_state)
self.assertTableNotExists("test_dligsw_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_dligsw", editor, new_state, project_state
)
self.assertTableNotExists("test_dligsw_pony")
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_add_field_ignore_swapped(self):
"""
Tests the AddField operation.
"""
# Test the state alteration
operation = migrations.AddField(
"Pony",
"height",
models.FloatField(null=True, default=5),
)
project_state, new_state = self.make_test_state("test_adfligsw", operation)
# Test the database alteration
self.assertTableNotExists("test_adfligsw_pony")
with connection.schema_editor() as editor:
operation.database_forwards(
"test_adfligsw", editor, project_state, new_state
)
self.assertTableNotExists("test_adfligsw_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_adfligsw", editor, new_state, project_state
)
self.assertTableNotExists("test_adfligsw_pony")
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_indexes_ignore_swapped(self):
"""
Add/RemoveIndex operations ignore swapped models.
"""
operation = migrations.AddIndex(
"Pony", models.Index(fields=["pink"], name="my_name_idx")
)
project_state, new_state = self.make_test_state("test_adinigsw", operation)
with connection.schema_editor() as editor:
# No database queries should be run for swapped models
operation.database_forwards(
"test_adinigsw", editor, project_state, new_state
)
operation.database_backwards(
"test_adinigsw", editor, new_state, project_state
)
operation = migrations.RemoveIndex(
"Pony", models.Index(fields=["pink"], name="my_name_idx")
)
project_state, new_state = self.make_test_state("test_rminigsw", operation)
with connection.schema_editor() as editor:
operation.database_forwards(
"test_rminigsw", editor, project_state, new_state
)
operation.database_backwards(
"test_rminigsw", editor, new_state, project_state
)
class TestCreateModel(SimpleTestCase):
def test_references_model_mixin(self):
migrations.CreateModel(
"name",
fields=[],
bases=(Mixin, models.Model),
).references_model("other_model", "migrations")
class FieldOperationTests(SimpleTestCase):
def test_references_model(self):
operation = FieldOperation(
"MoDel", "field", models.ForeignKey("Other", models.CASCADE)
)
# Model name match.
self.assertIs(operation.references_model("mOdEl", "migrations"), True)
# Referenced field.
self.assertIs(operation.references_model("oTher", "migrations"), True)
# Doesn't reference.
self.assertIs(operation.references_model("Whatever", "migrations"), False)
def test_references_field_by_name(self):
operation = FieldOperation("MoDel", "field", models.BooleanField(default=False))
self.assertIs(operation.references_field("model", "field", "migrations"), True)
def test_references_field_by_remote_field_model(self):
operation = FieldOperation(
"Model", "field", models.ForeignKey("Other", models.CASCADE)
)
self.assertIs(
operation.references_field("Other", "whatever", "migrations"), True
)
self.assertIs(
operation.references_field("Missing", "whatever", "migrations"), False
)
def test_references_field_by_from_fields(self):
operation = FieldOperation(
"Model",
"field",
models.fields.related.ForeignObject(
"Other", models.CASCADE, ["from"], ["to"]
),
)
self.assertIs(operation.references_field("Model", "from", "migrations"), True)
self.assertIs(operation.references_field("Model", "to", "migrations"), False)
self.assertIs(operation.references_field("Other", "from", "migrations"), False)
self.assertIs(operation.references_field("Model", "to", "migrations"), False)
def test_references_field_by_to_fields(self):
operation = FieldOperation(
"Model",
"field",
models.ForeignKey("Other", models.CASCADE, to_field="field"),
)
self.assertIs(operation.references_field("Other", "field", "migrations"), True)
self.assertIs(
operation.references_field("Other", "whatever", "migrations"), False
)
self.assertIs(
operation.references_field("Missing", "whatever", "migrations"), False
)
def test_references_field_by_through(self):
operation = FieldOperation(
"Model", "field", models.ManyToManyField("Other", through="Through")
)
self.assertIs(
operation.references_field("Other", "whatever", "migrations"), True
)
self.assertIs(
operation.references_field("Through", "whatever", "migrations"), True
)
self.assertIs(
operation.references_field("Missing", "whatever", "migrations"), False
)
def test_reference_field_by_through_fields(self):
operation = FieldOperation(
"Model",
"field",
models.ManyToManyField(
"Other", through="Through", through_fields=("first", "second")
),
)
self.assertIs(
operation.references_field("Other", "whatever", "migrations"), True
)
self.assertIs(
operation.references_field("Through", "whatever", "migrations"), False
)
self.assertIs(
operation.references_field("Through", "first", "migrations"), True
)
self.assertIs(
operation.references_field("Through", "second", "migrations"), True
)
|
46efdf831863fadf22de7efc9f682334b609fbbd1eddcb6f0182cf624960dd66 | import os
import shutil
import tempfile
from contextlib import contextmanager
from importlib import import_module
from django.apps import apps
from django.db import connection, connections, migrations, models
from django.db.migrations.migration import Migration
from django.db.migrations.recorder import MigrationRecorder
from django.db.migrations.state import ProjectState
from django.test import TransactionTestCase
from django.test.utils import extend_sys_path
from django.utils.module_loading import module_dir
class MigrationTestBase(TransactionTestCase):
"""
Contains an extended set of asserts for testing migrations and schema operations.
"""
available_apps = ["migrations"]
databases = {"default", "other"}
def tearDown(self):
# Reset applied-migrations state.
for db in self.databases:
recorder = MigrationRecorder(connections[db])
recorder.migration_qs.filter(app="migrations").delete()
def get_table_description(self, table, using="default"):
with connections[using].cursor() as cursor:
return connections[using].introspection.get_table_description(cursor, table)
def assertTableExists(self, table, using="default"):
with connections[using].cursor() as cursor:
self.assertIn(table, connections[using].introspection.table_names(cursor))
def assertTableNotExists(self, table, using="default"):
with connections[using].cursor() as cursor:
self.assertNotIn(
table, connections[using].introspection.table_names(cursor)
)
def assertColumnExists(self, table, column, using="default"):
self.assertIn(
column, [c.name for c in self.get_table_description(table, using=using)]
)
def assertColumnNotExists(self, table, column, using="default"):
self.assertNotIn(
column, [c.name for c in self.get_table_description(table, using=using)]
)
def _get_column_allows_null(self, table, column, using):
return [
c.null_ok
for c in self.get_table_description(table, using=using)
if c.name == column
][0]
def assertColumnNull(self, table, column, using="default"):
self.assertTrue(self._get_column_allows_null(table, column, using))
def assertColumnNotNull(self, table, column, using="default"):
self.assertFalse(self._get_column_allows_null(table, column, using))
def _get_column_collation(self, table, column, using):
return next(
f.collation
for f in self.get_table_description(table, using=using)
if f.name == column
)
def assertColumnCollation(self, table, column, collation, using="default"):
self.assertEqual(self._get_column_collation(table, column, using), collation)
def _get_table_comment(self, table, using):
with connections[using].cursor() as cursor:
return next(
t.comment
for t in connections[using].introspection.get_table_list(cursor)
if t.name == table
)
def assertTableComment(self, table, comment, using="default"):
self.assertEqual(self._get_table_comment(table, using), comment)
def assertTableCommentNotExists(self, table, using="default"):
self.assertIn(self._get_table_comment(table, using), [None, ""])
def assertIndexExists(
self, table, columns, value=True, using="default", index_type=None
):
with connections[using].cursor() as cursor:
self.assertEqual(
value,
any(
c["index"]
for c in connections[using]
.introspection.get_constraints(cursor, table)
.values()
if (
c["columns"] == list(columns)
and (index_type is None or c["type"] == index_type)
and not c["unique"]
)
),
)
def assertIndexNotExists(self, table, columns):
return self.assertIndexExists(table, columns, False)
def assertIndexNameExists(self, table, index, using="default"):
with connections[using].cursor() as cursor:
self.assertIn(
index,
connection.introspection.get_constraints(cursor, table),
)
def assertIndexNameNotExists(self, table, index, using="default"):
with connections[using].cursor() as cursor:
self.assertNotIn(
index,
connection.introspection.get_constraints(cursor, table),
)
def assertConstraintExists(self, table, name, value=True, using="default"):
with connections[using].cursor() as cursor:
constraints = (
connections[using].introspection.get_constraints(cursor, table).items()
)
self.assertEqual(
value,
any(c["check"] for n, c in constraints if n == name),
)
def assertConstraintNotExists(self, table, name):
return self.assertConstraintExists(table, name, False)
def assertUniqueConstraintExists(self, table, columns, value=True, using="default"):
with connections[using].cursor() as cursor:
constraints = (
connections[using].introspection.get_constraints(cursor, table).values()
)
self.assertEqual(
value,
any(c["unique"] for c in constraints if c["columns"] == list(columns)),
)
def assertFKExists(self, table, columns, to, value=True, using="default"):
if not connections[using].features.can_introspect_foreign_keys:
return
with connections[using].cursor() as cursor:
self.assertEqual(
value,
any(
c["foreign_key"] == to
for c in connections[using]
.introspection.get_constraints(cursor, table)
.values()
if c["columns"] == list(columns)
),
)
def assertFKNotExists(self, table, columns, to):
return self.assertFKExists(table, columns, to, False)
@contextmanager
def temporary_migration_module(self, app_label="migrations", module=None):
"""
Allows testing management commands in a temporary migrations module.
Wrap all invocations to makemigrations and squashmigrations with this
context manager in order to avoid creating migration files in your
source tree inadvertently.
Takes the application label that will be passed to makemigrations or
squashmigrations and the Python path to a migrations module.
The migrations module is used as a template for creating the temporary
migrations module. If it isn't provided, the application's migrations
module is used, if it exists.
Returns the filesystem path to the temporary migrations module.
"""
with tempfile.TemporaryDirectory() as temp_dir:
target_dir = tempfile.mkdtemp(dir=temp_dir)
with open(os.path.join(target_dir, "__init__.py"), "w"):
pass
target_migrations_dir = os.path.join(target_dir, "migrations")
if module is None:
module = apps.get_app_config(app_label).name + ".migrations"
try:
source_migrations_dir = module_dir(import_module(module))
except (ImportError, ValueError):
pass
else:
shutil.copytree(source_migrations_dir, target_migrations_dir)
with extend_sys_path(temp_dir):
new_module = os.path.basename(target_dir) + ".migrations"
with self.settings(MIGRATION_MODULES={app_label: new_module}):
yield target_migrations_dir
class OperationTestBase(MigrationTestBase):
"""Common functions to help test operations."""
@classmethod
def setUpClass(cls):
super().setUpClass()
cls._initial_table_names = frozenset(connection.introspection.table_names())
def tearDown(self):
self.cleanup_test_tables()
super().tearDown()
def cleanup_test_tables(self):
table_names = (
frozenset(connection.introspection.table_names())
- self._initial_table_names
)
with connection.schema_editor() as editor:
with connection.constraint_checks_disabled():
for table_name in table_names:
editor.execute(
editor.sql_delete_table
% {
"table": editor.quote_name(table_name),
}
)
def apply_operations(self, app_label, project_state, operations, atomic=True):
migration = Migration("name", app_label)
migration.operations = operations
with connection.schema_editor(atomic=atomic) as editor:
return migration.apply(project_state, editor)
def unapply_operations(self, app_label, project_state, operations, atomic=True):
migration = Migration("name", app_label)
migration.operations = operations
with connection.schema_editor(atomic=atomic) as editor:
return migration.unapply(project_state, editor)
def make_test_state(self, app_label, operation, **kwargs):
"""
Makes a test state using set_up_test_model and returns the
original state and the state after the migration is applied.
"""
project_state = self.set_up_test_model(app_label, **kwargs)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
return project_state, new_state
def set_up_test_model(
self,
app_label,
second_model=False,
third_model=False,
index=False,
multicol_index=False,
related_model=False,
mti_model=False,
proxy_model=False,
manager_model=False,
unique_together=False,
options=False,
db_table=None,
index_together=False, # RemovedInDjango51Warning.
constraints=None,
indexes=None,
):
"""Creates a test model state and database table."""
# Make the "current" state.
model_options = {
"swappable": "TEST_SWAP_MODEL",
# RemovedInDjango51Warning.
"index_together": [["weight", "pink"]] if index_together else [],
"unique_together": [["pink", "weight"]] if unique_together else [],
}
if options:
model_options["permissions"] = [("can_groom", "Can groom")]
if db_table:
model_options["db_table"] = db_table
operations = [
migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=3)),
("weight", models.FloatField()),
("green", models.IntegerField(null=True)),
(
"yellow",
models.CharField(
blank=True, null=True, db_default="Yellow", max_length=20
),
),
],
options=model_options,
)
]
if index:
operations.append(
migrations.AddIndex(
"Pony",
models.Index(fields=["pink"], name="pony_pink_idx"),
)
)
if multicol_index:
operations.append(
migrations.AddIndex(
"Pony",
models.Index(fields=["pink", "weight"], name="pony_test_idx"),
)
)
if indexes:
for index in indexes:
operations.append(migrations.AddIndex("Pony", index))
if constraints:
for constraint in constraints:
operations.append(migrations.AddConstraint("Pony", constraint))
if second_model:
operations.append(
migrations.CreateModel(
"Stable",
[
("id", models.AutoField(primary_key=True)),
],
)
)
if third_model:
operations.append(
migrations.CreateModel(
"Van",
[
("id", models.AutoField(primary_key=True)),
],
)
)
if related_model:
operations.append(
migrations.CreateModel(
"Rider",
[
("id", models.AutoField(primary_key=True)),
("pony", models.ForeignKey("Pony", models.CASCADE)),
(
"friend",
models.ForeignKey("self", models.CASCADE, null=True),
),
],
)
)
if mti_model:
operations.append(
migrations.CreateModel(
"ShetlandPony",
fields=[
(
"pony_ptr",
models.OneToOneField(
"Pony",
models.CASCADE,
auto_created=True,
parent_link=True,
primary_key=True,
to_field="id",
serialize=False,
),
),
("cuteness", models.IntegerField(default=1)),
],
bases=["%s.Pony" % app_label],
)
)
if proxy_model:
operations.append(
migrations.CreateModel(
"ProxyPony",
fields=[],
options={"proxy": True},
bases=["%s.Pony" % app_label],
)
)
if manager_model:
from .models import FoodManager, FoodQuerySet
operations.append(
migrations.CreateModel(
"Food",
fields=[
("id", models.AutoField(primary_key=True)),
],
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
],
)
)
return self.apply_operations(app_label, ProjectState(), operations)
|
21e4acb7e559d0cdf51058a4b8e008bbf76188163a907d3833368ed3bcf47b99 | import functools
import re
from unittest import mock
from django.apps import apps
from django.conf import settings
from django.contrib.auth.models import AbstractBaseUser
from django.core.validators import RegexValidator, validate_slug
from django.db import connection, migrations, models
from django.db.migrations.autodetector import MigrationAutodetector
from django.db.migrations.graph import MigrationGraph
from django.db.migrations.loader import MigrationLoader
from django.db.migrations.questioner import MigrationQuestioner
from django.db.migrations.state import ModelState, ProjectState
from django.test import SimpleTestCase, TestCase, ignore_warnings, override_settings
from django.test.utils import isolate_lru_cache
from django.utils.deprecation import RemovedInDjango51Warning
from .models import FoodManager, FoodQuerySet
class DeconstructibleObject:
"""
A custom deconstructible object.
"""
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def deconstruct(self):
return (self.__module__ + "." + self.__class__.__name__, self.args, self.kwargs)
class BaseAutodetectorTests(TestCase):
def repr_changes(self, changes, include_dependencies=False):
output = ""
for app_label, migrations_ in sorted(changes.items()):
output += " %s:\n" % app_label
for migration in migrations_:
output += " %s\n" % migration.name
for operation in migration.operations:
output += " %s\n" % operation
if include_dependencies:
output += " Dependencies:\n"
if migration.dependencies:
for dep in migration.dependencies:
output += " %s\n" % (dep,)
else:
output += " None\n"
return output
def assertNumberMigrations(self, changes, app_label, number):
if len(changes.get(app_label, [])) != number:
self.fail(
"Incorrect number of migrations (%s) for %s (expected %s)\n%s"
% (
len(changes.get(app_label, [])),
app_label,
number,
self.repr_changes(changes),
)
)
def assertMigrationDependencies(self, changes, app_label, position, dependencies):
if not changes.get(app_label):
self.fail(
"No migrations found for %s\n%s"
% (app_label, self.repr_changes(changes))
)
if len(changes[app_label]) < position + 1:
self.fail(
"No migration at index %s for %s\n%s"
% (position, app_label, self.repr_changes(changes))
)
migration = changes[app_label][position]
if set(migration.dependencies) != set(dependencies):
self.fail(
"Migration dependencies mismatch for %s.%s (expected %s):\n%s"
% (
app_label,
migration.name,
dependencies,
self.repr_changes(changes, include_dependencies=True),
)
)
def assertOperationTypes(self, changes, app_label, position, types):
if not changes.get(app_label):
self.fail(
"No migrations found for %s\n%s"
% (app_label, self.repr_changes(changes))
)
if len(changes[app_label]) < position + 1:
self.fail(
"No migration at index %s for %s\n%s"
% (position, app_label, self.repr_changes(changes))
)
migration = changes[app_label][position]
real_types = [
operation.__class__.__name__ for operation in migration.operations
]
if types != real_types:
self.fail(
"Operation type mismatch for %s.%s (expected %s):\n%s"
% (
app_label,
migration.name,
types,
self.repr_changes(changes),
)
)
def assertOperationAttributes(
self, changes, app_label, position, operation_position, **attrs
):
if not changes.get(app_label):
self.fail(
"No migrations found for %s\n%s"
% (app_label, self.repr_changes(changes))
)
if len(changes[app_label]) < position + 1:
self.fail(
"No migration at index %s for %s\n%s"
% (position, app_label, self.repr_changes(changes))
)
migration = changes[app_label][position]
if len(changes[app_label]) < position + 1:
self.fail(
"No operation at index %s for %s.%s\n%s"
% (
operation_position,
app_label,
migration.name,
self.repr_changes(changes),
)
)
operation = migration.operations[operation_position]
for attr, value in attrs.items():
if getattr(operation, attr, None) != value:
self.fail(
"Attribute mismatch for %s.%s op #%s, %s (expected %r, got %r):\n%s"
% (
app_label,
migration.name,
operation_position,
attr,
value,
getattr(operation, attr, None),
self.repr_changes(changes),
)
)
def assertOperationFieldAttributes(
self, changes, app_label, position, operation_position, **attrs
):
if not changes.get(app_label):
self.fail(
"No migrations found for %s\n%s"
% (app_label, self.repr_changes(changes))
)
if len(changes[app_label]) < position + 1:
self.fail(
"No migration at index %s for %s\n%s"
% (position, app_label, self.repr_changes(changes))
)
migration = changes[app_label][position]
if len(changes[app_label]) < position + 1:
self.fail(
"No operation at index %s for %s.%s\n%s"
% (
operation_position,
app_label,
migration.name,
self.repr_changes(changes),
)
)
operation = migration.operations[operation_position]
if not hasattr(operation, "field"):
self.fail(
"No field attribute for %s.%s op #%s."
% (
app_label,
migration.name,
operation_position,
)
)
field = operation.field
for attr, value in attrs.items():
if getattr(field, attr, None) != value:
self.fail(
"Field attribute mismatch for %s.%s op #%s, field.%s (expected %r, "
"got %r):\n%s"
% (
app_label,
migration.name,
operation_position,
attr,
value,
getattr(field, attr, None),
self.repr_changes(changes),
)
)
def make_project_state(self, model_states):
"Shortcut to make ProjectStates from lists of predefined models"
project_state = ProjectState()
for model_state in model_states:
project_state.add_model(model_state.clone())
return project_state
def get_changes(self, before_states, after_states, questioner=None):
if not isinstance(before_states, ProjectState):
before_states = self.make_project_state(before_states)
if not isinstance(after_states, ProjectState):
after_states = self.make_project_state(after_states)
return MigrationAutodetector(
before_states,
after_states,
questioner,
)._detect_changes()
class AutodetectorTests(BaseAutodetectorTests):
"""
Tests the migration autodetector.
"""
author_empty = ModelState(
"testapp", "Author", [("id", models.AutoField(primary_key=True))]
)
author_name = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
)
author_name_null = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, null=True)),
],
)
author_name_longer = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=400)),
],
)
author_name_renamed = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("names", models.CharField(max_length=200)),
],
)
author_name_default = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, default="Ada Lovelace")),
],
)
author_name_db_default = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, db_default="Ada Lovelace")),
],
)
author_name_check_constraint = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
{
"constraints": [
models.CheckConstraint(
check=models.Q(name__contains="Bob"), name="name_contains_bob"
)
]
},
)
author_dates_of_birth_auto_now = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("date_of_birth", models.DateField(auto_now=True)),
("date_time_of_birth", models.DateTimeField(auto_now=True)),
("time_of_birth", models.TimeField(auto_now=True)),
],
)
author_dates_of_birth_auto_now_add = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("date_of_birth", models.DateField(auto_now_add=True)),
("date_time_of_birth", models.DateTimeField(auto_now_add=True)),
("time_of_birth", models.TimeField(auto_now_add=True)),
],
)
author_name_deconstructible_1 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, default=DeconstructibleObject())),
],
)
author_name_deconstructible_2 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, default=DeconstructibleObject())),
],
)
author_name_deconstructible_3 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, default=models.IntegerField())),
],
)
author_name_deconstructible_4 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, default=models.IntegerField())),
],
)
author_name_deconstructible_list_1 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=[DeconstructibleObject(), 123]
),
),
],
)
author_name_deconstructible_list_2 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=[DeconstructibleObject(), 123]
),
),
],
)
author_name_deconstructible_list_3 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=[DeconstructibleObject(), 999]
),
),
],
)
author_name_deconstructible_tuple_1 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=(DeconstructibleObject(), 123)
),
),
],
)
author_name_deconstructible_tuple_2 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=(DeconstructibleObject(), 123)
),
),
],
)
author_name_deconstructible_tuple_3 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=(DeconstructibleObject(), 999)
),
),
],
)
author_name_deconstructible_dict_1 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default={"item": DeconstructibleObject(), "otheritem": 123},
),
),
],
)
author_name_deconstructible_dict_2 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default={"item": DeconstructibleObject(), "otheritem": 123},
),
),
],
)
author_name_deconstructible_dict_3 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default={"item": DeconstructibleObject(), "otheritem": 999},
),
),
],
)
author_name_nested_deconstructible_1 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2"),
),
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c")),
),
),
),
],
)
author_name_nested_deconstructible_2 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2"),
),
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c")),
),
),
),
],
)
author_name_nested_deconstructible_changed_arg = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2-changed"),
),
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c")),
),
),
),
],
)
author_name_nested_deconstructible_extra_arg = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2"),
),
None,
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c")),
),
),
),
],
)
author_name_nested_deconstructible_changed_kwarg = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2"),
),
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c-changed")),
),
),
),
],
)
author_name_nested_deconstructible_extra_kwarg = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2"),
),
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c")),
c=None,
),
),
),
],
)
author_custom_pk = ModelState(
"testapp", "Author", [("pk_field", models.IntegerField(primary_key=True))]
)
author_with_biography_non_blank = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField()),
("biography", models.TextField()),
],
)
author_with_biography_blank = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(blank=True)),
("biography", models.TextField(blank=True)),
],
)
author_with_book = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
)
author_with_book_order_wrt = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={"order_with_respect_to": "book"},
)
author_renamed_with_book = ModelState(
"testapp",
"Writer",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
)
author_with_publisher_string = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("publisher_name", models.CharField(max_length=200)),
],
)
author_with_publisher = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)),
],
)
author_with_user = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("user", models.ForeignKey("auth.User", models.CASCADE)),
],
)
author_with_custom_user = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("user", models.ForeignKey("thirdapp.CustomUser", models.CASCADE)),
],
)
author_proxy = ModelState(
"testapp", "AuthorProxy", [], {"proxy": True}, ("testapp.author",)
)
author_proxy_options = ModelState(
"testapp",
"AuthorProxy",
[],
{
"proxy": True,
"verbose_name": "Super Author",
},
("testapp.author",),
)
author_proxy_notproxy = ModelState(
"testapp", "AuthorProxy", [], {}, ("testapp.author",)
)
author_proxy_third = ModelState(
"thirdapp", "AuthorProxy", [], {"proxy": True}, ("testapp.author",)
)
author_proxy_third_notproxy = ModelState(
"thirdapp", "AuthorProxy", [], {}, ("testapp.author",)
)
author_proxy_proxy = ModelState(
"testapp", "AAuthorProxyProxy", [], {"proxy": True}, ("testapp.authorproxy",)
)
author_unmanaged = ModelState(
"testapp", "AuthorUnmanaged", [], {"managed": False}, ("testapp.author",)
)
author_unmanaged_managed = ModelState(
"testapp", "AuthorUnmanaged", [], {}, ("testapp.author",)
)
author_unmanaged_default_pk = ModelState(
"testapp", "Author", [("id", models.AutoField(primary_key=True))]
)
author_unmanaged_custom_pk = ModelState(
"testapp",
"Author",
[
("pk_field", models.IntegerField(primary_key=True)),
],
)
author_with_m2m = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("publishers", models.ManyToManyField("testapp.Publisher")),
],
)
author_with_m2m_blank = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("publishers", models.ManyToManyField("testapp.Publisher", blank=True)),
],
)
author_with_m2m_through = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"publishers",
models.ManyToManyField("testapp.Publisher", through="testapp.Contract"),
),
],
)
author_with_renamed_m2m_through = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"publishers",
models.ManyToManyField("testapp.Publisher", through="testapp.Deal"),
),
],
)
author_with_former_m2m = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("publishers", models.CharField(max_length=100)),
],
)
author_with_options = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
],
{
"permissions": [("can_hire", "Can hire")],
"verbose_name": "Authi",
},
)
author_with_db_table_comment = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table_comment": "Table comment"},
)
author_with_db_table_options = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table": "author_one"},
)
author_with_new_db_table_options = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table": "author_two"},
)
author_renamed_with_db_table_options = ModelState(
"testapp",
"NewAuthor",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table": "author_one"},
)
author_renamed_with_new_db_table_options = ModelState(
"testapp",
"NewAuthor",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table": "author_three"},
)
contract = ModelState(
"testapp",
"Contract",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)),
],
)
contract_renamed = ModelState(
"testapp",
"Deal",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)),
],
)
publisher = ModelState(
"testapp",
"Publisher",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
)
publisher_with_author = ModelState(
"testapp",
"Publisher",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("name", models.CharField(max_length=100)),
],
)
publisher_with_aardvark_author = ModelState(
"testapp",
"Publisher",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Aardvark", models.CASCADE)),
("name", models.CharField(max_length=100)),
],
)
publisher_with_book = ModelState(
"testapp",
"Publisher",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("otherapp.Book", models.CASCADE)),
("name", models.CharField(max_length=100)),
],
)
other_pony = ModelState(
"otherapp",
"Pony",
[
("id", models.AutoField(primary_key=True)),
],
)
other_pony_food = ModelState(
"otherapp",
"Pony",
[
("id", models.AutoField(primary_key=True)),
],
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
],
)
other_stable = ModelState(
"otherapp", "Stable", [("id", models.AutoField(primary_key=True))]
)
third_thing = ModelState(
"thirdapp", "Thing", [("id", models.AutoField(primary_key=True))]
)
book = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
)
book_proxy_fk = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("thirdapp.AuthorProxy", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
)
book_proxy_proxy_fk = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.AAuthorProxyProxy", models.CASCADE)),
],
)
book_migrations_fk = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("migrations.UnmigratedModel", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
)
book_with_no_author_fk = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.IntegerField()),
("title", models.CharField(max_length=200)),
],
)
book_with_no_author = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("title", models.CharField(max_length=200)),
],
)
book_with_author_renamed = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Writer", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
)
book_with_field_and_author_renamed = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("writer", models.ForeignKey("testapp.Writer", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
)
book_with_multiple_authors = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("authors", models.ManyToManyField("testapp.Author")),
("title", models.CharField(max_length=200)),
],
)
book_with_multiple_authors_through_attribution = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
(
"authors",
models.ManyToManyField(
"testapp.Author", through="otherapp.Attribution"
),
),
("title", models.CharField(max_length=200)),
],
)
book_indexes = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"indexes": [
models.Index(fields=["author", "title"], name="book_title_author_idx")
],
},
)
book_unordered_indexes = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"indexes": [
models.Index(fields=["title", "author"], name="book_author_title_idx")
],
},
)
book_unique_together = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"unique_together": {("author", "title")},
},
)
book_unique_together_2 = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"unique_together": {("title", "author")},
},
)
book_unique_together_3 = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("newfield", models.IntegerField()),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"unique_together": {("title", "newfield")},
},
)
book_unique_together_4 = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("newfield2", models.IntegerField()),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"unique_together": {("title", "newfield2")},
},
)
attribution = ModelState(
"otherapp",
"Attribution",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
)
edition = ModelState(
"thirdapp",
"Edition",
[
("id", models.AutoField(primary_key=True)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
)
custom_user = ModelState(
"thirdapp",
"CustomUser",
[
("id", models.AutoField(primary_key=True)),
("username", models.CharField(max_length=255)),
],
bases=(AbstractBaseUser,),
)
custom_user_no_inherit = ModelState(
"thirdapp",
"CustomUser",
[
("id", models.AutoField(primary_key=True)),
("username", models.CharField(max_length=255)),
],
)
aardvark = ModelState(
"thirdapp", "Aardvark", [("id", models.AutoField(primary_key=True))]
)
aardvark_testapp = ModelState(
"testapp", "Aardvark", [("id", models.AutoField(primary_key=True))]
)
aardvark_based_on_author = ModelState(
"testapp", "Aardvark", [], bases=("testapp.Author",)
)
aardvark_pk_fk_author = ModelState(
"testapp",
"Aardvark",
[
(
"id",
models.OneToOneField(
"testapp.Author", models.CASCADE, primary_key=True
),
),
],
)
knight = ModelState("eggs", "Knight", [("id", models.AutoField(primary_key=True))])
rabbit = ModelState(
"eggs",
"Rabbit",
[
("id", models.AutoField(primary_key=True)),
("knight", models.ForeignKey("eggs.Knight", models.CASCADE)),
("parent", models.ForeignKey("eggs.Rabbit", models.CASCADE)),
],
{
"unique_together": {("parent", "knight")},
"indexes": [
models.Index(
fields=["parent", "knight"], name="rabbit_circular_fk_index"
)
],
},
)
def test_arrange_for_graph(self):
"""Tests auto-naming of migrations for graph matching."""
# Make a fake graph
graph = MigrationGraph()
graph.add_node(("testapp", "0001_initial"), None)
graph.add_node(("testapp", "0002_foobar"), None)
graph.add_node(("otherapp", "0001_initial"), None)
graph.add_dependency(
"testapp.0002_foobar",
("testapp", "0002_foobar"),
("testapp", "0001_initial"),
)
graph.add_dependency(
"testapp.0002_foobar",
("testapp", "0002_foobar"),
("otherapp", "0001_initial"),
)
# Use project state to make a new migration change set
before = self.make_project_state([self.publisher, self.other_pony])
after = self.make_project_state(
[
self.author_empty,
self.publisher,
self.other_pony,
self.other_stable,
]
)
autodetector = MigrationAutodetector(before, after)
changes = autodetector._detect_changes()
# Run through arrange_for_graph
changes = autodetector.arrange_for_graph(changes, graph)
# Make sure there's a new name, deps match, etc.
self.assertEqual(changes["testapp"][0].name, "0003_author")
self.assertEqual(
changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]
)
self.assertEqual(changes["otherapp"][0].name, "0002_stable")
self.assertEqual(
changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]
)
def test_arrange_for_graph_with_multiple_initial(self):
# Make a fake graph.
graph = MigrationGraph()
# Use project state to make a new migration change set.
before = self.make_project_state([])
after = self.make_project_state(
[self.author_with_book, self.book, self.attribution]
)
autodetector = MigrationAutodetector(
before, after, MigrationQuestioner({"ask_initial": True})
)
changes = autodetector._detect_changes()
changes = autodetector.arrange_for_graph(changes, graph)
self.assertEqual(changes["otherapp"][0].name, "0001_initial")
self.assertEqual(changes["otherapp"][0].dependencies, [])
self.assertEqual(changes["otherapp"][1].name, "0002_initial")
self.assertCountEqual(
changes["otherapp"][1].dependencies,
[("testapp", "0001_initial"), ("otherapp", "0001_initial")],
)
self.assertEqual(changes["testapp"][0].name, "0001_initial")
self.assertEqual(
changes["testapp"][0].dependencies, [("otherapp", "0001_initial")]
)
def test_trim_apps(self):
"""
Trim does not remove dependencies but does remove unwanted apps.
"""
# Use project state to make a new migration change set
before = self.make_project_state([])
after = self.make_project_state(
[self.author_empty, self.other_pony, self.other_stable, self.third_thing]
)
autodetector = MigrationAutodetector(
before, after, MigrationQuestioner({"ask_initial": True})
)
changes = autodetector._detect_changes()
# Run through arrange_for_graph
graph = MigrationGraph()
changes = autodetector.arrange_for_graph(changes, graph)
changes["testapp"][0].dependencies.append(("otherapp", "0001_initial"))
changes = autodetector._trim_to_apps(changes, {"testapp"})
# Make sure there's the right set of migrations
self.assertEqual(changes["testapp"][0].name, "0001_initial")
self.assertEqual(changes["otherapp"][0].name, "0001_initial")
self.assertNotIn("thirdapp", changes)
def test_custom_migration_name(self):
"""Tests custom naming of migrations for graph matching."""
# Make a fake graph
graph = MigrationGraph()
graph.add_node(("testapp", "0001_initial"), None)
graph.add_node(("testapp", "0002_foobar"), None)
graph.add_node(("otherapp", "0001_initial"), None)
graph.add_dependency(
"testapp.0002_foobar",
("testapp", "0002_foobar"),
("testapp", "0001_initial"),
)
# Use project state to make a new migration change set
before = self.make_project_state([])
after = self.make_project_state(
[self.author_empty, self.other_pony, self.other_stable]
)
autodetector = MigrationAutodetector(before, after)
changes = autodetector._detect_changes()
# Run through arrange_for_graph
migration_name = "custom_name"
changes = autodetector.arrange_for_graph(changes, graph, migration_name)
# Make sure there's a new name, deps match, etc.
self.assertEqual(changes["testapp"][0].name, "0003_%s" % migration_name)
self.assertEqual(
changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]
)
self.assertEqual(changes["otherapp"][0].name, "0002_%s" % migration_name)
self.assertEqual(
changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]
)
def test_new_model(self):
"""Tests autodetection of new models."""
changes = self.get_changes([], [self.other_pony_food])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Pony")
self.assertEqual(
[name for name, mgr in changes["otherapp"][0].operations[0].managers],
["food_qs", "food_mgr", "food_mgr_kwargs"],
)
def test_old_model(self):
"""Tests deletion of old models."""
changes = self.get_changes([self.author_empty], [])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
def test_add_field(self):
"""Tests autodetection of new fields."""
changes = self.get_changes([self.author_empty], [self.author_name])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="name")
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_add_not_null_field_with_db_default(self, mocked_ask_method):
changes = self.get_changes([self.author_empty], [self.author_name_db_default])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="name", preserve_default=True
)
self.assertOperationFieldAttributes(
changes, "testapp", 0, 0, db_default=models.Value("Ada Lovelace")
)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_add_date_fields_with_auto_now_not_asking_for_default(
self, mocked_ask_method
):
changes = self.get_changes(
[self.author_empty], [self.author_dates_of_birth_auto_now]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["AddField", "AddField", "AddField"]
)
self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now=True)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_add_date_fields_with_auto_now_add_not_asking_for_null_addition(
self, mocked_ask_method
):
changes = self.get_changes(
[self.author_empty], [self.author_dates_of_birth_auto_now_add]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["AddField", "AddField", "AddField"]
)
self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now_add=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now_add=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now_add=True)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_auto_now_add_addition"
)
def test_add_date_fields_with_auto_now_add_asking_for_default(
self, mocked_ask_method
):
changes = self.get_changes(
[self.author_empty], [self.author_dates_of_birth_auto_now_add]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["AddField", "AddField", "AddField"]
)
self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now_add=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now_add=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now_add=True)
self.assertEqual(mocked_ask_method.call_count, 3)
def test_remove_field(self):
"""Tests autodetection of removed fields."""
changes = self.get_changes([self.author_name], [self.author_empty])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RemoveField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="name")
def test_alter_field(self):
"""Tests autodetection of new fields."""
changes = self.get_changes([self.author_name], [self.author_name_longer])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="name", preserve_default=True
)
def test_supports_functools_partial(self):
def _content_file_name(instance, filename, key, **kwargs):
return "{}/{}".format(instance, filename)
def content_file_name(key, **kwargs):
return functools.partial(_content_file_name, key, **kwargs)
# An unchanged partial reference.
before = [
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"file",
models.FileField(
max_length=200, upload_to=content_file_name("file")
),
),
],
)
]
after = [
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"file",
models.FileField(
max_length=200, upload_to=content_file_name("file")
),
),
],
)
]
changes = self.get_changes(before, after)
self.assertNumberMigrations(changes, "testapp", 0)
# A changed partial reference.
args_changed = [
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"file",
models.FileField(
max_length=200, upload_to=content_file_name("other-file")
),
),
],
)
]
changes = self.get_changes(before, args_changed)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
# Can't use assertOperationFieldAttributes because we need the
# deconstructed version, i.e., the exploded func/args/keywords rather
# than the partial: we don't care if it's not the same instance of the
# partial, only if it's the same source function, args, and keywords.
value = changes["testapp"][0].operations[0].field.upload_to
self.assertEqual(
(_content_file_name, ("other-file",), {}),
(value.func, value.args, value.keywords),
)
kwargs_changed = [
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"file",
models.FileField(
max_length=200,
upload_to=content_file_name("file", spam="eggs"),
),
),
],
)
]
changes = self.get_changes(before, kwargs_changed)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
value = changes["testapp"][0].operations[0].field.upload_to
self.assertEqual(
(_content_file_name, ("file",), {"spam": "eggs"}),
(value.func, value.args, value.keywords),
)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_alter_field_to_not_null_with_default(self, mocked_ask_method):
"""
#23609 - Tests autodetection of nullable to non-nullable alterations.
"""
changes = self.get_changes([self.author_name_null], [self.author_name_default])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="name", preserve_default=True
)
self.assertOperationFieldAttributes(
changes, "testapp", 0, 0, default="Ada Lovelace"
)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration",
side_effect=AssertionError("Should not have prompted for not null alteration"),
)
def test_alter_field_to_not_null_with_db_default(self, mocked_ask_method):
changes = self.get_changes(
[self.author_name_null], [self.author_name_db_default]
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="name", preserve_default=True
)
self.assertOperationFieldAttributes(
changes, "testapp", 0, 0, db_default=models.Value("Ada Lovelace")
)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration",
return_value=models.NOT_PROVIDED,
)
def test_alter_field_to_not_null_without_default(self, mocked_ask_method):
"""
#23609 - Tests autodetection of nullable to non-nullable alterations.
"""
changes = self.get_changes([self.author_name_null], [self.author_name])
self.assertEqual(mocked_ask_method.call_count, 1)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="name", preserve_default=True
)
self.assertOperationFieldAttributes(
changes, "testapp", 0, 0, default=models.NOT_PROVIDED
)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration",
return_value="Some Name",
)
def test_alter_field_to_not_null_oneoff_default(self, mocked_ask_method):
"""
#23609 - Tests autodetection of nullable to non-nullable alterations.
"""
changes = self.get_changes([self.author_name_null], [self.author_name])
self.assertEqual(mocked_ask_method.call_count, 1)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="name", preserve_default=False
)
self.assertOperationFieldAttributes(
changes, "testapp", 0, 0, default="Some Name"
)
def test_rename_field(self):
"""Tests autodetection of renamed fields."""
changes = self.get_changes(
[self.author_name],
[self.author_name_renamed],
MigrationQuestioner({"ask_rename": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="name", new_name="names"
)
def test_rename_field_foreign_key_to_field(self):
before = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("field", models.IntegerField(unique=True)),
],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
(
"foo",
models.ForeignKey("app.Foo", models.CASCADE, to_field="field"),
),
],
),
]
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("renamed_field", models.IntegerField(unique=True)),
],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
(
"foo",
models.ForeignKey(
"app.Foo", models.CASCADE, to_field="renamed_field"
),
),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["RenameField"])
self.assertOperationAttributes(
changes, "app", 0, 0, old_name="field", new_name="renamed_field"
)
def test_rename_foreign_object_fields(self):
fields = ("first", "second")
renamed_fields = ("first_renamed", "second_renamed")
before = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("first", models.IntegerField()),
("second", models.IntegerField()),
],
options={"unique_together": {fields}},
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("first", models.IntegerField()),
("second", models.IntegerField()),
(
"foo",
models.ForeignObject(
"app.Foo",
models.CASCADE,
from_fields=fields,
to_fields=fields,
),
),
],
),
]
# Case 1: to_fields renames.
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("first_renamed", models.IntegerField()),
("second_renamed", models.IntegerField()),
],
options={"unique_together": {renamed_fields}},
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("first", models.IntegerField()),
("second", models.IntegerField()),
(
"foo",
models.ForeignObject(
"app.Foo",
models.CASCADE,
from_fields=fields,
to_fields=renamed_fields,
),
),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(
changes, "app", 0, ["RenameField", "RenameField", "AlterUniqueTogether"]
)
self.assertOperationAttributes(
changes,
"app",
0,
0,
model_name="foo",
old_name="first",
new_name="first_renamed",
)
self.assertOperationAttributes(
changes,
"app",
0,
1,
model_name="foo",
old_name="second",
new_name="second_renamed",
)
# Case 2: from_fields renames.
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("first", models.IntegerField()),
("second", models.IntegerField()),
],
options={"unique_together": {fields}},
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("first_renamed", models.IntegerField()),
("second_renamed", models.IntegerField()),
(
"foo",
models.ForeignObject(
"app.Foo",
models.CASCADE,
from_fields=renamed_fields,
to_fields=fields,
),
),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["RenameField", "RenameField"])
self.assertOperationAttributes(
changes,
"app",
0,
0,
model_name="bar",
old_name="first",
new_name="first_renamed",
)
self.assertOperationAttributes(
changes,
"app",
0,
1,
model_name="bar",
old_name="second",
new_name="second_renamed",
)
def test_rename_referenced_primary_key(self):
before = [
ModelState(
"app",
"Foo",
[
("id", models.CharField(primary_key=True, serialize=False)),
],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("foo", models.ForeignKey("app.Foo", models.CASCADE)),
],
),
]
after = [
ModelState(
"app",
"Foo",
[("renamed_id", models.CharField(primary_key=True, serialize=False))],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("foo", models.ForeignKey("app.Foo", models.CASCADE)),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["RenameField"])
self.assertOperationAttributes(
changes, "app", 0, 0, old_name="id", new_name="renamed_id"
)
def test_rename_field_preserved_db_column(self):
"""
RenameField is used if a field is renamed and db_column equal to the
old field's column is added.
"""
before = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("field", models.IntegerField()),
],
),
]
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("renamed_field", models.IntegerField(db_column="field")),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["AlterField", "RenameField"])
self.assertOperationAttributes(
changes,
"app",
0,
0,
model_name="foo",
name="field",
)
self.assertEqual(
changes["app"][0].operations[0].field.deconstruct(),
(
"field",
"django.db.models.IntegerField",
[],
{"db_column": "field"},
),
)
self.assertOperationAttributes(
changes,
"app",
0,
1,
model_name="foo",
old_name="field",
new_name="renamed_field",
)
def test_rename_related_field_preserved_db_column(self):
before = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("foo", models.ForeignKey("app.Foo", models.CASCADE)),
],
),
]
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
(
"renamed_foo",
models.ForeignKey(
"app.Foo", models.CASCADE, db_column="foo_id"
),
),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["AlterField", "RenameField"])
self.assertOperationAttributes(
changes,
"app",
0,
0,
model_name="bar",
name="foo",
)
self.assertEqual(
changes["app"][0].operations[0].field.deconstruct(),
(
"foo",
"django.db.models.ForeignKey",
[],
{"to": "app.foo", "on_delete": models.CASCADE, "db_column": "foo_id"},
),
)
self.assertOperationAttributes(
changes,
"app",
0,
1,
model_name="bar",
old_name="foo",
new_name="renamed_foo",
)
def test_rename_field_with_renamed_model(self):
changes = self.get_changes(
[self.author_name],
[
ModelState(
"testapp",
"RenamedAuthor",
[
("id", models.AutoField(primary_key=True)),
("renamed_name", models.CharField(max_length=200)),
],
),
],
MigrationQuestioner({"ask_rename_model": True, "ask_rename": True}),
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel", "RenameField"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
old_name="Author",
new_name="RenamedAuthor",
)
self.assertOperationAttributes(
changes,
"testapp",
0,
1,
old_name="name",
new_name="renamed_name",
)
def test_rename_model(self):
"""Tests autodetection of renamed models."""
changes = self.get_changes(
[self.author_with_book, self.book],
[self.author_renamed_with_book, self.book_with_author_renamed],
MigrationQuestioner({"ask_rename_model": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="Author", new_name="Writer"
)
# Now that RenameModel handles related fields too, there should be
# no AlterField for the related field.
self.assertNumberMigrations(changes, "otherapp", 0)
def test_rename_model_case(self):
"""
Model name is case-insensitive. Changing case doesn't lead to any
autodetected operations.
"""
author_renamed = ModelState(
"testapp",
"author",
[
("id", models.AutoField(primary_key=True)),
],
)
changes = self.get_changes(
[self.author_empty, self.book],
[author_renamed, self.book],
questioner=MigrationQuestioner({"ask_rename_model": True}),
)
self.assertNumberMigrations(changes, "testapp", 0)
self.assertNumberMigrations(changes, "otherapp", 0)
def test_renamed_referenced_m2m_model_case(self):
publisher_renamed = ModelState(
"testapp",
"publisher",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
)
changes = self.get_changes(
[self.publisher, self.author_with_m2m],
[publisher_renamed, self.author_with_m2m],
questioner=MigrationQuestioner({"ask_rename_model": True}),
)
self.assertNumberMigrations(changes, "testapp", 0)
self.assertNumberMigrations(changes, "otherapp", 0)
def test_rename_m2m_through_model(self):
"""
Tests autodetection of renamed models that are used in M2M relations as
through models.
"""
changes = self.get_changes(
[self.author_with_m2m_through, self.publisher, self.contract],
[
self.author_with_renamed_m2m_through,
self.publisher,
self.contract_renamed,
],
MigrationQuestioner({"ask_rename_model": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="Contract", new_name="Deal"
)
def test_rename_model_with_renamed_rel_field(self):
"""
Tests autodetection of renamed models while simultaneously renaming one
of the fields that relate to the renamed model.
"""
changes = self.get_changes(
[self.author_with_book, self.book],
[self.author_renamed_with_book, self.book_with_field_and_author_renamed],
MigrationQuestioner({"ask_rename": True, "ask_rename_model": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="Author", new_name="Writer"
)
# Right number/type of migrations for related field rename?
# Alter is already taken care of.
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["RenameField"])
self.assertOperationAttributes(
changes, "otherapp", 0, 0, old_name="author", new_name="writer"
)
def test_rename_model_with_fks_in_different_position(self):
"""
#24537 - The order of fields in a model does not influence
the RenameModel detection.
"""
before = [
ModelState(
"testapp",
"EntityA",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"testapp",
"EntityB",
[
("id", models.AutoField(primary_key=True)),
("some_label", models.CharField(max_length=255)),
("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)),
],
),
]
after = [
ModelState(
"testapp",
"EntityA",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"testapp",
"RenamedEntityB",
[
("id", models.AutoField(primary_key=True)),
("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)),
("some_label", models.CharField(max_length=255)),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename_model": True})
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="EntityB", new_name="RenamedEntityB"
)
def test_rename_model_reverse_relation_dependencies(self):
"""
The migration to rename a model pointed to by a foreign key in another
app must run after the other app's migration that adds the foreign key
with model's original name. Therefore, the renaming migration has a
dependency on that other migration.
"""
before = [
ModelState(
"testapp",
"EntityA",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"otherapp",
"EntityB",
[
("id", models.AutoField(primary_key=True)),
("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)),
],
),
]
after = [
ModelState(
"testapp",
"RenamedEntityA",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"otherapp",
"EntityB",
[
("id", models.AutoField(primary_key=True)),
(
"entity_a",
models.ForeignKey("testapp.RenamedEntityA", models.CASCADE),
),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename_model": True})
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertMigrationDependencies(
changes, "testapp", 0, [("otherapp", "__first__")]
)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="EntityA", new_name="RenamedEntityA"
)
def test_fk_dependency(self):
"""Having a ForeignKey automatically adds a dependency."""
# Note that testapp (author) has no dependencies,
# otherapp (book) depends on testapp (author),
# thirdapp (edition) depends on otherapp (book)
changes = self.get_changes([], [self.author_name, self.book, self.edition])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertMigrationDependencies(changes, "testapp", 0, [])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Book")
self.assertMigrationDependencies(
changes, "otherapp", 0, [("testapp", "auto_1")]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "thirdapp", 1)
self.assertOperationTypes(changes, "thirdapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "thirdapp", 0, 0, name="Edition")
self.assertMigrationDependencies(
changes, "thirdapp", 0, [("otherapp", "auto_1")]
)
def test_proxy_fk_dependency(self):
"""FK dependencies still work on proxy models."""
# Note that testapp (author) has no dependencies,
# otherapp (book) depends on testapp (authorproxy)
changes = self.get_changes(
[], [self.author_empty, self.author_proxy_third, self.book_proxy_fk]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertMigrationDependencies(changes, "testapp", 0, [])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Book")
self.assertMigrationDependencies(
changes, "otherapp", 0, [("thirdapp", "auto_1")]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "thirdapp", 1)
self.assertOperationTypes(changes, "thirdapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "thirdapp", 0, 0, name="AuthorProxy")
self.assertMigrationDependencies(
changes, "thirdapp", 0, [("testapp", "auto_1")]
)
def test_same_app_no_fk_dependency(self):
"""
A migration with a FK between two models of the same app
does not have a dependency to itself.
"""
changes = self.get_changes([], [self.author_with_publisher, self.publisher])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author")
self.assertMigrationDependencies(changes, "testapp", 0, [])
def test_circular_fk_dependency(self):
"""
Having a circular ForeignKey dependency automatically
resolves the situation into 2 migrations on one side and 1 on the other.
"""
changes = self.get_changes(
[], [self.author_with_book, self.book, self.publisher_with_book]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher")
self.assertMigrationDependencies(
changes, "testapp", 0, [("otherapp", "auto_1")]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 2)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationTypes(changes, "otherapp", 1, ["AddField"])
self.assertMigrationDependencies(changes, "otherapp", 0, [])
self.assertMigrationDependencies(
changes, "otherapp", 1, [("otherapp", "auto_1"), ("testapp", "auto_1")]
)
# both split migrations should be `initial`
self.assertTrue(changes["otherapp"][0].initial)
self.assertTrue(changes["otherapp"][1].initial)
def test_same_app_circular_fk_dependency(self):
"""
A migration with a FK between two models of the same app does
not have a dependency to itself.
"""
changes = self.get_changes(
[], [self.author_with_publisher, self.publisher_with_author]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["CreateModel", "CreateModel", "AddField"]
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher")
self.assertOperationAttributes(changes, "testapp", 0, 2, name="publisher")
self.assertMigrationDependencies(changes, "testapp", 0, [])
def test_same_app_circular_fk_dependency_with_unique_together_and_indexes(self):
"""
#22275 - A migration with circular FK dependency does not try
to create unique together constraint and indexes before creating all
required fields first.
"""
changes = self.get_changes([], [self.knight, self.rabbit])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "eggs", 1)
self.assertOperationTypes(
changes,
"eggs",
0,
["CreateModel", "CreateModel"],
)
self.assertNotIn("unique_together", changes["eggs"][0].operations[0].options)
self.assertMigrationDependencies(changes, "eggs", 0, [])
def test_alter_db_table_add(self):
"""Tests detection for adding db_table in model's options."""
changes = self.get_changes(
[self.author_empty], [self.author_with_db_table_options]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTable"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", table="author_one"
)
def test_alter_db_table_change(self):
"""Tests detection for changing db_table in model's options'."""
changes = self.get_changes(
[self.author_with_db_table_options], [self.author_with_new_db_table_options]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTable"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", table="author_two"
)
def test_alter_db_table_remove(self):
"""Tests detection for removing db_table in model's options."""
changes = self.get_changes(
[self.author_with_db_table_options], [self.author_empty]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTable"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", table=None
)
def test_alter_db_table_no_changes(self):
"""
Alter_db_table doesn't generate a migration if no changes have been made.
"""
changes = self.get_changes(
[self.author_with_db_table_options], [self.author_with_db_table_options]
)
# Right number of migrations?
self.assertEqual(len(changes), 0)
def test_keep_db_table_with_model_change(self):
"""
Tests when model changes but db_table stays as-is, autodetector must not
create more than one operation.
"""
changes = self.get_changes(
[self.author_with_db_table_options],
[self.author_renamed_with_db_table_options],
MigrationQuestioner({"ask_rename_model": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="Author", new_name="NewAuthor"
)
def test_alter_db_table_with_model_change(self):
"""
Tests when model and db_table changes, autodetector must create two
operations.
"""
changes = self.get_changes(
[self.author_with_db_table_options],
[self.author_renamed_with_new_db_table_options],
MigrationQuestioner({"ask_rename_model": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["RenameModel", "AlterModelTable"]
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="Author", new_name="NewAuthor"
)
self.assertOperationAttributes(
changes, "testapp", 0, 1, name="newauthor", table="author_three"
)
def test_alter_db_table_comment_add(self):
changes = self.get_changes(
[self.author_empty], [self.author_with_db_table_comment]
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTableComment"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", table_comment="Table comment"
)
def test_alter_db_table_comment_change(self):
author_with_new_db_table_comment = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table_comment": "New table comment"},
)
changes = self.get_changes(
[self.author_with_db_table_comment],
[author_with_new_db_table_comment],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTableComment"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="author",
table_comment="New table comment",
)
def test_alter_db_table_comment_remove(self):
changes = self.get_changes(
[self.author_with_db_table_comment],
[self.author_empty],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTableComment"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", db_table_comment=None
)
def test_alter_db_table_comment_no_changes(self):
changes = self.get_changes(
[self.author_with_db_table_comment],
[self.author_with_db_table_comment],
)
self.assertNumberMigrations(changes, "testapp", 0)
def test_identical_regex_doesnt_alter(self):
from_state = ModelState(
"testapp",
"model",
[
(
"id",
models.AutoField(
primary_key=True,
validators=[
RegexValidator(
re.compile("^[-a-zA-Z0-9_]+\\Z"),
"Enter a valid “slug” consisting of letters, numbers, "
"underscores or hyphens.",
"invalid",
)
],
),
)
],
)
to_state = ModelState(
"testapp",
"model",
[("id", models.AutoField(primary_key=True, validators=[validate_slug]))],
)
changes = self.get_changes([from_state], [to_state])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 0)
def test_different_regex_does_alter(self):
from_state = ModelState(
"testapp",
"model",
[
(
"id",
models.AutoField(
primary_key=True,
validators=[
RegexValidator(
re.compile("^[a-z]+\\Z", 32),
"Enter a valid “slug” consisting of letters, numbers, "
"underscores or hyphens.",
"invalid",
)
],
),
)
],
)
to_state = ModelState(
"testapp",
"model",
[("id", models.AutoField(primary_key=True, validators=[validate_slug]))],
)
changes = self.get_changes([from_state], [to_state])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
def test_alter_regex_string_to_compiled_regex(self):
regex_string = "^[a-z]+$"
from_state = ModelState(
"testapp",
"model",
[
(
"id",
models.AutoField(
primary_key=True, validators=[RegexValidator(regex_string)]
),
)
],
)
to_state = ModelState(
"testapp",
"model",
[
(
"id",
models.AutoField(
primary_key=True,
validators=[RegexValidator(re.compile(regex_string))],
),
)
],
)
changes = self.get_changes([from_state], [to_state])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
def test_empty_unique_together(self):
"""Empty unique_together shouldn't generate a migration."""
# Explicitly testing for not specified, since this is the case after
# a CreateModel operation w/o any definition on the original model
model_state_not_specified = ModelState(
"a", "model", [("id", models.AutoField(primary_key=True))]
)
# Explicitly testing for None, since this was the issue in #23452 after
# an AlterUniqueTogether operation with e.g. () as value
model_state_none = ModelState(
"a",
"model",
[("id", models.AutoField(primary_key=True))],
{
"unique_together": None,
},
)
# Explicitly testing for the empty set, since we now always have sets.
# During removal (('col1', 'col2'),) --> () this becomes set([])
model_state_empty = ModelState(
"a",
"model",
[("id", models.AutoField(primary_key=True))],
{
"unique_together": set(),
},
)
def test(from_state, to_state, msg):
changes = self.get_changes([from_state], [to_state])
if changes:
ops = ", ".join(
o.__class__.__name__ for o in changes["a"][0].operations
)
self.fail("Created operation(s) %s from %s" % (ops, msg))
tests = (
(
model_state_not_specified,
model_state_not_specified,
'"not specified" to "not specified"',
),
(model_state_not_specified, model_state_none, '"not specified" to "None"'),
(
model_state_not_specified,
model_state_empty,
'"not specified" to "empty"',
),
(model_state_none, model_state_not_specified, '"None" to "not specified"'),
(model_state_none, model_state_none, '"None" to "None"'),
(model_state_none, model_state_empty, '"None" to "empty"'),
(
model_state_empty,
model_state_not_specified,
'"empty" to "not specified"',
),
(model_state_empty, model_state_none, '"empty" to "None"'),
(model_state_empty, model_state_empty, '"empty" to "empty"'),
)
for t in tests:
test(*t)
def test_create_model_with_indexes(self):
"""Test creation of new model with indexes already defined."""
added_index = models.Index(
fields=["name"], name="create_model_with_indexes_idx"
)
author = ModelState(
"otherapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
{
"indexes": [added_index],
},
)
changes = self.get_changes([], [author])
# Right number of migrations?
self.assertEqual(len(changes["otherapp"]), 1)
# Right number of actions?
migration = changes["otherapp"][0]
self.assertEqual(len(migration.operations), 1)
# Right actions order?
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Author")
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
name="Author",
options={"indexes": [added_index]},
)
def test_add_indexes(self):
"""Test change detection of new indexes."""
changes = self.get_changes(
[self.author_empty, self.book], [self.author_empty, self.book_indexes]
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AddIndex"])
added_index = models.Index(
fields=["author", "title"], name="book_title_author_idx"
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, model_name="book", index=added_index
)
def test_remove_indexes(self):
"""Test change detection of removed indexes."""
changes = self.get_changes(
[self.author_empty, self.book_indexes], [self.author_empty, self.book]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["RemoveIndex"])
self.assertOperationAttributes(
changes, "otherapp", 0, 0, model_name="book", name="book_title_author_idx"
)
def test_rename_indexes(self):
book_renamed_indexes = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"indexes": [
models.Index(
fields=["author", "title"], name="renamed_book_title_author_idx"
)
],
},
)
changes = self.get_changes(
[self.author_empty, self.book_indexes],
[self.author_empty, book_renamed_indexes],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["RenameIndex"])
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
model_name="book",
new_name="renamed_book_title_author_idx",
old_name="book_title_author_idx",
)
def test_order_fields_indexes(self):
"""Test change detection of reordering of fields in indexes."""
changes = self.get_changes(
[self.author_empty, self.book_indexes],
[self.author_empty, self.book_unordered_indexes],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["RemoveIndex", "AddIndex"])
self.assertOperationAttributes(
changes, "otherapp", 0, 0, model_name="book", name="book_title_author_idx"
)
added_index = models.Index(
fields=["title", "author"], name="book_author_title_idx"
)
self.assertOperationAttributes(
changes, "otherapp", 0, 1, model_name="book", index=added_index
)
def test_create_model_with_check_constraint(self):
"""Test creation of new model with constraints already defined."""
author = ModelState(
"otherapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
{
"constraints": [
models.CheckConstraint(
check=models.Q(name__contains="Bob"), name="name_contains_bob"
)
]
},
)
changes = self.get_changes([], [author])
added_constraint = models.CheckConstraint(
check=models.Q(name__contains="Bob"), name="name_contains_bob"
)
# Right number of migrations?
self.assertEqual(len(changes["otherapp"]), 1)
# Right number of actions?
migration = changes["otherapp"][0]
self.assertEqual(len(migration.operations), 2)
# Right actions order?
self.assertOperationTypes(
changes, "otherapp", 0, ["CreateModel", "AddConstraint"]
)
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Author")
self.assertOperationAttributes(
changes, "otherapp", 0, 1, model_name="author", constraint=added_constraint
)
def test_add_constraints(self):
"""Test change detection of new constraints."""
changes = self.get_changes(
[self.author_name], [self.author_name_check_constraint]
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddConstraint"])
added_constraint = models.CheckConstraint(
check=models.Q(name__contains="Bob"), name="name_contains_bob"
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, model_name="author", constraint=added_constraint
)
def test_add_constraints_with_new_model(self):
book_with_unique_title_and_pony = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("title", models.CharField(max_length=200)),
("pony", models.ForeignKey("otherapp.Pony", models.CASCADE)),
],
{
"constraints": [
models.UniqueConstraint(
fields=["title", "pony"],
name="unique_title_pony",
)
]
},
)
changes = self.get_changes(
[self.book_with_no_author],
[book_with_unique_title_and_pony, self.other_pony],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["CreateModel", "AddField", "AddConstraint"],
)
def test_add_index_with_new_model(self):
book_with_index_title_and_pony = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("title", models.CharField(max_length=200)),
("pony", models.ForeignKey("otherapp.Pony", models.CASCADE)),
],
{
"indexes": [
models.Index(fields=["title", "pony"], name="index_title_pony"),
]
},
)
changes = self.get_changes(
[self.book_with_no_author],
[book_with_index_title_and_pony, self.other_pony],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["CreateModel", "AddField", "AddIndex"],
)
def test_remove_constraints(self):
"""Test change detection of removed constraints."""
changes = self.get_changes(
[self.author_name_check_constraint], [self.author_name]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RemoveConstraint"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, model_name="author", name="name_contains_bob"
)
def test_add_unique_together(self):
"""Tests unique_together detection."""
changes = self.get_changes(
[self.author_empty, self.book],
[self.author_empty, self.book_unique_together],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether"])
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
name="book",
unique_together={("author", "title")},
)
def test_remove_unique_together(self):
"""Tests unique_together detection."""
changes = self.get_changes(
[self.author_empty, self.book_unique_together],
[self.author_empty, self.book],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterUniqueTogether"])
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="book", unique_together=set()
)
def test_unique_together_remove_fk(self):
"""Tests unique_together and field removal detection & ordering"""
changes = self.get_changes(
[self.author_empty, self.book_unique_together],
[self.author_empty, self.book_with_no_author],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AlterUniqueTogether", "RemoveField"],
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="book", unique_together=set()
)
self.assertOperationAttributes(
changes, "otherapp", 0, 1, model_name="book", name="author"
)
def test_unique_together_no_changes(self):
"""
unique_together doesn't generate a migration if no
changes have been made.
"""
changes = self.get_changes(
[self.author_empty, self.book_unique_together],
[self.author_empty, self.book_unique_together],
)
# Right number of migrations?
self.assertEqual(len(changes), 0)
def test_unique_together_ordering(self):
"""
unique_together also triggers on ordering changes.
"""
changes = self.get_changes(
[self.author_empty, self.book_unique_together],
[self.author_empty, self.book_unique_together_2],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AlterUniqueTogether"],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
name="book",
unique_together={("title", "author")},
)
def test_add_field_and_unique_together(self):
"""
Added fields will be created before using them in unique_together.
"""
changes = self.get_changes(
[self.author_empty, self.book],
[self.author_empty, self.book_unique_together_3],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AddField", "AlterUniqueTogether"],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
1,
name="book",
unique_together={("title", "newfield")},
)
def test_create_model_and_unique_together(self):
author = ModelState(
"otherapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
)
book_with_author = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("otherapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"unique_together": {("title", "author")},
},
)
changes = self.get_changes(
[self.book_with_no_author], [author, book_with_author]
)
# Right number of migrations?
self.assertEqual(len(changes["otherapp"]), 1)
# Right number of actions?
migration = changes["otherapp"][0]
self.assertEqual(len(migration.operations), 3)
# Right actions order?
self.assertOperationTypes(
changes,
"otherapp",
0,
["CreateModel", "AddField", "AlterUniqueTogether"],
)
def test_remove_field_and_unique_together(self):
"""
Removed fields will be removed after updating unique_together.
"""
changes = self.get_changes(
[self.author_empty, self.book_unique_together_3],
[self.author_empty, self.book_unique_together],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AlterUniqueTogether", "RemoveField"],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
name="book",
unique_together={("author", "title")},
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
1,
model_name="book",
name="newfield",
)
def test_alter_field_and_unique_together(self):
"""Fields are altered after deleting some unique_together."""
initial_author = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField(db_index=True)),
],
{
"unique_together": {("name",)},
},
)
author_reversed_constraints = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, unique=True)),
("age", models.IntegerField()),
],
{
"unique_together": {("age",)},
},
)
changes = self.get_changes([initial_author], [author_reversed_constraints])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
[
"AlterUniqueTogether",
"AlterField",
"AlterField",
"AlterUniqueTogether",
],
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="author",
unique_together=set(),
)
self.assertOperationAttributes(
changes,
"testapp",
0,
1,
model_name="author",
name="age",
)
self.assertOperationAttributes(
changes,
"testapp",
0,
2,
model_name="author",
name="name",
)
self.assertOperationAttributes(
changes,
"testapp",
0,
3,
name="author",
unique_together={("age",)},
)
def test_partly_alter_unique_together_increase(self):
initial_author = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"unique_together": {("name",)},
},
)
author_new_constraints = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"unique_together": {("name",), ("age",)},
},
)
changes = self.get_changes([initial_author], [author_new_constraints])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
["AlterUniqueTogether"],
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="author",
unique_together={("name",), ("age",)},
)
def test_partly_alter_unique_together_decrease(self):
initial_author = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"unique_together": {("name",), ("age",)},
},
)
author_new_constraints = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"unique_together": {("name",)},
},
)
changes = self.get_changes([initial_author], [author_new_constraints])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
["AlterUniqueTogether"],
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="author",
unique_together={("name",)},
)
def test_rename_field_and_unique_together(self):
"""Fields are renamed before updating unique_together."""
changes = self.get_changes(
[self.author_empty, self.book_unique_together_3],
[self.author_empty, self.book_unique_together_4],
MigrationQuestioner({"ask_rename": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["RenameField", "AlterUniqueTogether"],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
1,
name="book",
unique_together={("title", "newfield2")},
)
def test_proxy(self):
"""The autodetector correctly deals with proxy models."""
# First, we test adding a proxy model
changes = self.get_changes(
[self.author_empty], [self.author_empty, self.author_proxy]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="AuthorProxy",
options={"proxy": True, "indexes": [], "constraints": []},
)
# Now, we test turning a proxy model into a non-proxy model
# It should delete the proxy then make the real one
changes = self.get_changes(
[self.author_empty, self.author_proxy],
[self.author_empty, self.author_proxy_notproxy],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel", "CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="AuthorProxy")
self.assertOperationAttributes(
changes, "testapp", 0, 1, name="AuthorProxy", options={}
)
def test_proxy_non_model_parent(self):
class Mixin:
pass
author_proxy_non_model_parent = ModelState(
"testapp",
"AuthorProxy",
[],
{"proxy": True},
(Mixin, "testapp.author"),
)
changes = self.get_changes(
[self.author_empty],
[self.author_empty, author_proxy_non_model_parent],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="AuthorProxy",
options={"proxy": True, "indexes": [], "constraints": []},
bases=(Mixin, "testapp.author"),
)
def test_proxy_custom_pk(self):
"""
#23415 - The autodetector must correctly deal with custom FK on proxy
models.
"""
# First, we test the default pk field name
changes = self.get_changes(
[], [self.author_empty, self.author_proxy_third, self.book_proxy_fk]
)
# The model the FK is pointing from and to.
self.assertEqual(
changes["otherapp"][0].operations[0].fields[2][1].remote_field.model,
"thirdapp.AuthorProxy",
)
# Now, we test the custom pk field name
changes = self.get_changes(
[], [self.author_custom_pk, self.author_proxy_third, self.book_proxy_fk]
)
# The model the FK is pointing from and to.
self.assertEqual(
changes["otherapp"][0].operations[0].fields[2][1].remote_field.model,
"thirdapp.AuthorProxy",
)
def test_proxy_to_mti_with_fk_to_proxy(self):
# First, test the pk table and field name.
to_state = self.make_project_state(
[self.author_empty, self.author_proxy_third, self.book_proxy_fk],
)
changes = self.get_changes([], to_state)
fk_field = changes["otherapp"][0].operations[0].fields[2][1]
self.assertEqual(
to_state.get_concrete_model_key(fk_field.remote_field.model),
("testapp", "author"),
)
self.assertEqual(fk_field.remote_field.model, "thirdapp.AuthorProxy")
# Change AuthorProxy to use MTI.
from_state = to_state.clone()
to_state = self.make_project_state(
[self.author_empty, self.author_proxy_third_notproxy, self.book_proxy_fk],
)
changes = self.get_changes(from_state, to_state)
# Right number/type of migrations for the AuthorProxy model?
self.assertNumberMigrations(changes, "thirdapp", 1)
self.assertOperationTypes(
changes, "thirdapp", 0, ["DeleteModel", "CreateModel"]
)
# Right number/type of migrations for the Book model with a FK to
# AuthorProxy?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterField"])
# otherapp should depend on thirdapp.
self.assertMigrationDependencies(
changes, "otherapp", 0, [("thirdapp", "auto_1")]
)
# Now, test the pk table and field name.
fk_field = changes["otherapp"][0].operations[0].field
self.assertEqual(
to_state.get_concrete_model_key(fk_field.remote_field.model),
("thirdapp", "authorproxy"),
)
self.assertEqual(fk_field.remote_field.model, "thirdapp.AuthorProxy")
def test_proxy_to_mti_with_fk_to_proxy_proxy(self):
# First, test the pk table and field name.
to_state = self.make_project_state(
[
self.author_empty,
self.author_proxy,
self.author_proxy_proxy,
self.book_proxy_proxy_fk,
]
)
changes = self.get_changes([], to_state)
fk_field = changes["otherapp"][0].operations[0].fields[1][1]
self.assertEqual(
to_state.get_concrete_model_key(fk_field.remote_field.model),
("testapp", "author"),
)
self.assertEqual(fk_field.remote_field.model, "testapp.AAuthorProxyProxy")
# Change AuthorProxy to use MTI. FK still points to AAuthorProxyProxy,
# a proxy of AuthorProxy.
from_state = to_state.clone()
to_state = self.make_project_state(
[
self.author_empty,
self.author_proxy_notproxy,
self.author_proxy_proxy,
self.book_proxy_proxy_fk,
]
)
changes = self.get_changes(from_state, to_state)
# Right number/type of migrations for the AuthorProxy model?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel", "CreateModel"])
# Right number/type of migrations for the Book model with a FK to
# AAuthorProxyProxy?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterField"])
# otherapp should depend on testapp.
self.assertMigrationDependencies(
changes, "otherapp", 0, [("testapp", "auto_1")]
)
# Now, test the pk table and field name.
fk_field = changes["otherapp"][0].operations[0].field
self.assertEqual(
to_state.get_concrete_model_key(fk_field.remote_field.model),
("testapp", "authorproxy"),
)
self.assertEqual(fk_field.remote_field.model, "testapp.AAuthorProxyProxy")
def test_unmanaged_create(self):
"""The autodetector correctly deals with managed models."""
# First, we test adding an unmanaged model
changes = self.get_changes(
[self.author_empty], [self.author_empty, self.author_unmanaged]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="AuthorUnmanaged", options={"managed": False}
)
def test_unmanaged_delete(self):
changes = self.get_changes(
[self.author_empty, self.author_unmanaged], [self.author_empty]
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel"])
def test_unmanaged_to_managed(self):
# Now, we test turning an unmanaged model into a managed model
changes = self.get_changes(
[self.author_empty, self.author_unmanaged],
[self.author_empty, self.author_unmanaged_managed],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="authorunmanaged", options={}
)
def test_managed_to_unmanaged(self):
# Now, we turn managed to unmanaged.
changes = self.get_changes(
[self.author_empty, self.author_unmanaged_managed],
[self.author_empty, self.author_unmanaged],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="authorunmanaged", options={"managed": False}
)
def test_unmanaged_custom_pk(self):
"""
#23415 - The autodetector must correctly deal with custom FK on
unmanaged models.
"""
# First, we test the default pk field name
changes = self.get_changes([], [self.author_unmanaged_default_pk, self.book])
# The model the FK on the book model points to.
fk_field = changes["otherapp"][0].operations[0].fields[2][1]
self.assertEqual(fk_field.remote_field.model, "testapp.Author")
# Now, we test the custom pk field name
changes = self.get_changes([], [self.author_unmanaged_custom_pk, self.book])
# The model the FK on the book model points to.
fk_field = changes["otherapp"][0].operations[0].fields[2][1]
self.assertEqual(fk_field.remote_field.model, "testapp.Author")
@override_settings(AUTH_USER_MODEL="thirdapp.CustomUser")
def test_swappable(self):
with isolate_lru_cache(apps.get_swappable_settings_name):
changes = self.get_changes(
[self.custom_user], [self.custom_user, self.author_with_custom_user]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertMigrationDependencies(
changes, "testapp", 0, [("__setting__", "AUTH_USER_MODEL")]
)
def test_swappable_lowercase(self):
model_state = ModelState(
"testapp",
"Document",
[
("id", models.AutoField(primary_key=True)),
(
"owner",
models.ForeignKey(
settings.AUTH_USER_MODEL.lower(),
models.CASCADE,
),
),
],
)
with isolate_lru_cache(apps.get_swappable_settings_name):
changes = self.get_changes([], [model_state])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Document")
self.assertMigrationDependencies(
changes,
"testapp",
0,
[("__setting__", "AUTH_USER_MODEL")],
)
@override_settings(AUTH_USER_MODEL="thirdapp.CustomUser")
def test_swappable_many_to_many_model_case(self):
document_lowercase = ModelState(
"testapp",
"Document",
[
("id", models.AutoField(primary_key=True)),
("owners", models.ManyToManyField(settings.AUTH_USER_MODEL.lower())),
],
)
document = ModelState(
"testapp",
"Document",
[
("id", models.AutoField(primary_key=True)),
("owners", models.ManyToManyField(settings.AUTH_USER_MODEL)),
],
)
with isolate_lru_cache(apps.get_swappable_settings_name):
changes = self.get_changes(
[self.custom_user, document_lowercase],
[self.custom_user, document],
)
self.assertEqual(len(changes), 0)
def test_swappable_changed(self):
with isolate_lru_cache(apps.get_swappable_settings_name):
before = self.make_project_state([self.custom_user, self.author_with_user])
with override_settings(AUTH_USER_MODEL="thirdapp.CustomUser"):
after = self.make_project_state(
[self.custom_user, self.author_with_custom_user]
)
autodetector = MigrationAutodetector(before, after)
changes = autodetector._detect_changes()
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, model_name="author", name="user"
)
fk_field = changes["testapp"][0].operations[0].field
self.assertEqual(fk_field.remote_field.model, "thirdapp.CustomUser")
def test_add_field_with_default(self):
"""#22030 - Adding a field with a default should work."""
changes = self.get_changes([self.author_empty], [self.author_name_default])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="name")
def test_custom_deconstructible(self):
"""
Two instances which deconstruct to the same value aren't considered a
change.
"""
changes = self.get_changes(
[self.author_name_deconstructible_1], [self.author_name_deconstructible_2]
)
# Right number of migrations?
self.assertEqual(len(changes), 0)
def test_deconstruct_field_kwarg(self):
"""Field instances are handled correctly by nested deconstruction."""
changes = self.get_changes(
[self.author_name_deconstructible_3], [self.author_name_deconstructible_4]
)
self.assertEqual(changes, {})
def test_deconstructible_list(self):
"""Nested deconstruction descends into lists."""
# When lists contain items that deconstruct to identical values, those lists
# should be considered equal for the purpose of detecting state changes
# (even if the original items are unequal).
changes = self.get_changes(
[self.author_name_deconstructible_list_1],
[self.author_name_deconstructible_list_2],
)
self.assertEqual(changes, {})
# Legitimate differences within the deconstructed lists should be reported
# as a change
changes = self.get_changes(
[self.author_name_deconstructible_list_1],
[self.author_name_deconstructible_list_3],
)
self.assertEqual(len(changes), 1)
def test_deconstructible_tuple(self):
"""Nested deconstruction descends into tuples."""
# When tuples contain items that deconstruct to identical values, those tuples
# should be considered equal for the purpose of detecting state changes
# (even if the original items are unequal).
changes = self.get_changes(
[self.author_name_deconstructible_tuple_1],
[self.author_name_deconstructible_tuple_2],
)
self.assertEqual(changes, {})
# Legitimate differences within the deconstructed tuples should be reported
# as a change
changes = self.get_changes(
[self.author_name_deconstructible_tuple_1],
[self.author_name_deconstructible_tuple_3],
)
self.assertEqual(len(changes), 1)
def test_deconstructible_dict(self):
"""Nested deconstruction descends into dict values."""
# When dicts contain items whose values deconstruct to identical values,
# those dicts should be considered equal for the purpose of detecting
# state changes (even if the original values are unequal).
changes = self.get_changes(
[self.author_name_deconstructible_dict_1],
[self.author_name_deconstructible_dict_2],
)
self.assertEqual(changes, {})
# Legitimate differences within the deconstructed dicts should be reported
# as a change
changes = self.get_changes(
[self.author_name_deconstructible_dict_1],
[self.author_name_deconstructible_dict_3],
)
self.assertEqual(len(changes), 1)
def test_nested_deconstructible_objects(self):
"""
Nested deconstruction is applied recursively to the args/kwargs of
deconstructed objects.
"""
# If the items within a deconstructed object's args/kwargs have the same
# deconstructed values - whether or not the items themselves are different
# instances - then the object as a whole is regarded as unchanged.
changes = self.get_changes(
[self.author_name_nested_deconstructible_1],
[self.author_name_nested_deconstructible_2],
)
self.assertEqual(changes, {})
# Differences that exist solely within the args list of a deconstructed object
# should be reported as changes
changes = self.get_changes(
[self.author_name_nested_deconstructible_1],
[self.author_name_nested_deconstructible_changed_arg],
)
self.assertEqual(len(changes), 1)
# Additional args should also be reported as a change
changes = self.get_changes(
[self.author_name_nested_deconstructible_1],
[self.author_name_nested_deconstructible_extra_arg],
)
self.assertEqual(len(changes), 1)
# Differences that exist solely within the kwargs dict of a deconstructed object
# should be reported as changes
changes = self.get_changes(
[self.author_name_nested_deconstructible_1],
[self.author_name_nested_deconstructible_changed_kwarg],
)
self.assertEqual(len(changes), 1)
# Additional kwargs should also be reported as a change
changes = self.get_changes(
[self.author_name_nested_deconstructible_1],
[self.author_name_nested_deconstructible_extra_kwarg],
)
self.assertEqual(len(changes), 1)
def test_deconstruct_type(self):
"""
#22951 -- Uninstantiated classes with deconstruct are correctly returned
by deep_deconstruct during serialization.
"""
author = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
# IntegerField intentionally not instantiated.
default=models.IntegerField,
),
),
],
)
changes = self.get_changes([], [author])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
def test_replace_string_with_foreignkey(self):
"""
#22300 - Adding an FK in the same "spot" as a deleted CharField should
work.
"""
changes = self.get_changes(
[self.author_with_publisher_string],
[self.author_with_publisher, self.publisher],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["CreateModel", "RemoveField", "AddField"]
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="publisher_name")
self.assertOperationAttributes(changes, "testapp", 0, 2, name="publisher")
def test_foreign_key_removed_before_target_model(self):
"""
Removing an FK and the model it targets in the same change must remove
the FK field before the model to maintain consistency.
"""
changes = self.get_changes(
[self.author_with_publisher, self.publisher], [self.author_name]
) # removes both the model and FK
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RemoveField", "DeleteModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="publisher")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher")
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_add_many_to_many(self, mocked_ask_method):
"""#22435 - Adding a ManyToManyField should not prompt for a default."""
changes = self.get_changes(
[self.author_empty, self.publisher], [self.author_with_m2m, self.publisher]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="publishers")
def test_alter_many_to_many(self):
changes = self.get_changes(
[self.author_with_m2m, self.publisher],
[self.author_with_m2m_blank, self.publisher],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="publishers")
def test_create_with_through_model(self):
"""
Adding a m2m with a through model and the models that use it should be
ordered correctly.
"""
changes = self.get_changes(
[], [self.author_with_m2m_through, self.publisher, self.contract]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
[
"CreateModel",
"CreateModel",
"CreateModel",
"AddField",
],
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher")
self.assertOperationAttributes(changes, "testapp", 0, 2, name="Contract")
self.assertOperationAttributes(
changes, "testapp", 0, 3, model_name="author", name="publishers"
)
def test_create_with_through_model_separate_apps(self):
author_with_m2m_through = ModelState(
"authors",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"publishers",
models.ManyToManyField(
"testapp.Publisher", through="contract.Contract"
),
),
],
)
contract = ModelState(
"contract",
"Contract",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("authors.Author", models.CASCADE)),
("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)),
],
)
changes = self.get_changes(
[], [author_with_m2m_through, self.publisher, contract]
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertNumberMigrations(changes, "contract", 1)
self.assertNumberMigrations(changes, "authors", 2)
self.assertMigrationDependencies(
changes,
"authors",
1,
{("authors", "auto_1"), ("contract", "auto_1"), ("testapp", "auto_1")},
)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher")
self.assertOperationTypes(changes, "contract", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "contract", 0, 0, name="Contract")
self.assertOperationTypes(changes, "authors", 0, ["CreateModel"])
self.assertOperationTypes(changes, "authors", 1, ["AddField"])
self.assertOperationAttributes(changes, "authors", 0, 0, name="Author")
self.assertOperationAttributes(
changes, "authors", 1, 0, model_name="author", name="publishers"
)
def test_many_to_many_removed_before_through_model(self):
"""
Removing a ManyToManyField and the "through" model in the same change
must remove the field before the model to maintain consistency.
"""
changes = self.get_changes(
[
self.book_with_multiple_authors_through_attribution,
self.author_name,
self.attribution,
],
[self.book_with_no_author, self.author_name],
)
# Remove both the through model and ManyToMany
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes, "otherapp", 0, ["RemoveField", "DeleteModel"]
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="authors", model_name="book"
)
self.assertOperationAttributes(changes, "otherapp", 0, 1, name="Attribution")
def test_many_to_many_removed_before_through_model_2(self):
"""
Removing a model that contains a ManyToManyField and the "through" model
in the same change must remove the field before the model to maintain
consistency.
"""
changes = self.get_changes(
[
self.book_with_multiple_authors_through_attribution,
self.author_name,
self.attribution,
],
[self.author_name],
)
# Remove both the through model and ManyToMany
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes, "otherapp", 0, ["RemoveField", "DeleteModel", "DeleteModel"]
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="authors", model_name="book"
)
self.assertOperationAttributes(changes, "otherapp", 0, 1, name="Attribution")
self.assertOperationAttributes(changes, "otherapp", 0, 2, name="Book")
def test_m2m_w_through_multistep_remove(self):
"""
A model with a m2m field that specifies a "through" model cannot be
removed in the same migration as that through model as the schema will
pass through an inconsistent state. The autodetector should produce two
migrations to avoid this issue.
"""
changes = self.get_changes(
[self.author_with_m2m_through, self.publisher, self.contract],
[self.publisher],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
["RemoveField", "RemoveField", "DeleteModel", "DeleteModel"],
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", model_name="contract"
)
self.assertOperationAttributes(
changes, "testapp", 0, 1, name="publisher", model_name="contract"
)
self.assertOperationAttributes(changes, "testapp", 0, 2, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 3, name="Contract")
def test_concrete_field_changed_to_many_to_many(self):
"""
#23938 - Changing a concrete field into a ManyToManyField
first removes the concrete field and then adds the m2m field.
"""
changes = self.get_changes(
[self.author_with_former_m2m], [self.author_with_m2m, self.publisher]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["CreateModel", "RemoveField", "AddField"]
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher")
self.assertOperationAttributes(
changes, "testapp", 0, 1, name="publishers", model_name="author"
)
self.assertOperationAttributes(
changes, "testapp", 0, 2, name="publishers", model_name="author"
)
def test_many_to_many_changed_to_concrete_field(self):
"""
#23938 - Changing a ManyToManyField into a concrete field
first removes the m2m field and then adds the concrete field.
"""
changes = self.get_changes(
[self.author_with_m2m, self.publisher], [self.author_with_former_m2m]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["RemoveField", "DeleteModel", "AddField"]
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="publishers", model_name="author"
)
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher")
self.assertOperationAttributes(
changes, "testapp", 0, 2, name="publishers", model_name="author"
)
self.assertOperationFieldAttributes(changes, "testapp", 0, 2, max_length=100)
def test_non_circular_foreignkey_dependency_removal(self):
"""
If two models with a ForeignKey from one to the other are removed at the
same time, the autodetector should remove them in the correct order.
"""
changes = self.get_changes(
[self.author_with_publisher, self.publisher_with_author], []
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["RemoveField", "DeleteModel", "DeleteModel"]
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", model_name="publisher"
)
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 2, name="Publisher")
def test_alter_model_options(self):
"""Changing a model's options should make a change."""
changes = self.get_changes([self.author_empty], [self.author_with_options])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
options={
"permissions": [("can_hire", "Can hire")],
"verbose_name": "Authi",
},
)
# Changing them back to empty should also make a change
changes = self.get_changes([self.author_with_options], [self.author_empty])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", options={}
)
def test_alter_model_options_proxy(self):
"""Changing a proxy model's options should also make a change."""
changes = self.get_changes(
[self.author_proxy, self.author_empty],
[self.author_proxy_options, self.author_empty],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="authorproxy",
options={"verbose_name": "Super Author"},
)
def test_set_alter_order_with_respect_to(self):
"""Setting order_with_respect_to adds a field."""
changes = self.get_changes(
[self.book, self.author_with_book],
[self.book, self.author_with_book_order_wrt],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterOrderWithRespectTo"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", order_with_respect_to="book"
)
def test_add_alter_order_with_respect_to(self):
"""
Setting order_with_respect_to when adding the FK too does
things in the right order.
"""
changes = self.get_changes(
[self.author_name], [self.book, self.author_with_book_order_wrt]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["AddField", "AlterOrderWithRespectTo"]
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, model_name="author", name="book"
)
self.assertOperationAttributes(
changes, "testapp", 0, 1, name="author", order_with_respect_to="book"
)
def test_remove_alter_order_with_respect_to(self):
"""
Removing order_with_respect_to when removing the FK too does
things in the right order.
"""
changes = self.get_changes(
[self.book, self.author_with_book_order_wrt], [self.author_name]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["AlterOrderWithRespectTo", "RemoveField"]
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", order_with_respect_to=None
)
self.assertOperationAttributes(
changes, "testapp", 0, 1, model_name="author", name="book"
)
def test_add_model_order_with_respect_to(self):
"""
Setting order_with_respect_to when adding the whole model
does things in the right order.
"""
changes = self.get_changes([], [self.book, self.author_with_book_order_wrt])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="Author",
options={"order_with_respect_to": "book"},
)
self.assertNotIn(
"_order",
[name for name, field in changes["testapp"][0].operations[0].fields],
)
def test_add_model_order_with_respect_to_unique_together(self):
changes = self.get_changes(
[],
[
self.book,
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={
"order_with_respect_to": "book",
"unique_together": {("id", "_order")},
},
),
],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="Author",
options={
"order_with_respect_to": "book",
"unique_together": {("id", "_order")},
},
)
def test_add_model_order_with_respect_to_constraint(self):
after = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={
"order_with_respect_to": "book",
"constraints": [
models.CheckConstraint(
check=models.Q(_order__gt=1), name="book_order_gt_1"
),
],
},
)
changes = self.get_changes([], [self.book, after])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
["CreateModel", "AddConstraint"],
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="Author",
options={"order_with_respect_to": "book"},
)
def test_add_model_order_with_respect_to_index(self):
after = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={
"order_with_respect_to": "book",
"indexes": [models.Index(fields=["_order"], name="book_order_idx")],
},
)
changes = self.get_changes([], [self.book, after])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="Author",
options={
"order_with_respect_to": "book",
"indexes": [models.Index(fields=["_order"], name="book_order_idx")],
},
)
def test_set_alter_order_with_respect_to_index_constraint_unique_together(self):
tests = [
(
"AddIndex",
{
"indexes": [
models.Index(fields=["_order"], name="book_order_idx"),
]
},
),
(
"AddConstraint",
{
"constraints": [
models.CheckConstraint(
check=models.Q(_order__gt=1),
name="book_order_gt_1",
),
]
},
),
("AlterUniqueTogether", {"unique_together": {("id", "_order")}}),
]
for operation, extra_option in tests:
with self.subTest(operation=operation):
after = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={
"order_with_respect_to": "book",
**extra_option,
},
)
changes = self.get_changes(
[self.book, self.author_with_book],
[self.book, after],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
[
"AlterOrderWithRespectTo",
operation,
],
)
def test_alter_model_managers(self):
"""
Changing the model managers adds a new operation.
"""
changes = self.get_changes([self.other_pony], [self.other_pony_food])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterModelManagers"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="pony")
self.assertEqual(
[name for name, mgr in changes["otherapp"][0].operations[0].managers],
["food_qs", "food_mgr", "food_mgr_kwargs"],
)
self.assertEqual(
changes["otherapp"][0].operations[0].managers[1][1].args, ("a", "b", 1, 2)
)
self.assertEqual(
changes["otherapp"][0].operations[0].managers[2][1].args, ("x", "y", 3, 4)
)
def test_swappable_first_inheritance(self):
"""Swappable models get their CreateModel first."""
changes = self.get_changes([], [self.custom_user, self.aardvark])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "thirdapp", 1)
self.assertOperationTypes(
changes, "thirdapp", 0, ["CreateModel", "CreateModel"]
)
self.assertOperationAttributes(changes, "thirdapp", 0, 0, name="CustomUser")
self.assertOperationAttributes(changes, "thirdapp", 0, 1, name="Aardvark")
def test_default_related_name_option(self):
model_state = ModelState(
"app",
"model",
[
("id", models.AutoField(primary_key=True)),
],
options={"default_related_name": "related_name"},
)
changes = self.get_changes([], [model_state])
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"app",
0,
0,
name="model",
options={"default_related_name": "related_name"},
)
altered_model_state = ModelState(
"app",
"Model",
[
("id", models.AutoField(primary_key=True)),
],
)
changes = self.get_changes([model_state], [altered_model_state])
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["AlterModelOptions"])
self.assertOperationAttributes(changes, "app", 0, 0, name="model", options={})
@override_settings(AUTH_USER_MODEL="thirdapp.CustomUser")
def test_swappable_first_setting(self):
"""Swappable models get their CreateModel first."""
with isolate_lru_cache(apps.get_swappable_settings_name):
changes = self.get_changes([], [self.custom_user_no_inherit, self.aardvark])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "thirdapp", 1)
self.assertOperationTypes(
changes, "thirdapp", 0, ["CreateModel", "CreateModel"]
)
self.assertOperationAttributes(changes, "thirdapp", 0, 0, name="CustomUser")
self.assertOperationAttributes(changes, "thirdapp", 0, 1, name="Aardvark")
def test_bases_first(self):
"""Bases of other models come first."""
changes = self.get_changes(
[], [self.aardvark_based_on_author, self.author_name]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Aardvark")
def test_bases_first_mixed_case_app_label(self):
app_label = "MiXedCaseApp"
changes = self.get_changes(
[],
[
ModelState(
app_label,
"owner",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
app_label,
"place",
[
("id", models.AutoField(primary_key=True)),
(
"owner",
models.ForeignKey("MiXedCaseApp.owner", models.CASCADE),
),
],
),
ModelState(app_label, "restaurant", [], bases=("MiXedCaseApp.place",)),
],
)
self.assertNumberMigrations(changes, app_label, 1)
self.assertOperationTypes(
changes,
app_label,
0,
[
"CreateModel",
"CreateModel",
"CreateModel",
],
)
self.assertOperationAttributes(changes, app_label, 0, 0, name="owner")
self.assertOperationAttributes(changes, app_label, 0, 1, name="place")
self.assertOperationAttributes(changes, app_label, 0, 2, name="restaurant")
def test_multiple_bases(self):
"""
Inheriting models doesn't move *_ptr fields into AddField operations.
"""
A = ModelState("app", "A", [("a_id", models.AutoField(primary_key=True))])
B = ModelState("app", "B", [("b_id", models.AutoField(primary_key=True))])
C = ModelState("app", "C", [], bases=("app.A", "app.B"))
D = ModelState("app", "D", [], bases=("app.A", "app.B"))
E = ModelState("app", "E", [], bases=("app.A", "app.B"))
changes = self.get_changes([], [A, B, C, D, E])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(
changes,
"app",
0,
["CreateModel", "CreateModel", "CreateModel", "CreateModel", "CreateModel"],
)
self.assertOperationAttributes(changes, "app", 0, 0, name="A")
self.assertOperationAttributes(changes, "app", 0, 1, name="B")
self.assertOperationAttributes(changes, "app", 0, 2, name="C")
self.assertOperationAttributes(changes, "app", 0, 3, name="D")
self.assertOperationAttributes(changes, "app", 0, 4, name="E")
def test_proxy_bases_first(self):
"""Bases of proxies come first."""
changes = self.get_changes(
[], [self.author_empty, self.author_proxy, self.author_proxy_proxy]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["CreateModel", "CreateModel", "CreateModel"]
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="AuthorProxy")
self.assertOperationAttributes(
changes, "testapp", 0, 2, name="AAuthorProxyProxy"
)
def test_pk_fk_included(self):
"""
A relation used as the primary key is kept as part of CreateModel.
"""
changes = self.get_changes([], [self.aardvark_pk_fk_author, self.author_name])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Aardvark")
def test_first_dependency(self):
"""
A dependency to an app with no migrations uses __first__.
"""
# Load graph
loader = MigrationLoader(connection)
before = self.make_project_state([])
after = self.make_project_state([self.book_migrations_fk])
after.real_apps = {"migrations"}
autodetector = MigrationAutodetector(before, after)
changes = autodetector._detect_changes(graph=loader.graph)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Book")
self.assertMigrationDependencies(
changes, "otherapp", 0, [("migrations", "__first__")]
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_last_dependency(self):
"""
A dependency to an app with existing migrations uses the
last migration of that app.
"""
# Load graph
loader = MigrationLoader(connection)
before = self.make_project_state([])
after = self.make_project_state([self.book_migrations_fk])
after.real_apps = {"migrations"}
autodetector = MigrationAutodetector(before, after)
changes = autodetector._detect_changes(graph=loader.graph)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Book")
self.assertMigrationDependencies(
changes, "otherapp", 0, [("migrations", "0002_second")]
)
def test_alter_fk_before_model_deletion(self):
"""
ForeignKeys are altered _before_ the model they used to
refer to are deleted.
"""
changes = self.get_changes(
[self.author_name, self.publisher_with_author],
[self.aardvark_testapp, self.publisher_with_aardvark_author],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["CreateModel", "AlterField", "DeleteModel"]
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Aardvark")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="author")
self.assertOperationAttributes(changes, "testapp", 0, 2, name="Author")
def test_fk_dependency_other_app(self):
"""
#23100 - ForeignKeys correctly depend on other apps' models.
"""
changes = self.get_changes(
[self.author_name, self.book], [self.author_with_book, self.book]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="book")
self.assertMigrationDependencies(
changes, "testapp", 0, [("otherapp", "__first__")]
)
def test_alter_unique_together_fk_to_m2m(self):
changes = self.get_changes(
[self.author_name, self.book_unique_together],
[
self.author_name,
ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ManyToManyField("testapp.Author")),
("title", models.CharField(max_length=200)),
],
),
],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes, "otherapp", 0, ["AlterUniqueTogether", "RemoveField", "AddField"]
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="book", unique_together=set()
)
self.assertOperationAttributes(
changes, "otherapp", 0, 1, model_name="book", name="author"
)
self.assertOperationAttributes(
changes, "otherapp", 0, 2, model_name="book", name="author"
)
def test_alter_field_to_fk_dependency_other_app(self):
changes = self.get_changes(
[self.author_empty, self.book_with_no_author_fk],
[self.author_empty, self.book],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterField"])
self.assertMigrationDependencies(
changes, "otherapp", 0, [("testapp", "__first__")]
)
def test_circular_dependency_mixed_addcreate(self):
"""
#23315 - The dependency resolver knows to put all CreateModel
before AddField and not become unsolvable.
"""
address = ModelState(
"a",
"Address",
[
("id", models.AutoField(primary_key=True)),
("country", models.ForeignKey("b.DeliveryCountry", models.CASCADE)),
],
)
person = ModelState(
"a",
"Person",
[
("id", models.AutoField(primary_key=True)),
],
)
apackage = ModelState(
"b",
"APackage",
[
("id", models.AutoField(primary_key=True)),
("person", models.ForeignKey("a.Person", models.CASCADE)),
],
)
country = ModelState(
"b",
"DeliveryCountry",
[
("id", models.AutoField(primary_key=True)),
],
)
changes = self.get_changes([], [address, person, apackage, country])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "a", 2)
self.assertNumberMigrations(changes, "b", 1)
self.assertOperationTypes(changes, "a", 0, ["CreateModel", "CreateModel"])
self.assertOperationTypes(changes, "a", 1, ["AddField"])
self.assertOperationTypes(changes, "b", 0, ["CreateModel", "CreateModel"])
@override_settings(AUTH_USER_MODEL="a.Tenant")
def test_circular_dependency_swappable(self):
"""
#23322 - The dependency resolver knows to explicitly resolve
swappable models.
"""
with isolate_lru_cache(apps.get_swappable_settings_name):
tenant = ModelState(
"a",
"Tenant",
[
("id", models.AutoField(primary_key=True)),
("primary_address", models.ForeignKey("b.Address", models.CASCADE)),
],
bases=(AbstractBaseUser,),
)
address = ModelState(
"b",
"Address",
[
("id", models.AutoField(primary_key=True)),
(
"tenant",
models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE),
),
],
)
changes = self.get_changes([], [address, tenant])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "a", 2)
self.assertOperationTypes(changes, "a", 0, ["CreateModel"])
self.assertOperationTypes(changes, "a", 1, ["AddField"])
self.assertMigrationDependencies(changes, "a", 0, [])
self.assertMigrationDependencies(
changes, "a", 1, [("a", "auto_1"), ("b", "auto_1")]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "b", 1)
self.assertOperationTypes(changes, "b", 0, ["CreateModel"])
self.assertMigrationDependencies(
changes, "b", 0, [("__setting__", "AUTH_USER_MODEL")]
)
@override_settings(AUTH_USER_MODEL="b.Tenant")
def test_circular_dependency_swappable2(self):
"""
#23322 - The dependency resolver knows to explicitly resolve
swappable models but with the swappable not being the first migrated
model.
"""
with isolate_lru_cache(apps.get_swappable_settings_name):
address = ModelState(
"a",
"Address",
[
("id", models.AutoField(primary_key=True)),
(
"tenant",
models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE),
),
],
)
tenant = ModelState(
"b",
"Tenant",
[
("id", models.AutoField(primary_key=True)),
("primary_address", models.ForeignKey("a.Address", models.CASCADE)),
],
bases=(AbstractBaseUser,),
)
changes = self.get_changes([], [address, tenant])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "a", 2)
self.assertOperationTypes(changes, "a", 0, ["CreateModel"])
self.assertOperationTypes(changes, "a", 1, ["AddField"])
self.assertMigrationDependencies(changes, "a", 0, [])
self.assertMigrationDependencies(
changes, "a", 1, [("__setting__", "AUTH_USER_MODEL"), ("a", "auto_1")]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "b", 1)
self.assertOperationTypes(changes, "b", 0, ["CreateModel"])
self.assertMigrationDependencies(changes, "b", 0, [("a", "auto_1")])
@override_settings(AUTH_USER_MODEL="a.Person")
def test_circular_dependency_swappable_self(self):
"""
#23322 - The dependency resolver knows to explicitly resolve
swappable models.
"""
with isolate_lru_cache(apps.get_swappable_settings_name):
person = ModelState(
"a",
"Person",
[
("id", models.AutoField(primary_key=True)),
(
"parent1",
models.ForeignKey(
settings.AUTH_USER_MODEL,
models.CASCADE,
related_name="children",
),
),
],
)
changes = self.get_changes([], [person])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "a", 1)
self.assertOperationTypes(changes, "a", 0, ["CreateModel"])
self.assertMigrationDependencies(changes, "a", 0, [])
@override_settings(AUTH_USER_MODEL="a.User")
def test_swappable_circular_multi_mti(self):
with isolate_lru_cache(apps.get_swappable_settings_name):
parent = ModelState(
"a",
"Parent",
[("user", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE))],
)
child = ModelState("a", "Child", [], bases=("a.Parent",))
user = ModelState("a", "User", [], bases=(AbstractBaseUser, "a.Child"))
changes = self.get_changes([], [parent, child, user])
self.assertNumberMigrations(changes, "a", 1)
self.assertOperationTypes(
changes, "a", 0, ["CreateModel", "CreateModel", "CreateModel", "AddField"]
)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_add_blank_textfield_and_charfield(self, mocked_ask_method):
"""
#23405 - Adding a NOT NULL and blank `CharField` or `TextField`
without default should not prompt for a default.
"""
changes = self.get_changes(
[self.author_empty], [self.author_with_biography_blank]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField", "AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition"
)
def test_add_non_blank_textfield_and_charfield(self, mocked_ask_method):
"""
#23405 - Adding a NOT NULL and non-blank `CharField` or `TextField`
without default should prompt for a default.
"""
changes = self.get_changes(
[self.author_empty], [self.author_with_biography_non_blank]
)
self.assertEqual(mocked_ask_method.call_count, 2)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField", "AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0)
def test_mti_inheritance_model_removal(self):
Animal = ModelState(
"app",
"Animal",
[
("id", models.AutoField(primary_key=True)),
],
)
Dog = ModelState("app", "Dog", [], bases=("app.Animal",))
changes = self.get_changes([Animal, Dog], [Animal])
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["DeleteModel"])
self.assertOperationAttributes(changes, "app", 0, 0, name="Dog")
def test_add_model_with_field_removed_from_base_model(self):
"""
Removing a base field takes place before adding a new inherited model
that has a field with the same name.
"""
before = [
ModelState(
"app",
"readable",
[
("id", models.AutoField(primary_key=True)),
("title", models.CharField(max_length=200)),
],
),
]
after = [
ModelState(
"app",
"readable",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"app",
"book",
[
("title", models.CharField(max_length=200)),
],
bases=("app.readable",),
),
]
changes = self.get_changes(before, after)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["RemoveField", "CreateModel"])
self.assertOperationAttributes(
changes, "app", 0, 0, name="title", model_name="readable"
)
self.assertOperationAttributes(changes, "app", 0, 1, name="book")
def test_parse_number(self):
tests = [
("no_number", None),
("0001_initial", 1),
("0002_model3", 2),
("0002_auto_20380101_1112", 2),
("0002_squashed_0003", 3),
("0002_model2_squashed_0003_other4", 3),
("0002_squashed_0003_squashed_0004", 4),
("0002_model2_squashed_0003_other4_squashed_0005_other6", 5),
("0002_custom_name_20380101_1112_squashed_0003_model", 3),
("2_squashed_4", 4),
]
for migration_name, expected_number in tests:
with self.subTest(migration_name=migration_name):
self.assertEqual(
MigrationAutodetector.parse_number(migration_name),
expected_number,
)
def test_add_custom_fk_with_hardcoded_to(self):
class HardcodedForeignKey(models.ForeignKey):
def __init__(self, *args, **kwargs):
kwargs["to"] = "testapp.Author"
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["to"]
return name, path, args, kwargs
book_hardcoded_fk_to = ModelState(
"testapp",
"Book",
[
("author", HardcodedForeignKey(on_delete=models.CASCADE)),
],
)
changes = self.get_changes(
[self.author_empty],
[self.author_empty, book_hardcoded_fk_to],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Book")
@ignore_warnings(category=RemovedInDjango51Warning)
class AutodetectorIndexTogetherTests(BaseAutodetectorTests):
book_index_together = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"index_together": {("author", "title")},
},
)
book_index_together_2 = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"index_together": {("title", "author")},
},
)
book_index_together_3 = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("newfield", models.IntegerField()),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"index_together": {("title", "newfield")},
},
)
book_index_together_4 = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("newfield2", models.IntegerField()),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"index_together": {("title", "newfield2")},
},
)
def test_empty_index_together(self):
"""Empty index_together shouldn't generate a migration."""
# Explicitly testing for not specified, since this is the case after
# a CreateModel operation w/o any definition on the original model
model_state_not_specified = ModelState(
"a", "model", [("id", models.AutoField(primary_key=True))]
)
# Explicitly testing for None, since this was the issue in #23452 after
# an AlterIndexTogether operation with e.g. () as value
model_state_none = ModelState(
"a",
"model",
[("id", models.AutoField(primary_key=True))],
{
"index_together": None,
},
)
# Explicitly testing for the empty set, since we now always have sets.
# During removal (('col1', 'col2'),) --> () this becomes set([])
model_state_empty = ModelState(
"a",
"model",
[("id", models.AutoField(primary_key=True))],
{
"index_together": set(),
},
)
def test(from_state, to_state, msg):
changes = self.get_changes([from_state], [to_state])
if changes:
ops = ", ".join(
o.__class__.__name__ for o in changes["a"][0].operations
)
self.fail("Created operation(s) %s from %s" % (ops, msg))
tests = (
(
model_state_not_specified,
model_state_not_specified,
'"not specified" to "not specified"',
),
(model_state_not_specified, model_state_none, '"not specified" to "None"'),
(
model_state_not_specified,
model_state_empty,
'"not specified" to "empty"',
),
(model_state_none, model_state_not_specified, '"None" to "not specified"'),
(model_state_none, model_state_none, '"None" to "None"'),
(model_state_none, model_state_empty, '"None" to "empty"'),
(
model_state_empty,
model_state_not_specified,
'"empty" to "not specified"',
),
(model_state_empty, model_state_none, '"empty" to "None"'),
(model_state_empty, model_state_empty, '"empty" to "empty"'),
)
for t in tests:
test(*t)
def test_rename_index_together_to_index(self):
changes = self.get_changes(
[AutodetectorTests.author_empty, self.book_index_together],
[AutodetectorTests.author_empty, AutodetectorTests.book_indexes],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["RenameIndex"])
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
model_name="book",
new_name="book_title_author_idx",
old_fields=("author", "title"),
)
def test_rename_index_together_to_index_extra_options(self):
# Indexes with extra options don't match indexes in index_together.
book_partial_index = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"indexes": [
models.Index(
fields=["author", "title"],
condition=models.Q(title__startswith="The"),
name="book_title_author_idx",
)
],
},
)
changes = self.get_changes(
[AutodetectorTests.author_empty, self.book_index_together],
[AutodetectorTests.author_empty, book_partial_index],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AlterIndexTogether", "AddIndex"],
)
def test_rename_index_together_to_index_order_fields(self):
# Indexes with reordered fields don't match indexes in index_together.
changes = self.get_changes(
[AutodetectorTests.author_empty, self.book_index_together],
[AutodetectorTests.author_empty, AutodetectorTests.book_unordered_indexes],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AlterIndexTogether", "AddIndex"],
)
def test_add_index_together(self):
changes = self.get_changes(
[AutodetectorTests.author_empty, AutodetectorTests.book],
[AutodetectorTests.author_empty, self.book_index_together],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterIndexTogether"])
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="book", index_together={("author", "title")}
)
def test_remove_index_together(self):
changes = self.get_changes(
[AutodetectorTests.author_empty, self.book_index_together],
[AutodetectorTests.author_empty, AutodetectorTests.book],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterIndexTogether"])
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="book", index_together=set()
)
def test_index_together_remove_fk(self):
changes = self.get_changes(
[AutodetectorTests.author_empty, self.book_index_together],
[AutodetectorTests.author_empty, AutodetectorTests.book_with_no_author],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AlterIndexTogether", "RemoveField"],
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="book", index_together=set()
)
self.assertOperationAttributes(
changes, "otherapp", 0, 1, model_name="book", name="author"
)
def test_index_together_no_changes(self):
"""
index_together doesn't generate a migration if no changes have been
made.
"""
changes = self.get_changes(
[AutodetectorTests.author_empty, self.book_index_together],
[AutodetectorTests.author_empty, self.book_index_together],
)
self.assertEqual(len(changes), 0)
def test_index_together_ordering(self):
"""index_together triggers on ordering changes."""
changes = self.get_changes(
[AutodetectorTests.author_empty, self.book_index_together],
[AutodetectorTests.author_empty, self.book_index_together_2],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AlterIndexTogether"],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
name="book",
index_together={("title", "author")},
)
def test_add_field_and_index_together(self):
"""
Added fields will be created before using them in index_together.
"""
changes = self.get_changes(
[AutodetectorTests.author_empty, AutodetectorTests.book],
[AutodetectorTests.author_empty, self.book_index_together_3],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AddField", "AlterIndexTogether"],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
1,
name="book",
index_together={("title", "newfield")},
)
def test_create_model_and_index_together(self):
author = ModelState(
"otherapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
)
book_with_author = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("otherapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"index_together": {("title", "author")},
},
)
changes = self.get_changes(
[AutodetectorTests.book_with_no_author], [author, book_with_author]
)
self.assertEqual(len(changes["otherapp"]), 1)
migration = changes["otherapp"][0]
self.assertEqual(len(migration.operations), 3)
self.assertOperationTypes(
changes,
"otherapp",
0,
["CreateModel", "AddField", "AlterIndexTogether"],
)
def test_remove_field_and_index_together(self):
"""
Removed fields will be removed after updating index_together.
"""
changes = self.get_changes(
[AutodetectorTests.author_empty, self.book_index_together_3],
[AutodetectorTests.author_empty, self.book_index_together],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AlterIndexTogether", "RemoveField"],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
name="book",
index_together={("author", "title")},
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
1,
model_name="book",
name="newfield",
)
def test_alter_field_and_index_together(self):
"""Fields are altered after deleting some index_together."""
initial_author = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField(db_index=True)),
],
{
"index_together": {("name",)},
},
)
author_reversed_constraints = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, unique=True)),
("age", models.IntegerField()),
],
{
"index_together": {("age",)},
},
)
changes = self.get_changes([initial_author], [author_reversed_constraints])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
[
"AlterIndexTogether",
"AlterField",
"AlterField",
"AlterIndexTogether",
],
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="author",
index_together=set(),
)
self.assertOperationAttributes(
changes,
"testapp",
0,
1,
model_name="author",
name="age",
)
self.assertOperationAttributes(
changes,
"testapp",
0,
2,
model_name="author",
name="name",
)
self.assertOperationAttributes(
changes,
"testapp",
0,
3,
name="author",
index_together={("age",)},
)
def test_partly_alter_index_together_increase(self):
initial_author = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"index_together": {("name",)},
},
)
author_new_constraints = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"index_together": {("name",), ("age",)},
},
)
changes = self.get_changes([initial_author], [author_new_constraints])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
["AlterIndexTogether"],
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="author",
index_together={("name",), ("age",)},
)
def test_partly_alter_index_together_decrease(self):
initial_author = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"index_together": {("name",), ("age",)},
},
)
author_new_constraints = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"index_together": {("age",)},
},
)
changes = self.get_changes([initial_author], [author_new_constraints])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
["AlterIndexTogether"],
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="author",
index_together={("age",)},
)
def test_rename_field_and_index_together(self):
"""Fields are renamed before updating index_together."""
changes = self.get_changes(
[AutodetectorTests.author_empty, self.book_index_together_3],
[AutodetectorTests.author_empty, self.book_index_together_4],
MigrationQuestioner({"ask_rename": True}),
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["RenameField", "AlterIndexTogether"],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
1,
name="book",
index_together={("title", "newfield2")},
)
def test_add_model_order_with_respect_to_index_together(self):
changes = self.get_changes(
[],
[
AutodetectorTests.book,
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={
"order_with_respect_to": "book",
"index_together": {("name", "_order")},
},
),
],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="Author",
options={
"order_with_respect_to": "book",
"index_together": {("name", "_order")},
},
)
def test_set_alter_order_with_respect_to_index_together(self):
after = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={
"order_with_respect_to": "book",
"index_together": {("name", "_order")},
},
)
changes = self.get_changes(
[AutodetectorTests.book, AutodetectorTests.author_with_book],
[AutodetectorTests.book, after],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
["AlterOrderWithRespectTo", "AlterIndexTogether"],
)
class MigrationSuggestNameTests(SimpleTestCase):
def test_no_operations(self):
class Migration(migrations.Migration):
operations = []
migration = Migration("some_migration", "test_app")
self.assertIs(migration.suggest_name().startswith("auto_"), True)
def test_no_operations_initial(self):
class Migration(migrations.Migration):
initial = True
operations = []
migration = Migration("some_migration", "test_app")
self.assertEqual(migration.suggest_name(), "initial")
def test_single_operation(self):
class Migration(migrations.Migration):
operations = [migrations.CreateModel("Person", fields=[])]
migration = Migration("0001_initial", "test_app")
self.assertEqual(migration.suggest_name(), "person")
class Migration(migrations.Migration):
operations = [migrations.DeleteModel("Person")]
migration = Migration("0002_initial", "test_app")
self.assertEqual(migration.suggest_name(), "delete_person")
def test_single_operation_long_name(self):
class Migration(migrations.Migration):
operations = [migrations.CreateModel("A" * 53, fields=[])]
migration = Migration("some_migration", "test_app")
self.assertEqual(migration.suggest_name(), "a" * 53)
def test_two_operations(self):
class Migration(migrations.Migration):
operations = [
migrations.CreateModel("Person", fields=[]),
migrations.DeleteModel("Animal"),
]
migration = Migration("some_migration", "test_app")
self.assertEqual(migration.suggest_name(), "person_delete_animal")
def test_two_create_models(self):
class Migration(migrations.Migration):
operations = [
migrations.CreateModel("Person", fields=[]),
migrations.CreateModel("Animal", fields=[]),
]
migration = Migration("0001_initial", "test_app")
self.assertEqual(migration.suggest_name(), "person_animal")
def test_two_create_models_with_initial_true(self):
class Migration(migrations.Migration):
initial = True
operations = [
migrations.CreateModel("Person", fields=[]),
migrations.CreateModel("Animal", fields=[]),
]
migration = Migration("0001_initial", "test_app")
self.assertEqual(migration.suggest_name(), "initial")
def test_many_operations_suffix(self):
class Migration(migrations.Migration):
operations = [
migrations.CreateModel("Person1", fields=[]),
migrations.CreateModel("Person2", fields=[]),
migrations.CreateModel("Person3", fields=[]),
migrations.DeleteModel("Person4"),
migrations.DeleteModel("Person5"),
]
migration = Migration("some_migration", "test_app")
self.assertEqual(
migration.suggest_name(),
"person1_person2_person3_delete_person4_and_more",
)
def test_operation_with_no_suggested_name(self):
class Migration(migrations.Migration):
operations = [
migrations.CreateModel("Person", fields=[]),
migrations.RunSQL("SELECT 1 FROM person;"),
]
migration = Migration("some_migration", "test_app")
self.assertIs(migration.suggest_name().startswith("auto_"), True)
def test_operation_with_invalid_chars_in_suggested_name(self):
class Migration(migrations.Migration):
operations = [
migrations.AddConstraint(
"Person",
models.UniqueConstraint(
fields=["name"], name="person.name-*~unique!"
),
),
]
migration = Migration("some_migration", "test_app")
self.assertEqual(migration.suggest_name(), "person_person_name_unique_")
def test_none_name(self):
class Migration(migrations.Migration):
operations = [migrations.RunSQL("SELECT 1 FROM person;")]
migration = Migration("0001_initial", "test_app")
suggest_name = migration.suggest_name()
self.assertIs(suggest_name.startswith("auto_"), True)
def test_none_name_with_initial_true(self):
class Migration(migrations.Migration):
initial = True
operations = [migrations.RunSQL("SELECT 1 FROM person;")]
migration = Migration("0001_initial", "test_app")
self.assertEqual(migration.suggest_name(), "initial")
def test_auto(self):
migration = migrations.Migration("0001_initial", "test_app")
suggest_name = migration.suggest_name()
self.assertIs(suggest_name.startswith("auto_"), True)
|
8717b58a8900a21401debf45691e7dca680429083d147b118a0daa55e1e2d3a4 | from unittest import mock
from asgiref.sync import sync_to_async
from django.conf.global_settings import PASSWORD_HASHERS
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.base_user import AbstractBaseUser
from django.contrib.auth.hashers import get_hasher
from django.contrib.auth.models import (
AnonymousUser,
Group,
Permission,
User,
UserManager,
)
from django.contrib.contenttypes.models import ContentType
from django.core import mail
from django.db import connection, migrations
from django.db.migrations.state import ModelState, ProjectState
from django.db.models.signals import post_save
from django.test import SimpleTestCase, TestCase, TransactionTestCase, override_settings
from django.test.utils import ignore_warnings
from django.utils.deprecation import RemovedInDjango51Warning
from .models import CustomEmailField, IntegerUsernameUser
class NaturalKeysTestCase(TestCase):
def test_user_natural_key(self):
staff_user = User.objects.create_user(username="staff")
self.assertEqual(User.objects.get_by_natural_key("staff"), staff_user)
self.assertEqual(staff_user.natural_key(), ("staff",))
def test_group_natural_key(self):
users_group = Group.objects.create(name="users")
self.assertEqual(Group.objects.get_by_natural_key("users"), users_group)
class LoadDataWithoutNaturalKeysTestCase(TestCase):
fixtures = ["regular.json"]
def test_user_is_created_and_added_to_group(self):
user = User.objects.get(username="my_username")
group = Group.objects.get(name="my_group")
self.assertEqual(group, user.groups.get())
class LoadDataWithNaturalKeysTestCase(TestCase):
fixtures = ["natural.json"]
def test_user_is_created_and_added_to_group(self):
user = User.objects.get(username="my_username")
group = Group.objects.get(name="my_group")
self.assertEqual(group, user.groups.get())
class LoadDataWithNaturalKeysAndMultipleDatabasesTestCase(TestCase):
databases = {"default", "other"}
def test_load_data_with_user_permissions(self):
# Create test contenttypes for both databases
default_objects = [
ContentType.objects.db_manager("default").create(
model="examplemodela",
app_label="app_a",
),
ContentType.objects.db_manager("default").create(
model="examplemodelb",
app_label="app_b",
),
]
other_objects = [
ContentType.objects.db_manager("other").create(
model="examplemodelb",
app_label="app_b",
),
ContentType.objects.db_manager("other").create(
model="examplemodela",
app_label="app_a",
),
]
# Now we create the test UserPermission
Permission.objects.db_manager("default").create(
name="Can delete example model b",
codename="delete_examplemodelb",
content_type=default_objects[1],
)
Permission.objects.db_manager("other").create(
name="Can delete example model b",
codename="delete_examplemodelb",
content_type=other_objects[0],
)
perm_default = Permission.objects.get_by_natural_key(
"delete_examplemodelb",
"app_b",
"examplemodelb",
)
perm_other = Permission.objects.db_manager("other").get_by_natural_key(
"delete_examplemodelb",
"app_b",
"examplemodelb",
)
self.assertEqual(perm_default.content_type_id, default_objects[1].id)
self.assertEqual(perm_other.content_type_id, other_objects[0].id)
class UserManagerTestCase(TransactionTestCase):
available_apps = [
"auth_tests",
"django.contrib.auth",
"django.contrib.contenttypes",
]
def test_create_user(self):
email_lowercase = "[email protected]"
user = User.objects.create_user("user", email_lowercase)
self.assertEqual(user.email, email_lowercase)
self.assertEqual(user.username, "user")
self.assertFalse(user.has_usable_password())
def test_create_user_email_domain_normalize_rfc3696(self):
# According to RFC 3696 Section 3 the "@" symbol can be part of the
# local part of an email address.
returned = UserManager.normalize_email(r"Abc\@[email protected]")
self.assertEqual(returned, r"Abc\@[email protected]")
def test_create_user_email_domain_normalize(self):
returned = UserManager.normalize_email("[email protected]")
self.assertEqual(returned, "[email protected]")
def test_create_user_email_domain_normalize_with_whitespace(self):
returned = UserManager.normalize_email(r"email\ [email protected]")
self.assertEqual(returned, r"email\ [email protected]")
def test_empty_username(self):
with self.assertRaisesMessage(ValueError, "The given username must be set"):
User.objects.create_user(username="")
def test_create_user_is_staff(self):
email = "[email protected]"
user = User.objects.create_user("user", email, is_staff=True)
self.assertEqual(user.email, email)
self.assertEqual(user.username, "user")
self.assertTrue(user.is_staff)
def test_create_super_user_raises_error_on_false_is_superuser(self):
with self.assertRaisesMessage(
ValueError, "Superuser must have is_superuser=True."
):
User.objects.create_superuser(
username="test",
email="[email protected]",
password="test",
is_superuser=False,
)
def test_create_superuser_raises_error_on_false_is_staff(self):
with self.assertRaisesMessage(ValueError, "Superuser must have is_staff=True."):
User.objects.create_superuser(
username="test",
email="[email protected]",
password="test",
is_staff=False,
)
@ignore_warnings(category=RemovedInDjango51Warning)
def test_make_random_password(self):
allowed_chars = "abcdefg"
password = UserManager().make_random_password(5, allowed_chars)
self.assertEqual(len(password), 5)
for char in password:
self.assertIn(char, allowed_chars)
def test_make_random_password_warning(self):
msg = "BaseUserManager.make_random_password() is deprecated."
with self.assertWarnsMessage(RemovedInDjango51Warning, msg):
UserManager().make_random_password()
def test_runpython_manager_methods(self):
def forwards(apps, schema_editor):
UserModel = apps.get_model("auth", "User")
user = UserModel.objects.create_user("user1", password="secure")
self.assertIsInstance(user, UserModel)
operation = migrations.RunPython(forwards, migrations.RunPython.noop)
project_state = ProjectState()
project_state.add_model(ModelState.from_model(User))
project_state.add_model(ModelState.from_model(Group))
project_state.add_model(ModelState.from_model(Permission))
project_state.add_model(ModelState.from_model(ContentType))
new_state = project_state.clone()
with connection.schema_editor() as editor:
operation.state_forwards("test_manager_methods", new_state)
operation.database_forwards(
"test_manager_methods",
editor,
project_state,
new_state,
)
user = User.objects.get(username="user1")
self.assertTrue(user.check_password("secure"))
class AbstractBaseUserTests(SimpleTestCase):
def test_has_usable_password(self):
"""
Passwords are usable even if they don't correspond to a hasher in
settings.PASSWORD_HASHERS.
"""
self.assertIs(User(password="some-gibbberish").has_usable_password(), True)
def test_normalize_username(self):
self.assertEqual(IntegerUsernameUser().normalize_username(123), 123)
def test_clean_normalize_username(self):
# The normalization happens in AbstractBaseUser.clean()
ohm_username = "iamtheΩ" # U+2126 OHM SIGN
for model in ("auth.User", "auth_tests.CustomUser"):
with self.subTest(model=model), self.settings(AUTH_USER_MODEL=model):
User = get_user_model()
user = User(**{User.USERNAME_FIELD: ohm_username, "password": "foo"})
user.clean()
username = user.get_username()
self.assertNotEqual(username, ohm_username)
self.assertEqual(
username, "iamtheΩ"
) # U+03A9 GREEK CAPITAL LETTER OMEGA
def test_default_email(self):
self.assertEqual(AbstractBaseUser.get_email_field_name(), "email")
def test_custom_email(self):
user = CustomEmailField()
self.assertEqual(user.get_email_field_name(), "email_address")
class AbstractUserTestCase(TestCase):
def test_email_user(self):
# valid send_mail parameters
kwargs = {
"fail_silently": False,
"auth_user": None,
"auth_password": None,
"connection": None,
"html_message": None,
}
user = User(email="[email protected]")
user.email_user(
subject="Subject here",
message="This is a message",
from_email="[email protected]",
**kwargs,
)
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0]
self.assertEqual(message.subject, "Subject here")
self.assertEqual(message.body, "This is a message")
self.assertEqual(message.from_email, "[email protected]")
self.assertEqual(message.to, [user.email])
def test_last_login_default(self):
user1 = User.objects.create(username="user1")
self.assertIsNone(user1.last_login)
user2 = User.objects.create_user(username="user2")
self.assertIsNone(user2.last_login)
def test_user_clean_normalize_email(self):
user = User(username="user", password="foo", email="[email protected]")
user.clean()
self.assertEqual(user.email, "[email protected]")
def test_user_double_save(self):
"""
Calling user.save() twice should trigger password_changed() once.
"""
user = User.objects.create_user(username="user", password="foo")
user.set_password("bar")
with mock.patch(
"django.contrib.auth.password_validation.password_changed"
) as pw_changed:
user.save()
self.assertEqual(pw_changed.call_count, 1)
user.save()
self.assertEqual(pw_changed.call_count, 1)
@override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS)
def test_check_password_upgrade(self):
"""
password_changed() shouldn't be called if User.check_password()
triggers a hash iteration upgrade.
"""
user = User.objects.create_user(username="user", password="foo")
initial_password = user.password
self.assertTrue(user.check_password("foo"))
hasher = get_hasher("default")
self.assertEqual("pbkdf2_sha256", hasher.algorithm)
old_iterations = hasher.iterations
try:
# Upgrade the password iterations
hasher.iterations = old_iterations + 1
with mock.patch(
"django.contrib.auth.password_validation.password_changed"
) as pw_changed:
user.check_password("foo")
self.assertEqual(pw_changed.call_count, 0)
self.assertNotEqual(initial_password, user.password)
finally:
hasher.iterations = old_iterations
@override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS)
async def test_acheck_password_upgrade(self):
user = await sync_to_async(User.objects.create_user)(
username="user", password="foo"
)
initial_password = user.password
self.assertIs(await user.acheck_password("foo"), True)
hasher = get_hasher("default")
self.assertEqual("pbkdf2_sha256", hasher.algorithm)
old_iterations = hasher.iterations
try:
# Upgrade the password iterations.
hasher.iterations = old_iterations + 1
with mock.patch(
"django.contrib.auth.password_validation.password_changed"
) as pw_changed:
self.assertIs(await user.acheck_password("foo"), True)
self.assertEqual(pw_changed.call_count, 0)
self.assertNotEqual(initial_password, user.password)
finally:
hasher.iterations = old_iterations
class CustomModelBackend(ModelBackend):
def with_perm(
self, perm, is_active=True, include_superusers=True, backend=None, obj=None
):
if obj is not None and obj.username == "charliebrown":
return User.objects.filter(pk=obj.pk)
return User.objects.filter(username__startswith="charlie")
class UserWithPermTestCase(TestCase):
@classmethod
def setUpTestData(cls):
content_type = ContentType.objects.get_for_model(Group)
cls.permission = Permission.objects.create(
name="test",
content_type=content_type,
codename="test",
)
# User with permission.
cls.user1 = User.objects.create_user("user 1", "[email protected]")
cls.user1.user_permissions.add(cls.permission)
# User with group permission.
group1 = Group.objects.create(name="group 1")
group1.permissions.add(cls.permission)
group2 = Group.objects.create(name="group 2")
group2.permissions.add(cls.permission)
cls.user2 = User.objects.create_user("user 2", "[email protected]")
cls.user2.groups.add(group1, group2)
# Users without permissions.
cls.user_charlie = User.objects.create_user("charlie", "[email protected]")
cls.user_charlie_b = User.objects.create_user(
"charliebrown", "[email protected]"
)
# Superuser.
cls.superuser = User.objects.create_superuser(
"superuser",
"[email protected]",
"superpassword",
)
# Inactive user with permission.
cls.inactive_user = User.objects.create_user(
"inactive_user",
"[email protected]",
is_active=False,
)
cls.inactive_user.user_permissions.add(cls.permission)
def test_invalid_permission_name(self):
msg = "Permission name should be in the form app_label.permission_codename."
for perm in ("nodots", "too.many.dots", "...", ""):
with self.subTest(perm), self.assertRaisesMessage(ValueError, msg):
User.objects.with_perm(perm)
def test_invalid_permission_type(self):
msg = "The `perm` argument must be a string or a permission instance."
for perm in (b"auth.test", object(), None):
with self.subTest(perm), self.assertRaisesMessage(TypeError, msg):
User.objects.with_perm(perm)
def test_invalid_backend_type(self):
msg = "backend must be a dotted import path string (got %r)."
for backend in (b"auth_tests.CustomModelBackend", object()):
with self.subTest(backend):
with self.assertRaisesMessage(TypeError, msg % backend):
User.objects.with_perm("auth.test", backend=backend)
def test_basic(self):
active_users = [self.user1, self.user2]
tests = [
({}, [*active_users, self.superuser]),
({"obj": self.user1}, []),
# Only inactive users.
({"is_active": False}, [self.inactive_user]),
# All users.
({"is_active": None}, [*active_users, self.superuser, self.inactive_user]),
# Exclude superusers.
({"include_superusers": False}, active_users),
(
{"include_superusers": False, "is_active": False},
[self.inactive_user],
),
(
{"include_superusers": False, "is_active": None},
[*active_users, self.inactive_user],
),
]
for kwargs, expected_users in tests:
for perm in ("auth.test", self.permission):
with self.subTest(perm=perm, **kwargs):
self.assertCountEqual(
User.objects.with_perm(perm, **kwargs),
expected_users,
)
@override_settings(
AUTHENTICATION_BACKENDS=["django.contrib.auth.backends.BaseBackend"]
)
def test_backend_without_with_perm(self):
self.assertSequenceEqual(User.objects.with_perm("auth.test"), [])
def test_nonexistent_permission(self):
self.assertSequenceEqual(User.objects.with_perm("auth.perm"), [self.superuser])
def test_nonexistent_backend(self):
with self.assertRaises(ImportError):
User.objects.with_perm(
"auth.test",
backend="invalid.backend.CustomModelBackend",
)
@override_settings(
AUTHENTICATION_BACKENDS=["auth_tests.test_models.CustomModelBackend"]
)
def test_custom_backend(self):
for perm in ("auth.test", self.permission):
with self.subTest(perm):
self.assertCountEqual(
User.objects.with_perm(perm),
[self.user_charlie, self.user_charlie_b],
)
@override_settings(
AUTHENTICATION_BACKENDS=["auth_tests.test_models.CustomModelBackend"]
)
def test_custom_backend_pass_obj(self):
for perm in ("auth.test", self.permission):
with self.subTest(perm):
self.assertSequenceEqual(
User.objects.with_perm(perm, obj=self.user_charlie_b),
[self.user_charlie_b],
)
@override_settings(
AUTHENTICATION_BACKENDS=[
"auth_tests.test_models.CustomModelBackend",
"django.contrib.auth.backends.ModelBackend",
]
)
def test_multiple_backends(self):
msg = (
"You have multiple authentication backends configured and "
"therefore must provide the `backend` argument."
)
with self.assertRaisesMessage(ValueError, msg):
User.objects.with_perm("auth.test")
backend = "auth_tests.test_models.CustomModelBackend"
self.assertCountEqual(
User.objects.with_perm("auth.test", backend=backend),
[self.user_charlie, self.user_charlie_b],
)
class IsActiveTestCase(TestCase):
"""
Tests the behavior of the guaranteed is_active attribute
"""
def test_builtin_user_isactive(self):
user = User.objects.create(username="foo", email="[email protected]")
# is_active is true by default
self.assertIs(user.is_active, True)
user.is_active = False
user.save()
user_fetched = User.objects.get(pk=user.pk)
# the is_active flag is saved
self.assertFalse(user_fetched.is_active)
@override_settings(AUTH_USER_MODEL="auth_tests.IsActiveTestUser1")
def test_is_active_field_default(self):
"""
tests that the default value for is_active is provided
"""
UserModel = get_user_model()
user = UserModel(username="foo")
self.assertIs(user.is_active, True)
# you can set the attribute - but it will not save
user.is_active = False
# there should be no problem saving - but the attribute is not saved
user.save()
user_fetched = UserModel._default_manager.get(pk=user.pk)
# the attribute is always true for newly retrieved instance
self.assertIs(user_fetched.is_active, True)
class TestCreateSuperUserSignals(TestCase):
"""
Simple test case for ticket #20541
"""
def post_save_listener(self, *args, **kwargs):
self.signals_count += 1
def setUp(self):
self.signals_count = 0
post_save.connect(self.post_save_listener, sender=User)
def tearDown(self):
post_save.disconnect(self.post_save_listener, sender=User)
def test_create_user(self):
User.objects.create_user("JohnDoe")
self.assertEqual(self.signals_count, 1)
def test_create_superuser(self):
User.objects.create_superuser("JohnDoe", "[email protected]", "1")
self.assertEqual(self.signals_count, 1)
class AnonymousUserTests(SimpleTestCase):
no_repr_msg = "Django doesn't provide a DB representation for AnonymousUser."
def setUp(self):
self.user = AnonymousUser()
def test_properties(self):
self.assertIsNone(self.user.pk)
self.assertEqual(self.user.username, "")
self.assertEqual(self.user.get_username(), "")
self.assertIs(self.user.is_anonymous, True)
self.assertIs(self.user.is_authenticated, False)
self.assertIs(self.user.is_staff, False)
self.assertIs(self.user.is_active, False)
self.assertIs(self.user.is_superuser, False)
self.assertEqual(self.user.groups.count(), 0)
self.assertEqual(self.user.user_permissions.count(), 0)
self.assertEqual(self.user.get_user_permissions(), set())
self.assertEqual(self.user.get_group_permissions(), set())
def test_str(self):
self.assertEqual(str(self.user), "AnonymousUser")
def test_eq(self):
self.assertEqual(self.user, AnonymousUser())
self.assertNotEqual(self.user, User("super", "[email protected]", "super"))
def test_hash(self):
self.assertEqual(hash(self.user), 1)
def test_int(self):
msg = (
"Cannot cast AnonymousUser to int. Are you trying to use it in "
"place of User?"
)
with self.assertRaisesMessage(TypeError, msg):
int(self.user)
def test_delete(self):
with self.assertRaisesMessage(NotImplementedError, self.no_repr_msg):
self.user.delete()
def test_save(self):
with self.assertRaisesMessage(NotImplementedError, self.no_repr_msg):
self.user.save()
def test_set_password(self):
with self.assertRaisesMessage(NotImplementedError, self.no_repr_msg):
self.user.set_password("password")
def test_check_password(self):
with self.assertRaisesMessage(NotImplementedError, self.no_repr_msg):
self.user.check_password("password")
class GroupTests(SimpleTestCase):
def test_str(self):
g = Group(name="Users")
self.assertEqual(str(g), "Users")
class PermissionTests(TestCase):
def test_str(self):
p = Permission.objects.get(codename="view_customemailfield")
self.assertEqual(
str(p), "Auth_Tests | custom email field | Can view custom email field"
)
|
dfb9b6a24ee61421f5ad13f4e152f18e27cf30a07c13a834cda9822c8739fb13 | from unittest import mock, skipUnless
from django.conf.global_settings import PASSWORD_HASHERS
from django.contrib.auth.hashers import (
UNUSABLE_PASSWORD_PREFIX,
UNUSABLE_PASSWORD_SUFFIX_LENGTH,
BasePasswordHasher,
BCryptPasswordHasher,
BCryptSHA256PasswordHasher,
MD5PasswordHasher,
PBKDF2PasswordHasher,
PBKDF2SHA1PasswordHasher,
ScryptPasswordHasher,
acheck_password,
check_password,
get_hasher,
identify_hasher,
is_password_usable,
make_password,
)
from django.test import SimpleTestCase, ignore_warnings
from django.test.utils import override_settings
from django.utils.deprecation import RemovedInDjango51Warning
try:
import bcrypt
except ImportError:
bcrypt = None
try:
import argon2
except ImportError:
argon2 = None
# scrypt requires OpenSSL 1.1+
try:
import hashlib
scrypt = hashlib.scrypt
except ImportError:
scrypt = None
class PBKDF2SingleIterationHasher(PBKDF2PasswordHasher):
iterations = 1
@override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS)
class TestUtilsHashPass(SimpleTestCase):
def test_simple(self):
encoded = make_password("lètmein")
self.assertTrue(encoded.startswith("pbkdf2_sha256$"))
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
# Blank passwords
blank_encoded = make_password("")
self.assertTrue(blank_encoded.startswith("pbkdf2_sha256$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
async def test_acheck_password(self):
encoded = make_password("lètmein")
self.assertIs(await acheck_password("lètmein", encoded), True)
self.assertIs(await acheck_password("lètmeinz", encoded), False)
# Blank passwords.
blank_encoded = make_password("")
self.assertIs(await acheck_password("", blank_encoded), True)
self.assertIs(await acheck_password(" ", blank_encoded), False)
def test_bytes(self):
encoded = make_password(b"bytes_password")
self.assertTrue(encoded.startswith("pbkdf2_sha256$"))
self.assertIs(is_password_usable(encoded), True)
self.assertIs(check_password(b"bytes_password", encoded), True)
def test_invalid_password(self):
msg = "Password must be a string or bytes, got int."
with self.assertRaisesMessage(TypeError, msg):
make_password(1)
def test_pbkdf2(self):
encoded = make_password("lètmein", "seasalt", "pbkdf2_sha256")
self.assertEqual(
encoded,
"pbkdf2_sha256$720000$seasalt$eDupbcisD1UuIiou3hMuMu8oe/XwnpDw45r6AA5iv0E=",
)
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "pbkdf2_sha256")
# Blank passwords
blank_encoded = make_password("", "seasalt", "pbkdf2_sha256")
self.assertTrue(blank_encoded.startswith("pbkdf2_sha256$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
# Salt entropy check.
hasher = get_hasher("pbkdf2_sha256")
encoded_weak_salt = make_password("lètmein", "iodizedsalt", "pbkdf2_sha256")
encoded_strong_salt = make_password("lètmein", hasher.salt(), "pbkdf2_sha256")
self.assertIs(hasher.must_update(encoded_weak_salt), True)
self.assertIs(hasher.must_update(encoded_strong_salt), False)
@ignore_warnings(category=RemovedInDjango51Warning)
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.SHA1PasswordHasher"]
)
def test_sha1(self):
encoded = make_password("lètmein", "seasalt", "sha1")
self.assertEqual(
encoded, "sha1$seasalt$cff36ea83f5706ce9aa7454e63e431fc726b2dc8"
)
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "sha1")
# Blank passwords
blank_encoded = make_password("", "seasalt", "sha1")
self.assertTrue(blank_encoded.startswith("sha1$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
# Salt entropy check.
hasher = get_hasher("sha1")
encoded_weak_salt = make_password("lètmein", "iodizedsalt", "sha1")
encoded_strong_salt = make_password("lètmein", hasher.salt(), "sha1")
self.assertIs(hasher.must_update(encoded_weak_salt), True)
self.assertIs(hasher.must_update(encoded_strong_salt), False)
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.SHA1PasswordHasher"]
)
def test_sha1_deprecation_warning(self):
msg = "django.contrib.auth.hashers.SHA1PasswordHasher is deprecated."
with self.assertRaisesMessage(RemovedInDjango51Warning, msg):
get_hasher("sha1")
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.MD5PasswordHasher"]
)
def test_md5(self):
encoded = make_password("lètmein", "seasalt", "md5")
self.assertEqual(encoded, "md5$seasalt$3f86d0d3d465b7b458c231bf3555c0e3")
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "md5")
# Blank passwords
blank_encoded = make_password("", "seasalt", "md5")
self.assertTrue(blank_encoded.startswith("md5$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
# Salt entropy check.
hasher = get_hasher("md5")
encoded_weak_salt = make_password("lètmein", "iodizedsalt", "md5")
encoded_strong_salt = make_password("lètmein", hasher.salt(), "md5")
self.assertIs(hasher.must_update(encoded_weak_salt), True)
self.assertIs(hasher.must_update(encoded_strong_salt), False)
@ignore_warnings(category=RemovedInDjango51Warning)
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.UnsaltedMD5PasswordHasher"]
)
def test_unsalted_md5(self):
encoded = make_password("lètmein", "", "unsalted_md5")
self.assertEqual(encoded, "88a434c88cca4e900f7874cd98123f43")
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "unsalted_md5")
# Alternate unsalted syntax
alt_encoded = "md5$$%s" % encoded
self.assertTrue(is_password_usable(alt_encoded))
self.assertTrue(check_password("lètmein", alt_encoded))
self.assertFalse(check_password("lètmeinz", alt_encoded))
# Blank passwords
blank_encoded = make_password("", "", "unsalted_md5")
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
@ignore_warnings(category=RemovedInDjango51Warning)
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.UnsaltedMD5PasswordHasher"]
)
def test_unsalted_md5_encode_invalid_salt(self):
hasher = get_hasher("unsalted_md5")
msg = "salt must be empty."
with self.assertRaisesMessage(ValueError, msg):
hasher.encode("password", salt="salt")
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.UnsaltedMD5PasswordHasher"]
)
def test_unsalted_md5_deprecation_warning(self):
msg = "django.contrib.auth.hashers.UnsaltedMD5PasswordHasher is deprecated."
with self.assertRaisesMessage(RemovedInDjango51Warning, msg):
get_hasher("unsalted_md5")
@ignore_warnings(category=RemovedInDjango51Warning)
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.UnsaltedSHA1PasswordHasher"]
)
def test_unsalted_sha1(self):
encoded = make_password("lètmein", "", "unsalted_sha1")
self.assertEqual(encoded, "sha1$$6d138ca3ae545631b3abd71a4f076ce759c5700b")
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "unsalted_sha1")
# Raw SHA1 isn't acceptable
alt_encoded = encoded[6:]
self.assertFalse(check_password("lètmein", alt_encoded))
# Blank passwords
blank_encoded = make_password("", "", "unsalted_sha1")
self.assertTrue(blank_encoded.startswith("sha1$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
@ignore_warnings(category=RemovedInDjango51Warning)
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.UnsaltedSHA1PasswordHasher"]
)
def test_unsalted_sha1_encode_invalid_salt(self):
hasher = get_hasher("unsalted_sha1")
msg = "salt must be empty."
with self.assertRaisesMessage(ValueError, msg):
hasher.encode("password", salt="salt")
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.UnsaltedSHA1PasswordHasher"]
)
def test_unsalted_sha1_deprecation_warning(self):
msg = "django.contrib.auth.hashers.UnsaltedSHA1PasswordHasher is deprecated."
with self.assertRaisesMessage(RemovedInDjango51Warning, msg):
get_hasher("unsalted_sha1")
@skipUnless(bcrypt, "bcrypt not installed")
def test_bcrypt_sha256(self):
encoded = make_password("lètmein", hasher="bcrypt_sha256")
self.assertTrue(is_password_usable(encoded))
self.assertTrue(encoded.startswith("bcrypt_sha256$"))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "bcrypt_sha256")
# password truncation no longer works
password = (
"VSK0UYV6FFQVZ0KG88DYN9WADAADZO1CTSIVDJUNZSUML6IBX7LN7ZS3R5"
"JGB3RGZ7VI7G7DJQ9NI8BQFSRPTG6UWTTVESA5ZPUN"
)
encoded = make_password(password, hasher="bcrypt_sha256")
self.assertTrue(check_password(password, encoded))
self.assertFalse(check_password(password[:72], encoded))
# Blank passwords
blank_encoded = make_password("", hasher="bcrypt_sha256")
self.assertTrue(blank_encoded.startswith("bcrypt_sha256$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
@skipUnless(bcrypt, "bcrypt not installed")
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.BCryptPasswordHasher"]
)
def test_bcrypt(self):
encoded = make_password("lètmein", hasher="bcrypt")
self.assertTrue(is_password_usable(encoded))
self.assertTrue(encoded.startswith("bcrypt$"))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "bcrypt")
# Blank passwords
blank_encoded = make_password("", hasher="bcrypt")
self.assertTrue(blank_encoded.startswith("bcrypt$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
@skipUnless(bcrypt, "bcrypt not installed")
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.BCryptPasswordHasher"]
)
def test_bcrypt_upgrade(self):
hasher = get_hasher("bcrypt")
self.assertEqual("bcrypt", hasher.algorithm)
self.assertNotEqual(hasher.rounds, 4)
old_rounds = hasher.rounds
try:
# Generate a password with 4 rounds.
hasher.rounds = 4
encoded = make_password("letmein", hasher="bcrypt")
rounds = hasher.safe_summary(encoded)["work factor"]
self.assertEqual(rounds, 4)
state = {"upgraded": False}
def setter(password):
state["upgraded"] = True
# No upgrade is triggered.
self.assertTrue(check_password("letmein", encoded, setter, "bcrypt"))
self.assertFalse(state["upgraded"])
# Revert to the old rounds count and ...
hasher.rounds = old_rounds
# ... check if the password would get updated to the new count.
self.assertTrue(check_password("letmein", encoded, setter, "bcrypt"))
self.assertTrue(state["upgraded"])
finally:
hasher.rounds = old_rounds
@skipUnless(bcrypt, "bcrypt not installed")
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.BCryptPasswordHasher"]
)
def test_bcrypt_harden_runtime(self):
hasher = get_hasher("bcrypt")
self.assertEqual("bcrypt", hasher.algorithm)
with mock.patch.object(hasher, "rounds", 4):
encoded = make_password("letmein", hasher="bcrypt")
with mock.patch.object(hasher, "rounds", 6), mock.patch.object(
hasher, "encode", side_effect=hasher.encode
):
hasher.harden_runtime("wrong_password", encoded)
# Increasing rounds from 4 to 6 means an increase of 4 in workload,
# therefore hardening should run 3 times to make the timing the
# same (the original encode() call already ran once).
self.assertEqual(hasher.encode.call_count, 3)
# Get the original salt (includes the original workload factor)
algorithm, data = encoded.split("$", 1)
expected_call = (("wrong_password", data[:29].encode()),)
self.assertEqual(hasher.encode.call_args_list, [expected_call] * 3)
def test_unusable(self):
encoded = make_password(None)
self.assertEqual(
len(encoded),
len(UNUSABLE_PASSWORD_PREFIX) + UNUSABLE_PASSWORD_SUFFIX_LENGTH,
)
self.assertFalse(is_password_usable(encoded))
self.assertFalse(check_password(None, encoded))
self.assertFalse(check_password(encoded, encoded))
self.assertFalse(check_password(UNUSABLE_PASSWORD_PREFIX, encoded))
self.assertFalse(check_password("", encoded))
self.assertFalse(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
with self.assertRaisesMessage(ValueError, "Unknown password hashing algorithm"):
identify_hasher(encoded)
# Assert that the unusable passwords actually contain a random part.
# This might fail one day due to a hash collision.
self.assertNotEqual(encoded, make_password(None), "Random password collision?")
def test_unspecified_password(self):
"""
Makes sure specifying no plain password with a valid encoded password
returns `False`.
"""
self.assertFalse(check_password(None, make_password("lètmein")))
def test_bad_algorithm(self):
msg = (
"Unknown password hashing algorithm '%s'. Did you specify it in "
"the PASSWORD_HASHERS setting?"
)
with self.assertRaisesMessage(ValueError, msg % "lolcat"):
make_password("lètmein", hasher="lolcat")
with self.assertRaisesMessage(ValueError, msg % "lolcat"):
identify_hasher("lolcat$salt$hash")
def test_is_password_usable(self):
passwords = ("lètmein_badencoded", "", None)
for password in passwords:
with self.subTest(password=password):
self.assertIs(is_password_usable(password), True)
def test_low_level_pbkdf2(self):
hasher = PBKDF2PasswordHasher()
encoded = hasher.encode("lètmein", "seasalt2")
self.assertEqual(
encoded,
"pbkdf2_sha256$720000$"
"seasalt2$e8hbsPnTo9qWhT3xYfKWoRth0h0J3360yb/tipPhPtY=",
)
self.assertTrue(hasher.verify("lètmein", encoded))
def test_low_level_pbkdf2_sha1(self):
hasher = PBKDF2SHA1PasswordHasher()
encoded = hasher.encode("lètmein", "seasalt2")
self.assertEqual(
encoded, "pbkdf2_sha1$720000$seasalt2$2DDbzziqCtfldrRSNAaF8oA9OMw="
)
self.assertTrue(hasher.verify("lètmein", encoded))
@skipUnless(bcrypt, "bcrypt not installed")
def test_bcrypt_salt_check(self):
hasher = BCryptPasswordHasher()
encoded = hasher.encode("lètmein", hasher.salt())
self.assertIs(hasher.must_update(encoded), False)
@skipUnless(bcrypt, "bcrypt not installed")
def test_bcryptsha256_salt_check(self):
hasher = BCryptSHA256PasswordHasher()
encoded = hasher.encode("lètmein", hasher.salt())
self.assertIs(hasher.must_update(encoded), False)
@override_settings(
PASSWORD_HASHERS=[
"django.contrib.auth.hashers.PBKDF2PasswordHasher",
"django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher",
"django.contrib.auth.hashers.MD5PasswordHasher",
],
)
def test_upgrade(self):
self.assertEqual("pbkdf2_sha256", get_hasher("default").algorithm)
for algo in ("pbkdf2_sha1", "md5"):
with self.subTest(algo=algo):
encoded = make_password("lètmein", hasher=algo)
state = {"upgraded": False}
def setter(password):
state["upgraded"] = True
self.assertTrue(check_password("lètmein", encoded, setter))
self.assertTrue(state["upgraded"])
def test_no_upgrade(self):
encoded = make_password("lètmein")
state = {"upgraded": False}
def setter():
state["upgraded"] = True
self.assertFalse(check_password("WRONG", encoded, setter))
self.assertFalse(state["upgraded"])
@override_settings(
PASSWORD_HASHERS=[
"django.contrib.auth.hashers.PBKDF2PasswordHasher",
"django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher",
"django.contrib.auth.hashers.MD5PasswordHasher",
],
)
def test_no_upgrade_on_incorrect_pass(self):
self.assertEqual("pbkdf2_sha256", get_hasher("default").algorithm)
for algo in ("pbkdf2_sha1", "md5"):
with self.subTest(algo=algo):
encoded = make_password("lètmein", hasher=algo)
state = {"upgraded": False}
def setter():
state["upgraded"] = True
self.assertFalse(check_password("WRONG", encoded, setter))
self.assertFalse(state["upgraded"])
def test_pbkdf2_upgrade(self):
hasher = get_hasher("default")
self.assertEqual("pbkdf2_sha256", hasher.algorithm)
self.assertNotEqual(hasher.iterations, 1)
old_iterations = hasher.iterations
try:
# Generate a password with 1 iteration.
hasher.iterations = 1
encoded = make_password("letmein")
algo, iterations, salt, hash = encoded.split("$", 3)
self.assertEqual(iterations, "1")
state = {"upgraded": False}
def setter(password):
state["upgraded"] = True
# No upgrade is triggered
self.assertTrue(check_password("letmein", encoded, setter))
self.assertFalse(state["upgraded"])
# Revert to the old iteration count and ...
hasher.iterations = old_iterations
# ... check if the password would get updated to the new iteration count.
self.assertTrue(check_password("letmein", encoded, setter))
self.assertTrue(state["upgraded"])
finally:
hasher.iterations = old_iterations
def test_pbkdf2_harden_runtime(self):
hasher = get_hasher("default")
self.assertEqual("pbkdf2_sha256", hasher.algorithm)
with mock.patch.object(hasher, "iterations", 1):
encoded = make_password("letmein")
with mock.patch.object(hasher, "iterations", 6), mock.patch.object(
hasher, "encode", side_effect=hasher.encode
):
hasher.harden_runtime("wrong_password", encoded)
# Encode should get called once ...
self.assertEqual(hasher.encode.call_count, 1)
# ... with the original salt and 5 iterations.
algorithm, iterations, salt, hash = encoded.split("$", 3)
expected_call = (("wrong_password", salt, 5),)
self.assertEqual(hasher.encode.call_args, expected_call)
def test_pbkdf2_upgrade_new_hasher(self):
hasher = get_hasher("default")
self.assertEqual("pbkdf2_sha256", hasher.algorithm)
self.assertNotEqual(hasher.iterations, 1)
state = {"upgraded": False}
def setter(password):
state["upgraded"] = True
with self.settings(
PASSWORD_HASHERS=["auth_tests.test_hashers.PBKDF2SingleIterationHasher"]
):
encoded = make_password("letmein")
algo, iterations, salt, hash = encoded.split("$", 3)
self.assertEqual(iterations, "1")
# No upgrade is triggered
self.assertTrue(check_password("letmein", encoded, setter))
self.assertFalse(state["upgraded"])
# Revert to the old iteration count and check if the password would get
# updated to the new iteration count.
with self.settings(
PASSWORD_HASHERS=[
"django.contrib.auth.hashers.PBKDF2PasswordHasher",
"auth_tests.test_hashers.PBKDF2SingleIterationHasher",
]
):
self.assertTrue(check_password("letmein", encoded, setter))
self.assertTrue(state["upgraded"])
def test_check_password_calls_harden_runtime(self):
hasher = get_hasher("default")
encoded = make_password("letmein")
with mock.patch.object(hasher, "harden_runtime"), mock.patch.object(
hasher, "must_update", return_value=True
):
# Correct password supplied, no hardening needed
check_password("letmein", encoded)
self.assertEqual(hasher.harden_runtime.call_count, 0)
# Wrong password supplied, hardening needed
check_password("wrong_password", encoded)
self.assertEqual(hasher.harden_runtime.call_count, 1)
def test_encode_invalid_salt(self):
hasher_classes = [
MD5PasswordHasher,
PBKDF2PasswordHasher,
PBKDF2SHA1PasswordHasher,
ScryptPasswordHasher,
]
msg = "salt must be provided and cannot contain $."
for hasher_class in hasher_classes:
hasher = hasher_class()
for salt in [None, "", "sea$salt"]:
with self.subTest(hasher_class.__name__, salt=salt):
with self.assertRaisesMessage(ValueError, msg):
hasher.encode("password", salt)
def test_encode_password_required(self):
hasher_classes = [
MD5PasswordHasher,
PBKDF2PasswordHasher,
PBKDF2SHA1PasswordHasher,
ScryptPasswordHasher,
]
msg = "password must be provided."
for hasher_class in hasher_classes:
hasher = hasher_class()
with self.subTest(hasher_class.__name__):
with self.assertRaisesMessage(TypeError, msg):
hasher.encode(None, "seasalt")
class BasePasswordHasherTests(SimpleTestCase):
not_implemented_msg = "subclasses of BasePasswordHasher must provide %s() method"
def setUp(self):
self.hasher = BasePasswordHasher()
def test_load_library_no_algorithm(self):
msg = "Hasher 'BasePasswordHasher' doesn't specify a library attribute"
with self.assertRaisesMessage(ValueError, msg):
self.hasher._load_library()
def test_load_library_importerror(self):
PlainHasher = type(
"PlainHasher",
(BasePasswordHasher,),
{"algorithm": "plain", "library": "plain"},
)
msg = "Couldn't load 'PlainHasher' algorithm library: No module named 'plain'"
with self.assertRaisesMessage(ValueError, msg):
PlainHasher()._load_library()
def test_attributes(self):
self.assertIsNone(self.hasher.algorithm)
self.assertIsNone(self.hasher.library)
def test_encode(self):
msg = self.not_implemented_msg % "an encode"
with self.assertRaisesMessage(NotImplementedError, msg):
self.hasher.encode("password", "salt")
def test_decode(self):
msg = self.not_implemented_msg % "a decode"
with self.assertRaisesMessage(NotImplementedError, msg):
self.hasher.decode("encoded")
def test_harden_runtime(self):
msg = (
"subclasses of BasePasswordHasher should provide a harden_runtime() method"
)
with self.assertWarnsMessage(Warning, msg):
self.hasher.harden_runtime("password", "encoded")
def test_must_update(self):
self.assertIs(self.hasher.must_update("encoded"), False)
def test_safe_summary(self):
msg = self.not_implemented_msg % "a safe_summary"
with self.assertRaisesMessage(NotImplementedError, msg):
self.hasher.safe_summary("encoded")
def test_verify(self):
msg = self.not_implemented_msg % "a verify"
with self.assertRaisesMessage(NotImplementedError, msg):
self.hasher.verify("password", "encoded")
@skipUnless(argon2, "argon2-cffi not installed")
@override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS)
class TestUtilsHashPassArgon2(SimpleTestCase):
def test_argon2(self):
encoded = make_password("lètmein", hasher="argon2")
self.assertTrue(is_password_usable(encoded))
self.assertTrue(encoded.startswith("argon2$argon2id$"))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "argon2")
# Blank passwords
blank_encoded = make_password("", hasher="argon2")
self.assertTrue(blank_encoded.startswith("argon2$argon2id$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
# Old hashes without version attribute
encoded = (
"argon2$argon2i$m=8,t=1,p=1$c29tZXNhbHQ$gwQOXSNhxiOxPOA0+PY10P9QFO"
"4NAYysnqRt1GSQLE55m+2GYDt9FEjPMHhP2Cuf0nOEXXMocVrsJAtNSsKyfg"
)
self.assertTrue(check_password("secret", encoded))
self.assertFalse(check_password("wrong", encoded))
# Old hashes with version attribute.
encoded = "argon2$argon2i$v=19$m=8,t=1,p=1$c2FsdHNhbHQ$YC9+jJCrQhs5R6db7LlN8Q"
self.assertIs(check_password("secret", encoded), True)
self.assertIs(check_password("wrong", encoded), False)
# Salt entropy check.
hasher = get_hasher("argon2")
encoded_weak_salt = make_password("lètmein", "iodizedsalt", "argon2")
encoded_strong_salt = make_password("lètmein", hasher.salt(), "argon2")
self.assertIs(hasher.must_update(encoded_weak_salt), True)
self.assertIs(hasher.must_update(encoded_strong_salt), False)
def test_argon2_decode(self):
salt = "abcdefghijk"
encoded = make_password("lètmein", salt=salt, hasher="argon2")
hasher = get_hasher("argon2")
decoded = hasher.decode(encoded)
self.assertEqual(decoded["memory_cost"], hasher.memory_cost)
self.assertEqual(decoded["parallelism"], hasher.parallelism)
self.assertEqual(decoded["salt"], salt)
self.assertEqual(decoded["time_cost"], hasher.time_cost)
def test_argon2_upgrade(self):
self._test_argon2_upgrade("time_cost", "time cost", 1)
self._test_argon2_upgrade("memory_cost", "memory cost", 64)
self._test_argon2_upgrade("parallelism", "parallelism", 1)
def test_argon2_version_upgrade(self):
hasher = get_hasher("argon2")
state = {"upgraded": False}
encoded = (
"argon2$argon2id$v=19$m=102400,t=2,p=8$Y041dExhNkljRUUy$TMa6A8fPJh"
"CAUXRhJXCXdw"
)
def setter(password):
state["upgraded"] = True
old_m = hasher.memory_cost
old_t = hasher.time_cost
old_p = hasher.parallelism
try:
hasher.memory_cost = 8
hasher.time_cost = 1
hasher.parallelism = 1
self.assertTrue(check_password("secret", encoded, setter, "argon2"))
self.assertTrue(state["upgraded"])
finally:
hasher.memory_cost = old_m
hasher.time_cost = old_t
hasher.parallelism = old_p
def _test_argon2_upgrade(self, attr, summary_key, new_value):
hasher = get_hasher("argon2")
self.assertEqual("argon2", hasher.algorithm)
self.assertNotEqual(getattr(hasher, attr), new_value)
old_value = getattr(hasher, attr)
try:
# Generate hash with attr set to 1
setattr(hasher, attr, new_value)
encoded = make_password("letmein", hasher="argon2")
attr_value = hasher.safe_summary(encoded)[summary_key]
self.assertEqual(attr_value, new_value)
state = {"upgraded": False}
def setter(password):
state["upgraded"] = True
# No upgrade is triggered.
self.assertTrue(check_password("letmein", encoded, setter, "argon2"))
self.assertFalse(state["upgraded"])
# Revert to the old rounds count and ...
setattr(hasher, attr, old_value)
# ... check if the password would get updated to the new count.
self.assertTrue(check_password("letmein", encoded, setter, "argon2"))
self.assertTrue(state["upgraded"])
finally:
setattr(hasher, attr, old_value)
@skipUnless(scrypt, "scrypt not available")
@override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS)
class TestUtilsHashPassScrypt(SimpleTestCase):
def test_scrypt(self):
encoded = make_password("lètmein", "seasalt", "scrypt")
self.assertEqual(
encoded,
"scrypt$16384$seasalt$8$1$Qj3+9PPyRjSJIebHnG81TMjsqtaIGxNQG/aEB/NY"
"afTJ7tibgfYz71m0ldQESkXFRkdVCBhhY8mx7rQwite/Pw==",
)
self.assertIs(is_password_usable(encoded), True)
self.assertIs(check_password("lètmein", encoded), True)
self.assertIs(check_password("lètmeinz", encoded), False)
self.assertEqual(identify_hasher(encoded).algorithm, "scrypt")
# Blank passwords.
blank_encoded = make_password("", "seasalt", "scrypt")
self.assertIs(blank_encoded.startswith("scrypt$"), True)
self.assertIs(is_password_usable(blank_encoded), True)
self.assertIs(check_password("", blank_encoded), True)
self.assertIs(check_password(" ", blank_encoded), False)
def test_scrypt_decode(self):
encoded = make_password("lètmein", "seasalt", "scrypt")
hasher = get_hasher("scrypt")
decoded = hasher.decode(encoded)
tests = [
("block_size", hasher.block_size),
("parallelism", hasher.parallelism),
("salt", "seasalt"),
("work_factor", hasher.work_factor),
]
for key, excepted in tests:
with self.subTest(key=key):
self.assertEqual(decoded[key], excepted)
def _test_scrypt_upgrade(self, attr, summary_key, new_value):
hasher = get_hasher("scrypt")
self.assertEqual(hasher.algorithm, "scrypt")
self.assertNotEqual(getattr(hasher, attr), new_value)
old_value = getattr(hasher, attr)
try:
# Generate hash with attr set to the new value.
setattr(hasher, attr, new_value)
encoded = make_password("lètmein", "seasalt", "scrypt")
attr_value = hasher.safe_summary(encoded)[summary_key]
self.assertEqual(attr_value, new_value)
state = {"upgraded": False}
def setter(password):
state["upgraded"] = True
# No update is triggered.
self.assertIs(check_password("lètmein", encoded, setter, "scrypt"), True)
self.assertIs(state["upgraded"], False)
# Revert to the old value.
setattr(hasher, attr, old_value)
# Password is updated.
self.assertIs(check_password("lètmein", encoded, setter, "scrypt"), True)
self.assertIs(state["upgraded"], True)
finally:
setattr(hasher, attr, old_value)
def test_scrypt_upgrade(self):
tests = [
("work_factor", "work factor", 2**11),
("block_size", "block size", 10),
("parallelism", "parallelism", 2),
]
for attr, summary_key, new_value in tests:
with self.subTest(attr=attr):
self._test_scrypt_upgrade(attr, summary_key, new_value)
|
ce99a4d0c001b9bfa6cd8e3f2306d69a9f83c547b0c3a38c773b18e1030bb4d5 | import inspect
import threading
from datetime import datetime, timedelta
from unittest import mock
from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from django.db import DEFAULT_DB_ALIAS, DatabaseError, connections, models
from django.db.models.manager import BaseManager
from django.db.models.query import MAX_GET_RESULTS, EmptyQuerySet
from django.test import (
SimpleTestCase,
TestCase,
TransactionTestCase,
skipUnlessDBFeature,
)
from django.utils.translation import gettext_lazy
from .models import (
Article,
ArticleSelectOnSave,
ChildPrimaryKeyWithDefault,
FeaturedArticle,
PrimaryKeyWithDbDefault,
PrimaryKeyWithDefault,
SelfRef,
)
class ModelInstanceCreationTests(TestCase):
def test_object_is_not_written_to_database_until_save_was_called(self):
a = Article(
id=None,
headline="Parrot programs in Python",
pub_date=datetime(2005, 7, 28),
)
self.assertIsNone(a.id)
self.assertEqual(Article.objects.count(), 0)
# Save it into the database. You have to call save() explicitly.
a.save()
self.assertIsNotNone(a.id)
self.assertEqual(Article.objects.count(), 1)
def test_can_initialize_model_instance_using_positional_arguments(self):
"""
You can initialize a model instance using positional arguments,
which should match the field order as defined in the model.
"""
a = Article(None, "Second article", datetime(2005, 7, 29))
a.save()
self.assertEqual(a.headline, "Second article")
self.assertEqual(a.pub_date, datetime(2005, 7, 29, 0, 0))
def test_can_create_instance_using_kwargs(self):
a = Article(
id=None,
headline="Third article",
pub_date=datetime(2005, 7, 30),
)
a.save()
self.assertEqual(a.headline, "Third article")
self.assertEqual(a.pub_date, datetime(2005, 7, 30, 0, 0))
def test_autofields_generate_different_values_for_each_instance(self):
a1 = Article.objects.create(
headline="First", pub_date=datetime(2005, 7, 30, 0, 0)
)
a2 = Article.objects.create(
headline="First", pub_date=datetime(2005, 7, 30, 0, 0)
)
a3 = Article.objects.create(
headline="First", pub_date=datetime(2005, 7, 30, 0, 0)
)
self.assertNotEqual(a3.id, a1.id)
self.assertNotEqual(a3.id, a2.id)
def test_can_mix_and_match_position_and_kwargs(self):
# You can also mix and match position and keyword arguments, but
# be sure not to duplicate field information.
a = Article(None, "Fourth article", pub_date=datetime(2005, 7, 31))
a.save()
self.assertEqual(a.headline, "Fourth article")
def test_positional_and_keyword_args_for_the_same_field(self):
msg = "Article() got both positional and keyword arguments for field '%s'."
with self.assertRaisesMessage(TypeError, msg % "headline"):
Article(None, "Fifth article", headline="Other headline.")
with self.assertRaisesMessage(TypeError, msg % "headline"):
Article(None, "Sixth article", headline="")
with self.assertRaisesMessage(TypeError, msg % "pub_date"):
Article(None, "Seventh article", datetime(2021, 3, 1), pub_date=None)
def test_cannot_create_instance_with_invalid_kwargs(self):
msg = "Article() got unexpected keyword arguments: 'foo'"
with self.assertRaisesMessage(TypeError, msg):
Article(
id=None,
headline="Some headline",
pub_date=datetime(2005, 7, 31),
foo="bar",
)
msg = "Article() got unexpected keyword arguments: 'foo', 'bar'"
with self.assertRaisesMessage(TypeError, msg):
Article(
id=None,
headline="Some headline",
pub_date=datetime(2005, 7, 31),
foo="bar",
bar="baz",
)
def test_can_leave_off_value_for_autofield_and_it_gets_value_on_save(self):
"""
You can leave off the value for an AutoField when creating an
object, because it'll get filled in automatically when you save().
"""
a = Article(headline="Article 5", pub_date=datetime(2005, 7, 31))
a.save()
self.assertEqual(a.headline, "Article 5")
self.assertIsNotNone(a.id)
def test_leaving_off_a_field_with_default_set_the_default_will_be_saved(self):
a = Article(pub_date=datetime(2005, 7, 31))
a.save()
self.assertEqual(a.headline, "Default headline")
def test_for_datetimefields_saves_as_much_precision_as_was_given(self):
"""as much precision in *seconds*"""
a1 = Article(
headline="Article 7",
pub_date=datetime(2005, 7, 31, 12, 30),
)
a1.save()
self.assertEqual(
Article.objects.get(id__exact=a1.id).pub_date, datetime(2005, 7, 31, 12, 30)
)
a2 = Article(
headline="Article 8",
pub_date=datetime(2005, 7, 31, 12, 30, 45),
)
a2.save()
self.assertEqual(
Article.objects.get(id__exact=a2.id).pub_date,
datetime(2005, 7, 31, 12, 30, 45),
)
def test_saving_an_object_again_does_not_create_a_new_object(self):
a = Article(headline="original", pub_date=datetime(2014, 5, 16))
a.save()
current_id = a.id
a.save()
self.assertEqual(a.id, current_id)
a.headline = "Updated headline"
a.save()
self.assertEqual(a.id, current_id)
def test_querysets_checking_for_membership(self):
headlines = ["Parrot programs in Python", "Second article", "Third article"]
some_pub_date = datetime(2014, 5, 16, 12, 1)
for headline in headlines:
Article(headline=headline, pub_date=some_pub_date).save()
a = Article(headline="Some headline", pub_date=some_pub_date)
a.save()
# You can use 'in' to test for membership...
self.assertIn(a, Article.objects.all())
# ... but there will often be more efficient ways if that is all you need:
self.assertTrue(Article.objects.filter(id=a.id).exists())
def test_save_primary_with_default(self):
# An UPDATE attempt is skipped when a primary key has default.
with self.assertNumQueries(1):
PrimaryKeyWithDefault().save()
def test_save_primary_with_db_default(self):
# An UPDATE attempt is skipped when a primary key has db_default.
with self.assertNumQueries(1):
PrimaryKeyWithDbDefault().save()
def test_save_parent_primary_with_default(self):
# An UPDATE attempt is skipped when an inherited primary key has
# default.
with self.assertNumQueries(2):
ChildPrimaryKeyWithDefault().save()
class ModelTest(TestCase):
def test_objects_attribute_is_only_available_on_the_class_itself(self):
with self.assertRaisesMessage(
AttributeError, "Manager isn't accessible via Article instances"
):
getattr(
Article(),
"objects",
)
self.assertFalse(hasattr(Article(), "objects"))
self.assertTrue(hasattr(Article, "objects"))
def test_queryset_delete_removes_all_items_in_that_queryset(self):
headlines = ["An article", "Article One", "Amazing article", "Boring article"]
some_pub_date = datetime(2014, 5, 16, 12, 1)
for headline in headlines:
Article(headline=headline, pub_date=some_pub_date).save()
self.assertQuerySetEqual(
Article.objects.order_by("headline"),
sorted(headlines),
transform=lambda a: a.headline,
)
Article.objects.filter(headline__startswith="A").delete()
self.assertEqual(Article.objects.get().headline, "Boring article")
def test_not_equal_and_equal_operators_behave_as_expected_on_instances(self):
some_pub_date = datetime(2014, 5, 16, 12, 1)
a1 = Article.objects.create(headline="First", pub_date=some_pub_date)
a2 = Article.objects.create(headline="Second", pub_date=some_pub_date)
self.assertNotEqual(a1, a2)
self.assertEqual(a1, Article.objects.get(id__exact=a1.id))
self.assertNotEqual(
Article.objects.get(id__exact=a1.id), Article.objects.get(id__exact=a2.id)
)
def test_microsecond_precision(self):
a9 = Article(
headline="Article 9",
pub_date=datetime(2005, 7, 31, 12, 30, 45, 180),
)
a9.save()
self.assertEqual(
Article.objects.get(pk=a9.pk).pub_date,
datetime(2005, 7, 31, 12, 30, 45, 180),
)
def test_manually_specify_primary_key(self):
# You can manually specify the primary key when creating a new object.
a101 = Article(
id=101,
headline="Article 101",
pub_date=datetime(2005, 7, 31, 12, 30, 45),
)
a101.save()
a101 = Article.objects.get(pk=101)
self.assertEqual(a101.headline, "Article 101")
def test_create_method(self):
# You can create saved objects in a single step
a10 = Article.objects.create(
headline="Article 10",
pub_date=datetime(2005, 7, 31, 12, 30, 45),
)
self.assertEqual(Article.objects.get(headline="Article 10"), a10)
def test_year_lookup_edge_case(self):
# Edge-case test: A year lookup should retrieve all objects in
# the given year, including Jan. 1 and Dec. 31.
a11 = Article.objects.create(
headline="Article 11",
pub_date=datetime(2008, 1, 1),
)
a12 = Article.objects.create(
headline="Article 12",
pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999),
)
self.assertSequenceEqual(
Article.objects.filter(pub_date__year=2008),
[a11, a12],
)
def test_unicode_data(self):
# Unicode data works, too.
a = Article(
headline="\u6797\u539f \u3081\u3050\u307f",
pub_date=datetime(2005, 7, 28),
)
a.save()
self.assertEqual(
Article.objects.get(pk=a.id).headline, "\u6797\u539f \u3081\u3050\u307f"
)
def test_hash_function(self):
# Model instances have a hash function, so they can be used in sets
# or as dictionary keys. Two models compare as equal if their primary
# keys are equal.
a10 = Article.objects.create(
headline="Article 10",
pub_date=datetime(2005, 7, 31, 12, 30, 45),
)
a11 = Article.objects.create(
headline="Article 11",
pub_date=datetime(2008, 1, 1),
)
a12 = Article.objects.create(
headline="Article 12",
pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999),
)
s = {a10, a11, a12}
self.assertIn(Article.objects.get(headline="Article 11"), s)
def test_extra_method_select_argument_with_dashes_and_values(self):
# The 'select' argument to extra() supports names with dashes in
# them, as long as you use values().
Article.objects.bulk_create(
[
Article(
headline="Article 10", pub_date=datetime(2005, 7, 31, 12, 30, 45)
),
Article(headline="Article 11", pub_date=datetime(2008, 1, 1)),
Article(
headline="Article 12",
pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999),
),
]
)
dicts = (
Article.objects.filter(pub_date__year=2008)
.extra(select={"dashed-value": "1"})
.values("headline", "dashed-value")
)
self.assertEqual(
[sorted(d.items()) for d in dicts],
[
[("dashed-value", 1), ("headline", "Article 11")],
[("dashed-value", 1), ("headline", "Article 12")],
],
)
def test_extra_method_select_argument_with_dashes(self):
# If you use 'select' with extra() and names containing dashes on a
# query that's *not* a values() query, those extra 'select' values
# will silently be ignored.
Article.objects.bulk_create(
[
Article(
headline="Article 10", pub_date=datetime(2005, 7, 31, 12, 30, 45)
),
Article(headline="Article 11", pub_date=datetime(2008, 1, 1)),
Article(
headline="Article 12",
pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999),
),
]
)
articles = Article.objects.filter(pub_date__year=2008).extra(
select={"dashed-value": "1", "undashedvalue": "2"}
)
self.assertEqual(articles[0].undashedvalue, 2)
def test_create_relation_with_gettext_lazy(self):
"""
gettext_lazy objects work when saving model instances
through various methods. Refs #10498.
"""
notlazy = "test"
lazy = gettext_lazy(notlazy)
Article.objects.create(headline=lazy, pub_date=datetime.now())
article = Article.objects.get()
self.assertEqual(article.headline, notlazy)
# test that assign + save works with Promise objects
article.headline = lazy
article.save()
self.assertEqual(article.headline, notlazy)
# test .update()
Article.objects.update(headline=lazy)
article = Article.objects.get()
self.assertEqual(article.headline, notlazy)
# still test bulk_create()
Article.objects.all().delete()
Article.objects.bulk_create([Article(headline=lazy, pub_date=datetime.now())])
article = Article.objects.get()
self.assertEqual(article.headline, notlazy)
def test_emptyqs(self):
msg = "EmptyQuerySet can't be instantiated"
with self.assertRaisesMessage(TypeError, msg):
EmptyQuerySet()
self.assertIsInstance(Article.objects.none(), EmptyQuerySet)
self.assertNotIsInstance("", EmptyQuerySet)
def test_emptyqs_values(self):
# test for #15959
Article.objects.create(headline="foo", pub_date=datetime.now())
with self.assertNumQueries(0):
qs = Article.objects.none().values_list("pk")
self.assertIsInstance(qs, EmptyQuerySet)
self.assertEqual(len(qs), 0)
def test_emptyqs_customqs(self):
# A hacky test for custom QuerySet subclass - refs #17271
Article.objects.create(headline="foo", pub_date=datetime.now())
class CustomQuerySet(models.QuerySet):
def do_something(self):
return "did something"
qs = Article.objects.all()
qs.__class__ = CustomQuerySet
qs = qs.none()
with self.assertNumQueries(0):
self.assertEqual(len(qs), 0)
self.assertIsInstance(qs, EmptyQuerySet)
self.assertEqual(qs.do_something(), "did something")
def test_emptyqs_values_order(self):
# Tests for ticket #17712
Article.objects.create(headline="foo", pub_date=datetime.now())
with self.assertNumQueries(0):
self.assertEqual(
len(Article.objects.none().values_list("id").order_by("id")), 0
)
with self.assertNumQueries(0):
self.assertEqual(
len(
Article.objects.none().filter(
id__in=Article.objects.values_list("id", flat=True)
)
),
0,
)
@skipUnlessDBFeature("can_distinct_on_fields")
def test_emptyqs_distinct(self):
# Tests for #19426
Article.objects.create(headline="foo", pub_date=datetime.now())
with self.assertNumQueries(0):
self.assertEqual(
len(Article.objects.none().distinct("headline", "pub_date")), 0
)
def test_ticket_20278(self):
sr = SelfRef.objects.create()
with self.assertRaises(ObjectDoesNotExist):
SelfRef.objects.get(selfref=sr)
def test_eq(self):
self.assertEqual(Article(id=1), Article(id=1))
self.assertNotEqual(Article(id=1), object())
self.assertNotEqual(object(), Article(id=1))
a = Article()
self.assertEqual(a, a)
self.assertEqual(a, mock.ANY)
self.assertNotEqual(Article(), a)
def test_hash(self):
# Value based on PK
self.assertEqual(hash(Article(id=1)), hash(1))
msg = "Model instances without primary key value are unhashable"
with self.assertRaisesMessage(TypeError, msg):
# No PK value -> unhashable (because save() would then change
# hash)
hash(Article())
def test_missing_hash_not_inherited(self):
class NoHash(models.Model):
def __eq__(self, other):
return super.__eq__(other)
with self.assertRaisesMessage(TypeError, "unhashable type: 'NoHash'"):
hash(NoHash(id=1))
def test_specified_parent_hash_inherited(self):
class ParentHash(models.Model):
def __eq__(self, other):
return super.__eq__(other)
__hash__ = models.Model.__hash__
self.assertEqual(hash(ParentHash(id=1)), 1)
def test_delete_and_access_field(self):
# Accessing a field after it's deleted from a model reloads its value.
pub_date = datetime.now()
article = Article.objects.create(headline="foo", pub_date=pub_date)
new_pub_date = article.pub_date + timedelta(days=10)
article.headline = "bar"
article.pub_date = new_pub_date
del article.headline
with self.assertNumQueries(1):
self.assertEqual(article.headline, "foo")
# Fields that weren't deleted aren't reloaded.
self.assertEqual(article.pub_date, new_pub_date)
def test_multiple_objects_max_num_fetched(self):
max_results = MAX_GET_RESULTS - 1
Article.objects.bulk_create(
Article(headline="Area %s" % i, pub_date=datetime(2005, 7, 28))
for i in range(max_results)
)
self.assertRaisesMessage(
MultipleObjectsReturned,
"get() returned more than one Article -- it returned %d!" % max_results,
Article.objects.get,
headline__startswith="Area",
)
Article.objects.create(
headline="Area %s" % max_results, pub_date=datetime(2005, 7, 28)
)
self.assertRaisesMessage(
MultipleObjectsReturned,
"get() returned more than one Article -- it returned more than %d!"
% max_results,
Article.objects.get,
headline__startswith="Area",
)
class ModelLookupTest(TestCase):
@classmethod
def setUpTestData(cls):
# Create an Article.
cls.a = Article(
id=None,
headline="Swallow programs in Python",
pub_date=datetime(2005, 7, 28),
)
# Save it into the database. You have to call save() explicitly.
cls.a.save()
def test_all_lookup(self):
# Change values by changing the attributes, then calling save().
self.a.headline = "Parrot programs in Python"
self.a.save()
# Article.objects.all() returns all the articles in the database.
self.assertSequenceEqual(Article.objects.all(), [self.a])
def test_rich_lookup(self):
# Django provides a rich database lookup API.
self.assertEqual(Article.objects.get(id__exact=self.a.id), self.a)
self.assertEqual(Article.objects.get(headline__startswith="Swallow"), self.a)
self.assertEqual(Article.objects.get(pub_date__year=2005), self.a)
self.assertEqual(
Article.objects.get(pub_date__year=2005, pub_date__month=7), self.a
)
self.assertEqual(
Article.objects.get(
pub_date__year=2005, pub_date__month=7, pub_date__day=28
),
self.a,
)
self.assertEqual(Article.objects.get(pub_date__week_day=5), self.a)
def test_equal_lookup(self):
# The "__exact" lookup type can be omitted, as a shortcut.
self.assertEqual(Article.objects.get(id=self.a.id), self.a)
self.assertEqual(
Article.objects.get(headline="Swallow programs in Python"), self.a
)
self.assertSequenceEqual(
Article.objects.filter(pub_date__year=2005),
[self.a],
)
self.assertSequenceEqual(
Article.objects.filter(pub_date__year=2004),
[],
)
self.assertSequenceEqual(
Article.objects.filter(pub_date__year=2005, pub_date__month=7),
[self.a],
)
self.assertSequenceEqual(
Article.objects.filter(pub_date__week_day=5),
[self.a],
)
self.assertSequenceEqual(
Article.objects.filter(pub_date__week_day=6),
[],
)
def test_does_not_exist(self):
# Django raises an Article.DoesNotExist exception for get() if the
# parameters don't match any object.
with self.assertRaisesMessage(
ObjectDoesNotExist, "Article matching query does not exist."
):
Article.objects.get(
id__exact=2000,
)
# To avoid dict-ordering related errors check only one lookup
# in single assert.
with self.assertRaises(ObjectDoesNotExist):
Article.objects.get(pub_date__year=2005, pub_date__month=8)
with self.assertRaisesMessage(
ObjectDoesNotExist, "Article matching query does not exist."
):
Article.objects.get(
pub_date__week_day=6,
)
def test_lookup_by_primary_key(self):
# Lookup by a primary key is the most common case, so Django
# provides a shortcut for primary-key exact lookups.
# The following is identical to articles.get(id=a.id).
self.assertEqual(Article.objects.get(pk=self.a.id), self.a)
# pk can be used as a shortcut for the primary key name in any query.
self.assertSequenceEqual(Article.objects.filter(pk__in=[self.a.id]), [self.a])
# Model instances of the same type and same ID are considered equal.
a = Article.objects.get(pk=self.a.id)
b = Article.objects.get(pk=self.a.id)
self.assertEqual(a, b)
def test_too_many(self):
# Create a very similar object
a = Article(
id=None,
headline="Swallow bites Python",
pub_date=datetime(2005, 7, 28),
)
a.save()
self.assertEqual(Article.objects.count(), 2)
# Django raises an Article.MultipleObjectsReturned exception if the
# lookup matches more than one object
msg = "get() returned more than one Article -- it returned 2!"
with self.assertRaisesMessage(MultipleObjectsReturned, msg):
Article.objects.get(
headline__startswith="Swallow",
)
with self.assertRaisesMessage(MultipleObjectsReturned, msg):
Article.objects.get(
pub_date__year=2005,
)
with self.assertRaisesMessage(MultipleObjectsReturned, msg):
Article.objects.get(pub_date__year=2005, pub_date__month=7)
class ConcurrentSaveTests(TransactionTestCase):
available_apps = ["basic"]
@skipUnlessDBFeature("test_db_allows_multiple_connections")
def test_concurrent_delete_with_save(self):
"""
Test fetching, deleting and finally saving an object - we should get
an insert in this case.
"""
a = Article.objects.create(headline="foo", pub_date=datetime.now())
exceptions = []
def deleter():
try:
# Do not delete a directly - doing so alters its state.
Article.objects.filter(pk=a.pk).delete()
except Exception as e:
exceptions.append(e)
finally:
connections[DEFAULT_DB_ALIAS].close()
self.assertEqual(len(exceptions), 0)
t = threading.Thread(target=deleter)
t.start()
t.join()
a.save()
self.assertEqual(Article.objects.get(pk=a.pk).headline, "foo")
class ManagerTest(SimpleTestCase):
QUERYSET_PROXY_METHODS = [
"none",
"count",
"dates",
"datetimes",
"distinct",
"extra",
"get",
"get_or_create",
"update_or_create",
"create",
"bulk_create",
"bulk_update",
"filter",
"aggregate",
"annotate",
"alias",
"complex_filter",
"exclude",
"in_bulk",
"iterator",
"earliest",
"latest",
"first",
"last",
"order_by",
"select_for_update",
"select_related",
"prefetch_related",
"values",
"values_list",
"update",
"reverse",
"defer",
"only",
"using",
"exists",
"contains",
"explain",
"_insert",
"_update",
"raw",
"union",
"intersection",
"difference",
"aaggregate",
"abulk_create",
"abulk_update",
"acontains",
"acount",
"acreate",
"aearliest",
"aexists",
"aexplain",
"afirst",
"aget",
"aget_or_create",
"ain_bulk",
"aiterator",
"alast",
"alatest",
"aupdate",
"aupdate_or_create",
]
def test_manager_methods(self):
"""
This test ensures that the correct set of methods from `QuerySet`
are copied onto `Manager`.
It's particularly useful to prevent accidentally leaking new methods
into `Manager`. New `QuerySet` methods that should also be copied onto
`Manager` will need to be added to `ManagerTest.QUERYSET_PROXY_METHODS`.
"""
self.assertEqual(
sorted(BaseManager._get_queryset_methods(models.QuerySet)),
sorted(self.QUERYSET_PROXY_METHODS),
)
def test_manager_method_attributes(self):
self.assertEqual(Article.objects.get.__doc__, models.QuerySet.get.__doc__)
self.assertEqual(Article.objects.count.__name__, models.QuerySet.count.__name__)
def test_manager_method_signature(self):
self.assertEqual(
str(inspect.signature(Article.objects.bulk_create)),
"(objs, batch_size=None, ignore_conflicts=False, update_conflicts=False, "
"update_fields=None, unique_fields=None)",
)
class SelectOnSaveTests(TestCase):
def test_select_on_save(self):
a1 = Article.objects.create(pub_date=datetime.now())
with self.assertNumQueries(1):
a1.save()
asos = ArticleSelectOnSave.objects.create(pub_date=datetime.now())
with self.assertNumQueries(2):
asos.save()
with self.assertNumQueries(1):
asos.save(force_update=True)
Article.objects.all().delete()
with self.assertRaisesMessage(
DatabaseError, "Forced update did not affect any rows."
):
with self.assertNumQueries(1):
asos.save(force_update=True)
def test_select_on_save_lying_update(self):
"""
select_on_save works correctly if the database doesn't return correct
information about matched rows from UPDATE.
"""
# Change the manager to not return "row matched" for update().
# We are going to change the Article's _base_manager class
# dynamically. This is a bit of a hack, but it seems hard to
# test this properly otherwise. Article's manager, because
# proxy models use their parent model's _base_manager.
orig_class = Article._base_manager._queryset_class
class FakeQuerySet(models.QuerySet):
# Make sure the _update method below is in fact called.
called = False
def _update(self, *args, **kwargs):
FakeQuerySet.called = True
super()._update(*args, **kwargs)
return 0
try:
Article._base_manager._queryset_class = FakeQuerySet
asos = ArticleSelectOnSave.objects.create(pub_date=datetime.now())
with self.assertNumQueries(3):
asos.save()
self.assertTrue(FakeQuerySet.called)
# This is not wanted behavior, but this is how Django has always
# behaved for databases that do not return correct information
# about matched rows for UPDATE.
with self.assertRaisesMessage(
DatabaseError, "Forced update did not affect any rows."
):
asos.save(force_update=True)
msg = (
"An error occurred in the current transaction. You can't "
"execute queries until the end of the 'atomic' block."
)
with self.assertRaisesMessage(DatabaseError, msg) as cm:
asos.save(update_fields=["pub_date"])
self.assertIsInstance(cm.exception.__cause__, DatabaseError)
finally:
Article._base_manager._queryset_class = orig_class
class ModelRefreshTests(TestCase):
def test_refresh(self):
a = Article.objects.create(pub_date=datetime.now())
Article.objects.create(pub_date=datetime.now())
Article.objects.filter(pk=a.pk).update(headline="new headline")
with self.assertNumQueries(1):
a.refresh_from_db()
self.assertEqual(a.headline, "new headline")
orig_pub_date = a.pub_date
new_pub_date = a.pub_date + timedelta(10)
Article.objects.update(headline="new headline 2", pub_date=new_pub_date)
with self.assertNumQueries(1):
a.refresh_from_db(fields=["headline"])
self.assertEqual(a.headline, "new headline 2")
self.assertEqual(a.pub_date, orig_pub_date)
with self.assertNumQueries(1):
a.refresh_from_db()
self.assertEqual(a.pub_date, new_pub_date)
def test_unknown_kwarg(self):
s = SelfRef.objects.create()
msg = "refresh_from_db() got an unexpected keyword argument 'unknown_kwarg'"
with self.assertRaisesMessage(TypeError, msg):
s.refresh_from_db(unknown_kwarg=10)
def test_lookup_in_fields(self):
s = SelfRef.objects.create()
msg = (
'Found "__" in fields argument. Relations and transforms are not allowed '
"in fields."
)
with self.assertRaisesMessage(ValueError, msg):
s.refresh_from_db(fields=["foo__bar"])
def test_refresh_fk(self):
s1 = SelfRef.objects.create()
s2 = SelfRef.objects.create()
s3 = SelfRef.objects.create(selfref=s1)
s3_copy = SelfRef.objects.get(pk=s3.pk)
s3_copy.selfref.touched = True
s3.selfref = s2
s3.save()
with self.assertNumQueries(1):
s3_copy.refresh_from_db()
with self.assertNumQueries(1):
# The old related instance was thrown away (the selfref_id has
# changed). It needs to be reloaded on access, so one query
# executed.
self.assertFalse(hasattr(s3_copy.selfref, "touched"))
self.assertEqual(s3_copy.selfref, s2)
def test_refresh_null_fk(self):
s1 = SelfRef.objects.create()
s2 = SelfRef.objects.create(selfref=s1)
s2.selfref = None
s2.refresh_from_db()
self.assertEqual(s2.selfref, s1)
def test_refresh_unsaved(self):
pub_date = datetime.now()
a = Article.objects.create(pub_date=pub_date)
a2 = Article(id=a.pk)
with self.assertNumQueries(1):
a2.refresh_from_db()
self.assertEqual(a2.pub_date, pub_date)
self.assertEqual(a2._state.db, "default")
def test_refresh_fk_on_delete_set_null(self):
a = Article.objects.create(
headline="Parrot programs in Python",
pub_date=datetime(2005, 7, 28),
)
s1 = SelfRef.objects.create(article=a)
a.delete()
s1.refresh_from_db()
self.assertIsNone(s1.article_id)
self.assertIsNone(s1.article)
def test_refresh_no_fields(self):
a = Article.objects.create(pub_date=datetime.now())
with self.assertNumQueries(0):
a.refresh_from_db(fields=[])
def test_refresh_clears_reverse_related(self):
"""refresh_from_db() clear cached reverse relations."""
article = Article.objects.create(
headline="Parrot programs in Python",
pub_date=datetime(2005, 7, 28),
)
self.assertFalse(hasattr(article, "featured"))
FeaturedArticle.objects.create(article_id=article.pk)
article.refresh_from_db()
self.assertTrue(hasattr(article, "featured"))
def test_refresh_clears_one_to_one_field(self):
article = Article.objects.create(
headline="Parrot programs in Python",
pub_date=datetime(2005, 7, 28),
)
featured = FeaturedArticle.objects.create(article_id=article.pk)
self.assertEqual(featured.article.headline, "Parrot programs in Python")
article.headline = "Parrot programs in Python 2.0"
article.save()
featured.refresh_from_db()
self.assertEqual(featured.article.headline, "Parrot programs in Python 2.0")
def test_prefetched_cache_cleared(self):
a = Article.objects.create(pub_date=datetime(2005, 7, 28))
s = SelfRef.objects.create(article=a)
# refresh_from_db() without fields=[...]
a1_prefetched = Article.objects.prefetch_related("selfref_set").first()
self.assertCountEqual(a1_prefetched.selfref_set.all(), [s])
s.article = None
s.save()
# Relation is cleared and prefetch cache is stale.
self.assertCountEqual(a1_prefetched.selfref_set.all(), [s])
a1_prefetched.refresh_from_db()
# Cache was cleared and new results are available.
self.assertCountEqual(a1_prefetched.selfref_set.all(), [])
# refresh_from_db() with fields=[...]
a2_prefetched = Article.objects.prefetch_related("selfref_set").first()
self.assertCountEqual(a2_prefetched.selfref_set.all(), [])
s.article = a
s.save()
# Relation is added and prefetch cache is stale.
self.assertCountEqual(a2_prefetched.selfref_set.all(), [])
a2_prefetched.refresh_from_db(fields=["selfref_set"])
# Cache was cleared and new results are available.
self.assertCountEqual(a2_prefetched.selfref_set.all(), [s])
|
79121c0ce1e4fe9cd0c17438406c93491bf6152edc467b4c59d67ad6eda79195 | """
Bare-bones model
This is a basic model with only two non-primary-key fields.
"""
import uuid
from django.db import models
class Article(models.Model):
headline = models.CharField(max_length=100, default="Default headline")
pub_date = models.DateTimeField()
class Meta:
ordering = ("pub_date", "headline")
def __str__(self):
return self.headline
class FeaturedArticle(models.Model):
article = models.OneToOneField(Article, models.CASCADE, related_name="featured")
class ArticleSelectOnSave(Article):
class Meta:
proxy = True
select_on_save = True
class SelfRef(models.Model):
selfref = models.ForeignKey(
"self",
models.SET_NULL,
null=True,
blank=True,
related_name="+",
)
article = models.ForeignKey(Article, models.SET_NULL, null=True, blank=True)
def __str__(self):
# This method intentionally doesn't work for all cases - part
# of the test for ticket #20278
return SelfRef.objects.get(selfref=self).pk
class PrimaryKeyWithDefault(models.Model):
uuid = models.UUIDField(primary_key=True, default=uuid.uuid4)
class PrimaryKeyWithDbDefault(models.Model):
uuid = models.IntegerField(primary_key=True, db_default=1)
class ChildPrimaryKeyWithDefault(PrimaryKeyWithDefault):
pass
|
688a4393bbfac35dabaf588fb2eb29eb75de4a1787fa175a6e08a20ad3053ce3 | import datetime
import itertools
import unittest
from copy import copy
from unittest import mock
from django.core.exceptions import FieldError
from django.core.management.color import no_style
from django.db import (
DatabaseError,
DataError,
IntegrityError,
OperationalError,
connection,
)
from django.db.backends.utils import truncate_name
from django.db.models import (
CASCADE,
PROTECT,
AutoField,
BigAutoField,
BigIntegerField,
BinaryField,
BooleanField,
CharField,
CheckConstraint,
DateField,
DateTimeField,
DecimalField,
DurationField,
F,
FloatField,
ForeignKey,
ForeignObject,
Index,
IntegerField,
JSONField,
ManyToManyField,
Model,
OneToOneField,
OrderBy,
PositiveIntegerField,
Q,
SlugField,
SmallAutoField,
SmallIntegerField,
TextField,
TimeField,
UniqueConstraint,
UUIDField,
Value,
)
from django.db.models.fields.json import KeyTextTransform
from django.db.models.functions import Abs, Cast, Collate, Lower, Random, Upper
from django.db.models.indexes import IndexExpression
from django.db.transaction import TransactionManagementError, atomic
from django.test import (
TransactionTestCase,
ignore_warnings,
skipIfDBFeature,
skipUnlessDBFeature,
)
from django.test.utils import CaptureQueriesContext, isolate_apps, register_lookup
from django.utils.deprecation import RemovedInDjango51Warning
from .fields import CustomManyToManyField, InheritedManyToManyField, MediumBlobField
from .models import (
Author,
AuthorCharFieldWithIndex,
AuthorTextFieldWithIndex,
AuthorWithDefaultHeight,
AuthorWithEvenLongerName,
AuthorWithIndexedName,
AuthorWithUniqueName,
AuthorWithUniqueNameAndBirthday,
Book,
BookForeignObj,
BookWeak,
BookWithLongName,
BookWithO2O,
BookWithoutAuthor,
BookWithSlug,
IntegerPK,
Node,
Note,
NoteRename,
Tag,
TagM2MTest,
TagUniqueRename,
Thing,
UniqueTest,
new_apps,
)
class SchemaTests(TransactionTestCase):
"""
Tests for the schema-alteration code.
Be aware that these tests are more liable than most to false results,
as sometimes the code to check if a test has worked is almost as complex
as the code it is testing.
"""
available_apps = []
models = [
Author,
AuthorCharFieldWithIndex,
AuthorTextFieldWithIndex,
AuthorWithDefaultHeight,
AuthorWithEvenLongerName,
Book,
BookWeak,
BookWithLongName,
BookWithO2O,
BookWithSlug,
IntegerPK,
Node,
Note,
Tag,
TagM2MTest,
TagUniqueRename,
Thing,
UniqueTest,
]
# Utility functions
def setUp(self):
# local_models should contain test dependent model classes that will be
# automatically removed from the app cache on test tear down.
self.local_models = []
# isolated_local_models contains models that are in test methods
# decorated with @isolate_apps.
self.isolated_local_models = []
def tearDown(self):
# Delete any tables made for our models
self.delete_tables()
new_apps.clear_cache()
for model in new_apps.get_models():
model._meta._expire_cache()
if "schema" in new_apps.all_models:
for model in self.local_models:
for many_to_many in model._meta.many_to_many:
through = many_to_many.remote_field.through
if through and through._meta.auto_created:
del new_apps.all_models["schema"][through._meta.model_name]
del new_apps.all_models["schema"][model._meta.model_name]
if self.isolated_local_models:
with connection.schema_editor() as editor:
for model in self.isolated_local_models:
editor.delete_model(model)
def delete_tables(self):
"Deletes all model tables for our models for a clean test environment"
converter = connection.introspection.identifier_converter
with connection.schema_editor() as editor:
connection.disable_constraint_checking()
table_names = connection.introspection.table_names()
if connection.features.ignores_table_name_case:
table_names = [table_name.lower() for table_name in table_names]
for model in itertools.chain(SchemaTests.models, self.local_models):
tbl = converter(model._meta.db_table)
if connection.features.ignores_table_name_case:
tbl = tbl.lower()
if tbl in table_names:
editor.delete_model(model)
table_names.remove(tbl)
connection.enable_constraint_checking()
def column_classes(self, model):
with connection.cursor() as cursor:
columns = {
d[0]: (connection.introspection.get_field_type(d[1], d), d)
for d in connection.introspection.get_table_description(
cursor,
model._meta.db_table,
)
}
# SQLite has a different format for field_type
for name, (type, desc) in columns.items():
if isinstance(type, tuple):
columns[name] = (type[0], desc)
return columns
def get_primary_key(self, table):
with connection.cursor() as cursor:
return connection.introspection.get_primary_key_column(cursor, table)
def get_indexes(self, table):
"""
Get the indexes on the table using a new cursor.
"""
with connection.cursor() as cursor:
return [
c["columns"][0]
for c in connection.introspection.get_constraints(
cursor, table
).values()
if c["index"] and len(c["columns"]) == 1
]
def get_uniques(self, table):
with connection.cursor() as cursor:
return [
c["columns"][0]
for c in connection.introspection.get_constraints(
cursor, table
).values()
if c["unique"] and len(c["columns"]) == 1
]
def get_constraints(self, table):
"""
Get the constraints on a table using a new cursor.
"""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def get_constraints_for_column(self, model, column_name):
constraints = self.get_constraints(model._meta.db_table)
constraints_for_column = []
for name, details in constraints.items():
if details["columns"] == [column_name]:
constraints_for_column.append(name)
return sorted(constraints_for_column)
def check_added_field_default(
self,
schema_editor,
model,
field,
field_name,
expected_default,
cast_function=None,
):
with connection.cursor() as cursor:
schema_editor.add_field(model, field)
cursor.execute(
"SELECT {} FROM {};".format(field_name, model._meta.db_table)
)
database_default = cursor.fetchall()[0][0]
if cast_function and type(database_default) != type(expected_default):
database_default = cast_function(database_default)
self.assertEqual(database_default, expected_default)
def get_constraints_count(self, table, column, fk_to):
"""
Return a dict with keys 'fks', 'uniques, and 'indexes' indicating the
number of foreign keys, unique constraints, and indexes on
`table`.`column`. The `fk_to` argument is a 2-tuple specifying the
expected foreign key relationship's (table, column).
"""
with connection.cursor() as cursor:
constraints = connection.introspection.get_constraints(cursor, table)
counts = {"fks": 0, "uniques": 0, "indexes": 0}
for c in constraints.values():
if c["columns"] == [column]:
if c["foreign_key"] == fk_to:
counts["fks"] += 1
if c["unique"]:
counts["uniques"] += 1
elif c["index"]:
counts["indexes"] += 1
return counts
def get_column_collation(self, table, column):
with connection.cursor() as cursor:
return next(
f.collation
for f in connection.introspection.get_table_description(cursor, table)
if f.name == column
)
def get_column_comment(self, table, column):
with connection.cursor() as cursor:
return next(
f.comment
for f in connection.introspection.get_table_description(cursor, table)
if f.name == column
)
def get_table_comment(self, table):
with connection.cursor() as cursor:
return next(
t.comment
for t in connection.introspection.get_table_list(cursor)
if t.name == table
)
def assert_column_comment_not_exists(self, table, column):
with connection.cursor() as cursor:
columns = connection.introspection.get_table_description(cursor, table)
self.assertFalse(any([c.name == column and c.comment for c in columns]))
def assertIndexOrder(self, table, index, order):
constraints = self.get_constraints(table)
self.assertIn(index, constraints)
index_orders = constraints[index]["orders"]
self.assertTrue(
all(val == expected for val, expected in zip(index_orders, order))
)
def assertForeignKeyExists(self, model, column, expected_fk_table, field="id"):
"""
Fail if the FK constraint on `model.Meta.db_table`.`column` to
`expected_fk_table`.id doesn't exist.
"""
if not connection.features.can_introspect_foreign_keys:
return
constraints = self.get_constraints(model._meta.db_table)
constraint_fk = None
for details in constraints.values():
if details["columns"] == [column] and details["foreign_key"]:
constraint_fk = details["foreign_key"]
break
self.assertEqual(constraint_fk, (expected_fk_table, field))
def assertForeignKeyNotExists(self, model, column, expected_fk_table):
if not connection.features.can_introspect_foreign_keys:
return
with self.assertRaises(AssertionError):
self.assertForeignKeyExists(model, column, expected_fk_table)
# Tests
def test_creation_deletion(self):
"""
Tries creating a model's table, and then deleting it.
"""
with connection.schema_editor() as editor:
# Create the table
editor.create_model(Author)
# The table is there
list(Author.objects.all())
# Clean up that table
editor.delete_model(Author)
# No deferred SQL should be left over.
self.assertEqual(editor.deferred_sql, [])
# The table is gone
with self.assertRaises(DatabaseError):
list(Author.objects.all())
@skipUnlessDBFeature("supports_foreign_keys")
def test_fk(self):
"Creating tables out of FK order, then repointing, works"
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Book)
editor.create_model(Author)
editor.create_model(Tag)
# Initial tables are there
list(Author.objects.all())
list(Book.objects.all())
# Make sure the FK constraint is present
with self.assertRaises(IntegrityError):
Book.objects.create(
author_id=1,
title="Much Ado About Foreign Keys",
pub_date=datetime.datetime.now(),
)
# Repoint the FK constraint
old_field = Book._meta.get_field("author")
new_field = ForeignKey(Tag, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
self.assertForeignKeyExists(Book, "author_id", "schema_tag")
@skipUnlessDBFeature("can_create_inline_fk")
def test_inline_fk(self):
# Create some tables.
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
editor.create_model(Note)
self.assertForeignKeyNotExists(Note, "book_id", "schema_book")
# Add a foreign key from one to the other.
with connection.schema_editor() as editor:
new_field = ForeignKey(Book, CASCADE)
new_field.set_attributes_from_name("book")
editor.add_field(Note, new_field)
self.assertForeignKeyExists(Note, "book_id", "schema_book")
# Creating a FK field with a constraint uses a single statement without
# a deferred ALTER TABLE.
self.assertFalse(
[
sql
for sql in (str(statement) for statement in editor.deferred_sql)
if sql.startswith("ALTER TABLE") and "ADD CONSTRAINT" in sql
]
)
@skipUnlessDBFeature("can_create_inline_fk")
def test_add_inline_fk_update_data(self):
with connection.schema_editor() as editor:
editor.create_model(Node)
# Add an inline foreign key and update data in the same transaction.
new_field = ForeignKey(Node, CASCADE, related_name="new_fk", null=True)
new_field.set_attributes_from_name("new_parent_fk")
parent = Node.objects.create()
with connection.schema_editor() as editor:
editor.add_field(Node, new_field)
editor.execute("UPDATE schema_node SET new_parent_fk_id = %s;", [parent.pk])
assertIndex = (
self.assertIn
if connection.features.indexes_foreign_keys
else self.assertNotIn
)
assertIndex("new_parent_fk_id", self.get_indexes(Node._meta.db_table))
@skipUnlessDBFeature(
"can_create_inline_fk",
"allows_multiple_constraints_on_same_fields",
)
@isolate_apps("schema")
def test_add_inline_fk_index_update_data(self):
class Node(Model):
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Node)
# Add an inline foreign key, update data, and an index in the same
# transaction.
new_field = ForeignKey(Node, CASCADE, related_name="new_fk", null=True)
new_field.set_attributes_from_name("new_parent_fk")
parent = Node.objects.create()
with connection.schema_editor() as editor:
editor.add_field(Node, new_field)
Node._meta.add_field(new_field)
editor.execute("UPDATE schema_node SET new_parent_fk_id = %s;", [parent.pk])
editor.add_index(
Node, Index(fields=["new_parent_fk"], name="new_parent_inline_fk_idx")
)
self.assertIn("new_parent_fk_id", self.get_indexes(Node._meta.db_table))
@skipUnlessDBFeature("supports_foreign_keys")
def test_char_field_with_db_index_to_fk(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(AuthorCharFieldWithIndex)
# Change CharField to FK
old_field = AuthorCharFieldWithIndex._meta.get_field("char_field")
new_field = ForeignKey(Author, CASCADE, blank=True)
new_field.set_attributes_from_name("char_field")
with connection.schema_editor() as editor:
editor.alter_field(
AuthorCharFieldWithIndex, old_field, new_field, strict=True
)
self.assertForeignKeyExists(
AuthorCharFieldWithIndex, "char_field_id", "schema_author"
)
@skipUnlessDBFeature("supports_foreign_keys")
@skipUnlessDBFeature("supports_index_on_text_field")
def test_text_field_with_db_index_to_fk(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(AuthorTextFieldWithIndex)
# Change TextField to FK
old_field = AuthorTextFieldWithIndex._meta.get_field("text_field")
new_field = ForeignKey(Author, CASCADE, blank=True)
new_field.set_attributes_from_name("text_field")
with connection.schema_editor() as editor:
editor.alter_field(
AuthorTextFieldWithIndex, old_field, new_field, strict=True
)
self.assertForeignKeyExists(
AuthorTextFieldWithIndex, "text_field_id", "schema_author"
)
@isolate_apps("schema")
def test_char_field_pk_to_auto_field(self):
class Foo(Model):
id = CharField(max_length=255, primary_key=True)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.isolated_local_models = [Foo]
old_field = Foo._meta.get_field("id")
new_field = AutoField(primary_key=True)
new_field.set_attributes_from_name("id")
new_field.model = Foo
with connection.schema_editor() as editor:
editor.alter_field(Foo, old_field, new_field, strict=True)
@skipUnlessDBFeature("supports_foreign_keys")
def test_fk_to_proxy(self):
"Creating a FK to a proxy model creates database constraints."
class AuthorProxy(Author):
class Meta:
app_label = "schema"
apps = new_apps
proxy = True
class AuthorRef(Model):
author = ForeignKey(AuthorProxy, on_delete=CASCADE)
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [AuthorProxy, AuthorRef]
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(AuthorRef)
self.assertForeignKeyExists(AuthorRef, "author_id", "schema_author")
@skipUnlessDBFeature("supports_foreign_keys", "can_introspect_foreign_keys")
def test_fk_db_constraint(self):
"The db_constraint parameter is respected"
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
editor.create_model(Author)
editor.create_model(BookWeak)
# Initial tables are there
list(Author.objects.all())
list(Tag.objects.all())
list(BookWeak.objects.all())
self.assertForeignKeyNotExists(BookWeak, "author_id", "schema_author")
# Make a db_constraint=False FK
new_field = ForeignKey(Tag, CASCADE, db_constraint=False)
new_field.set_attributes_from_name("tag")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
self.assertForeignKeyNotExists(Author, "tag_id", "schema_tag")
# Alter to one with a constraint
new_field2 = ForeignKey(Tag, CASCADE)
new_field2.set_attributes_from_name("tag")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
self.assertForeignKeyExists(Author, "tag_id", "schema_tag")
# Alter to one without a constraint again
new_field2 = ForeignKey(Tag, CASCADE)
new_field2.set_attributes_from_name("tag")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field2, new_field, strict=True)
self.assertForeignKeyNotExists(Author, "tag_id", "schema_tag")
@isolate_apps("schema")
def test_no_db_constraint_added_during_primary_key_change(self):
"""
When a primary key that's pointed to by a ForeignKey with
db_constraint=False is altered, a foreign key constraint isn't added.
"""
class Author(Model):
class Meta:
app_label = "schema"
class BookWeak(Model):
author = ForeignKey(Author, CASCADE, db_constraint=False)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWeak)
self.assertForeignKeyNotExists(BookWeak, "author_id", "schema_author")
old_field = Author._meta.get_field("id")
new_field = BigAutoField(primary_key=True)
new_field.model = Author
new_field.set_attributes_from_name("id")
# @isolate_apps() and inner models are needed to have the model
# relations populated, otherwise this doesn't act as a regression test.
self.assertEqual(len(new_field.model._meta.related_objects), 1)
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertForeignKeyNotExists(BookWeak, "author_id", "schema_author")
def _test_m2m_db_constraint(self, M2MFieldClass):
class LocalAuthorWithM2M(Model):
name = CharField(max_length=255)
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalAuthorWithM2M]
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
editor.create_model(LocalAuthorWithM2M)
# Initial tables are there
list(LocalAuthorWithM2M.objects.all())
list(Tag.objects.all())
# Make a db_constraint=False FK
new_field = M2MFieldClass(Tag, related_name="authors", db_constraint=False)
new_field.contribute_to_class(LocalAuthorWithM2M, "tags")
# Add the field
with connection.schema_editor() as editor:
editor.add_field(LocalAuthorWithM2M, new_field)
self.assertForeignKeyNotExists(
new_field.remote_field.through, "tag_id", "schema_tag"
)
@skipUnlessDBFeature("supports_foreign_keys")
def test_m2m_db_constraint(self):
self._test_m2m_db_constraint(ManyToManyField)
@skipUnlessDBFeature("supports_foreign_keys")
def test_m2m_db_constraint_custom(self):
self._test_m2m_db_constraint(CustomManyToManyField)
@skipUnlessDBFeature("supports_foreign_keys")
def test_m2m_db_constraint_inherited(self):
self._test_m2m_db_constraint(InheritedManyToManyField)
def test_add_field(self):
"""
Tests adding fields to models
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Add the new field
new_field = IntegerField(null=True)
new_field.set_attributes_from_name("age")
with CaptureQueriesContext(
connection
) as ctx, connection.schema_editor() as editor:
editor.add_field(Author, new_field)
drop_default_sql = editor.sql_alter_column_no_default % {
"column": editor.quote_name(new_field.name),
}
self.assertFalse(
any(drop_default_sql in query["sql"] for query in ctx.captured_queries)
)
# Table is not rebuilt.
self.assertIs(
any("CREATE TABLE" in query["sql"] for query in ctx.captured_queries), False
)
self.assertIs(
any("DROP TABLE" in query["sql"] for query in ctx.captured_queries), False
)
columns = self.column_classes(Author)
self.assertEqual(
columns["age"][0],
connection.features.introspected_field_types["IntegerField"],
)
self.assertTrue(columns["age"][1][6])
def test_add_field_remove_field(self):
"""
Adding a field and removing it removes all deferred sql referring to it.
"""
with connection.schema_editor() as editor:
# Create a table with a unique constraint on the slug field.
editor.create_model(Tag)
# Remove the slug column.
editor.remove_field(Tag, Tag._meta.get_field("slug"))
self.assertEqual(editor.deferred_sql, [])
def test_add_field_temp_default(self):
"""
Tests adding fields to models with a temporary default
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Add some rows of data
Author.objects.create(name="Andrew", height=30)
Author.objects.create(name="Andrea")
# Add a not-null field
new_field = CharField(max_length=30, default="Godwin")
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
self.assertEqual(
columns["surname"][0],
connection.features.introspected_field_types["CharField"],
)
self.assertEqual(
columns["surname"][1][6],
connection.features.interprets_empty_strings_as_nulls,
)
def test_add_field_temp_default_boolean(self):
"""
Tests adding fields to models with a temporary default where
the default is False. (#21783)
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Add some rows of data
Author.objects.create(name="Andrew", height=30)
Author.objects.create(name="Andrea")
# Add a not-null field
new_field = BooleanField(default=False)
new_field.set_attributes_from_name("awesome")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
# BooleanField are stored as TINYINT(1) on MySQL.
field_type = columns["awesome"][0]
self.assertEqual(
field_type, connection.features.introspected_field_types["BooleanField"]
)
def test_add_field_default_transform(self):
"""
Tests adding fields to models with a default that is not directly
valid in the database (#22581)
"""
class TestTransformField(IntegerField):
# Weird field that saves the count of items in its value
def get_default(self):
return self.default
def get_prep_value(self, value):
if value is None:
return 0
return len(value)
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add some rows of data
Author.objects.create(name="Andrew", height=30)
Author.objects.create(name="Andrea")
# Add the field with a default it needs to cast (to string in this case)
new_field = TestTransformField(default={1: 2})
new_field.set_attributes_from_name("thing")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure the field is there
columns = self.column_classes(Author)
field_type, field_info = columns["thing"]
self.assertEqual(
field_type, connection.features.introspected_field_types["IntegerField"]
)
# Make sure the values were transformed correctly
self.assertEqual(Author.objects.extra(where=["thing = 1"]).count(), 2)
def test_add_field_o2o_nullable(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Note)
new_field = OneToOneField(Note, CASCADE, null=True)
new_field.set_attributes_from_name("note")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
self.assertIn("note_id", columns)
self.assertTrue(columns["note_id"][1][6])
def test_add_field_binary(self):
"""
Tests binary fields get a sane default (#22851)
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add the new field
new_field = BinaryField(blank=True)
new_field.set_attributes_from_name("bits")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
# MySQL annoyingly uses the same backend, so it'll come back as one of
# these two types.
self.assertIn(columns["bits"][0], ("BinaryField", "TextField"))
def test_add_field_durationfield_with_default(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
new_field = DurationField(default=datetime.timedelta(minutes=10))
new_field.set_attributes_from_name("duration")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
self.assertEqual(
columns["duration"][0],
connection.features.introspected_field_types["DurationField"],
)
@unittest.skipUnless(connection.vendor == "mysql", "MySQL specific")
def test_add_binaryfield_mediumblob(self):
"""
Test adding a custom-sized binary field on MySQL (#24846).
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add the new field with default
new_field = MediumBlobField(blank=True, default=b"123")
new_field.set_attributes_from_name("bits")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
# Introspection treats BLOBs as TextFields
self.assertEqual(columns["bits"][0], "TextField")
@isolate_apps("schema")
def test_add_auto_field(self):
class AddAutoFieldModel(Model):
name = CharField(max_length=255, primary_key=True)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(AddAutoFieldModel)
self.isolated_local_models = [AddAutoFieldModel]
old_field = AddAutoFieldModel._meta.get_field("name")
new_field = CharField(max_length=255)
new_field.set_attributes_from_name("name")
new_field.model = AddAutoFieldModel
with connection.schema_editor() as editor:
editor.alter_field(AddAutoFieldModel, old_field, new_field)
new_auto_field = AutoField(primary_key=True)
new_auto_field.set_attributes_from_name("id")
new_auto_field.model = AddAutoFieldModel()
with connection.schema_editor() as editor:
editor.add_field(AddAutoFieldModel, new_auto_field)
# Crashes on PostgreSQL when the GENERATED BY suffix is missing.
AddAutoFieldModel.objects.create(name="test")
def test_remove_field(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
with CaptureQueriesContext(connection) as ctx:
editor.remove_field(Author, Author._meta.get_field("name"))
columns = self.column_classes(Author)
self.assertNotIn("name", columns)
if getattr(connection.features, "can_alter_table_drop_column", True):
# Table is not rebuilt.
self.assertIs(
any("CREATE TABLE" in query["sql"] for query in ctx.captured_queries),
False,
)
self.assertIs(
any("DROP TABLE" in query["sql"] for query in ctx.captured_queries),
False,
)
def test_remove_indexed_field(self):
with connection.schema_editor() as editor:
editor.create_model(AuthorCharFieldWithIndex)
with connection.schema_editor() as editor:
editor.remove_field(
AuthorCharFieldWithIndex,
AuthorCharFieldWithIndex._meta.get_field("char_field"),
)
columns = self.column_classes(AuthorCharFieldWithIndex)
self.assertNotIn("char_field", columns)
def test_alter(self):
"""
Tests simple altering of fields
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertEqual(
columns["name"][0],
connection.features.introspected_field_types["CharField"],
)
self.assertEqual(
bool(columns["name"][1][6]),
bool(connection.features.interprets_empty_strings_as_nulls),
)
# Alter the name field to a TextField
old_field = Author._meta.get_field("name")
new_field = TextField(null=True)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
columns = self.column_classes(Author)
self.assertEqual(columns["name"][0], "TextField")
self.assertTrue(columns["name"][1][6])
# Change nullability again
new_field2 = TextField(null=False)
new_field2.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
columns = self.column_classes(Author)
self.assertEqual(columns["name"][0], "TextField")
self.assertEqual(
bool(columns["name"][1][6]),
bool(connection.features.interprets_empty_strings_as_nulls),
)
def test_alter_auto_field_to_integer_field(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change AutoField to IntegerField
old_field = Author._meta.get_field("id")
new_field = IntegerField(primary_key=True)
new_field.set_attributes_from_name("id")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
# Now that ID is an IntegerField, the database raises an error if it
# isn't provided.
if not connection.features.supports_unspecified_pk:
with self.assertRaises(DatabaseError):
Author.objects.create()
def test_alter_auto_field_to_char_field(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change AutoField to CharField
old_field = Author._meta.get_field("id")
new_field = CharField(primary_key=True, max_length=50)
new_field.set_attributes_from_name("id")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
@isolate_apps("schema")
def test_alter_auto_field_quoted_db_column(self):
class Foo(Model):
id = AutoField(primary_key=True, db_column='"quoted_id"')
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.isolated_local_models = [Foo]
old_field = Foo._meta.get_field("id")
new_field = BigAutoField(primary_key=True)
new_field.model = Foo
new_field.db_column = '"quoted_id"'
new_field.set_attributes_from_name("id")
with connection.schema_editor() as editor:
editor.alter_field(Foo, old_field, new_field, strict=True)
Foo.objects.create()
def test_alter_not_unique_field_to_primary_key(self):
# Create the table.
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change UUIDField to primary key.
old_field = Author._meta.get_field("uuid")
new_field = UUIDField(primary_key=True)
new_field.set_attributes_from_name("uuid")
new_field.model = Author
with connection.schema_editor() as editor:
editor.remove_field(Author, Author._meta.get_field("id"))
editor.alter_field(Author, old_field, new_field, strict=True)
# Redundant unique constraint is not added.
count = self.get_constraints_count(
Author._meta.db_table,
Author._meta.get_field("uuid").column,
None,
)
self.assertLessEqual(count["uniques"], 1)
@isolate_apps("schema")
def test_alter_primary_key_quoted_db_table(self):
class Foo(Model):
class Meta:
app_label = "schema"
db_table = '"foo"'
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.isolated_local_models = [Foo]
old_field = Foo._meta.get_field("id")
new_field = BigAutoField(primary_key=True)
new_field.model = Foo
new_field.set_attributes_from_name("id")
with connection.schema_editor() as editor:
editor.alter_field(Foo, old_field, new_field, strict=True)
Foo.objects.create()
def test_alter_text_field(self):
# Regression for "BLOB/TEXT column 'info' can't have a default value")
# on MySQL.
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Note)
old_field = Note._meta.get_field("info")
new_field = TextField(blank=True)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
def test_alter_text_field_to_not_null_with_default_value(self):
with connection.schema_editor() as editor:
editor.create_model(Note)
old_field = Note._meta.get_field("address")
new_field = TextField(blank=True, default="", null=False)
new_field.set_attributes_from_name("address")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
@skipUnlessDBFeature("can_defer_constraint_checks", "can_rollback_ddl")
def test_alter_fk_checks_deferred_constraints(self):
"""
#25492 - Altering a foreign key's structure and data in the same
transaction.
"""
with connection.schema_editor() as editor:
editor.create_model(Node)
old_field = Node._meta.get_field("parent")
new_field = ForeignKey(Node, CASCADE)
new_field.set_attributes_from_name("parent")
parent = Node.objects.create()
with connection.schema_editor() as editor:
# Update the parent FK to create a deferred constraint check.
Node.objects.update(parent=parent)
editor.alter_field(Node, old_field, new_field, strict=True)
@isolate_apps("schema")
def test_alter_null_with_default_value_deferred_constraints(self):
class Publisher(Model):
class Meta:
app_label = "schema"
class Article(Model):
publisher = ForeignKey(Publisher, CASCADE)
title = CharField(max_length=50, null=True)
description = CharField(max_length=100, null=True)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Publisher)
editor.create_model(Article)
self.isolated_local_models = [Article, Publisher]
publisher = Publisher.objects.create()
Article.objects.create(publisher=publisher)
old_title = Article._meta.get_field("title")
new_title = CharField(max_length=50, null=False, default="")
new_title.set_attributes_from_name("title")
old_description = Article._meta.get_field("description")
new_description = CharField(max_length=100, null=False, default="")
new_description.set_attributes_from_name("description")
with connection.schema_editor() as editor:
editor.alter_field(Article, old_title, new_title, strict=True)
editor.alter_field(Article, old_description, new_description, strict=True)
def test_alter_text_field_to_date_field(self):
"""
#25002 - Test conversion of text field to date field.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
Note.objects.create(info="1988-05-05")
old_field = Note._meta.get_field("info")
new_field = DateField(blank=True)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
# Make sure the field isn't nullable
columns = self.column_classes(Note)
self.assertFalse(columns["info"][1][6])
def test_alter_text_field_to_datetime_field(self):
"""
#25002 - Test conversion of text field to datetime field.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
Note.objects.create(info="1988-05-05 3:16:17.4567")
old_field = Note._meta.get_field("info")
new_field = DateTimeField(blank=True)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
# Make sure the field isn't nullable
columns = self.column_classes(Note)
self.assertFalse(columns["info"][1][6])
def test_alter_text_field_to_time_field(self):
"""
#25002 - Test conversion of text field to time field.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
Note.objects.create(info="3:16:17.4567")
old_field = Note._meta.get_field("info")
new_field = TimeField(blank=True)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
# Make sure the field isn't nullable
columns = self.column_classes(Note)
self.assertFalse(columns["info"][1][6])
@skipIfDBFeature("interprets_empty_strings_as_nulls")
def test_alter_textual_field_keep_null_status(self):
"""
Changing a field type shouldn't affect the not null status.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
with self.assertRaises(IntegrityError):
Note.objects.create(info=None)
old_field = Note._meta.get_field("info")
new_field = CharField(max_length=50)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
with self.assertRaises(IntegrityError):
Note.objects.create(info=None)
@skipUnlessDBFeature("interprets_empty_strings_as_nulls")
def test_alter_textual_field_not_null_to_null(self):
"""
Nullability for textual fields is preserved on databases that
interpret empty strings as NULLs.
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
columns = self.column_classes(Author)
# Field is nullable.
self.assertTrue(columns["uuid"][1][6])
# Change to NOT NULL.
old_field = Author._meta.get_field("uuid")
new_field = SlugField(null=False, blank=True)
new_field.set_attributes_from_name("uuid")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
columns = self.column_classes(Author)
# Nullability is preserved.
self.assertTrue(columns["uuid"][1][6])
def test_alter_numeric_field_keep_null_status(self):
"""
Changing a field type shouldn't affect the not null status.
"""
with connection.schema_editor() as editor:
editor.create_model(UniqueTest)
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=None, slug="aaa")
old_field = UniqueTest._meta.get_field("year")
new_field = BigIntegerField()
new_field.set_attributes_from_name("year")
with connection.schema_editor() as editor:
editor.alter_field(UniqueTest, old_field, new_field, strict=True)
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=None, slug="bbb")
def test_alter_null_to_not_null(self):
"""
#23609 - Tests handling of default values when altering from NULL to NOT NULL.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertTrue(columns["height"][1][6])
# Create some test data
Author.objects.create(name="Not null author", height=12)
Author.objects.create(name="Null author")
# Verify null value
self.assertEqual(Author.objects.get(name="Not null author").height, 12)
self.assertIsNone(Author.objects.get(name="Null author").height)
# Alter the height field to NOT NULL with default
old_field = Author._meta.get_field("height")
new_field = PositiveIntegerField(default=42)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
columns = self.column_classes(Author)
self.assertFalse(columns["height"][1][6])
# Verify default value
self.assertEqual(Author.objects.get(name="Not null author").height, 12)
self.assertEqual(Author.objects.get(name="Null author").height, 42)
def test_alter_charfield_to_null(self):
"""
#24307 - Should skip an alter statement on databases with
interprets_empty_strings_as_nulls when changing a CharField to null.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change the CharField to null
old_field = Author._meta.get_field("name")
new_field = copy(old_field)
new_field.null = True
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_char_field_decrease_length(self):
# Create the table.
with connection.schema_editor() as editor:
editor.create_model(Author)
Author.objects.create(name="x" * 255)
# Change max_length of CharField.
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=254)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
msg = "value too long for type character varying(254)"
with self.assertRaisesMessage(DataError, msg):
editor.alter_field(Author, old_field, new_field, strict=True)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_with_custom_db_type(self):
from django.contrib.postgres.fields import ArrayField
class Foo(Model):
field = ArrayField(CharField(max_length=255))
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.isolated_local_models = [Foo]
old_field = Foo._meta.get_field("field")
new_field = ArrayField(CharField(max_length=16))
new_field.set_attributes_from_name("field")
new_field.model = Foo
with connection.schema_editor() as editor:
editor.alter_field(Foo, old_field, new_field, strict=True)
@isolate_apps("schema")
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_array_field_decrease_base_field_length(self):
from django.contrib.postgres.fields import ArrayField
class ArrayModel(Model):
field = ArrayField(CharField(max_length=16))
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(ArrayModel)
self.isolated_local_models = [ArrayModel]
ArrayModel.objects.create(field=["x" * 16])
old_field = ArrayModel._meta.get_field("field")
new_field = ArrayField(CharField(max_length=15))
new_field.set_attributes_from_name("field")
new_field.model = ArrayModel
with connection.schema_editor() as editor:
msg = "value too long for type character varying(15)"
with self.assertRaisesMessage(DataError, msg):
editor.alter_field(ArrayModel, old_field, new_field, strict=True)
@isolate_apps("schema")
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_array_field_decrease_nested_base_field_length(self):
from django.contrib.postgres.fields import ArrayField
class ArrayModel(Model):
field = ArrayField(ArrayField(CharField(max_length=16)))
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(ArrayModel)
self.isolated_local_models = [ArrayModel]
ArrayModel.objects.create(field=[["x" * 16]])
old_field = ArrayModel._meta.get_field("field")
new_field = ArrayField(ArrayField(CharField(max_length=15)))
new_field.set_attributes_from_name("field")
new_field.model = ArrayModel
with connection.schema_editor() as editor:
msg = "value too long for type character varying(15)"
with self.assertRaisesMessage(DataError, msg):
editor.alter_field(ArrayModel, old_field, new_field, strict=True)
def _add_ci_collation(self):
ci_collation = "case_insensitive"
def drop_collation():
with connection.cursor() as cursor:
cursor.execute(f"DROP COLLATION IF EXISTS {ci_collation}")
with connection.cursor() as cursor:
cursor.execute(
f"CREATE COLLATION IF NOT EXISTS {ci_collation} (provider=icu, "
f"locale='und-u-ks-level2', deterministic=false)"
)
self.addCleanup(drop_collation)
return ci_collation
@isolate_apps("schema")
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
@skipUnlessDBFeature(
"supports_collation_on_charfield",
"supports_non_deterministic_collations",
)
def test_db_collation_arrayfield(self):
from django.contrib.postgres.fields import ArrayField
ci_collation = self._add_ci_collation()
cs_collation = "en-x-icu"
class ArrayModel(Model):
field = ArrayField(CharField(max_length=16, db_collation=ci_collation))
class Meta:
app_label = "schema"
# Create the table.
with connection.schema_editor() as editor:
editor.create_model(ArrayModel)
self.isolated_local_models = [ArrayModel]
self.assertEqual(
self.get_column_collation(ArrayModel._meta.db_table, "field"),
ci_collation,
)
# Alter collation.
old_field = ArrayModel._meta.get_field("field")
new_field_cs = ArrayField(CharField(max_length=16, db_collation=cs_collation))
new_field_cs.set_attributes_from_name("field")
new_field_cs.model = ArrayField
with connection.schema_editor() as editor:
editor.alter_field(ArrayModel, old_field, new_field_cs, strict=True)
self.assertEqual(
self.get_column_collation(ArrayModel._meta.db_table, "field"),
cs_collation,
)
@isolate_apps("schema")
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
@skipUnlessDBFeature(
"supports_collation_on_charfield",
"supports_non_deterministic_collations",
)
def test_unique_with_collation_charfield(self):
ci_collation = self._add_ci_collation()
class CiCharModel(Model):
field = CharField(max_length=16, db_collation=ci_collation, unique=True)
class Meta:
app_label = "schema"
# Create the table.
with connection.schema_editor() as editor:
editor.create_model(CiCharModel)
self.isolated_local_models = [CiCharModel]
self.assertEqual(
self.get_column_collation(CiCharModel._meta.db_table, "field"),
ci_collation,
)
self.assertIn("field", self.get_uniques(CiCharModel._meta.db_table))
@isolate_apps("schema")
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
@skipUnlessDBFeature(
"supports_collation_on_charfield",
"supports_non_deterministic_collations",
)
def test_relation_to_collation_charfield(self):
ci_collation = self._add_ci_collation()
class CiCharModel(Model):
field = CharField(max_length=16, db_collation=ci_collation, unique=True)
class Meta:
app_label = "schema"
class RelationModel(Model):
field = OneToOneField(CiCharModel, CASCADE, to_field="field")
class Meta:
app_label = "schema"
# Create the table.
with connection.schema_editor() as editor:
editor.create_model(CiCharModel)
editor.create_model(RelationModel)
self.isolated_local_models = [CiCharModel, RelationModel]
self.assertEqual(
self.get_column_collation(RelationModel._meta.db_table, "field_id"),
ci_collation,
)
self.assertEqual(
self.get_column_collation(CiCharModel._meta.db_table, "field"),
ci_collation,
)
self.assertIn("field_id", self.get_uniques(RelationModel._meta.db_table))
def test_alter_textfield_to_null(self):
"""
#24307 - Should skip an alter statement on databases with
interprets_empty_strings_as_nulls when changing a TextField to null.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Note)
# Change the TextField to null
old_field = Note._meta.get_field("info")
new_field = copy(old_field)
new_field.null = True
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
def test_alter_null_to_not_null_keeping_default(self):
"""
#23738 - Can change a nullable field with default to non-nullable
with the same default.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(AuthorWithDefaultHeight)
# Ensure the field is right to begin with
columns = self.column_classes(AuthorWithDefaultHeight)
self.assertTrue(columns["height"][1][6])
# Alter the height field to NOT NULL keeping the previous default
old_field = AuthorWithDefaultHeight._meta.get_field("height")
new_field = PositiveIntegerField(default=42)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(
AuthorWithDefaultHeight, old_field, new_field, strict=True
)
columns = self.column_classes(AuthorWithDefaultHeight)
self.assertFalse(columns["height"][1][6])
@skipUnlessDBFeature("supports_foreign_keys")
def test_alter_fk(self):
"""
Tests altering of FKs
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the field is right to begin with
columns = self.column_classes(Book)
self.assertEqual(
columns["author_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
self.assertForeignKeyExists(Book, "author_id", "schema_author")
# Alter the FK
old_field = Book._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE, editable=False)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
columns = self.column_classes(Book)
self.assertEqual(
columns["author_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
self.assertForeignKeyExists(Book, "author_id", "schema_author")
@skipUnlessDBFeature("supports_foreign_keys")
def test_alter_to_fk(self):
"""
#24447 - Tests adding a FK constraint for an existing column
"""
class LocalBook(Model):
author = IntegerField()
title = CharField(max_length=100, db_index=True)
pub_date = DateTimeField()
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalBook]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(LocalBook)
# Ensure no FK constraint exists
constraints = self.get_constraints(LocalBook._meta.db_table)
for details in constraints.values():
if details["foreign_key"]:
self.fail(
"Found an unexpected FK constraint to %s" % details["columns"]
)
old_field = LocalBook._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(LocalBook, old_field, new_field, strict=True)
self.assertForeignKeyExists(LocalBook, "author_id", "schema_author")
@skipUnlessDBFeature("supports_foreign_keys", "can_introspect_foreign_keys")
def test_alter_o2o_to_fk(self):
"""
#24163 - Tests altering of OneToOneField to ForeignKey
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithO2O)
# Ensure the field is right to begin with
columns = self.column_classes(BookWithO2O)
self.assertEqual(
columns["author_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
# Ensure the field is unique
author = Author.objects.create(name="Joe")
BookWithO2O.objects.create(
author=author, title="Django 1", pub_date=datetime.datetime.now()
)
with self.assertRaises(IntegrityError):
BookWithO2O.objects.create(
author=author, title="Django 2", pub_date=datetime.datetime.now()
)
BookWithO2O.objects.all().delete()
self.assertForeignKeyExists(BookWithO2O, "author_id", "schema_author")
# Alter the OneToOneField to ForeignKey
old_field = BookWithO2O._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(BookWithO2O, old_field, new_field, strict=True)
columns = self.column_classes(Book)
self.assertEqual(
columns["author_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
# Ensure the field is not unique anymore
Book.objects.create(
author=author, title="Django 1", pub_date=datetime.datetime.now()
)
Book.objects.create(
author=author, title="Django 2", pub_date=datetime.datetime.now()
)
self.assertForeignKeyExists(Book, "author_id", "schema_author")
@skipUnlessDBFeature("supports_foreign_keys", "can_introspect_foreign_keys")
def test_alter_fk_to_o2o(self):
"""
#24163 - Tests altering of ForeignKey to OneToOneField
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the field is right to begin with
columns = self.column_classes(Book)
self.assertEqual(
columns["author_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
# Ensure the field is not unique
author = Author.objects.create(name="Joe")
Book.objects.create(
author=author, title="Django 1", pub_date=datetime.datetime.now()
)
Book.objects.create(
author=author, title="Django 2", pub_date=datetime.datetime.now()
)
Book.objects.all().delete()
self.assertForeignKeyExists(Book, "author_id", "schema_author")
# Alter the ForeignKey to OneToOneField
old_field = Book._meta.get_field("author")
new_field = OneToOneField(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
columns = self.column_classes(BookWithO2O)
self.assertEqual(
columns["author_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
# Ensure the field is unique now
BookWithO2O.objects.create(
author=author, title="Django 1", pub_date=datetime.datetime.now()
)
with self.assertRaises(IntegrityError):
BookWithO2O.objects.create(
author=author, title="Django 2", pub_date=datetime.datetime.now()
)
self.assertForeignKeyExists(BookWithO2O, "author_id", "schema_author")
def test_alter_field_fk_to_o2o(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
expected_fks = (
1
if connection.features.supports_foreign_keys
and connection.features.can_introspect_foreign_keys
else 0
)
expected_indexes = 1 if connection.features.indexes_foreign_keys else 0
# Check the index is right to begin with.
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(
counts,
{"fks": expected_fks, "uniques": 0, "indexes": expected_indexes},
)
old_field = Book._meta.get_field("author")
new_field = OneToOneField(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field)
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The index on ForeignKey is replaced with a unique constraint for
# OneToOneField.
self.assertEqual(counts, {"fks": expected_fks, "uniques": 1, "indexes": 0})
def test_autofield_to_o2o(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Note)
# Rename the field.
old_field = Author._meta.get_field("id")
new_field = AutoField(primary_key=True)
new_field.set_attributes_from_name("note_ptr")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
# Alter AutoField to OneToOneField.
new_field_o2o = OneToOneField(Note, CASCADE)
new_field_o2o.set_attributes_from_name("note_ptr")
new_field_o2o.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field_o2o, strict=True)
columns = self.column_classes(Author)
field_type, _ = columns["note_ptr_id"]
self.assertEqual(
field_type, connection.features.introspected_field_types["IntegerField"]
)
def test_alter_field_fk_keeps_index(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
expected_fks = (
1
if connection.features.supports_foreign_keys
and connection.features.can_introspect_foreign_keys
else 0
)
expected_indexes = 1 if connection.features.indexes_foreign_keys else 0
# Check the index is right to begin with.
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(
counts,
{"fks": expected_fks, "uniques": 0, "indexes": expected_indexes},
)
old_field = Book._meta.get_field("author")
# on_delete changed from CASCADE.
new_field = ForeignKey(Author, PROTECT)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The index remains.
self.assertEqual(
counts,
{"fks": expected_fks, "uniques": 0, "indexes": expected_indexes},
)
def test_alter_field_o2o_to_fk(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithO2O)
expected_fks = (
1
if connection.features.supports_foreign_keys
and connection.features.can_introspect_foreign_keys
else 0
)
# Check the unique constraint is right to begin with.
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(counts, {"fks": expected_fks, "uniques": 1, "indexes": 0})
old_field = BookWithO2O._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(BookWithO2O, old_field, new_field)
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The unique constraint on OneToOneField is replaced with an index for
# ForeignKey.
self.assertEqual(counts, {"fks": expected_fks, "uniques": 0, "indexes": 1})
def test_alter_field_o2o_keeps_unique(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithO2O)
expected_fks = (
1
if connection.features.supports_foreign_keys
and connection.features.can_introspect_foreign_keys
else 0
)
# Check the unique constraint is right to begin with.
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(counts, {"fks": expected_fks, "uniques": 1, "indexes": 0})
old_field = BookWithO2O._meta.get_field("author")
# on_delete changed from CASCADE.
new_field = OneToOneField(Author, PROTECT)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(BookWithO2O, old_field, new_field, strict=True)
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field("author").column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The unique constraint remains.
self.assertEqual(counts, {"fks": expected_fks, "uniques": 1, "indexes": 0})
@skipUnlessDBFeature("ignores_table_name_case")
def test_alter_db_table_case(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Alter the case of the table
old_table_name = Author._meta.db_table
with connection.schema_editor() as editor:
editor.alter_db_table(Author, old_table_name, old_table_name.upper())
def test_alter_implicit_id_to_explicit(self):
"""
Should be able to convert an implicit "id" field to an explicit "id"
primary key field.
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field("id")
new_field = AutoField(primary_key=True)
new_field.set_attributes_from_name("id")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
# This will fail if DROP DEFAULT is inadvertently executed on this
# field which drops the id sequence, at least on PostgreSQL.
Author.objects.create(name="Foo")
Author.objects.create(name="Bar")
def test_alter_autofield_pk_to_bigautofield_pk(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field("id")
new_field = BigAutoField(primary_key=True)
new_field.set_attributes_from_name("id")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
Author.objects.create(name="Foo", pk=1)
with connection.cursor() as cursor:
sequence_reset_sqls = connection.ops.sequence_reset_sql(
no_style(), [Author]
)
if sequence_reset_sqls:
cursor.execute(sequence_reset_sqls[0])
self.assertIsNotNone(Author.objects.create(name="Bar"))
def test_alter_autofield_pk_to_smallautofield_pk(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field("id")
new_field = SmallAutoField(primary_key=True)
new_field.set_attributes_from_name("id")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
Author.objects.create(name="Foo", pk=1)
with connection.cursor() as cursor:
sequence_reset_sqls = connection.ops.sequence_reset_sql(
no_style(), [Author]
)
if sequence_reset_sqls:
cursor.execute(sequence_reset_sqls[0])
self.assertIsNotNone(Author.objects.create(name="Bar"))
def test_alter_int_pk_to_autofield_pk(self):
"""
Should be able to rename an IntegerField(primary_key=True) to
AutoField(primary_key=True).
"""
with connection.schema_editor() as editor:
editor.create_model(IntegerPK)
old_field = IntegerPK._meta.get_field("i")
new_field = AutoField(primary_key=True)
new_field.model = IntegerPK
new_field.set_attributes_from_name("i")
with connection.schema_editor() as editor:
editor.alter_field(IntegerPK, old_field, new_field, strict=True)
# A model representing the updated model.
class IntegerPKToAutoField(Model):
i = AutoField(primary_key=True)
j = IntegerField(unique=True)
class Meta:
app_label = "schema"
apps = new_apps
db_table = IntegerPK._meta.db_table
# An id (i) is generated by the database.
obj = IntegerPKToAutoField.objects.create(j=1)
self.assertIsNotNone(obj.i)
def test_alter_int_pk_to_bigautofield_pk(self):
"""
Should be able to rename an IntegerField(primary_key=True) to
BigAutoField(primary_key=True).
"""
with connection.schema_editor() as editor:
editor.create_model(IntegerPK)
old_field = IntegerPK._meta.get_field("i")
new_field = BigAutoField(primary_key=True)
new_field.model = IntegerPK
new_field.set_attributes_from_name("i")
with connection.schema_editor() as editor:
editor.alter_field(IntegerPK, old_field, new_field, strict=True)
# A model representing the updated model.
class IntegerPKToBigAutoField(Model):
i = BigAutoField(primary_key=True)
j = IntegerField(unique=True)
class Meta:
app_label = "schema"
apps = new_apps
db_table = IntegerPK._meta.db_table
# An id (i) is generated by the database.
obj = IntegerPKToBigAutoField.objects.create(j=1)
self.assertIsNotNone(obj.i)
@isolate_apps("schema")
def test_alter_smallint_pk_to_smallautofield_pk(self):
"""
Should be able to rename an SmallIntegerField(primary_key=True) to
SmallAutoField(primary_key=True).
"""
class SmallIntegerPK(Model):
i = SmallIntegerField(primary_key=True)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(SmallIntegerPK)
self.isolated_local_models = [SmallIntegerPK]
old_field = SmallIntegerPK._meta.get_field("i")
new_field = SmallAutoField(primary_key=True)
new_field.model = SmallIntegerPK
new_field.set_attributes_from_name("i")
with connection.schema_editor() as editor:
editor.alter_field(SmallIntegerPK, old_field, new_field, strict=True)
@isolate_apps("schema")
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_serial_auto_field_to_bigautofield(self):
class SerialAutoField(Model):
id = SmallAutoField(primary_key=True)
class Meta:
app_label = "schema"
table = SerialAutoField._meta.db_table
column = SerialAutoField._meta.get_field("id").column
with connection.cursor() as cursor:
cursor.execute(
f'CREATE TABLE "{table}" '
f'("{column}" smallserial NOT NULL PRIMARY KEY)'
)
try:
old_field = SerialAutoField._meta.get_field("id")
new_field = BigAutoField(primary_key=True)
new_field.model = SerialAutoField
new_field.set_attributes_from_name("id")
with connection.schema_editor() as editor:
editor.alter_field(SerialAutoField, old_field, new_field, strict=True)
sequence_name = f"{table}_{column}_seq"
with connection.cursor() as cursor:
cursor.execute(
"SELECT data_type FROM pg_sequences WHERE sequencename = %s",
[sequence_name],
)
row = cursor.fetchone()
sequence_data_type = row[0] if row and row[0] else None
self.assertEqual(sequence_data_type, "bigint")
# Rename the column.
old_field = new_field
new_field = AutoField(primary_key=True)
new_field.model = SerialAutoField
new_field.set_attributes_from_name("renamed_id")
with connection.schema_editor() as editor:
editor.alter_field(SerialAutoField, old_field, new_field, strict=True)
with connection.cursor() as cursor:
cursor.execute(
"SELECT data_type FROM pg_sequences WHERE sequencename = %s",
[sequence_name],
)
row = cursor.fetchone()
sequence_data_type = row[0] if row and row[0] else None
self.assertEqual(sequence_data_type, "integer")
finally:
with connection.cursor() as cursor:
cursor.execute(f'DROP TABLE "{table}"')
def test_alter_int_pk_to_int_unique(self):
"""
Should be able to rename an IntegerField(primary_key=True) to
IntegerField(unique=True).
"""
with connection.schema_editor() as editor:
editor.create_model(IntegerPK)
# Delete the old PK
old_field = IntegerPK._meta.get_field("i")
new_field = IntegerField(unique=True)
new_field.model = IntegerPK
new_field.set_attributes_from_name("i")
with connection.schema_editor() as editor:
editor.alter_field(IntegerPK, old_field, new_field, strict=True)
# The primary key constraint is gone. Result depends on database:
# 'id' for SQLite, None for others (must not be 'i').
self.assertIn(self.get_primary_key(IntegerPK._meta.db_table), ("id", None))
# Set up a model class as it currently stands. The original IntegerPK
# class is now out of date and some backends make use of the whole
# model class when modifying a field (such as sqlite3 when remaking a
# table) so an outdated model class leads to incorrect results.
class Transitional(Model):
i = IntegerField(unique=True)
j = IntegerField(unique=True)
class Meta:
app_label = "schema"
apps = new_apps
db_table = "INTEGERPK"
# model requires a new PK
old_field = Transitional._meta.get_field("j")
new_field = IntegerField(primary_key=True)
new_field.model = Transitional
new_field.set_attributes_from_name("j")
with connection.schema_editor() as editor:
editor.alter_field(Transitional, old_field, new_field, strict=True)
# Create a model class representing the updated model.
class IntegerUnique(Model):
i = IntegerField(unique=True)
j = IntegerField(primary_key=True)
class Meta:
app_label = "schema"
apps = new_apps
db_table = "INTEGERPK"
# Ensure unique constraint works.
IntegerUnique.objects.create(i=1, j=1)
with self.assertRaises(IntegrityError):
IntegerUnique.objects.create(i=1, j=2)
def test_rename(self):
"""
Tests simple altering of fields
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertEqual(
columns["name"][0],
connection.features.introspected_field_types["CharField"],
)
self.assertNotIn("display_name", columns)
# Alter the name field's name
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=254)
new_field.set_attributes_from_name("display_name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
columns = self.column_classes(Author)
self.assertEqual(
columns["display_name"][0],
connection.features.introspected_field_types["CharField"],
)
self.assertNotIn("name", columns)
@isolate_apps("schema")
def test_rename_referenced_field(self):
class Author(Model):
name = CharField(max_length=255, unique=True)
class Meta:
app_label = "schema"
class Book(Model):
author = ForeignKey(Author, CASCADE, to_field="name")
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name("renamed")
with connection.schema_editor(
atomic=connection.features.supports_atomic_references_rename
) as editor:
editor.alter_field(Author, Author._meta.get_field("name"), new_field)
# Ensure the foreign key reference was updated.
self.assertForeignKeyExists(Book, "author_id", "schema_author", "renamed")
@skipIfDBFeature("interprets_empty_strings_as_nulls")
def test_rename_keep_null_status(self):
"""
Renaming a field shouldn't affect the not null status.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
with self.assertRaises(IntegrityError):
Note.objects.create(info=None)
old_field = Note._meta.get_field("info")
new_field = TextField()
new_field.set_attributes_from_name("detail_info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
columns = self.column_classes(Note)
self.assertEqual(columns["detail_info"][0], "TextField")
self.assertNotIn("info", columns)
with self.assertRaises(IntegrityError):
NoteRename.objects.create(detail_info=None)
@isolate_apps("schema")
def test_rename_keep_db_default(self):
"""Renaming a field shouldn't affect a database default."""
class AuthorDbDefault(Model):
birth_year = IntegerField(db_default=1985)
class Meta:
app_label = "schema"
self.isolated_local_models = [AuthorDbDefault]
with connection.schema_editor() as editor:
editor.create_model(AuthorDbDefault)
columns = self.column_classes(AuthorDbDefault)
self.assertEqual(columns["birth_year"][1].default, "1985")
old_field = AuthorDbDefault._meta.get_field("birth_year")
new_field = IntegerField(db_default=1985)
new_field.set_attributes_from_name("renamed_year")
new_field.model = AuthorDbDefault
with connection.schema_editor(
atomic=connection.features.supports_atomic_references_rename
) as editor:
editor.alter_field(AuthorDbDefault, old_field, new_field, strict=True)
columns = self.column_classes(AuthorDbDefault)
self.assertEqual(columns["renamed_year"][1].default, "1985")
@skipUnlessDBFeature(
"supports_column_check_constraints", "can_introspect_check_constraints"
)
@isolate_apps("schema")
def test_rename_field_with_check_to_truncated_name(self):
class AuthorWithLongColumn(Model):
field_with_very_looooooong_name = PositiveIntegerField(null=True)
class Meta:
app_label = "schema"
self.isolated_local_models = [AuthorWithLongColumn]
with connection.schema_editor() as editor:
editor.create_model(AuthorWithLongColumn)
old_field = AuthorWithLongColumn._meta.get_field(
"field_with_very_looooooong_name"
)
new_field = PositiveIntegerField(null=True)
new_field.set_attributes_from_name("renamed_field_with_very_long_name")
with connection.schema_editor() as editor:
editor.alter_field(AuthorWithLongColumn, old_field, new_field, strict=True)
new_field_name = truncate_name(
new_field.column, connection.ops.max_name_length()
)
constraints = self.get_constraints(AuthorWithLongColumn._meta.db_table)
check_constraints = [
name
for name, details in constraints.items()
if details["columns"] == [new_field_name] and details["check"]
]
self.assertEqual(len(check_constraints), 1)
def _test_m2m_create(self, M2MFieldClass):
"""
Tests M2M fields on models during creation
"""
class LocalBookWithM2M(Model):
author = ForeignKey(Author, CASCADE)
title = CharField(max_length=100, db_index=True)
pub_date = DateTimeField()
tags = M2MFieldClass("TagM2MTest", related_name="books")
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalBookWithM2M]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(TagM2MTest)
editor.create_model(LocalBookWithM2M)
# Ensure there is now an m2m table there
columns = self.column_classes(
LocalBookWithM2M._meta.get_field("tags").remote_field.through
)
self.assertEqual(
columns["tagm2mtest_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
def test_m2m_create(self):
self._test_m2m_create(ManyToManyField)
def test_m2m_create_custom(self):
self._test_m2m_create(CustomManyToManyField)
def test_m2m_create_inherited(self):
self._test_m2m_create(InheritedManyToManyField)
def _test_m2m_create_through(self, M2MFieldClass):
"""
Tests M2M fields on models during creation with through models
"""
class LocalTagThrough(Model):
book = ForeignKey("schema.LocalBookWithM2MThrough", CASCADE)
tag = ForeignKey("schema.TagM2MTest", CASCADE)
class Meta:
app_label = "schema"
apps = new_apps
class LocalBookWithM2MThrough(Model):
tags = M2MFieldClass(
"TagM2MTest", related_name="books", through=LocalTagThrough
)
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalTagThrough, LocalBookWithM2MThrough]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(LocalTagThrough)
editor.create_model(TagM2MTest)
editor.create_model(LocalBookWithM2MThrough)
# Ensure there is now an m2m table there
columns = self.column_classes(LocalTagThrough)
self.assertEqual(
columns["book_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
self.assertEqual(
columns["tag_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
def test_m2m_create_through(self):
self._test_m2m_create_through(ManyToManyField)
def test_m2m_create_through_custom(self):
self._test_m2m_create_through(CustomManyToManyField)
def test_m2m_create_through_inherited(self):
self._test_m2m_create_through(InheritedManyToManyField)
def test_m2m_through_remove(self):
class LocalAuthorNoteThrough(Model):
book = ForeignKey("schema.Author", CASCADE)
tag = ForeignKey("self", CASCADE)
class Meta:
app_label = "schema"
apps = new_apps
class LocalNoteWithM2MThrough(Model):
authors = ManyToManyField("schema.Author", through=LocalAuthorNoteThrough)
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalAuthorNoteThrough, LocalNoteWithM2MThrough]
# Create the tables.
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(LocalAuthorNoteThrough)
editor.create_model(LocalNoteWithM2MThrough)
# Remove the through parameter.
old_field = LocalNoteWithM2MThrough._meta.get_field("authors")
new_field = ManyToManyField("Author")
new_field.set_attributes_from_name("authors")
msg = (
f"Cannot alter field {old_field} into {new_field} - they are not "
f"compatible types (you cannot alter to or from M2M fields, or add or "
f"remove through= on M2M fields)"
)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, msg):
editor.alter_field(LocalNoteWithM2MThrough, old_field, new_field)
def _test_m2m(self, M2MFieldClass):
"""
Tests adding/removing M2M fields on models
"""
class LocalAuthorWithM2M(Model):
name = CharField(max_length=255)
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalAuthorWithM2M]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(LocalAuthorWithM2M)
editor.create_model(TagM2MTest)
# Create an M2M field
new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors")
new_field.contribute_to_class(LocalAuthorWithM2M, "tags")
# Ensure there's no m2m table there
with self.assertRaises(DatabaseError):
self.column_classes(new_field.remote_field.through)
# Add the field
with CaptureQueriesContext(
connection
) as ctx, connection.schema_editor() as editor:
editor.add_field(LocalAuthorWithM2M, new_field)
# Table is not rebuilt.
self.assertEqual(
len(
[
query["sql"]
for query in ctx.captured_queries
if "CREATE TABLE" in query["sql"]
]
),
1,
)
self.assertIs(
any("DROP TABLE" in query["sql"] for query in ctx.captured_queries),
False,
)
# Ensure there is now an m2m table there
columns = self.column_classes(new_field.remote_field.through)
self.assertEqual(
columns["tagm2mtest_id"][0],
connection.features.introspected_field_types["IntegerField"],
)
# "Alter" the field. This should not rename the DB table to itself.
with connection.schema_editor() as editor:
editor.alter_field(LocalAuthorWithM2M, new_field, new_field, strict=True)
# Remove the M2M table again
with connection.schema_editor() as editor:
editor.remove_field(LocalAuthorWithM2M, new_field)
# Ensure there's no m2m table there
with self.assertRaises(DatabaseError):
self.column_classes(new_field.remote_field.through)
# Make sure the model state is coherent with the table one now that
# we've removed the tags field.
opts = LocalAuthorWithM2M._meta
opts.local_many_to_many.remove(new_field)
del new_apps.all_models["schema"][
new_field.remote_field.through._meta.model_name
]
opts._expire_cache()
def test_m2m(self):
self._test_m2m(ManyToManyField)
def test_m2m_custom(self):
self._test_m2m(CustomManyToManyField)
def test_m2m_inherited(self):
self._test_m2m(InheritedManyToManyField)
def _test_m2m_through_alter(self, M2MFieldClass):
"""
Tests altering M2Ms with explicit through models (should no-op)
"""
class LocalAuthorTag(Model):
author = ForeignKey("schema.LocalAuthorWithM2MThrough", CASCADE)
tag = ForeignKey("schema.TagM2MTest", CASCADE)
class Meta:
app_label = "schema"
apps = new_apps
class LocalAuthorWithM2MThrough(Model):
name = CharField(max_length=255)
tags = M2MFieldClass(
"schema.TagM2MTest", related_name="authors", through=LocalAuthorTag
)
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalAuthorTag, LocalAuthorWithM2MThrough]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(LocalAuthorTag)
editor.create_model(LocalAuthorWithM2MThrough)
editor.create_model(TagM2MTest)
# Ensure the m2m table is there
self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3)
# "Alter" the field's blankness. This should not actually do anything.
old_field = LocalAuthorWithM2MThrough._meta.get_field("tags")
new_field = M2MFieldClass(
"schema.TagM2MTest", related_name="authors", through=LocalAuthorTag
)
new_field.contribute_to_class(LocalAuthorWithM2MThrough, "tags")
with connection.schema_editor() as editor:
editor.alter_field(
LocalAuthorWithM2MThrough, old_field, new_field, strict=True
)
# Ensure the m2m table is still there
self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3)
def test_m2m_through_alter(self):
self._test_m2m_through_alter(ManyToManyField)
def test_m2m_through_alter_custom(self):
self._test_m2m_through_alter(CustomManyToManyField)
def test_m2m_through_alter_inherited(self):
self._test_m2m_through_alter(InheritedManyToManyField)
def _test_m2m_repoint(self, M2MFieldClass):
"""
Tests repointing M2M fields
"""
class LocalBookWithM2M(Model):
author = ForeignKey(Author, CASCADE)
title = CharField(max_length=100, db_index=True)
pub_date = DateTimeField()
tags = M2MFieldClass("TagM2MTest", related_name="books")
class Meta:
app_label = "schema"
apps = new_apps
self.local_models = [LocalBookWithM2M]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(LocalBookWithM2M)
editor.create_model(TagM2MTest)
editor.create_model(UniqueTest)
# Ensure the M2M exists and points to TagM2MTest
if connection.features.supports_foreign_keys:
self.assertForeignKeyExists(
LocalBookWithM2M._meta.get_field("tags").remote_field.through,
"tagm2mtest_id",
"schema_tagm2mtest",
)
# Repoint the M2M
old_field = LocalBookWithM2M._meta.get_field("tags")
new_field = M2MFieldClass(UniqueTest)
new_field.contribute_to_class(LocalBookWithM2M, "uniques")
with connection.schema_editor() as editor:
editor.alter_field(LocalBookWithM2M, old_field, new_field, strict=True)
# Ensure old M2M is gone
with self.assertRaises(DatabaseError):
self.column_classes(
LocalBookWithM2M._meta.get_field("tags").remote_field.through
)
# This model looks like the new model and is used for teardown.
opts = LocalBookWithM2M._meta
opts.local_many_to_many.remove(old_field)
# Ensure the new M2M exists and points to UniqueTest
if connection.features.supports_foreign_keys:
self.assertForeignKeyExists(
new_field.remote_field.through, "uniquetest_id", "schema_uniquetest"
)
def test_m2m_repoint(self):
self._test_m2m_repoint(ManyToManyField)
def test_m2m_repoint_custom(self):
self._test_m2m_repoint(CustomManyToManyField)
def test_m2m_repoint_inherited(self):
self._test_m2m_repoint(InheritedManyToManyField)
@isolate_apps("schema")
def test_m2m_rename_field_in_target_model(self):
class LocalTagM2MTest(Model):
title = CharField(max_length=255)
class Meta:
app_label = "schema"
class LocalM2M(Model):
tags = ManyToManyField(LocalTagM2MTest)
class Meta:
app_label = "schema"
# Create the tables.
with connection.schema_editor() as editor:
editor.create_model(LocalM2M)
editor.create_model(LocalTagM2MTest)
self.isolated_local_models = [LocalM2M, LocalTagM2MTest]
# Ensure the m2m table is there.
self.assertEqual(len(self.column_classes(LocalM2M)), 1)
# Alter a field in LocalTagM2MTest.
old_field = LocalTagM2MTest._meta.get_field("title")
new_field = CharField(max_length=254)
new_field.contribute_to_class(LocalTagM2MTest, "title1")
# @isolate_apps() and inner models are needed to have the model
# relations populated, otherwise this doesn't act as a regression test.
self.assertEqual(len(new_field.model._meta.related_objects), 1)
with connection.schema_editor() as editor:
editor.alter_field(LocalTagM2MTest, old_field, new_field, strict=True)
# Ensure the m2m table is still there.
self.assertEqual(len(self.column_classes(LocalM2M)), 1)
@skipUnlessDBFeature(
"supports_column_check_constraints", "can_introspect_check_constraints"
)
def test_check_constraints(self):
"""
Tests creating/deleting CHECK constraints
"""
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the constraint exists
constraints = self.get_constraints(Author._meta.db_table)
if not any(
details["columns"] == ["height"] and details["check"]
for details in constraints.values()
):
self.fail("No check constraint for height found")
# Alter the column to remove it
old_field = Author._meta.get_field("height")
new_field = IntegerField(null=True, blank=True)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
for details in constraints.values():
if details["columns"] == ["height"] and details["check"]:
self.fail("Check constraint for height found")
# Alter the column to re-add it
new_field2 = Author._meta.get_field("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
if not any(
details["columns"] == ["height"] and details["check"]
for details in constraints.values()
):
self.fail("No check constraint for height found")
@skipUnlessDBFeature(
"supports_column_check_constraints", "can_introspect_check_constraints"
)
@isolate_apps("schema")
def test_check_constraint_timedelta_param(self):
class DurationModel(Model):
duration = DurationField()
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(DurationModel)
self.isolated_local_models = [DurationModel]
constraint_name = "duration_gte_5_minutes"
constraint = CheckConstraint(
check=Q(duration__gt=datetime.timedelta(minutes=5)),
name=constraint_name,
)
DurationModel._meta.constraints = [constraint]
with connection.schema_editor() as editor:
editor.add_constraint(DurationModel, constraint)
constraints = self.get_constraints(DurationModel._meta.db_table)
self.assertIn(constraint_name, constraints)
with self.assertRaises(IntegrityError), atomic():
DurationModel.objects.create(duration=datetime.timedelta(minutes=4))
DurationModel.objects.create(duration=datetime.timedelta(minutes=10))
@skipUnlessDBFeature(
"supports_column_check_constraints", "can_introspect_check_constraints"
)
def test_remove_field_check_does_not_remove_meta_constraints(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add the custom check constraint
constraint = CheckConstraint(
check=Q(height__gte=0), name="author_height_gte_0_check"
)
custom_constraint_name = constraint.name
Author._meta.constraints = [constraint]
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
# Ensure the constraints exist
constraints = self.get_constraints(Author._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["height"]
and details["check"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Alter the column to remove field check
old_field = Author._meta.get_field("height")
new_field = IntegerField(null=True, blank=True)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["height"]
and details["check"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 0)
# Alter the column to re-add field check
new_field2 = Author._meta.get_field("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["height"]
and details["check"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Drop the check constraint
with connection.schema_editor() as editor:
Author._meta.constraints = []
editor.remove_constraint(Author, constraint)
def test_unique(self):
"""
Tests removing and adding unique constraints to a single column.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure the field is unique to begin with
Tag.objects.create(title="foo", slug="foo")
with self.assertRaises(IntegrityError):
Tag.objects.create(title="bar", slug="foo")
Tag.objects.all().delete()
# Alter the slug field to be non-unique
old_field = Tag._meta.get_field("slug")
new_field = SlugField(unique=False)
new_field.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(Tag, old_field, new_field, strict=True)
# Ensure the field is no longer unique
Tag.objects.create(title="foo", slug="foo")
Tag.objects.create(title="bar", slug="foo")
Tag.objects.all().delete()
# Alter the slug field to be unique
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(Tag, new_field, new_field2, strict=True)
# Ensure the field is unique again
Tag.objects.create(title="foo", slug="foo")
with self.assertRaises(IntegrityError):
Tag.objects.create(title="bar", slug="foo")
Tag.objects.all().delete()
# Rename the field
new_field3 = SlugField(unique=True)
new_field3.set_attributes_from_name("slug2")
with connection.schema_editor() as editor:
editor.alter_field(Tag, new_field2, new_field3, strict=True)
# Ensure the field is still unique
TagUniqueRename.objects.create(title="foo", slug2="foo")
with self.assertRaises(IntegrityError):
TagUniqueRename.objects.create(title="bar", slug2="foo")
Tag.objects.all().delete()
def test_unique_name_quoting(self):
old_table_name = TagUniqueRename._meta.db_table
try:
with connection.schema_editor() as editor:
editor.create_model(TagUniqueRename)
editor.alter_db_table(TagUniqueRename, old_table_name, "unique-table")
TagUniqueRename._meta.db_table = "unique-table"
# This fails if the unique index name isn't quoted.
editor.alter_unique_together(TagUniqueRename, [], (("title", "slug2"),))
finally:
with connection.schema_editor() as editor:
editor.delete_model(TagUniqueRename)
TagUniqueRename._meta.db_table = old_table_name
@isolate_apps("schema")
@skipUnlessDBFeature("supports_foreign_keys")
def test_unique_no_unnecessary_fk_drops(self):
"""
If AlterField isn't selective about dropping foreign key constraints
when modifying a field with a unique constraint, the AlterField
incorrectly drops and recreates the Book.author foreign key even though
it doesn't restrict the field being changed (#29193).
"""
class Author(Model):
name = CharField(max_length=254, unique=True)
class Meta:
app_label = "schema"
class Book(Model):
author = ForeignKey(Author, CASCADE)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
new_field = CharField(max_length=255, unique=True)
new_field.model = Author
new_field.set_attributes_from_name("name")
with self.assertLogs("django.db.backends.schema", "DEBUG") as cm:
with connection.schema_editor() as editor:
editor.alter_field(Author, Author._meta.get_field("name"), new_field)
# One SQL statement is executed to alter the field.
self.assertEqual(len(cm.records), 1)
@isolate_apps("schema")
def test_unique_and_reverse_m2m(self):
"""
AlterField can modify a unique field when there's a reverse M2M
relation on the model.
"""
class Tag(Model):
title = CharField(max_length=255)
slug = SlugField(unique=True)
class Meta:
app_label = "schema"
class Book(Model):
tags = ManyToManyField(Tag, related_name="books")
class Meta:
app_label = "schema"
self.isolated_local_models = [Book._meta.get_field("tags").remote_field.through]
with connection.schema_editor() as editor:
editor.create_model(Tag)
editor.create_model(Book)
new_field = SlugField(max_length=75, unique=True)
new_field.model = Tag
new_field.set_attributes_from_name("slug")
with self.assertLogs("django.db.backends.schema", "DEBUG") as cm:
with connection.schema_editor() as editor:
editor.alter_field(Tag, Tag._meta.get_field("slug"), new_field)
# One SQL statement is executed to alter the field.
self.assertEqual(len(cm.records), 1)
# Ensure that the field is still unique.
Tag.objects.create(title="foo", slug="foo")
with self.assertRaises(IntegrityError):
Tag.objects.create(title="bar", slug="foo")
def test_remove_ignored_unique_constraint_not_create_fk_index(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
constraint = UniqueConstraint(
"author",
condition=Q(title__in=["tHGttG", "tRatEotU"]),
name="book_author_condition_uniq",
)
# Add unique constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Book, constraint)
old_constraints = self.get_constraints_for_column(
Book,
Book._meta.get_field("author").column,
)
# Remove unique constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Book, constraint)
new_constraints = self.get_constraints_for_column(
Book,
Book._meta.get_field("author").column,
)
# Redundant foreign key index is not added.
self.assertEqual(
len(old_constraints) - 1
if connection.features.supports_partial_indexes
else len(old_constraints),
len(new_constraints),
)
@skipUnlessDBFeature("allows_multiple_constraints_on_same_fields")
def test_remove_field_unique_does_not_remove_meta_constraints(self):
with connection.schema_editor() as editor:
editor.create_model(AuthorWithUniqueName)
self.local_models = [AuthorWithUniqueName]
# Add the custom unique constraint
constraint = UniqueConstraint(fields=["name"], name="author_name_uniq")
custom_constraint_name = constraint.name
AuthorWithUniqueName._meta.constraints = [constraint]
with connection.schema_editor() as editor:
editor.add_constraint(AuthorWithUniqueName, constraint)
# Ensure the constraints exist
constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name"]
and details["unique"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Alter the column to remove field uniqueness
old_field = AuthorWithUniqueName._meta.get_field("name")
new_field = CharField(max_length=255)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(AuthorWithUniqueName, old_field, new_field, strict=True)
constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name"]
and details["unique"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 0)
# Alter the column to re-add field uniqueness
new_field2 = AuthorWithUniqueName._meta.get_field("name")
with connection.schema_editor() as editor:
editor.alter_field(AuthorWithUniqueName, new_field, new_field2, strict=True)
constraints = self.get_constraints(AuthorWithUniqueName._meta.db_table)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name"]
and details["unique"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Drop the unique constraint
with connection.schema_editor() as editor:
AuthorWithUniqueName._meta.constraints = []
editor.remove_constraint(AuthorWithUniqueName, constraint)
def test_unique_together(self):
"""
Tests removing and adding unique_together constraints on a model.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(UniqueTest)
# Ensure the fields are unique to begin with
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.create(year=2011, slug="foo")
UniqueTest.objects.create(year=2011, slug="bar")
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.all().delete()
# Alter the model to its non-unique-together companion
with connection.schema_editor() as editor:
editor.alter_unique_together(
UniqueTest, UniqueTest._meta.unique_together, []
)
# Ensure the fields are no longer unique
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.all().delete()
# Alter it back
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_unique_together(
UniqueTest, [], UniqueTest._meta.unique_together
)
# Ensure the fields are unique again
UniqueTest.objects.create(year=2012, slug="foo")
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.all().delete()
def test_unique_together_with_fk(self):
"""
Tests removing and adding unique_together constraints that include
a foreign key.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the fields are unique to begin with
self.assertEqual(Book._meta.unique_together, ())
# Add the unique_together constraint
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [], [["author", "title"]])
# Alter it back
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [["author", "title"]], [])
def test_unique_together_with_fk_with_existing_index(self):
"""
Tests removing and adding unique_together constraints that include
a foreign key, where the foreign key is added after the model is
created.
"""
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithoutAuthor)
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name("author")
editor.add_field(BookWithoutAuthor, new_field)
# Ensure the fields aren't unique to begin with
self.assertEqual(Book._meta.unique_together, ())
# Add the unique_together constraint
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [], [["author", "title"]])
# Alter it back
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [["author", "title"]], [])
def _test_composed_index_with_fk(self, index):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
table = Book._meta.db_table
self.assertEqual(Book._meta.indexes, [])
Book._meta.indexes = [index]
with connection.schema_editor() as editor:
editor.add_index(Book, index)
self.assertIn(index.name, self.get_constraints(table))
Book._meta.indexes = []
with connection.schema_editor() as editor:
editor.remove_index(Book, index)
self.assertNotIn(index.name, self.get_constraints(table))
def test_composed_index_with_fk(self):
index = Index(fields=["author", "title"], name="book_author_title_idx")
self._test_composed_index_with_fk(index)
def test_composed_desc_index_with_fk(self):
index = Index(fields=["-author", "title"], name="book_author_title_idx")
self._test_composed_index_with_fk(index)
@skipUnlessDBFeature("supports_expression_indexes")
def test_composed_func_index_with_fk(self):
index = Index(F("author"), F("title"), name="book_author_title_idx")
self._test_composed_index_with_fk(index)
@skipUnlessDBFeature("supports_expression_indexes")
def test_composed_desc_func_index_with_fk(self):
index = Index(F("author").desc(), F("title"), name="book_author_title_idx")
self._test_composed_index_with_fk(index)
@skipUnlessDBFeature("supports_expression_indexes")
def test_composed_func_transform_index_with_fk(self):
index = Index(F("title__lower"), name="book_title_lower_idx")
with register_lookup(CharField, Lower):
self._test_composed_index_with_fk(index)
def _test_composed_constraint_with_fk(self, constraint):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
table = Book._meta.db_table
self.assertEqual(Book._meta.constraints, [])
Book._meta.constraints = [constraint]
with connection.schema_editor() as editor:
editor.add_constraint(Book, constraint)
self.assertIn(constraint.name, self.get_constraints(table))
Book._meta.constraints = []
with connection.schema_editor() as editor:
editor.remove_constraint(Book, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
def test_composed_constraint_with_fk(self):
constraint = UniqueConstraint(
fields=["author", "title"],
name="book_author_title_uniq",
)
self._test_composed_constraint_with_fk(constraint)
@skipUnlessDBFeature(
"supports_column_check_constraints", "can_introspect_check_constraints"
)
def test_composed_check_constraint_with_fk(self):
constraint = CheckConstraint(check=Q(author__gt=0), name="book_author_check")
self._test_composed_constraint_with_fk(constraint)
@skipUnlessDBFeature("allows_multiple_constraints_on_same_fields")
def test_remove_unique_together_does_not_remove_meta_constraints(self):
with connection.schema_editor() as editor:
editor.create_model(AuthorWithUniqueNameAndBirthday)
self.local_models = [AuthorWithUniqueNameAndBirthday]
# Add the custom unique constraint
constraint = UniqueConstraint(
fields=["name", "birthday"], name="author_name_birthday_uniq"
)
custom_constraint_name = constraint.name
AuthorWithUniqueNameAndBirthday._meta.constraints = [constraint]
with connection.schema_editor() as editor:
editor.add_constraint(AuthorWithUniqueNameAndBirthday, constraint)
# Ensure the constraints exist
constraints = self.get_constraints(
AuthorWithUniqueNameAndBirthday._meta.db_table
)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name", "birthday"]
and details["unique"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Remove unique together
unique_together = AuthorWithUniqueNameAndBirthday._meta.unique_together
with connection.schema_editor() as editor:
editor.alter_unique_together(
AuthorWithUniqueNameAndBirthday, unique_together, []
)
constraints = self.get_constraints(
AuthorWithUniqueNameAndBirthday._meta.db_table
)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name", "birthday"]
and details["unique"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 0)
# Re-add unique together
with connection.schema_editor() as editor:
editor.alter_unique_together(
AuthorWithUniqueNameAndBirthday, [], unique_together
)
constraints = self.get_constraints(
AuthorWithUniqueNameAndBirthday._meta.db_table
)
self.assertIn(custom_constraint_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name", "birthday"]
and details["unique"]
and name != custom_constraint_name
]
self.assertEqual(len(other_constraints), 1)
# Drop the unique constraint
with connection.schema_editor() as editor:
AuthorWithUniqueNameAndBirthday._meta.constraints = []
editor.remove_constraint(AuthorWithUniqueNameAndBirthday, constraint)
def test_unique_constraint(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(fields=["name"], name="name_uq")
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIs(sql.references_table(table), True)
self.assertIs(sql.references_column(table, "name"), True)
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_unique_constraint(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(Upper("name").desc(), name="func_upper_uq")
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
constraints = self.get_constraints(table)
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, constraint.name, ["DESC"])
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]["unique"], True)
# SQL contains a database function.
self.assertIs(sql.references_column(table, "name"), True)
self.assertIn("UPPER(%s)" % editor.quote_name("name"), str(sql))
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_composite_func_unique_constraint(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithSlug)
constraint = UniqueConstraint(
Upper("title"),
Lower("slug"),
name="func_upper_lower_unq",
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(BookWithSlug, constraint)
sql = constraint.create_sql(BookWithSlug, editor)
table = BookWithSlug._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]["unique"], True)
# SQL contains database functions.
self.assertIs(sql.references_column(table, "title"), True)
self.assertIs(sql.references_column(table, "slug"), True)
sql = str(sql)
self.assertIn("UPPER(%s)" % editor.quote_name("title"), sql)
self.assertIn("LOWER(%s)" % editor.quote_name("slug"), sql)
self.assertLess(sql.index("UPPER"), sql.index("LOWER"))
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(BookWithSlug, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_unique_constraint_field_and_expression(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(
F("height").desc(),
"uuid",
Lower("name").asc(),
name="func_f_lower_field_unq",
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, constraint.name, ["DESC", "ASC", "ASC"])
constraints = self.get_constraints(table)
self.assertIs(constraints[constraint.name]["unique"], True)
self.assertEqual(len(constraints[constraint.name]["columns"]), 3)
self.assertEqual(constraints[constraint.name]["columns"][1], "uuid")
# SQL contains database functions and columns.
self.assertIs(sql.references_column(table, "height"), True)
self.assertIs(sql.references_column(table, "name"), True)
self.assertIs(sql.references_column(table, "uuid"), True)
self.assertIn("LOWER(%s)" % editor.quote_name("name"), str(sql))
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes", "supports_partial_indexes")
def test_func_unique_constraint_partial(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(
Upper("name"),
name="func_upper_cond_weight_uq",
condition=Q(weight__isnull=False),
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]["unique"], True)
self.assertIs(sql.references_column(table, "name"), True)
self.assertIn("UPPER(%s)" % editor.quote_name("name"), str(sql))
self.assertIn(
"WHERE %s IS NOT NULL" % editor.quote_name("weight"),
str(sql),
)
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes", "supports_covering_indexes")
def test_func_unique_constraint_covering(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(
Upper("name"),
name="func_upper_covering_uq",
include=["weight", "height"],
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]["unique"], True)
self.assertEqual(
constraints[constraint.name]["columns"],
[None, "weight", "height"],
)
self.assertIs(sql.references_column(table, "name"), True)
self.assertIs(sql.references_column(table, "weight"), True)
self.assertIs(sql.references_column(table, "height"), True)
self.assertIn("UPPER(%s)" % editor.quote_name("name"), str(sql))
self.assertIn(
"INCLUDE (%s, %s)"
% (
editor.quote_name("weight"),
editor.quote_name("height"),
),
str(sql),
)
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_unique_constraint_lookups(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
with register_lookup(CharField, Lower), register_lookup(IntegerField, Abs):
constraint = UniqueConstraint(
F("name__lower"),
F("weight__abs"),
name="func_lower_abs_lookup_uq",
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
sql = constraint.create_sql(Author, editor)
table = Author._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]["unique"], True)
# SQL contains columns.
self.assertIs(sql.references_column(table, "name"), True)
self.assertIs(sql.references_column(table, "weight"), True)
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(Author, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_unique_constraint_collate(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("This backend does not support case-insensitive collations.")
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithSlug)
constraint = UniqueConstraint(
Collate(F("title"), collation=collation).desc(),
Collate("slug", collation=collation),
name="func_collate_uq",
)
# Add constraint.
with connection.schema_editor() as editor:
editor.add_constraint(BookWithSlug, constraint)
sql = constraint.create_sql(BookWithSlug, editor)
table = BookWithSlug._meta.db_table
constraints = self.get_constraints(table)
self.assertIn(constraint.name, constraints)
self.assertIs(constraints[constraint.name]["unique"], True)
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, constraint.name, ["DESC", "ASC"])
# SQL contains columns and a collation.
self.assertIs(sql.references_column(table, "title"), True)
self.assertIs(sql.references_column(table, "slug"), True)
self.assertIn("COLLATE %s" % editor.quote_name(collation), str(sql))
# Remove constraint.
with connection.schema_editor() as editor:
editor.remove_constraint(BookWithSlug, constraint)
self.assertNotIn(constraint.name, self.get_constraints(table))
@skipIfDBFeature("supports_expression_indexes")
def test_func_unique_constraint_unsupported(self):
# UniqueConstraint is ignored on databases that don't support indexes on
# expressions.
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(F("name"), name="func_name_uq")
with connection.schema_editor() as editor, self.assertNumQueries(0):
self.assertIsNone(editor.add_constraint(Author, constraint))
self.assertIsNone(editor.remove_constraint(Author, constraint))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_unique_constraint_nonexistent_field(self):
constraint = UniqueConstraint(Lower("nonexistent"), name="func_nonexistent_uq")
msg = (
"Cannot resolve keyword 'nonexistent' into field. Choices are: "
"height, id, name, uuid, weight"
)
with self.assertRaisesMessage(FieldError, msg):
with connection.schema_editor() as editor:
editor.add_constraint(Author, constraint)
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_unique_constraint_nondeterministic(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
constraint = UniqueConstraint(Random(), name="func_random_uq")
with connection.schema_editor() as editor:
with self.assertRaises(DatabaseError):
editor.add_constraint(Author, constraint)
@ignore_warnings(category=RemovedInDjango51Warning)
def test_index_together(self):
"""
Tests removing and adding index_together constraints on a model.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure there's no index on the year/slug columns first
self.assertIs(
any(
c["index"]
for c in self.get_constraints("schema_tag").values()
if c["columns"] == ["slug", "title"]
),
False,
)
# Alter the model to add an index
with connection.schema_editor() as editor:
editor.alter_index_together(Tag, [], [("slug", "title")])
# Ensure there is now an index
self.assertIs(
any(
c["index"]
for c in self.get_constraints("schema_tag").values()
if c["columns"] == ["slug", "title"]
),
True,
)
# Alter it back
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_index_together(Tag, [("slug", "title")], [])
# Ensure there's no index
self.assertIs(
any(
c["index"]
for c in self.get_constraints("schema_tag").values()
if c["columns"] == ["slug", "title"]
),
False,
)
@ignore_warnings(category=RemovedInDjango51Warning)
def test_index_together_with_fk(self):
"""
Tests removing and adding index_together constraints that include
a foreign key.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the fields are unique to begin with
self.assertEqual(Book._meta.index_together, ())
# Add the unique_together constraint
with connection.schema_editor() as editor:
editor.alter_index_together(Book, [], [["author", "title"]])
# Alter it back
with connection.schema_editor() as editor:
editor.alter_index_together(Book, [["author", "title"]], [])
@ignore_warnings(category=RemovedInDjango51Warning)
@isolate_apps("schema")
def test_create_index_together(self):
"""
Tests creating models with index_together already defined
"""
class TagIndexed(Model):
title = CharField(max_length=255)
slug = SlugField(unique=True)
class Meta:
app_label = "schema"
index_together = [["slug", "title"]]
# Create the table
with connection.schema_editor() as editor:
editor.create_model(TagIndexed)
self.isolated_local_models = [TagIndexed]
# Ensure there is an index
self.assertIs(
any(
c["index"]
for c in self.get_constraints("schema_tagindexed").values()
if c["columns"] == ["slug", "title"]
),
True,
)
@skipUnlessDBFeature("allows_multiple_constraints_on_same_fields")
@ignore_warnings(category=RemovedInDjango51Warning)
@isolate_apps("schema")
def test_remove_index_together_does_not_remove_meta_indexes(self):
class AuthorWithIndexedNameAndBirthday(Model):
name = CharField(max_length=255)
birthday = DateField()
class Meta:
app_label = "schema"
index_together = [["name", "birthday"]]
with connection.schema_editor() as editor:
editor.create_model(AuthorWithIndexedNameAndBirthday)
self.isolated_local_models = [AuthorWithIndexedNameAndBirthday]
# Add the custom index
index = Index(fields=["name", "birthday"], name="author_name_birthday_idx")
custom_index_name = index.name
AuthorWithIndexedNameAndBirthday._meta.indexes = [index]
with connection.schema_editor() as editor:
editor.add_index(AuthorWithIndexedNameAndBirthday, index)
# Ensure the indexes exist
constraints = self.get_constraints(
AuthorWithIndexedNameAndBirthday._meta.db_table
)
self.assertIn(custom_index_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name", "birthday"]
and details["index"]
and name != custom_index_name
]
self.assertEqual(len(other_constraints), 1)
# Remove index together
index_together = AuthorWithIndexedNameAndBirthday._meta.index_together
with connection.schema_editor() as editor:
editor.alter_index_together(
AuthorWithIndexedNameAndBirthday, index_together, []
)
constraints = self.get_constraints(
AuthorWithIndexedNameAndBirthday._meta.db_table
)
self.assertIn(custom_index_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name", "birthday"]
and details["index"]
and name != custom_index_name
]
self.assertEqual(len(other_constraints), 0)
# Re-add index together
with connection.schema_editor() as editor:
editor.alter_index_together(
AuthorWithIndexedNameAndBirthday, [], index_together
)
constraints = self.get_constraints(
AuthorWithIndexedNameAndBirthday._meta.db_table
)
self.assertIn(custom_index_name, constraints)
other_constraints = [
name
for name, details in constraints.items()
if details["columns"] == ["name", "birthday"]
and details["index"]
and name != custom_index_name
]
self.assertEqual(len(other_constraints), 1)
# Drop the index
with connection.schema_editor() as editor:
AuthorWithIndexedNameAndBirthday._meta.indexes = []
editor.remove_index(AuthorWithIndexedNameAndBirthday, index)
@isolate_apps("schema")
def test_db_table(self):
"""
Tests renaming of the table
"""
class Author(Model):
name = CharField(max_length=255)
class Meta:
app_label = "schema"
class Book(Model):
author = ForeignKey(Author, CASCADE)
class Meta:
app_label = "schema"
# Create the table and one referring it.
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the table is there to begin with
columns = self.column_classes(Author)
self.assertEqual(
columns["name"][0],
connection.features.introspected_field_types["CharField"],
)
# Alter the table
with connection.schema_editor(
atomic=connection.features.supports_atomic_references_rename
) as editor:
editor.alter_db_table(Author, "schema_author", "schema_otherauthor")
Author._meta.db_table = "schema_otherauthor"
columns = self.column_classes(Author)
self.assertEqual(
columns["name"][0],
connection.features.introspected_field_types["CharField"],
)
# Ensure the foreign key reference was updated
self.assertForeignKeyExists(Book, "author_id", "schema_otherauthor")
# Alter the table again
with connection.schema_editor(
atomic=connection.features.supports_atomic_references_rename
) as editor:
editor.alter_db_table(Author, "schema_otherauthor", "schema_author")
# Ensure the table is still there
Author._meta.db_table = "schema_author"
columns = self.column_classes(Author)
self.assertEqual(
columns["name"][0],
connection.features.introspected_field_types["CharField"],
)
def test_add_remove_index(self):
"""
Tests index addition and removal
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the table is there and has no index
self.assertNotIn("title", self.get_indexes(Author._meta.db_table))
# Add the index
index = Index(fields=["name"], name="author_title_idx")
with connection.schema_editor() as editor:
editor.add_index(Author, index)
self.assertIn("name", self.get_indexes(Author._meta.db_table))
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn("name", self.get_indexes(Author._meta.db_table))
def test_remove_db_index_doesnt_remove_custom_indexes(self):
"""
Changing db_index to False doesn't remove indexes from Meta.indexes.
"""
with connection.schema_editor() as editor:
editor.create_model(AuthorWithIndexedName)
self.local_models = [AuthorWithIndexedName]
# Ensure the table has its index
self.assertIn("name", self.get_indexes(AuthorWithIndexedName._meta.db_table))
# Add the custom index
index = Index(fields=["-name"], name="author_name_idx")
author_index_name = index.name
with connection.schema_editor() as editor:
db_index_name = editor._create_index_name(
table_name=AuthorWithIndexedName._meta.db_table,
column_names=("name",),
)
try:
AuthorWithIndexedName._meta.indexes = [index]
with connection.schema_editor() as editor:
editor.add_index(AuthorWithIndexedName, index)
old_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table)
self.assertIn(author_index_name, old_constraints)
self.assertIn(db_index_name, old_constraints)
# Change name field to db_index=False
old_field = AuthorWithIndexedName._meta.get_field("name")
new_field = CharField(max_length=255)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(
AuthorWithIndexedName, old_field, new_field, strict=True
)
new_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table)
self.assertNotIn(db_index_name, new_constraints)
# The index from Meta.indexes is still in the database.
self.assertIn(author_index_name, new_constraints)
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(AuthorWithIndexedName, index)
finally:
AuthorWithIndexedName._meta.indexes = []
def test_order_index(self):
"""
Indexes defined with ordering (ASC/DESC) defined on column
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
# The table doesn't have an index
self.assertNotIn("title", self.get_indexes(Author._meta.db_table))
index_name = "author_name_idx"
# Add the index
index = Index(fields=["name", "-weight"], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(Author, index)
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(Author._meta.db_table, index_name, ["ASC", "DESC"])
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
def test_indexes(self):
"""
Tests creation/altering of indexes
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the table is there and has the right index
self.assertIn(
"title",
self.get_indexes(Book._meta.db_table),
)
# Alter to remove the index
old_field = Book._meta.get_field("title")
new_field = CharField(max_length=100, db_index=False)
new_field.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
# Ensure the table is there and has no index
self.assertNotIn(
"title",
self.get_indexes(Book._meta.db_table),
)
# Alter to re-add the index
new_field2 = Book._meta.get_field("title")
with connection.schema_editor() as editor:
editor.alter_field(Book, new_field, new_field2, strict=True)
# Ensure the table is there and has the index again
self.assertIn(
"title",
self.get_indexes(Book._meta.db_table),
)
# Add a unique column, verify that creates an implicit index
new_field3 = BookWithSlug._meta.get_field("slug")
with connection.schema_editor() as editor:
editor.add_field(Book, new_field3)
self.assertIn(
"slug",
self.get_uniques(Book._meta.db_table),
)
# Remove the unique, check the index goes with it
new_field4 = CharField(max_length=20, unique=False)
new_field4.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(BookWithSlug, new_field3, new_field4, strict=True)
self.assertNotIn(
"slug",
self.get_uniques(Book._meta.db_table),
)
def test_text_field_with_db_index(self):
with connection.schema_editor() as editor:
editor.create_model(AuthorTextFieldWithIndex)
# The text_field index is present if the database supports it.
assertion = (
self.assertIn
if connection.features.supports_index_on_text_field
else self.assertNotIn
)
assertion(
"text_field", self.get_indexes(AuthorTextFieldWithIndex._meta.db_table)
)
def _index_expressions_wrappers(self):
index_expression = IndexExpression()
index_expression.set_wrapper_classes(connection)
return ", ".join(
[
wrapper_cls.__qualname__
for wrapper_cls in index_expression.wrapper_classes
]
)
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_multiple_wrapper_references(self):
index = Index(OrderBy(F("name").desc(), descending=True), name="name")
msg = (
"Multiple references to %s can't be used in an indexed expression."
% self._index_expressions_wrappers()
)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, msg):
editor.add_index(Author, index)
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_invalid_topmost_expressions(self):
index = Index(Upper(F("name").desc()), name="name")
msg = (
"%s must be topmost expressions in an indexed expression."
% self._index_expressions_wrappers()
)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, msg):
editor.add_index(Author, index)
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(Lower("name").desc(), name="func_lower_idx")
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, index.name, ["DESC"])
# SQL contains a database function.
self.assertIs(sql.references_column(table, "name"), True)
self.assertIn("LOWER(%s)" % editor.quote_name("name"), str(sql))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_f(self):
with connection.schema_editor() as editor:
editor.create_model(Tag)
index = Index("slug", F("title").desc(), name="func_f_idx")
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Tag, index)
sql = index.create_sql(Tag, editor)
table = Tag._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(Tag._meta.db_table, index.name, ["ASC", "DESC"])
# SQL contains columns.
self.assertIs(sql.references_column(table, "slug"), True)
self.assertIs(sql.references_column(table, "title"), True)
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Tag, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_lookups(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
with register_lookup(CharField, Lower), register_lookup(IntegerField, Abs):
index = Index(
F("name__lower"),
F("weight__abs"),
name="func_lower_abs_lookup_idx",
)
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
# SQL contains columns.
self.assertIs(sql.references_column(table, "name"), True)
self.assertIs(sql.references_column(table, "weight"), True)
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_composite_func_index(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(Lower("name"), Upper("name"), name="func_lower_upper_idx")
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
# SQL contains database functions.
self.assertIs(sql.references_column(table, "name"), True)
sql = str(sql)
self.assertIn("LOWER(%s)" % editor.quote_name("name"), sql)
self.assertIn("UPPER(%s)" % editor.quote_name("name"), sql)
self.assertLess(sql.index("LOWER"), sql.index("UPPER"))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_composite_func_index_field_and_expression(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
index = Index(
F("author").desc(),
Lower("title").asc(),
"pub_date",
name="func_f_lower_field_idx",
)
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Book, index)
sql = index.create_sql(Book, editor)
table = Book._meta.db_table
constraints = self.get_constraints(table)
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, index.name, ["DESC", "ASC", "ASC"])
self.assertEqual(len(constraints[index.name]["columns"]), 3)
self.assertEqual(constraints[index.name]["columns"][2], "pub_date")
# SQL contains database functions and columns.
self.assertIs(sql.references_column(table, "author_id"), True)
self.assertIs(sql.references_column(table, "title"), True)
self.assertIs(sql.references_column(table, "pub_date"), True)
self.assertIn("LOWER(%s)" % editor.quote_name("title"), str(sql))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Book, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
@isolate_apps("schema")
def test_func_index_f_decimalfield(self):
class Node(Model):
value = DecimalField(max_digits=5, decimal_places=2)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Node)
index = Index(F("value"), name="func_f_decimalfield_idx")
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Node, index)
sql = index.create_sql(Node, editor)
table = Node._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
self.assertIs(sql.references_column(table, "value"), True)
# SQL doesn't contain casting.
self.assertNotIn("CAST", str(sql))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Node, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_cast(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(Cast("weight", FloatField()), name="func_cast_idx")
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
self.assertIs(sql.references_column(table, "weight"), True)
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_collate(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("This backend does not support case-insensitive collations.")
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithSlug)
index = Index(
Collate(F("title"), collation=collation).desc(),
Collate("slug", collation=collation),
name="func_collate_idx",
)
# Add index.
with connection.schema_editor() as editor:
editor.add_index(BookWithSlug, index)
sql = index.create_sql(BookWithSlug, editor)
table = Book._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, index.name, ["DESC", "ASC"])
# SQL contains columns and a collation.
self.assertIs(sql.references_column(table, "title"), True)
self.assertIs(sql.references_column(table, "slug"), True)
self.assertIn("COLLATE %s" % editor.quote_name(collation), str(sql))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Book, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
@skipIfDBFeature("collate_as_index_expression")
def test_func_index_collate_f_ordered(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("This backend does not support case-insensitive collations.")
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(
Collate(F("name").desc(), collation=collation),
name="func_collate_f_desc_idx",
)
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(table, index.name, ["DESC"])
# SQL contains columns and a collation.
self.assertIs(sql.references_column(table, "name"), True)
self.assertIn("COLLATE %s" % editor.quote_name(collation), str(sql))
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_calc(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(F("height") / (F("weight") + Value(5)), name="func_calc_idx")
# Add index.
with connection.schema_editor() as editor:
editor.add_index(Author, index)
sql = index.create_sql(Author, editor)
table = Author._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
# SQL contains columns and expressions.
self.assertIs(sql.references_column(table, "height"), True)
self.assertIs(sql.references_column(table, "weight"), True)
sql = str(sql)
self.assertIs(
sql.index(editor.quote_name("height"))
< sql.index("/")
< sql.index(editor.quote_name("weight"))
< sql.index("+")
< sql.index("5"),
True,
)
# Remove index.
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes", "supports_json_field")
@isolate_apps("schema")
def test_func_index_json_key_transform(self):
class JSONModel(Model):
field = JSONField()
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(JSONModel)
self.isolated_local_models = [JSONModel]
index = Index("field__some_key", name="func_json_key_idx")
with connection.schema_editor() as editor:
editor.add_index(JSONModel, index)
sql = index.create_sql(JSONModel, editor)
table = JSONModel._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
self.assertIs(sql.references_column(table, "field"), True)
with connection.schema_editor() as editor:
editor.remove_index(JSONModel, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipUnlessDBFeature("supports_expression_indexes", "supports_json_field")
@isolate_apps("schema")
def test_func_index_json_key_transform_cast(self):
class JSONModel(Model):
field = JSONField()
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(JSONModel)
self.isolated_local_models = [JSONModel]
index = Index(
Cast(KeyTextTransform("some_key", "field"), IntegerField()),
name="func_json_key_cast_idx",
)
with connection.schema_editor() as editor:
editor.add_index(JSONModel, index)
sql = index.create_sql(JSONModel, editor)
table = JSONModel._meta.db_table
self.assertIn(index.name, self.get_constraints(table))
self.assertIs(sql.references_column(table, "field"), True)
with connection.schema_editor() as editor:
editor.remove_index(JSONModel, index)
self.assertNotIn(index.name, self.get_constraints(table))
@skipIfDBFeature("supports_expression_indexes")
def test_func_index_unsupported(self):
# Index is ignored on databases that don't support indexes on
# expressions.
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(F("name"), name="random_idx")
with connection.schema_editor() as editor, self.assertNumQueries(0):
self.assertIsNone(editor.add_index(Author, index))
self.assertIsNone(editor.remove_index(Author, index))
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_nonexistent_field(self):
index = Index(Lower("nonexistent"), name="func_nonexistent_idx")
msg = (
"Cannot resolve keyword 'nonexistent' into field. Choices are: "
"height, id, name, uuid, weight"
)
with self.assertRaisesMessage(FieldError, msg):
with connection.schema_editor() as editor:
editor.add_index(Author, index)
@skipUnlessDBFeature("supports_expression_indexes")
def test_func_index_nondeterministic(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
index = Index(Random(), name="func_random_idx")
with connection.schema_editor() as editor:
with self.assertRaises(DatabaseError):
editor.add_index(Author, index)
def test_primary_key(self):
"""
Tests altering of the primary key
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure the table is there and has the right PK
self.assertEqual(self.get_primary_key(Tag._meta.db_table), "id")
# Alter to change the PK
id_field = Tag._meta.get_field("id")
old_field = Tag._meta.get_field("slug")
new_field = SlugField(primary_key=True)
new_field.set_attributes_from_name("slug")
new_field.model = Tag
with connection.schema_editor() as editor:
editor.remove_field(Tag, id_field)
editor.alter_field(Tag, old_field, new_field)
# Ensure the PK changed
self.assertNotIn(
"id",
self.get_indexes(Tag._meta.db_table),
)
self.assertEqual(self.get_primary_key(Tag._meta.db_table), "slug")
def test_alter_primary_key_the_same_name(self):
with connection.schema_editor() as editor:
editor.create_model(Thing)
old_field = Thing._meta.get_field("when")
new_field = CharField(max_length=2, primary_key=True)
new_field.set_attributes_from_name("when")
new_field.model = Thing
with connection.schema_editor() as editor:
editor.alter_field(Thing, old_field, new_field, strict=True)
self.assertEqual(self.get_primary_key(Thing._meta.db_table), "when")
with connection.schema_editor() as editor:
editor.alter_field(Thing, new_field, old_field, strict=True)
self.assertEqual(self.get_primary_key(Thing._meta.db_table), "when")
def test_context_manager_exit(self):
"""
Ensures transaction is correctly closed when an error occurs
inside a SchemaEditor context.
"""
class SomeError(Exception):
pass
try:
with connection.schema_editor():
raise SomeError
except SomeError:
self.assertFalse(connection.in_atomic_block)
@skipIfDBFeature("can_rollback_ddl")
def test_unsupported_transactional_ddl_disallowed(self):
message = (
"Executing DDL statements while in a transaction on databases "
"that can't perform a rollback is prohibited."
)
with atomic(), connection.schema_editor() as editor:
with self.assertRaisesMessage(TransactionManagementError, message):
editor.execute(
editor.sql_create_table % {"table": "foo", "definition": ""}
)
@skipUnlessDBFeature("supports_foreign_keys", "indexes_foreign_keys")
def test_foreign_key_index_long_names_regression(self):
"""
Regression test for #21497.
Only affects databases that supports foreign keys.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(AuthorWithEvenLongerName)
editor.create_model(BookWithLongName)
# Find the properly shortened column name
column_name = connection.ops.quote_name(
"author_foreign_key_with_really_long_field_name_id"
)
column_name = column_name[1:-1].lower() # unquote, and, for Oracle, un-upcase
# Ensure the table is there and has an index on the column
self.assertIn(
column_name,
self.get_indexes(BookWithLongName._meta.db_table),
)
@skipUnlessDBFeature("supports_foreign_keys")
def test_add_foreign_key_long_names(self):
"""
Regression test for #23009.
Only affects databases that supports foreign keys.
"""
# Create the initial tables
with connection.schema_editor() as editor:
editor.create_model(AuthorWithEvenLongerName)
editor.create_model(BookWithLongName)
# Add a second FK, this would fail due to long ref name before the fix
new_field = ForeignKey(
AuthorWithEvenLongerName, CASCADE, related_name="something"
)
new_field.set_attributes_from_name(
"author_other_really_long_named_i_mean_so_long_fk"
)
with connection.schema_editor() as editor:
editor.add_field(BookWithLongName, new_field)
@isolate_apps("schema")
@skipUnlessDBFeature("supports_foreign_keys")
def test_add_foreign_key_quoted_db_table(self):
class Author(Model):
class Meta:
db_table = '"table_author_double_quoted"'
app_label = "schema"
class Book(Model):
author = ForeignKey(Author, CASCADE)
class Meta:
app_label = "schema"
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
self.isolated_local_models = [Author]
if connection.vendor == "mysql":
self.assertForeignKeyExists(
Book, "author_id", '"table_author_double_quoted"'
)
else:
self.assertForeignKeyExists(Book, "author_id", "table_author_double_quoted")
def test_add_foreign_object(self):
with connection.schema_editor() as editor:
editor.create_model(BookForeignObj)
self.local_models = [BookForeignObj]
new_field = ForeignObject(
Author, on_delete=CASCADE, from_fields=["author_id"], to_fields=["id"]
)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.add_field(BookForeignObj, new_field)
def test_creation_deletion_reserved_names(self):
"""
Tries creating a model's table, and then deleting it when it has a
SQL reserved name.
"""
# Create the table
with connection.schema_editor() as editor:
try:
editor.create_model(Thing)
except OperationalError as e:
self.fail(
"Errors when applying initial migration for a model "
"with a table named after an SQL reserved word: %s" % e
)
# The table is there
list(Thing.objects.all())
# Clean up that table
with connection.schema_editor() as editor:
editor.delete_model(Thing)
# The table is gone
with self.assertRaises(DatabaseError):
list(Thing.objects.all())
def test_remove_constraints_capital_letters(self):
"""
#23065 - Constraint names must be quoted if they contain capital letters.
"""
def get_field(*args, field_class=IntegerField, **kwargs):
kwargs["db_column"] = "CamelCase"
field = field_class(*args, **kwargs)
field.set_attributes_from_name("CamelCase")
return field
model = Author
field = get_field()
table = model._meta.db_table
column = field.column
identifier_converter = connection.introspection.identifier_converter
with connection.schema_editor() as editor:
editor.create_model(model)
editor.add_field(model, field)
constraint_name = "CamelCaseIndex"
expected_constraint_name = identifier_converter(constraint_name)
editor.execute(
editor.sql_create_index
% {
"table": editor.quote_name(table),
"name": editor.quote_name(constraint_name),
"using": "",
"columns": editor.quote_name(column),
"extra": "",
"condition": "",
"include": "",
}
)
self.assertIn(
expected_constraint_name, self.get_constraints(model._meta.db_table)
)
editor.alter_field(model, get_field(db_index=True), field, strict=True)
self.assertNotIn(
expected_constraint_name, self.get_constraints(model._meta.db_table)
)
constraint_name = "CamelCaseUniqConstraint"
expected_constraint_name = identifier_converter(constraint_name)
editor.execute(editor._create_unique_sql(model, [field], constraint_name))
self.assertIn(
expected_constraint_name, self.get_constraints(model._meta.db_table)
)
editor.alter_field(model, get_field(unique=True), field, strict=True)
self.assertNotIn(
expected_constraint_name, self.get_constraints(model._meta.db_table)
)
if editor.sql_create_fk and connection.features.can_introspect_foreign_keys:
constraint_name = "CamelCaseFKConstraint"
expected_constraint_name = identifier_converter(constraint_name)
editor.execute(
editor.sql_create_fk
% {
"table": editor.quote_name(table),
"name": editor.quote_name(constraint_name),
"column": editor.quote_name(column),
"to_table": editor.quote_name(table),
"to_column": editor.quote_name(model._meta.auto_field.column),
"deferrable": connection.ops.deferrable_sql(),
}
)
self.assertIn(
expected_constraint_name, self.get_constraints(model._meta.db_table)
)
editor.alter_field(
model,
get_field(Author, CASCADE, field_class=ForeignKey),
field,
strict=True,
)
self.assertNotIn(
expected_constraint_name, self.get_constraints(model._meta.db_table)
)
def test_add_field_use_effective_default(self):
"""
#23987 - effective_default() should be used as the field default when
adding a new field.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no surname field
columns = self.column_classes(Author)
self.assertNotIn("surname", columns)
# Create a row
Author.objects.create(name="Anonymous1")
# Add new CharField to ensure default will be used from effective_default
new_field = CharField(max_length=15, blank=True)
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure field was added with the right default
with connection.cursor() as cursor:
cursor.execute("SELECT surname FROM schema_author;")
item = cursor.fetchall()[0]
self.assertEqual(
item[0],
None if connection.features.interprets_empty_strings_as_nulls else "",
)
def test_add_field_default_dropped(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no surname field
columns = self.column_classes(Author)
self.assertNotIn("surname", columns)
# Create a row
Author.objects.create(name="Anonymous1")
# Add new CharField with a default
new_field = CharField(max_length=15, blank=True, default="surname default")
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure field was added with the right default
with connection.cursor() as cursor:
cursor.execute("SELECT surname FROM schema_author;")
item = cursor.fetchall()[0]
self.assertEqual(item[0], "surname default")
# And that the default is no longer set in the database.
field = next(
f
for f in connection.introspection.get_table_description(
cursor, "schema_author"
)
if f.name == "surname"
)
if connection.features.can_introspect_default:
self.assertIsNone(field.default)
def test_add_field_default_nullable(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add new nullable CharField with a default.
new_field = CharField(max_length=15, blank=True, null=True, default="surname")
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
Author.objects.create(name="Anonymous1")
with connection.cursor() as cursor:
cursor.execute("SELECT surname FROM schema_author;")
item = cursor.fetchall()[0]
self.assertIsNone(item[0])
field = next(
f
for f in connection.introspection.get_table_description(
cursor,
"schema_author",
)
if f.name == "surname"
)
# Field is still nullable.
self.assertTrue(field.null_ok)
# The database default is no longer set.
if connection.features.can_introspect_default:
self.assertIn(field.default, ["NULL", None])
def test_add_textfield_default_nullable(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add new nullable TextField with a default.
new_field = TextField(blank=True, null=True, default="text")
new_field.set_attributes_from_name("description")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
Author.objects.create(name="Anonymous1")
with connection.cursor() as cursor:
cursor.execute("SELECT description FROM schema_author;")
item = cursor.fetchall()[0]
self.assertIsNone(item[0])
field = next(
f
for f in connection.introspection.get_table_description(
cursor,
"schema_author",
)
if f.name == "description"
)
# Field is still nullable.
self.assertTrue(field.null_ok)
# The database default is no longer set.
if connection.features.can_introspect_default:
self.assertIn(field.default, ["NULL", None])
def test_alter_field_default_dropped(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Create a row
Author.objects.create(name="Anonymous1")
self.assertIsNone(Author.objects.get().height)
old_field = Author._meta.get_field("height")
# The default from the new field is used in updating existing rows.
new_field = IntegerField(blank=True, default=42)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(Author.objects.get().height, 42)
# The database default should be removed.
with connection.cursor() as cursor:
field = next(
f
for f in connection.introspection.get_table_description(
cursor, "schema_author"
)
if f.name == "height"
)
if connection.features.can_introspect_default:
self.assertIsNone(field.default)
def test_alter_field_default_doesnt_perform_queries(self):
"""
No queries are performed if a field default changes and the field's
not changing from null to non-null.
"""
with connection.schema_editor() as editor:
editor.create_model(AuthorWithDefaultHeight)
old_field = AuthorWithDefaultHeight._meta.get_field("height")
new_default = old_field.default * 2
new_field = PositiveIntegerField(null=True, blank=True, default=new_default)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor, self.assertNumQueries(0):
editor.alter_field(
AuthorWithDefaultHeight, old_field, new_field, strict=True
)
@skipUnlessDBFeature("supports_foreign_keys")
def test_alter_field_fk_attributes_noop(self):
"""
No queries are performed when changing field attributes that don't
affect the schema.
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
old_field = Book._meta.get_field("author")
new_field = ForeignKey(
Author,
blank=True,
editable=False,
error_messages={"invalid": "error message"},
help_text="help text",
limit_choices_to={"limit": "choice"},
on_delete=PROTECT,
related_name="related_name",
related_query_name="related_query_name",
validators=[lambda x: x],
verbose_name="verbose name",
)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor, self.assertNumQueries(0):
editor.alter_field(Book, old_field, new_field, strict=True)
with connection.schema_editor() as editor, self.assertNumQueries(0):
editor.alter_field(Book, new_field, old_field, strict=True)
def test_alter_field_choices_noop(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field("name")
new_field = CharField(
choices=(("Jane", "Jane"), ("Joe", "Joe")),
max_length=255,
)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor, self.assertNumQueries(0):
editor.alter_field(Author, old_field, new_field, strict=True)
with connection.schema_editor() as editor, self.assertNumQueries(0):
editor.alter_field(Author, new_field, old_field, strict=True)
def test_add_textfield_unhashable_default(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Create a row
Author.objects.create(name="Anonymous1")
# Create a field that has an unhashable default
new_field = TextField(default={})
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_add_indexed_charfield(self):
field = CharField(max_length=255, db_index=True)
field.set_attributes_from_name("nom_de_plume")
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.add_field(Author, field)
# Should create two indexes; one for like operator.
self.assertEqual(
self.get_constraints_for_column(Author, "nom_de_plume"),
[
"schema_author_nom_de_plume_7570a851",
"schema_author_nom_de_plume_7570a851_like",
],
)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_add_unique_charfield(self):
field = CharField(max_length=255, unique=True)
field.set_attributes_from_name("nom_de_plume")
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.add_field(Author, field)
# Should create two indexes; one for like operator.
self.assertEqual(
self.get_constraints_for_column(Author, "nom_de_plume"),
[
"schema_author_nom_de_plume_7570a851_like",
"schema_author_nom_de_plume_key",
],
)
@skipUnlessDBFeature("supports_comments")
def test_add_db_comment_charfield(self):
comment = "Custom comment"
field = CharField(max_length=255, db_comment=comment)
field.set_attributes_from_name("name_with_comment")
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.add_field(Author, field)
self.assertEqual(
self.get_column_comment(Author._meta.db_table, "name_with_comment"),
comment,
)
@skipUnlessDBFeature("supports_comments")
def test_add_db_comment_and_default_charfield(self):
comment = "Custom comment with default"
field = CharField(max_length=255, default="Joe Doe", db_comment=comment)
field.set_attributes_from_name("name_with_comment_default")
with connection.schema_editor() as editor:
editor.create_model(Author)
Author.objects.create(name="Before adding a new field")
editor.add_field(Author, field)
self.assertEqual(
self.get_column_comment(Author._meta.db_table, "name_with_comment_default"),
comment,
)
with connection.cursor() as cursor:
cursor.execute(
f"SELECT name_with_comment_default FROM {Author._meta.db_table};"
)
for row in cursor.fetchall():
self.assertEqual(row[0], "Joe Doe")
@skipUnlessDBFeature("supports_comments")
def test_alter_db_comment(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add comment.
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=255, db_comment="Custom comment")
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_column_comment(Author._meta.db_table, "name"),
"Custom comment",
)
# Alter comment.
old_field = new_field
new_field = CharField(max_length=255, db_comment="New custom comment")
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_column_comment(Author._meta.db_table, "name"),
"New custom comment",
)
# Remove comment.
old_field = new_field
new_field = CharField(max_length=255)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertIn(
self.get_column_comment(Author._meta.db_table, "name"),
[None, ""],
)
@skipUnlessDBFeature("supports_comments", "supports_foreign_keys")
def test_alter_db_comment_foreign_key(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
comment = "FK custom comment"
old_field = Book._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE, db_comment=comment)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
self.assertEqual(
self.get_column_comment(Book._meta.db_table, "author_id"),
comment,
)
@skipUnlessDBFeature("supports_comments")
def test_alter_field_type_preserve_comment(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
comment = "This is the name."
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=255, db_comment=comment)
new_field.set_attributes_from_name("name")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_column_comment(Author._meta.db_table, "name"),
comment,
)
# Changing a field type should preserve the comment.
old_field = new_field
new_field = CharField(max_length=511, db_comment=comment)
new_field.set_attributes_from_name("name")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
# Comment is preserved.
self.assertEqual(
self.get_column_comment(Author._meta.db_table, "name"),
comment,
)
@isolate_apps("schema")
@skipUnlessDBFeature("supports_comments")
def test_db_comment_table(self):
class ModelWithDbTableComment(Model):
class Meta:
app_label = "schema"
db_table_comment = "Custom table comment"
with connection.schema_editor() as editor:
editor.create_model(ModelWithDbTableComment)
self.isolated_local_models = [ModelWithDbTableComment]
self.assertEqual(
self.get_table_comment(ModelWithDbTableComment._meta.db_table),
"Custom table comment",
)
# Alter table comment.
old_db_table_comment = ModelWithDbTableComment._meta.db_table_comment
with connection.schema_editor() as editor:
editor.alter_db_table_comment(
ModelWithDbTableComment, old_db_table_comment, "New table comment"
)
self.assertEqual(
self.get_table_comment(ModelWithDbTableComment._meta.db_table),
"New table comment",
)
# Remove table comment.
old_db_table_comment = ModelWithDbTableComment._meta.db_table_comment
with connection.schema_editor() as editor:
editor.alter_db_table_comment(
ModelWithDbTableComment, old_db_table_comment, None
)
self.assertIn(
self.get_table_comment(ModelWithDbTableComment._meta.db_table),
[None, ""],
)
@isolate_apps("schema")
@skipUnlessDBFeature("supports_comments", "supports_foreign_keys")
def test_db_comments_from_abstract_model(self):
class AbstractModelWithDbComments(Model):
name = CharField(
max_length=255, db_comment="Custom comment", null=True, blank=True
)
class Meta:
app_label = "schema"
abstract = True
db_table_comment = "Custom table comment"
class ModelWithDbComments(AbstractModelWithDbComments):
pass
with connection.schema_editor() as editor:
editor.create_model(ModelWithDbComments)
self.isolated_local_models = [ModelWithDbComments]
self.assertEqual(
self.get_column_comment(ModelWithDbComments._meta.db_table, "name"),
"Custom comment",
)
self.assertEqual(
self.get_table_comment(ModelWithDbComments._meta.db_table),
"Custom table comment",
)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_add_index_to_charfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Author)
self.assertEqual(self.get_constraints_for_column(Author, "name"), [])
# Alter to add db_index=True and create 2 indexes.
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=255, db_index=True)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Author, "name"),
["schema_author_name_1fbc5617", "schema_author_name_1fbc5617_like"],
)
# Remove db_index=True to drop both indexes.
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Author, "name"), [])
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_add_unique_to_charfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Author)
self.assertEqual(self.get_constraints_for_column(Author, "name"), [])
# Alter to add unique=True and create 2 indexes.
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Author, "name"),
["schema_author_name_1fbc5617_like", "schema_author_name_1fbc5617_uniq"],
)
# Remove unique=True to drop both indexes.
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Author, "name"), [])
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_add_index_to_textfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Note)
self.assertEqual(self.get_constraints_for_column(Note, "info"), [])
# Alter to add db_index=True and create 2 indexes.
old_field = Note._meta.get_field("info")
new_field = TextField(db_index=True)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Note, "info"),
["schema_note_info_4b0ea695", "schema_note_info_4b0ea695_like"],
)
# Remove db_index=True to drop both indexes.
with connection.schema_editor() as editor:
editor.alter_field(Note, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Note, "info"), [])
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_add_unique_to_charfield_with_db_index(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(BookWithoutAuthor)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff", "schema_book_title_2dfb2dff_like"],
)
# Alter to add unique=True (should replace the index)
old_field = BookWithoutAuthor._meta.get_field("title")
new_field = CharField(max_length=100, db_index=True, unique=True)
new_field.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff_like", "schema_book_title_2dfb2dff_uniq"],
)
# Alter to remove unique=True (should drop unique index)
new_field2 = CharField(max_length=100, db_index=True)
new_field2.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff", "schema_book_title_2dfb2dff_like"],
)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_remove_unique_and_db_index_from_charfield(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(BookWithoutAuthor)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff", "schema_book_title_2dfb2dff_like"],
)
# Alter to add unique=True (should replace the index)
old_field = BookWithoutAuthor._meta.get_field("title")
new_field = CharField(max_length=100, db_index=True, unique=True)
new_field.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff_like", "schema_book_title_2dfb2dff_uniq"],
)
# Alter to remove both unique=True and db_index=True (should drop all indexes)
new_field2 = CharField(max_length=100)
new_field2.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"), []
)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_swap_unique_and_db_index_with_charfield(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(BookWithoutAuthor)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff", "schema_book_title_2dfb2dff_like"],
)
# Alter to set unique=True and remove db_index=True (should replace the index)
old_field = BookWithoutAuthor._meta.get_field("title")
new_field = CharField(max_length=100, unique=True)
new_field.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff_like", "schema_book_title_2dfb2dff_uniq"],
)
# Alter to set db_index=True and remove unique=True (should restore index)
new_field2 = CharField(max_length=100, db_index=True)
new_field2.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, "title"),
["schema_book_title_2dfb2dff", "schema_book_title_2dfb2dff_like"],
)
@unittest.skipUnless(connection.vendor == "postgresql", "PostgreSQL specific")
def test_alter_field_add_db_index_to_charfield_with_unique(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Tag)
self.assertEqual(
self.get_constraints_for_column(Tag, "slug"),
["schema_tag_slug_2c418ba3_like", "schema_tag_slug_key"],
)
# Alter to add db_index=True
old_field = Tag._meta.get_field("slug")
new_field = SlugField(db_index=True, unique=True)
new_field.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(Tag, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Tag, "slug"),
["schema_tag_slug_2c418ba3_like", "schema_tag_slug_key"],
)
# Alter to remove db_index=True
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(Tag, new_field, new_field2, strict=True)
self.assertEqual(
self.get_constraints_for_column(Tag, "slug"),
["schema_tag_slug_2c418ba3_like", "schema_tag_slug_key"],
)
def test_alter_field_add_index_to_integerfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Author)
self.assertEqual(self.get_constraints_for_column(Author, "weight"), [])
# Alter to add db_index=True and create index.
old_field = Author._meta.get_field("weight")
new_field = IntegerField(null=True, db_index=True)
new_field.set_attributes_from_name("weight")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Author, "weight"),
["schema_author_weight_587740f9"],
)
# Remove db_index=True to drop index.
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Author, "weight"), [])
def test_alter_pk_with_self_referential_field(self):
"""
Changing the primary key field name of a model with a self-referential
foreign key (#26384).
"""
with connection.schema_editor() as editor:
editor.create_model(Node)
old_field = Node._meta.get_field("node_id")
new_field = AutoField(primary_key=True)
new_field.set_attributes_from_name("id")
with connection.schema_editor() as editor:
editor.alter_field(Node, old_field, new_field, strict=True)
self.assertForeignKeyExists(Node, "parent_id", Node._meta.db_table)
@mock.patch("django.db.backends.base.schema.datetime")
@mock.patch("django.db.backends.base.schema.timezone")
def test_add_datefield_and_datetimefield_use_effective_default(
self, mocked_datetime, mocked_tz
):
"""
effective_default() should be used for DateField, DateTimeField, and
TimeField if auto_now or auto_now_add is set (#25005).
"""
now = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1)
now_tz = datetime.datetime(
month=1, day=1, year=2000, hour=1, minute=1, tzinfo=datetime.timezone.utc
)
mocked_datetime.now = mock.MagicMock(return_value=now)
mocked_tz.now = mock.MagicMock(return_value=now_tz)
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Check auto_now/auto_now_add attributes are not defined
columns = self.column_classes(Author)
self.assertNotIn("dob_auto_now", columns)
self.assertNotIn("dob_auto_now_add", columns)
self.assertNotIn("dtob_auto_now", columns)
self.assertNotIn("dtob_auto_now_add", columns)
self.assertNotIn("tob_auto_now", columns)
self.assertNotIn("tob_auto_now_add", columns)
# Create a row
Author.objects.create(name="Anonymous1")
# Ensure fields were added with the correct defaults
dob_auto_now = DateField(auto_now=True)
dob_auto_now.set_attributes_from_name("dob_auto_now")
self.check_added_field_default(
editor,
Author,
dob_auto_now,
"dob_auto_now",
now.date(),
cast_function=lambda x: x.date(),
)
dob_auto_now_add = DateField(auto_now_add=True)
dob_auto_now_add.set_attributes_from_name("dob_auto_now_add")
self.check_added_field_default(
editor,
Author,
dob_auto_now_add,
"dob_auto_now_add",
now.date(),
cast_function=lambda x: x.date(),
)
dtob_auto_now = DateTimeField(auto_now=True)
dtob_auto_now.set_attributes_from_name("dtob_auto_now")
self.check_added_field_default(
editor,
Author,
dtob_auto_now,
"dtob_auto_now",
now,
)
dt_tm_of_birth_auto_now_add = DateTimeField(auto_now_add=True)
dt_tm_of_birth_auto_now_add.set_attributes_from_name("dtob_auto_now_add")
self.check_added_field_default(
editor,
Author,
dt_tm_of_birth_auto_now_add,
"dtob_auto_now_add",
now,
)
tob_auto_now = TimeField(auto_now=True)
tob_auto_now.set_attributes_from_name("tob_auto_now")
self.check_added_field_default(
editor,
Author,
tob_auto_now,
"tob_auto_now",
now.time(),
cast_function=lambda x: x.time(),
)
tob_auto_now_add = TimeField(auto_now_add=True)
tob_auto_now_add.set_attributes_from_name("tob_auto_now_add")
self.check_added_field_default(
editor,
Author,
tob_auto_now_add,
"tob_auto_now_add",
now.time(),
cast_function=lambda x: x.time(),
)
def test_namespaced_db_table_create_index_name(self):
"""
Table names are stripped of their namespace/schema before being used to
generate index names.
"""
with connection.schema_editor() as editor:
max_name_length = connection.ops.max_name_length() or 200
namespace = "n" * max_name_length
table_name = "t" * max_name_length
namespaced_table_name = '"%s"."%s"' % (namespace, table_name)
self.assertEqual(
editor._create_index_name(table_name, []),
editor._create_index_name(namespaced_table_name, []),
)
@unittest.skipUnless(
connection.vendor == "oracle", "Oracle specific db_table syntax"
)
def test_creation_with_db_table_double_quotes(self):
oracle_user = connection.creation._test_database_user()
class Student(Model):
name = CharField(max_length=30)
class Meta:
app_label = "schema"
apps = new_apps
db_table = '"%s"."DJANGO_STUDENT_TABLE"' % oracle_user
class Document(Model):
name = CharField(max_length=30)
students = ManyToManyField(Student)
class Meta:
app_label = "schema"
apps = new_apps
db_table = '"%s"."DJANGO_DOCUMENT_TABLE"' % oracle_user
self.isolated_local_models = [Student, Document]
with connection.schema_editor() as editor:
editor.create_model(Student)
editor.create_model(Document)
doc = Document.objects.create(name="Test Name")
student = Student.objects.create(name="Some man")
doc.students.add(student)
@isolate_apps("schema")
@unittest.skipUnless(
connection.vendor == "postgresql", "PostgreSQL specific db_table syntax."
)
def test_namespaced_db_table_foreign_key_reference(self):
with connection.cursor() as cursor:
cursor.execute("CREATE SCHEMA django_schema_tests")
def delete_schema():
with connection.cursor() as cursor:
cursor.execute("DROP SCHEMA django_schema_tests CASCADE")
self.addCleanup(delete_schema)
class Author(Model):
class Meta:
app_label = "schema"
class Book(Model):
class Meta:
app_label = "schema"
db_table = '"django_schema_tests"."schema_book"'
author = ForeignKey(Author, CASCADE)
author.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
editor.add_field(Book, author)
def test_rename_table_renames_deferred_sql_references(self):
atomic_rename = connection.features.supports_atomic_references_rename
with connection.schema_editor(atomic=atomic_rename) as editor:
editor.create_model(Author)
editor.create_model(Book)
editor.alter_db_table(Author, "schema_author", "schema_renamed_author")
editor.alter_db_table(Author, "schema_book", "schema_renamed_book")
try:
self.assertGreater(len(editor.deferred_sql), 0)
for statement in editor.deferred_sql:
self.assertIs(statement.references_table("schema_author"), False)
self.assertIs(statement.references_table("schema_book"), False)
finally:
editor.alter_db_table(Author, "schema_renamed_author", "schema_author")
editor.alter_db_table(Author, "schema_renamed_book", "schema_book")
def test_rename_column_renames_deferred_sql_references(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
old_title = Book._meta.get_field("title")
new_title = CharField(max_length=100, db_index=True)
new_title.set_attributes_from_name("renamed_title")
editor.alter_field(Book, old_title, new_title)
old_author = Book._meta.get_field("author")
new_author = ForeignKey(Author, CASCADE)
new_author.set_attributes_from_name("renamed_author")
editor.alter_field(Book, old_author, new_author)
self.assertGreater(len(editor.deferred_sql), 0)
for statement in editor.deferred_sql:
self.assertIs(statement.references_column("book", "title"), False)
self.assertIs(statement.references_column("book", "author_id"), False)
@isolate_apps("schema")
def test_referenced_field_without_constraint_rename_inside_atomic_block(self):
"""
Foreign keys without database level constraint don't prevent the field
they reference from being renamed in an atomic block.
"""
class Foo(Model):
field = CharField(max_length=255, unique=True)
class Meta:
app_label = "schema"
class Bar(Model):
foo = ForeignKey(Foo, CASCADE, to_field="field", db_constraint=False)
class Meta:
app_label = "schema"
self.isolated_local_models = [Foo, Bar]
with connection.schema_editor() as editor:
editor.create_model(Foo)
editor.create_model(Bar)
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name("renamed")
with connection.schema_editor(atomic=True) as editor:
editor.alter_field(Foo, Foo._meta.get_field("field"), new_field)
@isolate_apps("schema")
def test_referenced_table_without_constraint_rename_inside_atomic_block(self):
"""
Foreign keys without database level constraint don't prevent the table
they reference from being renamed in an atomic block.
"""
class Foo(Model):
field = CharField(max_length=255, unique=True)
class Meta:
app_label = "schema"
class Bar(Model):
foo = ForeignKey(Foo, CASCADE, to_field="field", db_constraint=False)
class Meta:
app_label = "schema"
self.isolated_local_models = [Foo, Bar]
with connection.schema_editor() as editor:
editor.create_model(Foo)
editor.create_model(Bar)
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name("renamed")
with connection.schema_editor(atomic=True) as editor:
editor.alter_db_table(Foo, Foo._meta.db_table, "renamed_table")
Foo._meta.db_table = "renamed_table"
@isolate_apps("schema")
@skipUnlessDBFeature("supports_collation_on_charfield")
def test_db_collation_charfield(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
class Foo(Model):
field = CharField(max_length=255, db_collation=collation)
class Meta:
app_label = "schema"
self.isolated_local_models = [Foo]
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.assertEqual(
self.get_column_collation(Foo._meta.db_table, "field"),
collation,
)
@isolate_apps("schema")
@skipUnlessDBFeature("supports_collation_on_textfield")
def test_db_collation_textfield(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
class Foo(Model):
field = TextField(db_collation=collation)
class Meta:
app_label = "schema"
self.isolated_local_models = [Foo]
with connection.schema_editor() as editor:
editor.create_model(Foo)
self.assertEqual(
self.get_column_collation(Foo._meta.db_table, "field"),
collation,
)
@skipUnlessDBFeature("supports_collation_on_charfield")
def test_add_field_db_collation(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
with connection.schema_editor() as editor:
editor.create_model(Author)
new_field = CharField(max_length=255, db_collation=collation)
new_field.set_attributes_from_name("alias")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
self.assertEqual(
columns["alias"][0],
connection.features.introspected_field_types["CharField"],
)
self.assertEqual(columns["alias"][1][8], collation)
@skipUnlessDBFeature("supports_collation_on_charfield")
def test_alter_field_db_collation(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=255, db_collation=collation)
new_field.set_attributes_from_name("name")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_column_collation(Author._meta.db_table, "name"),
collation,
)
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertIsNone(self.get_column_collation(Author._meta.db_table, "name"))
@skipUnlessDBFeature("supports_collation_on_charfield")
def test_alter_field_type_preserve_db_collation(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=255, db_collation=collation)
new_field.set_attributes_from_name("name")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_column_collation(Author._meta.db_table, "name"),
collation,
)
# Changing a field type should preserve the collation.
old_field = new_field
new_field = CharField(max_length=511, db_collation=collation)
new_field.set_attributes_from_name("name")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
# Collation is preserved.
self.assertEqual(
self.get_column_collation(Author._meta.db_table, "name"),
collation,
)
@skipUnlessDBFeature("supports_collation_on_charfield")
def test_alter_primary_key_db_collation(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
with connection.schema_editor() as editor:
editor.create_model(Thing)
old_field = Thing._meta.get_field("when")
new_field = CharField(max_length=1, db_collation=collation, primary_key=True)
new_field.set_attributes_from_name("when")
new_field.model = Thing
with connection.schema_editor() as editor:
editor.alter_field(Thing, old_field, new_field, strict=True)
self.assertEqual(self.get_primary_key(Thing._meta.db_table), "when")
self.assertEqual(
self.get_column_collation(Thing._meta.db_table, "when"),
collation,
)
with connection.schema_editor() as editor:
editor.alter_field(Thing, new_field, old_field, strict=True)
self.assertEqual(self.get_primary_key(Thing._meta.db_table), "when")
self.assertIsNone(self.get_column_collation(Thing._meta.db_table, "when"))
@skipUnlessDBFeature(
"supports_collation_on_charfield", "supports_collation_on_textfield"
)
def test_alter_field_type_and_db_collation(self):
collation = connection.features.test_collations.get("non_default")
if not collation:
self.skipTest("Language collations are not supported.")
with connection.schema_editor() as editor:
editor.create_model(Note)
old_field = Note._meta.get_field("info")
new_field = CharField(max_length=255, db_collation=collation)
new_field.set_attributes_from_name("info")
new_field.model = Note
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
columns = self.column_classes(Note)
self.assertEqual(
columns["info"][0],
connection.features.introspected_field_types["CharField"],
)
self.assertEqual(columns["info"][1][8], collation)
with connection.schema_editor() as editor:
editor.alter_field(Note, new_field, old_field, strict=True)
columns = self.column_classes(Note)
self.assertEqual(columns["info"][0], "TextField")
self.assertIsNone(columns["info"][1][8])
@skipUnlessDBFeature(
"supports_collation_on_charfield",
"supports_non_deterministic_collations",
)
def test_ci_cs_db_collation(self):
cs_collation = connection.features.test_collations.get("cs")
ci_collation = connection.features.test_collations.get("ci")
try:
if connection.vendor == "mysql":
cs_collation = "latin1_general_cs"
elif connection.vendor == "postgresql":
cs_collation = "en-x-icu"
with connection.cursor() as cursor:
cursor.execute(
"CREATE COLLATION IF NOT EXISTS case_insensitive "
"(provider = icu, locale = 'und-u-ks-level2', "
"deterministic = false)"
)
ci_collation = "case_insensitive"
# Create the table.
with connection.schema_editor() as editor:
editor.create_model(Author)
# Case-insensitive collation.
old_field = Author._meta.get_field("name")
new_field_ci = CharField(max_length=255, db_collation=ci_collation)
new_field_ci.set_attributes_from_name("name")
new_field_ci.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field_ci, strict=True)
Author.objects.create(name="ANDREW")
self.assertIs(Author.objects.filter(name="Andrew").exists(), True)
# Case-sensitive collation.
new_field_cs = CharField(max_length=255, db_collation=cs_collation)
new_field_cs.set_attributes_from_name("name")
new_field_cs.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field_ci, new_field_cs, strict=True)
self.assertIs(Author.objects.filter(name="Andrew").exists(), False)
finally:
if connection.vendor == "postgresql":
with connection.cursor() as cursor:
cursor.execute("DROP COLLATION IF EXISTS case_insensitive")
|
7273aa6107ec10b6a214247363c24362dacd566f561226b301b37d821b6ec724 | from functools import update_wrapper, wraps
from unittest import TestCase
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.decorators import (
login_required,
permission_required,
user_passes_test,
)
from django.http import HttpRequest, HttpResponse, HttpResponseNotAllowed
from django.test import SimpleTestCase
from django.utils.decorators import method_decorator
from django.utils.functional import keep_lazy, keep_lazy_text, lazy
from django.utils.safestring import mark_safe
from django.views.decorators.cache import cache_control, cache_page, never_cache
from django.views.decorators.http import (
condition,
require_GET,
require_http_methods,
require_POST,
require_safe,
)
from django.views.decorators.vary import vary_on_cookie, vary_on_headers
def fully_decorated(request):
"""Expected __doc__"""
return HttpResponse("<html><body>dummy</body></html>")
fully_decorated.anything = "Expected __dict__"
def compose(*functions):
# compose(f, g)(*args, **kwargs) == f(g(*args, **kwargs))
functions = list(reversed(functions))
def _inner(*args, **kwargs):
result = functions[0](*args, **kwargs)
for f in functions[1:]:
result = f(result)
return result
return _inner
full_decorator = compose(
# django.views.decorators.http
require_http_methods(["GET"]),
require_GET,
require_POST,
require_safe,
condition(lambda r: None, lambda r: None),
# django.views.decorators.vary
vary_on_headers("Accept-language"),
vary_on_cookie,
# django.views.decorators.cache
cache_page(60 * 15),
cache_control(private=True),
never_cache,
# django.contrib.auth.decorators
# Apply user_passes_test twice to check #9474
user_passes_test(lambda u: True),
login_required,
permission_required("change_world"),
# django.contrib.admin.views.decorators
staff_member_required,
# django.utils.functional
keep_lazy(HttpResponse),
keep_lazy_text,
lazy,
# django.utils.safestring
mark_safe,
)
fully_decorated = full_decorator(fully_decorated)
class DecoratorsTest(TestCase):
def test_attributes(self):
"""
Built-in decorators set certain attributes of the wrapped function.
"""
self.assertEqual(fully_decorated.__name__, "fully_decorated")
self.assertEqual(fully_decorated.__doc__, "Expected __doc__")
self.assertEqual(fully_decorated.__dict__["anything"], "Expected __dict__")
def test_user_passes_test_composition(self):
"""
The user_passes_test decorator can be applied multiple times (#9474).
"""
def test1(user):
user.decorators_applied.append("test1")
return True
def test2(user):
user.decorators_applied.append("test2")
return True
def callback(request):
return request.user.decorators_applied
callback = user_passes_test(test1)(callback)
callback = user_passes_test(test2)(callback)
class DummyUser:
pass
class DummyRequest:
pass
request = DummyRequest()
request.user = DummyUser()
request.user.decorators_applied = []
response = callback(request)
self.assertEqual(response, ["test2", "test1"])
def test_require_safe_accepts_only_safe_methods(self):
"""
Test for the require_safe decorator.
A view returns either a response or an exception.
Refs #15637.
"""
def my_view(request):
return HttpResponse("OK")
my_safe_view = require_safe(my_view)
request = HttpRequest()
request.method = "GET"
self.assertIsInstance(my_safe_view(request), HttpResponse)
request.method = "HEAD"
self.assertIsInstance(my_safe_view(request), HttpResponse)
request.method = "POST"
self.assertIsInstance(my_safe_view(request), HttpResponseNotAllowed)
request.method = "PUT"
self.assertIsInstance(my_safe_view(request), HttpResponseNotAllowed)
request.method = "DELETE"
self.assertIsInstance(my_safe_view(request), HttpResponseNotAllowed)
# For testing method_decorator, a decorator that assumes a single argument.
# We will get type arguments if there is a mismatch in the number of arguments.
def simple_dec(func):
@wraps(func)
def wrapper(arg):
return func("test:" + arg)
return wrapper
simple_dec_m = method_decorator(simple_dec)
# For testing method_decorator, two decorators that add an attribute to the function
def myattr_dec(func):
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
wrapper.myattr = True
return wrapper
myattr_dec_m = method_decorator(myattr_dec)
def myattr2_dec(func):
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
wrapper.myattr2 = True
return wrapper
myattr2_dec_m = method_decorator(myattr2_dec)
class ClsDec:
def __init__(self, myattr):
self.myattr = myattr
def __call__(self, f):
def wrapper():
return f() and self.myattr
return update_wrapper(wrapper, f)
class MethodDecoratorTests(SimpleTestCase):
"""
Tests for method_decorator
"""
def test_preserve_signature(self):
class Test:
@simple_dec_m
def say(self, arg):
return arg
self.assertEqual("test:hello", Test().say("hello"))
def test_preserve_attributes(self):
# Sanity check myattr_dec and myattr2_dec
@myattr_dec
def func():
pass
self.assertIs(getattr(func, "myattr", False), True)
@myattr2_dec
def func():
pass
self.assertIs(getattr(func, "myattr2", False), True)
@myattr_dec
@myattr2_dec
def func():
pass
self.assertIs(getattr(func, "myattr", False), True)
self.assertIs(getattr(func, "myattr2", False), False)
# Decorate using method_decorator() on the method.
class TestPlain:
@myattr_dec_m
@myattr2_dec_m
def method(self):
"A method"
pass
# Decorate using method_decorator() on both the class and the method.
# The decorators applied to the methods are applied before the ones
# applied to the class.
@method_decorator(myattr_dec_m, "method")
class TestMethodAndClass:
@method_decorator(myattr2_dec_m)
def method(self):
"A method"
pass
# Decorate using an iterable of function decorators.
@method_decorator((myattr_dec, myattr2_dec), "method")
class TestFunctionIterable:
def method(self):
"A method"
pass
# Decorate using an iterable of method decorators.
decorators = (myattr_dec_m, myattr2_dec_m)
@method_decorator(decorators, "method")
class TestMethodIterable:
def method(self):
"A method"
pass
tests = (
TestPlain,
TestMethodAndClass,
TestFunctionIterable,
TestMethodIterable,
)
for Test in tests:
with self.subTest(Test=Test):
self.assertIs(getattr(Test().method, "myattr", False), True)
self.assertIs(getattr(Test().method, "myattr2", False), True)
self.assertIs(getattr(Test.method, "myattr", False), True)
self.assertIs(getattr(Test.method, "myattr2", False), True)
self.assertEqual(Test.method.__doc__, "A method")
self.assertEqual(Test.method.__name__, "method")
def test_new_attribute(self):
"""A decorator that sets a new attribute on the method."""
def decorate(func):
func.x = 1
return func
class MyClass:
@method_decorator(decorate)
def method(self):
return True
obj = MyClass()
self.assertEqual(obj.method.x, 1)
self.assertIs(obj.method(), True)
def test_bad_iterable(self):
decorators = {myattr_dec_m, myattr2_dec_m}
msg = "'set' object is not subscriptable"
with self.assertRaisesMessage(TypeError, msg):
@method_decorator(decorators, "method")
class TestIterable:
def method(self):
"A method"
pass
# Test for argumented decorator
def test_argumented(self):
class Test:
@method_decorator(ClsDec(False))
def method(self):
return True
self.assertIs(Test().method(), False)
def test_descriptors(self):
def original_dec(wrapped):
def _wrapped(arg):
return wrapped(arg)
return _wrapped
method_dec = method_decorator(original_dec)
class bound_wrapper:
def __init__(self, wrapped):
self.wrapped = wrapped
self.__name__ = wrapped.__name__
def __call__(self, arg):
return self.wrapped(arg)
def __get__(self, instance, cls=None):
return self
class descriptor_wrapper:
def __init__(self, wrapped):
self.wrapped = wrapped
self.__name__ = wrapped.__name__
def __get__(self, instance, cls=None):
return bound_wrapper(self.wrapped.__get__(instance, cls))
class Test:
@method_dec
@descriptor_wrapper
def method(self, arg):
return arg
self.assertEqual(Test().method(1), 1)
def test_class_decoration(self):
"""
@method_decorator can be used to decorate a class and its methods.
"""
def deco(func):
def _wrapper(*args, **kwargs):
return True
return _wrapper
@method_decorator(deco, name="method")
class Test:
def method(self):
return False
self.assertTrue(Test().method())
def test_tuple_of_decorators(self):
"""
@method_decorator can accept a tuple of decorators.
"""
def add_question_mark(func):
def _wrapper(*args, **kwargs):
return func(*args, **kwargs) + "?"
return _wrapper
def add_exclamation_mark(func):
def _wrapper(*args, **kwargs):
return func(*args, **kwargs) + "!"
return _wrapper
# The order should be consistent with the usual order in which
# decorators are applied, e.g.
# @add_exclamation_mark
# @add_question_mark
# def func():
# ...
decorators = (add_exclamation_mark, add_question_mark)
@method_decorator(decorators, name="method")
class TestFirst:
def method(self):
return "hello world"
class TestSecond:
@method_decorator(decorators)
def method(self):
return "hello world"
self.assertEqual(TestFirst().method(), "hello world?!")
self.assertEqual(TestSecond().method(), "hello world?!")
def test_invalid_non_callable_attribute_decoration(self):
"""
@method_decorator on a non-callable attribute raises an error.
"""
msg = (
"Cannot decorate 'prop' as it isn't a callable attribute of "
"<class 'Test'> (1)"
)
with self.assertRaisesMessage(TypeError, msg):
@method_decorator(lambda: None, name="prop")
class Test:
prop = 1
@classmethod
def __module__(cls):
return "tests"
def test_invalid_method_name_to_decorate(self):
"""
@method_decorator on a nonexistent method raises an error.
"""
msg = (
"The keyword argument `name` must be the name of a method of the "
"decorated class: <class 'Test'>. Got 'nonexistent_method' instead"
)
with self.assertRaisesMessage(ValueError, msg):
@method_decorator(lambda: None, name="nonexistent_method")
class Test:
@classmethod
def __module__(cls):
return "tests"
def test_wrapper_assignments(self):
"""@method_decorator preserves wrapper assignments."""
func_name = None
func_module = None
def decorator(func):
@wraps(func)
def inner(*args, **kwargs):
nonlocal func_name, func_module
func_name = getattr(func, "__name__", None)
func_module = getattr(func, "__module__", None)
return func(*args, **kwargs)
return inner
class Test:
@method_decorator(decorator)
def method(self):
return "tests"
Test().method()
self.assertEqual(func_name, "method")
self.assertIsNotNone(func_module)
|
028f5805516d62d1c848400869c8e313213b4b4ce2deacb38ef763f4092f1fe9 | from asgiref.sync import iscoroutinefunction
from django.http import HttpRequest, HttpResponse
from django.middleware.clickjacking import XFrameOptionsMiddleware
from django.test import SimpleTestCase
from django.views.decorators.clickjacking import (
xframe_options_deny,
xframe_options_exempt,
xframe_options_sameorigin,
)
class XFrameOptionsDenyTests(SimpleTestCase):
def test_wrapped_sync_function_is_not_coroutine_function(self):
def sync_view(request):
return HttpResponse()
wrapped_view = xframe_options_deny(sync_view)
self.assertIs(iscoroutinefunction(wrapped_view), False)
def test_wrapped_async_function_is_coroutine_function(self):
async def async_view(request):
return HttpResponse()
wrapped_view = xframe_options_deny(async_view)
self.assertIs(iscoroutinefunction(wrapped_view), True)
def test_decorator_sets_x_frame_options_to_deny(self):
@xframe_options_deny
def a_view(request):
return HttpResponse()
response = a_view(HttpRequest())
self.assertEqual(response.headers["X-Frame-Options"], "DENY")
async def test_decorator_sets_x_frame_options_to_deny_async_view(self):
@xframe_options_deny
async def an_async_view(request):
return HttpResponse()
response = await an_async_view(HttpRequest())
self.assertEqual(response.headers["X-Frame-Options"], "DENY")
class XFrameOptionsSameoriginTests(SimpleTestCase):
def test_wrapped_sync_function_is_not_coroutine_function(self):
def sync_view(request):
return HttpResponse()
wrapped_view = xframe_options_sameorigin(sync_view)
self.assertIs(iscoroutinefunction(wrapped_view), False)
def test_wrapped_async_function_is_coroutine_function(self):
async def async_view(request):
return HttpResponse()
wrapped_view = xframe_options_sameorigin(async_view)
self.assertIs(iscoroutinefunction(wrapped_view), True)
def test_decorator_sets_x_frame_options_to_sameorigin(self):
@xframe_options_sameorigin
def a_view(request):
return HttpResponse()
response = a_view(HttpRequest())
self.assertEqual(response.headers["X-Frame-Options"], "SAMEORIGIN")
async def test_decorator_sets_x_frame_options_to_sameorigin_async_view(self):
@xframe_options_sameorigin
async def an_async_view(request):
return HttpResponse()
response = await an_async_view(HttpRequest())
self.assertEqual(response.headers["X-Frame-Options"], "SAMEORIGIN")
class XFrameOptionsExemptTests(SimpleTestCase):
def test_wrapped_sync_function_is_not_coroutine_function(self):
def sync_view(request):
return HttpResponse()
wrapped_view = xframe_options_exempt(sync_view)
self.assertIs(iscoroutinefunction(wrapped_view), False)
def test_wrapped_async_function_is_coroutine_function(self):
async def async_view(request):
return HttpResponse()
wrapped_view = xframe_options_exempt(async_view)
self.assertIs(iscoroutinefunction(wrapped_view), True)
def test_decorator_stops_x_frame_options_being_set(self):
"""
@xframe_options_exempt instructs the XFrameOptionsMiddleware to NOT set
the header.
"""
@xframe_options_exempt
def a_view(request):
return HttpResponse()
request = HttpRequest()
response = a_view(request)
self.assertIsNone(response.get("X-Frame-Options", None))
self.assertIs(response.xframe_options_exempt, True)
# The real purpose of the exempt decorator is to suppress the
# middleware's functionality.
middleware_response = XFrameOptionsMiddleware(a_view)(request)
self.assertIsNone(middleware_response.get("X-Frame-Options"))
async def test_exempt_decorator_async_view(self):
@xframe_options_exempt
async def an_async_view(request):
return HttpResponse()
request = HttpRequest()
response = await an_async_view(request)
self.assertIsNone(response.get("X-Frame-Options"))
self.assertIs(response.xframe_options_exempt, True)
# The real purpose of the exempt decorator is to suppress the
# middleware's functionality.
middleware_response = await XFrameOptionsMiddleware(an_async_view)(request)
self.assertIsNone(middleware_response.get("X-Frame-Options"))
|
5329464bfc58ffb1f5060b801dc0ecb02c3079a3dbee29a2cbd0b89d1570f2ef | from datetime import datetime
from math import pi
from django.db import connection
from django.db.models import Case, F, FloatField, Value, When
from django.db.models.expressions import (
Expression,
ExpressionList,
ExpressionWrapper,
Func,
OrderByList,
RawSQL,
)
from django.db.models.functions import Collate
from django.db.models.lookups import GreaterThan
from django.test import SimpleTestCase, TestCase, skipIfDBFeature, skipUnlessDBFeature
from .models import (
Article,
DBArticle,
DBDefaults,
DBDefaultsFK,
DBDefaultsFunction,
DBDefaultsPK,
)
class DefaultTests(TestCase):
def test_field_defaults(self):
a = Article()
now = datetime.now()
a.save()
self.assertIsInstance(a.id, int)
self.assertEqual(a.headline, "Default headline")
self.assertLess((now - a.pub_date).seconds, 5)
@skipUnlessDBFeature(
"can_return_columns_from_insert", "supports_expression_defaults"
)
def test_field_db_defaults_returning(self):
a = DBArticle()
a.save()
self.assertIsInstance(a.id, int)
self.assertEqual(a.headline, "Default headline")
self.assertIsInstance(a.pub_date, datetime)
@skipIfDBFeature("can_return_columns_from_insert")
@skipUnlessDBFeature("supports_expression_defaults")
def test_field_db_defaults_refresh(self):
a = DBArticle()
a.save()
a.refresh_from_db()
self.assertIsInstance(a.id, int)
self.assertEqual(a.headline, "Default headline")
self.assertIsInstance(a.pub_date, datetime)
def test_null_db_default(self):
obj1 = DBDefaults.objects.create()
if not connection.features.can_return_columns_from_insert:
obj1.refresh_from_db()
self.assertEqual(obj1.null, 1.1)
obj2 = DBDefaults.objects.create(null=None)
self.assertIsNone(obj2.null)
@skipUnlessDBFeature("supports_expression_defaults")
def test_db_default_function(self):
m = DBDefaultsFunction.objects.create()
if not connection.features.can_return_columns_from_insert:
m.refresh_from_db()
self.assertAlmostEqual(m.number, pi)
self.assertEqual(m.year, datetime.now().year)
self.assertAlmostEqual(m.added, pi + 4.5)
self.assertEqual(m.multiple_subfunctions, 4.5)
@skipUnlessDBFeature("insert_test_table_with_defaults")
def test_both_default(self):
create_sql = connection.features.insert_test_table_with_defaults
with connection.cursor() as cursor:
cursor.execute(create_sql.format(DBDefaults._meta.db_table))
obj1 = DBDefaults.objects.get()
self.assertEqual(obj1.both, 2)
obj2 = DBDefaults.objects.create()
self.assertEqual(obj2.both, 1)
def test_pk_db_default(self):
obj1 = DBDefaultsPK.objects.create()
if not connection.features.can_return_columns_from_insert:
# refresh_from_db() cannot be used because that needs the pk to
# already be known to Django.
obj1 = DBDefaultsPK.objects.get(pk="en")
self.assertEqual(obj1.pk, "en")
self.assertEqual(obj1.language_code, "en")
obj2 = DBDefaultsPK.objects.create(language_code="de")
self.assertEqual(obj2.pk, "de")
self.assertEqual(obj2.language_code, "de")
def test_foreign_key_db_default(self):
parent1 = DBDefaultsPK.objects.create(language_code="fr")
child1 = DBDefaultsFK.objects.create()
if not connection.features.can_return_columns_from_insert:
child1.refresh_from_db()
self.assertEqual(child1.language_code, parent1)
parent2 = DBDefaultsPK.objects.create()
if not connection.features.can_return_columns_from_insert:
# refresh_from_db() cannot be used because that needs the pk to
# already be known to Django.
parent2 = DBDefaultsPK.objects.get(pk="en")
child2 = DBDefaultsFK.objects.create(language_code=parent2)
self.assertEqual(child2.language_code, parent2)
@skipUnlessDBFeature(
"can_return_columns_from_insert", "supports_expression_defaults"
)
def test_case_when_db_default_returning(self):
m = DBDefaultsFunction.objects.create()
self.assertEqual(m.case_when, 3)
@skipIfDBFeature("can_return_columns_from_insert")
@skipUnlessDBFeature("supports_expression_defaults")
def test_case_when_db_default_no_returning(self):
m = DBDefaultsFunction.objects.create()
m.refresh_from_db()
self.assertEqual(m.case_when, 3)
@skipUnlessDBFeature("supports_expression_defaults")
def test_bulk_create_all_db_defaults(self):
articles = [DBArticle(), DBArticle()]
DBArticle.objects.bulk_create(articles)
headlines = DBArticle.objects.values_list("headline", flat=True)
self.assertSequenceEqual(headlines, ["Default headline", "Default headline"])
@skipUnlessDBFeature("supports_expression_defaults")
def test_bulk_create_all_db_defaults_one_field(self):
pub_date = datetime.now()
articles = [DBArticle(pub_date=pub_date), DBArticle(pub_date=pub_date)]
DBArticle.objects.bulk_create(articles)
headlines = DBArticle.objects.values_list("headline", "pub_date")
self.assertSequenceEqual(
headlines,
[
("Default headline", pub_date),
("Default headline", pub_date),
],
)
@skipUnlessDBFeature("supports_expression_defaults")
def test_bulk_create_mixed_db_defaults(self):
articles = [DBArticle(), DBArticle(headline="Something else")]
DBArticle.objects.bulk_create(articles)
headlines = DBArticle.objects.values_list("headline", flat=True)
self.assertCountEqual(headlines, ["Default headline", "Something else"])
@skipUnlessDBFeature("supports_expression_defaults")
def test_bulk_create_mixed_db_defaults_function(self):
instances = [DBDefaultsFunction(), DBDefaultsFunction(year=2000)]
DBDefaultsFunction.objects.bulk_create(instances)
years = DBDefaultsFunction.objects.values_list("year", flat=True)
self.assertCountEqual(years, [2000, datetime.now().year])
class AllowedDefaultTests(SimpleTestCase):
def test_allowed(self):
class Max(Func):
function = "MAX"
tests = [
Value(10),
Max(1, 2),
RawSQL("Now()", ()),
Value(10) + Value(7), # Combined expression.
ExpressionList(Value(1), Value(2)),
ExpressionWrapper(Value(1), output_field=FloatField()),
Case(When(GreaterThan(2, 1), then=3), default=4),
]
for expression in tests:
with self.subTest(expression=expression):
self.assertIs(expression.allowed_default, True)
def test_disallowed(self):
class Max(Func):
function = "MAX"
tests = [
Expression(),
F("field"),
Max(F("count"), 1),
Value(10) + F("count"), # Combined expression.
ExpressionList(F("count"), Value(2)),
ExpressionWrapper(F("count"), output_field=FloatField()),
Collate(Value("John"), "nocase"),
OrderByList("field"),
]
for expression in tests:
with self.subTest(expression=expression):
self.assertIs(expression.allowed_default, False)
|
6afcaf67cb7b7d595ae2c38c16613057414bba934964476df75659363aab9631 | """
Callable defaults
You can pass callable objects as the ``default`` parameter to a field. When
the object is created without an explicit value passed in, Django will call
the method to determine the default value.
This example uses ``datetime.datetime.now`` as the default for the ``pub_date``
field.
"""
from datetime import datetime
from django.db import models
from django.db.models.functions import Coalesce, ExtractYear, Now, Pi
from django.db.models.lookups import GreaterThan
class Article(models.Model):
headline = models.CharField(max_length=100, default="Default headline")
pub_date = models.DateTimeField(default=datetime.now)
def __str__(self):
return self.headline
class DBArticle(models.Model):
"""
Values or expressions can be passed as the db_default parameter to a field.
When the object is created without an explicit value passed in, the
database will insert the default value automatically.
"""
headline = models.CharField(max_length=100, db_default="Default headline")
pub_date = models.DateTimeField(db_default=Now())
class Meta:
required_db_features = {"supports_expression_defaults"}
class DBDefaults(models.Model):
both = models.IntegerField(default=1, db_default=2)
null = models.FloatField(null=True, db_default=1.1)
class DBDefaultsFunction(models.Model):
number = models.FloatField(db_default=Pi())
year = models.IntegerField(db_default=ExtractYear(Now()))
added = models.FloatField(db_default=Pi() + 4.5)
multiple_subfunctions = models.FloatField(db_default=Coalesce(4.5, Pi()))
case_when = models.IntegerField(
db_default=models.Case(models.When(GreaterThan(2, 1), then=3), default=4)
)
class Meta:
required_db_features = {"supports_expression_defaults"}
class DBDefaultsPK(models.Model):
language_code = models.CharField(primary_key=True, max_length=2, db_default="en")
class DBDefaultsFK(models.Model):
language_code = models.ForeignKey(
DBDefaultsPK, db_default="fr", on_delete=models.CASCADE
)
|
a8cd70c85536f07d7bb501bb2ef9e05d0651ae8f77a1574ded1f454494754d52 | from math import ceil
from operator import attrgetter
from django.core.exceptions import FieldDoesNotExist
from django.db import (
IntegrityError,
NotSupportedError,
OperationalError,
ProgrammingError,
connection,
)
from django.db.models import FileField, Value
from django.db.models.functions import Lower, Now
from django.test import (
TestCase,
override_settings,
skipIfDBFeature,
skipUnlessDBFeature,
)
from .models import (
BigAutoFieldModel,
Country,
FieldsWithDbColumns,
NoFields,
NullableFields,
Pizzeria,
ProxyCountry,
ProxyMultiCountry,
ProxyMultiProxyCountry,
ProxyProxyCountry,
RelatedModel,
Restaurant,
SmallAutoFieldModel,
State,
TwoFields,
UpsertConflict,
)
class BulkCreateTests(TestCase):
def setUp(self):
self.data = [
Country(name="United States of America", iso_two_letter="US"),
Country(name="The Netherlands", iso_two_letter="NL"),
Country(name="Germany", iso_two_letter="DE"),
Country(name="Czech Republic", iso_two_letter="CZ"),
]
def test_simple(self):
created = Country.objects.bulk_create(self.data)
self.assertEqual(created, self.data)
self.assertQuerySetEqual(
Country.objects.order_by("-name"),
[
"United States of America",
"The Netherlands",
"Germany",
"Czech Republic",
],
attrgetter("name"),
)
created = Country.objects.bulk_create([])
self.assertEqual(created, [])
self.assertEqual(Country.objects.count(), 4)
@skipUnlessDBFeature("has_bulk_insert")
def test_efficiency(self):
with self.assertNumQueries(1):
Country.objects.bulk_create(self.data)
@skipUnlessDBFeature("has_bulk_insert")
def test_long_non_ascii_text(self):
"""
Inserting non-ASCII values with a length in the range 2001 to 4000
characters, i.e. 4002 to 8000 bytes, must be set as a CLOB on Oracle
(#22144).
"""
Country.objects.bulk_create([Country(description="Ж" * 3000)])
self.assertEqual(Country.objects.count(), 1)
@skipUnlessDBFeature("has_bulk_insert")
def test_long_and_short_text(self):
Country.objects.bulk_create(
[
Country(description="a" * 4001, iso_two_letter="A"),
Country(description="a", iso_two_letter="B"),
Country(description="Ж" * 2001, iso_two_letter="C"),
Country(description="Ж", iso_two_letter="D"),
]
)
self.assertEqual(Country.objects.count(), 4)
def test_multi_table_inheritance_unsupported(self):
expected_message = "Can't bulk create a multi-table inherited model"
with self.assertRaisesMessage(ValueError, expected_message):
Pizzeria.objects.bulk_create(
[
Pizzeria(name="The Art of Pizza"),
]
)
with self.assertRaisesMessage(ValueError, expected_message):
ProxyMultiCountry.objects.bulk_create(
[
ProxyMultiCountry(name="Fillory", iso_two_letter="FL"),
]
)
with self.assertRaisesMessage(ValueError, expected_message):
ProxyMultiProxyCountry.objects.bulk_create(
[
ProxyMultiProxyCountry(name="Fillory", iso_two_letter="FL"),
]
)
def test_proxy_inheritance_supported(self):
ProxyCountry.objects.bulk_create(
[
ProxyCountry(name="Qwghlm", iso_two_letter="QW"),
Country(name="Tortall", iso_two_letter="TA"),
]
)
self.assertQuerySetEqual(
ProxyCountry.objects.all(),
{"Qwghlm", "Tortall"},
attrgetter("name"),
ordered=False,
)
ProxyProxyCountry.objects.bulk_create(
[
ProxyProxyCountry(name="Netherlands", iso_two_letter="NT"),
]
)
self.assertQuerySetEqual(
ProxyProxyCountry.objects.all(),
{
"Qwghlm",
"Tortall",
"Netherlands",
},
attrgetter("name"),
ordered=False,
)
def test_non_auto_increment_pk(self):
State.objects.bulk_create(
[State(two_letter_code=s) for s in ["IL", "NY", "CA", "ME"]]
)
self.assertQuerySetEqual(
State.objects.order_by("two_letter_code"),
[
"CA",
"IL",
"ME",
"NY",
],
attrgetter("two_letter_code"),
)
@skipUnlessDBFeature("has_bulk_insert")
def test_non_auto_increment_pk_efficiency(self):
with self.assertNumQueries(1):
State.objects.bulk_create(
[State(two_letter_code=s) for s in ["IL", "NY", "CA", "ME"]]
)
self.assertQuerySetEqual(
State.objects.order_by("two_letter_code"),
[
"CA",
"IL",
"ME",
"NY",
],
attrgetter("two_letter_code"),
)
@skipIfDBFeature("allows_auto_pk_0")
def test_zero_as_autoval(self):
"""
Zero as id for AutoField should raise exception in MySQL, because MySQL
does not allow zero for automatic primary key if the
NO_AUTO_VALUE_ON_ZERO SQL mode is not enabled.
"""
valid_country = Country(name="Germany", iso_two_letter="DE")
invalid_country = Country(id=0, name="Poland", iso_two_letter="PL")
msg = "The database backend does not accept 0 as a value for AutoField."
with self.assertRaisesMessage(ValueError, msg):
Country.objects.bulk_create([valid_country, invalid_country])
def test_batch_same_vals(self):
# SQLite had a problem where all the same-valued models were
# collapsed to one insert.
Restaurant.objects.bulk_create([Restaurant(name="foo") for i in range(0, 2)])
self.assertEqual(Restaurant.objects.count(), 2)
def test_large_batch(self):
TwoFields.objects.bulk_create(
[TwoFields(f1=i, f2=i + 1) for i in range(0, 1001)]
)
self.assertEqual(TwoFields.objects.count(), 1001)
self.assertEqual(
TwoFields.objects.filter(f1__gte=450, f1__lte=550).count(), 101
)
self.assertEqual(TwoFields.objects.filter(f2__gte=901).count(), 101)
@skipUnlessDBFeature("has_bulk_insert")
def test_large_single_field_batch(self):
# SQLite had a problem with more than 500 UNIONed selects in single
# query.
Restaurant.objects.bulk_create([Restaurant() for i in range(0, 501)])
@skipUnlessDBFeature("has_bulk_insert")
def test_large_batch_efficiency(self):
with override_settings(DEBUG=True):
connection.queries_log.clear()
TwoFields.objects.bulk_create(
[TwoFields(f1=i, f2=i + 1) for i in range(0, 1001)]
)
self.assertLess(len(connection.queries), 10)
def test_large_batch_mixed(self):
"""
Test inserting a large batch with objects having primary key set
mixed together with objects without PK set.
"""
TwoFields.objects.bulk_create(
[
TwoFields(id=i if i % 2 == 0 else None, f1=i, f2=i + 1)
for i in range(100000, 101000)
]
)
self.assertEqual(TwoFields.objects.count(), 1000)
# We can't assume much about the ID's created, except that the above
# created IDs must exist.
id_range = range(100000, 101000, 2)
self.assertEqual(TwoFields.objects.filter(id__in=id_range).count(), 500)
self.assertEqual(TwoFields.objects.exclude(id__in=id_range).count(), 500)
@skipUnlessDBFeature("has_bulk_insert")
def test_large_batch_mixed_efficiency(self):
"""
Test inserting a large batch with objects having primary key set
mixed together with objects without PK set.
"""
with override_settings(DEBUG=True):
connection.queries_log.clear()
TwoFields.objects.bulk_create(
[
TwoFields(id=i if i % 2 == 0 else None, f1=i, f2=i + 1)
for i in range(100000, 101000)
]
)
self.assertLess(len(connection.queries), 10)
def test_explicit_batch_size(self):
objs = [TwoFields(f1=i, f2=i) for i in range(0, 4)]
num_objs = len(objs)
TwoFields.objects.bulk_create(objs, batch_size=1)
self.assertEqual(TwoFields.objects.count(), num_objs)
TwoFields.objects.all().delete()
TwoFields.objects.bulk_create(objs, batch_size=2)
self.assertEqual(TwoFields.objects.count(), num_objs)
TwoFields.objects.all().delete()
TwoFields.objects.bulk_create(objs, batch_size=3)
self.assertEqual(TwoFields.objects.count(), num_objs)
TwoFields.objects.all().delete()
TwoFields.objects.bulk_create(objs, batch_size=num_objs)
self.assertEqual(TwoFields.objects.count(), num_objs)
def test_empty_model(self):
NoFields.objects.bulk_create([NoFields() for i in range(2)])
self.assertEqual(NoFields.objects.count(), 2)
@skipUnlessDBFeature("has_bulk_insert")
def test_explicit_batch_size_efficiency(self):
objs = [TwoFields(f1=i, f2=i) for i in range(0, 100)]
with self.assertNumQueries(2):
TwoFields.objects.bulk_create(objs, 50)
TwoFields.objects.all().delete()
with self.assertNumQueries(1):
TwoFields.objects.bulk_create(objs, len(objs))
@skipUnlessDBFeature("has_bulk_insert")
def test_explicit_batch_size_respects_max_batch_size(self):
objs = [Country(name=f"Country {i}") for i in range(1000)]
fields = ["name", "iso_two_letter", "description"]
max_batch_size = max(connection.ops.bulk_batch_size(fields, objs), 1)
with self.assertNumQueries(ceil(len(objs) / max_batch_size)):
Country.objects.bulk_create(objs, batch_size=max_batch_size + 1)
@skipUnlessDBFeature("has_bulk_insert")
def test_bulk_insert_expressions(self):
Restaurant.objects.bulk_create(
[
Restaurant(name="Sam's Shake Shack"),
Restaurant(name=Lower(Value("Betty's Beetroot Bar"))),
]
)
bbb = Restaurant.objects.filter(name="betty's beetroot bar")
self.assertEqual(bbb.count(), 1)
@skipUnlessDBFeature("has_bulk_insert")
def test_bulk_insert_now(self):
NullableFields.objects.bulk_create(
[
NullableFields(datetime_field=Now()),
NullableFields(datetime_field=Now()),
]
)
self.assertEqual(
NullableFields.objects.filter(datetime_field__isnull=False).count(),
2,
)
@skipUnlessDBFeature("has_bulk_insert")
def test_bulk_insert_nullable_fields(self):
fk_to_auto_fields = {
"auto_field": NoFields.objects.create(),
"small_auto_field": SmallAutoFieldModel.objects.create(),
"big_auto_field": BigAutoFieldModel.objects.create(),
}
# NULL can be mixed with other values in nullable fields
nullable_fields = [
field for field in NullableFields._meta.get_fields() if field.name != "id"
]
NullableFields.objects.bulk_create(
[
NullableFields(**{**fk_to_auto_fields, field.name: None})
for field in nullable_fields
]
)
self.assertEqual(NullableFields.objects.count(), len(nullable_fields))
for field in nullable_fields:
with self.subTest(field=field):
field_value = "" if isinstance(field, FileField) else None
self.assertEqual(
NullableFields.objects.filter(**{field.name: field_value}).count(),
1,
)
@skipUnlessDBFeature("can_return_rows_from_bulk_insert")
def test_set_pk_and_insert_single_item(self):
with self.assertNumQueries(1):
countries = Country.objects.bulk_create([self.data[0]])
self.assertEqual(len(countries), 1)
self.assertEqual(Country.objects.get(pk=countries[0].pk), countries[0])
@skipUnlessDBFeature("can_return_rows_from_bulk_insert")
def test_set_pk_and_query_efficiency(self):
with self.assertNumQueries(1):
countries = Country.objects.bulk_create(self.data)
self.assertEqual(len(countries), 4)
self.assertEqual(Country.objects.get(pk=countries[0].pk), countries[0])
self.assertEqual(Country.objects.get(pk=countries[1].pk), countries[1])
self.assertEqual(Country.objects.get(pk=countries[2].pk), countries[2])
self.assertEqual(Country.objects.get(pk=countries[3].pk), countries[3])
@skipUnlessDBFeature("can_return_rows_from_bulk_insert")
def test_set_state(self):
country_nl = Country(name="Netherlands", iso_two_letter="NL")
country_be = Country(name="Belgium", iso_two_letter="BE")
Country.objects.bulk_create([country_nl])
country_be.save()
# Objects save via bulk_create() and save() should have equal state.
self.assertEqual(country_nl._state.adding, country_be._state.adding)
self.assertEqual(country_nl._state.db, country_be._state.db)
def test_set_state_with_pk_specified(self):
state_ca = State(two_letter_code="CA")
state_ny = State(two_letter_code="NY")
State.objects.bulk_create([state_ca])
state_ny.save()
# Objects save via bulk_create() and save() should have equal state.
self.assertEqual(state_ca._state.adding, state_ny._state.adding)
self.assertEqual(state_ca._state.db, state_ny._state.db)
@skipIfDBFeature("supports_ignore_conflicts")
def test_ignore_conflicts_value_error(self):
message = "This database backend does not support ignoring conflicts."
with self.assertRaisesMessage(NotSupportedError, message):
TwoFields.objects.bulk_create(self.data, ignore_conflicts=True)
@skipUnlessDBFeature("supports_ignore_conflicts")
def test_ignore_conflicts_ignore(self):
data = [
TwoFields(f1=1, f2=1),
TwoFields(f1=2, f2=2),
TwoFields(f1=3, f2=3),
]
TwoFields.objects.bulk_create(data)
self.assertEqual(TwoFields.objects.count(), 3)
# With ignore_conflicts=True, conflicts are ignored.
conflicting_objects = [
TwoFields(f1=2, f2=2),
TwoFields(f1=3, f2=3),
]
TwoFields.objects.bulk_create([conflicting_objects[0]], ignore_conflicts=True)
TwoFields.objects.bulk_create(conflicting_objects, ignore_conflicts=True)
self.assertEqual(TwoFields.objects.count(), 3)
self.assertIsNone(conflicting_objects[0].pk)
self.assertIsNone(conflicting_objects[1].pk)
# New objects are created and conflicts are ignored.
new_object = TwoFields(f1=4, f2=4)
TwoFields.objects.bulk_create(
conflicting_objects + [new_object], ignore_conflicts=True
)
self.assertEqual(TwoFields.objects.count(), 4)
self.assertIsNone(new_object.pk)
# Without ignore_conflicts=True, there's a problem.
with self.assertRaises(IntegrityError):
TwoFields.objects.bulk_create(conflicting_objects)
def test_nullable_fk_after_parent(self):
parent = NoFields()
child = NullableFields(auto_field=parent, integer_field=88)
parent.save()
NullableFields.objects.bulk_create([child])
child = NullableFields.objects.get(integer_field=88)
self.assertEqual(child.auto_field, parent)
@skipUnlessDBFeature("can_return_rows_from_bulk_insert")
def test_nullable_fk_after_parent_bulk_create(self):
parent = NoFields()
child = NullableFields(auto_field=parent, integer_field=88)
NoFields.objects.bulk_create([parent])
NullableFields.objects.bulk_create([child])
child = NullableFields.objects.get(integer_field=88)
self.assertEqual(child.auto_field, parent)
def test_unsaved_parent(self):
parent = NoFields()
msg = (
"bulk_create() prohibited to prevent data loss due to unsaved "
"related object 'auto_field'."
)
with self.assertRaisesMessage(ValueError, msg):
NullableFields.objects.bulk_create([NullableFields(auto_field=parent)])
def test_invalid_batch_size_exception(self):
msg = "Batch size must be a positive integer."
with self.assertRaisesMessage(ValueError, msg):
Country.objects.bulk_create([], batch_size=-1)
@skipIfDBFeature("supports_update_conflicts")
def test_update_conflicts_unsupported(self):
msg = "This database backend does not support updating conflicts."
with self.assertRaisesMessage(NotSupportedError, msg):
Country.objects.bulk_create(self.data, update_conflicts=True)
@skipUnlessDBFeature("supports_ignore_conflicts", "supports_update_conflicts")
def test_ignore_update_conflicts_exclusive(self):
msg = "ignore_conflicts and update_conflicts are mutually exclusive"
with self.assertRaisesMessage(ValueError, msg):
Country.objects.bulk_create(
self.data,
ignore_conflicts=True,
update_conflicts=True,
)
@skipUnlessDBFeature("supports_update_conflicts")
def test_update_conflicts_no_update_fields(self):
msg = (
"Fields that will be updated when a row insertion fails on "
"conflicts must be provided."
)
with self.assertRaisesMessage(ValueError, msg):
Country.objects.bulk_create(self.data, update_conflicts=True)
@skipUnlessDBFeature("supports_update_conflicts")
@skipIfDBFeature("supports_update_conflicts_with_target")
def test_update_conflicts_unique_field_unsupported(self):
msg = (
"This database backend does not support updating conflicts with "
"specifying unique fields that can trigger the upsert."
)
with self.assertRaisesMessage(NotSupportedError, msg):
TwoFields.objects.bulk_create(
[TwoFields(f1=1, f2=1), TwoFields(f1=2, f2=2)],
update_conflicts=True,
update_fields=["f2"],
unique_fields=["f1"],
)
@skipUnlessDBFeature("supports_update_conflicts")
def test_update_conflicts_nonexistent_update_fields(self):
unique_fields = None
if connection.features.supports_update_conflicts_with_target:
unique_fields = ["f1"]
msg = "TwoFields has no field named 'nonexistent'"
with self.assertRaisesMessage(FieldDoesNotExist, msg):
TwoFields.objects.bulk_create(
[TwoFields(f1=1, f2=1), TwoFields(f1=2, f2=2)],
update_conflicts=True,
update_fields=["nonexistent"],
unique_fields=unique_fields,
)
@skipUnlessDBFeature(
"supports_update_conflicts",
"supports_update_conflicts_with_target",
)
def test_update_conflicts_unique_fields_required(self):
msg = "Unique fields that can trigger the upsert must be provided."
with self.assertRaisesMessage(ValueError, msg):
TwoFields.objects.bulk_create(
[TwoFields(f1=1, f2=1), TwoFields(f1=2, f2=2)],
update_conflicts=True,
update_fields=["f1"],
)
@skipUnlessDBFeature(
"supports_update_conflicts",
"supports_update_conflicts_with_target",
)
def test_update_conflicts_invalid_update_fields(self):
msg = "bulk_create() can only be used with concrete fields in update_fields."
# Reverse one-to-one relationship.
with self.assertRaisesMessage(ValueError, msg):
Country.objects.bulk_create(
self.data,
update_conflicts=True,
update_fields=["relatedmodel"],
unique_fields=["pk"],
)
# Many-to-many relationship.
with self.assertRaisesMessage(ValueError, msg):
RelatedModel.objects.bulk_create(
[RelatedModel(country=self.data[0])],
update_conflicts=True,
update_fields=["big_auto_fields"],
unique_fields=["country"],
)
@skipUnlessDBFeature(
"supports_update_conflicts",
"supports_update_conflicts_with_target",
)
def test_update_conflicts_pk_in_update_fields(self):
msg = "bulk_create() cannot be used with primary keys in update_fields."
with self.assertRaisesMessage(ValueError, msg):
BigAutoFieldModel.objects.bulk_create(
[BigAutoFieldModel()],
update_conflicts=True,
update_fields=["id"],
unique_fields=["id"],
)
@skipUnlessDBFeature(
"supports_update_conflicts",
"supports_update_conflicts_with_target",
)
def test_update_conflicts_invalid_unique_fields(self):
msg = "bulk_create() can only be used with concrete fields in unique_fields."
# Reverse one-to-one relationship.
with self.assertRaisesMessage(ValueError, msg):
Country.objects.bulk_create(
self.data,
update_conflicts=True,
update_fields=["name"],
unique_fields=["relatedmodel"],
)
# Many-to-many relationship.
with self.assertRaisesMessage(ValueError, msg):
RelatedModel.objects.bulk_create(
[RelatedModel(country=self.data[0])],
update_conflicts=True,
update_fields=["name"],
unique_fields=["big_auto_fields"],
)
def _test_update_conflicts_two_fields(self, unique_fields):
TwoFields.objects.bulk_create(
[
TwoFields(f1=1, f2=1, name="a"),
TwoFields(f1=2, f2=2, name="b"),
]
)
self.assertEqual(TwoFields.objects.count(), 2)
conflicting_objects = [
TwoFields(f1=1, f2=1, name="c"),
TwoFields(f1=2, f2=2, name="d"),
]
TwoFields.objects.bulk_create(
conflicting_objects,
update_conflicts=True,
unique_fields=unique_fields,
update_fields=["name"],
)
self.assertEqual(TwoFields.objects.count(), 2)
self.assertCountEqual(
TwoFields.objects.values("f1", "f2", "name"),
[
{"f1": 1, "f2": 1, "name": "c"},
{"f1": 2, "f2": 2, "name": "d"},
],
)
@skipUnlessDBFeature(
"supports_update_conflicts", "supports_update_conflicts_with_target"
)
def test_update_conflicts_two_fields_unique_fields_first(self):
self._test_update_conflicts_two_fields(["f1"])
@skipUnlessDBFeature(
"supports_update_conflicts", "supports_update_conflicts_with_target"
)
def test_update_conflicts_two_fields_unique_fields_second(self):
self._test_update_conflicts_two_fields(["f2"])
@skipUnlessDBFeature(
"supports_update_conflicts", "supports_update_conflicts_with_target"
)
def test_update_conflicts_unique_fields_pk(self):
TwoFields.objects.bulk_create(
[
TwoFields(f1=1, f2=1, name="a"),
TwoFields(f1=2, f2=2, name="b"),
]
)
self.assertEqual(TwoFields.objects.count(), 2)
obj1 = TwoFields.objects.get(f1=1)
obj2 = TwoFields.objects.get(f1=2)
conflicting_objects = [
TwoFields(pk=obj1.pk, f1=3, f2=3, name="c"),
TwoFields(pk=obj2.pk, f1=4, f2=4, name="d"),
]
TwoFields.objects.bulk_create(
conflicting_objects,
update_conflicts=True,
unique_fields=["pk"],
update_fields=["name"],
)
self.assertEqual(TwoFields.objects.count(), 2)
self.assertCountEqual(
TwoFields.objects.values("f1", "f2", "name"),
[
{"f1": 1, "f2": 1, "name": "c"},
{"f1": 2, "f2": 2, "name": "d"},
],
)
@skipUnlessDBFeature(
"supports_update_conflicts", "supports_update_conflicts_with_target"
)
def test_update_conflicts_two_fields_unique_fields_both(self):
with self.assertRaises((OperationalError, ProgrammingError)):
self._test_update_conflicts_two_fields(["f1", "f2"])
@skipUnlessDBFeature("supports_update_conflicts")
@skipIfDBFeature("supports_update_conflicts_with_target")
def test_update_conflicts_two_fields_no_unique_fields(self):
self._test_update_conflicts_two_fields([])
def _test_update_conflicts_unique_two_fields(self, unique_fields):
Country.objects.bulk_create(self.data)
self.assertEqual(Country.objects.count(), 4)
new_data = [
# Conflicting countries.
Country(
name="Germany",
iso_two_letter="DE",
description=("Germany is a country in Central Europe."),
),
Country(
name="Czech Republic",
iso_two_letter="CZ",
description=(
"The Czech Republic is a landlocked country in Central Europe."
),
),
# New countries.
Country(name="Australia", iso_two_letter="AU"),
Country(
name="Japan",
iso_two_letter="JP",
description=("Japan is an island country in East Asia."),
),
]
Country.objects.bulk_create(
new_data,
update_conflicts=True,
update_fields=["description"],
unique_fields=unique_fields,
)
self.assertEqual(Country.objects.count(), 6)
self.assertCountEqual(
Country.objects.values("iso_two_letter", "description"),
[
{"iso_two_letter": "US", "description": ""},
{"iso_two_letter": "NL", "description": ""},
{
"iso_two_letter": "DE",
"description": ("Germany is a country in Central Europe."),
},
{
"iso_two_letter": "CZ",
"description": (
"The Czech Republic is a landlocked country in Central Europe."
),
},
{"iso_two_letter": "AU", "description": ""},
{
"iso_two_letter": "JP",
"description": ("Japan is an island country in East Asia."),
},
],
)
@skipUnlessDBFeature(
"supports_update_conflicts", "supports_update_conflicts_with_target"
)
def test_update_conflicts_unique_two_fields_unique_fields_both(self):
self._test_update_conflicts_unique_two_fields(["iso_two_letter", "name"])
@skipUnlessDBFeature(
"supports_update_conflicts", "supports_update_conflicts_with_target"
)
def test_update_conflicts_unique_two_fields_unique_fields_one(self):
with self.assertRaises((OperationalError, ProgrammingError)):
self._test_update_conflicts_unique_two_fields(["iso_two_letter"])
@skipUnlessDBFeature("supports_update_conflicts")
@skipIfDBFeature("supports_update_conflicts_with_target")
def test_update_conflicts_unique_two_fields_unique_no_unique_fields(self):
self._test_update_conflicts_unique_two_fields([])
def _test_update_conflicts(self, unique_fields):
UpsertConflict.objects.bulk_create(
[
UpsertConflict(number=1, rank=1, name="John"),
UpsertConflict(number=2, rank=2, name="Mary"),
UpsertConflict(number=3, rank=3, name="Hannah"),
]
)
self.assertEqual(UpsertConflict.objects.count(), 3)
conflicting_objects = [
UpsertConflict(number=1, rank=4, name="Steve"),
UpsertConflict(number=2, rank=2, name="Olivia"),
UpsertConflict(number=3, rank=1, name="Hannah"),
]
UpsertConflict.objects.bulk_create(
conflicting_objects,
update_conflicts=True,
update_fields=["name", "rank"],
unique_fields=unique_fields,
)
self.assertEqual(UpsertConflict.objects.count(), 3)
self.assertCountEqual(
UpsertConflict.objects.values("number", "rank", "name"),
[
{"number": 1, "rank": 4, "name": "Steve"},
{"number": 2, "rank": 2, "name": "Olivia"},
{"number": 3, "rank": 1, "name": "Hannah"},
],
)
UpsertConflict.objects.bulk_create(
conflicting_objects + [UpsertConflict(number=4, rank=4, name="Mark")],
update_conflicts=True,
update_fields=["name", "rank"],
unique_fields=unique_fields,
)
self.assertEqual(UpsertConflict.objects.count(), 4)
self.assertCountEqual(
UpsertConflict.objects.values("number", "rank", "name"),
[
{"number": 1, "rank": 4, "name": "Steve"},
{"number": 2, "rank": 2, "name": "Olivia"},
{"number": 3, "rank": 1, "name": "Hannah"},
{"number": 4, "rank": 4, "name": "Mark"},
],
)
@skipUnlessDBFeature(
"supports_update_conflicts", "supports_update_conflicts_with_target"
)
def test_update_conflicts_unique_fields(self):
self._test_update_conflicts(unique_fields=["number"])
@skipUnlessDBFeature("supports_update_conflicts")
@skipIfDBFeature("supports_update_conflicts_with_target")
def test_update_conflicts_no_unique_fields(self):
self._test_update_conflicts([])
@skipUnlessDBFeature(
"supports_update_conflicts", "supports_update_conflicts_with_target"
)
def test_update_conflicts_unique_fields_update_fields_db_column(self):
FieldsWithDbColumns.objects.bulk_create(
[
FieldsWithDbColumns(rank=1, name="a"),
FieldsWithDbColumns(rank=2, name="b"),
]
)
self.assertEqual(FieldsWithDbColumns.objects.count(), 2)
conflicting_objects = [
FieldsWithDbColumns(rank=1, name="c"),
FieldsWithDbColumns(rank=2, name="d"),
]
FieldsWithDbColumns.objects.bulk_create(
conflicting_objects,
update_conflicts=True,
unique_fields=["rank"],
update_fields=["name"],
)
self.assertEqual(FieldsWithDbColumns.objects.count(), 2)
self.assertCountEqual(
FieldsWithDbColumns.objects.values("rank", "name"),
[
{"rank": 1, "name": "c"},
{"rank": 2, "name": "d"},
],
)
|
b5e9f4271159f96daad950f4910bb6b182e4f925bb5931a15ede35e4d223dea6 | import unittest
import uuid
from django.core.checks import Error
from django.core.checks import Warning as DjangoWarning
from django.db import connection, models
from django.db.models.functions import Coalesce, Pi
from django.test import SimpleTestCase, TestCase, skipIfDBFeature, skipUnlessDBFeature
from django.test.utils import isolate_apps, override_settings
from django.utils.functional import lazy
from django.utils.timezone import now
from django.utils.translation import gettext_lazy as _
from django.utils.version import get_docs_version
@isolate_apps("invalid_models_tests")
class AutoFieldTests(SimpleTestCase):
def test_valid_case(self):
class Model(models.Model):
id = models.AutoField(primary_key=True)
field = Model._meta.get_field("id")
self.assertEqual(field.check(), [])
def test_primary_key(self):
# primary_key must be True. Refs #12467.
class Model(models.Model):
field = models.AutoField(primary_key=False)
# Prevent Django from autocreating `id` AutoField, which would
# result in an error, because a model must have exactly one
# AutoField.
another = models.IntegerField(primary_key=True)
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"AutoFields must set primary_key=True.",
obj=field,
id="fields.E100",
),
],
)
def test_max_length_warning(self):
class Model(models.Model):
auto = models.AutoField(primary_key=True, max_length=2)
field = Model._meta.get_field("auto")
self.assertEqual(
field.check(),
[
DjangoWarning(
"'max_length' is ignored when used with %s."
% field.__class__.__name__,
hint="Remove 'max_length' from field",
obj=field,
id="fields.W122",
),
],
)
@isolate_apps("invalid_models_tests")
class BinaryFieldTests(SimpleTestCase):
def test_valid_default_value(self):
class Model(models.Model):
field1 = models.BinaryField(default=b"test")
field2 = models.BinaryField(default=None)
for field_name in ("field1", "field2"):
field = Model._meta.get_field(field_name)
self.assertEqual(field.check(), [])
def test_str_default_value(self):
class Model(models.Model):
field = models.BinaryField(default="test")
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"BinaryField's default cannot be a string. Use bytes content "
"instead.",
obj=field,
id="fields.E170",
),
],
)
@isolate_apps("invalid_models_tests")
class CharFieldTests(TestCase):
def test_valid_field(self):
class Model(models.Model):
field = models.CharField(
max_length=255,
choices=[
("1", "item1"),
("2", "item2"),
],
db_index=True,
)
field = Model._meta.get_field("field")
self.assertEqual(field.check(), [])
def test_missing_max_length(self):
class Model(models.Model):
field = models.CharField()
field = Model._meta.get_field("field")
expected = (
[]
if connection.features.supports_unlimited_charfield
else [
Error(
"CharFields must define a 'max_length' attribute.",
obj=field,
id="fields.E120",
),
]
)
self.assertEqual(field.check(), expected)
def test_negative_max_length(self):
class Model(models.Model):
field = models.CharField(max_length=-1)
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'max_length' must be a positive integer.",
obj=field,
id="fields.E121",
),
],
)
def test_bad_max_length_value(self):
class Model(models.Model):
field = models.CharField(max_length="bad")
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'max_length' must be a positive integer.",
obj=field,
id="fields.E121",
),
],
)
def test_str_max_length_value(self):
class Model(models.Model):
field = models.CharField(max_length="20")
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'max_length' must be a positive integer.",
obj=field,
id="fields.E121",
),
],
)
def test_str_max_length_type(self):
class Model(models.Model):
field = models.CharField(max_length=True)
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'max_length' must be a positive integer.",
obj=field,
id="fields.E121",
),
],
)
def test_non_iterable_choices(self):
class Model(models.Model):
field = models.CharField(max_length=10, choices="bad")
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'choices' must be an iterable (e.g., a list or tuple).",
obj=field,
id="fields.E004",
),
],
)
def test_non_iterable_choices_two_letters(self):
"""Two letters isn't a valid choice pair."""
class Model(models.Model):
field = models.CharField(max_length=10, choices=["ab"])
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'choices' must be an iterable containing (actual value, "
"human readable name) tuples.",
obj=field,
id="fields.E005",
),
],
)
def test_iterable_of_iterable_choices(self):
class ThingItem:
def __init__(self, value, display):
self.value = value
self.display = display
def __iter__(self):
return iter((self.value, self.display))
def __len__(self):
return 2
class Things:
def __iter__(self):
return iter((ThingItem(1, 2), ThingItem(3, 4)))
class ThingWithIterableChoices(models.Model):
thing = models.CharField(max_length=100, blank=True, choices=Things())
self.assertEqual(ThingWithIterableChoices._meta.get_field("thing").check(), [])
def test_choices_containing_non_pairs(self):
class Model(models.Model):
field = models.CharField(max_length=10, choices=[(1, 2, 3), (1, 2, 3)])
class Model2(models.Model):
field = models.IntegerField(choices=[0])
for model in (Model, Model2):
with self.subTest(model.__name__):
field = model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'choices' must be an iterable containing (actual "
"value, human readable name) tuples.",
obj=field,
id="fields.E005",
),
],
)
def test_choices_containing_lazy(self):
class Model(models.Model):
field = models.CharField(
max_length=10, choices=[["1", _("1")], ["2", _("2")]]
)
self.assertEqual(Model._meta.get_field("field").check(), [])
def test_lazy_choices(self):
class Model(models.Model):
field = models.CharField(
max_length=10, choices=lazy(lambda: [[1, "1"], [2, "2"]], tuple)()
)
self.assertEqual(Model._meta.get_field("field").check(), [])
def test_choices_named_group(self):
class Model(models.Model):
field = models.CharField(
max_length=10,
choices=[
["knights", [["L", "Lancelot"], ["G", "Galahad"]]],
["wizards", [["T", "Tim the Enchanter"]]],
["R", "Random character"],
],
)
self.assertEqual(Model._meta.get_field("field").check(), [])
def test_choices_named_group_non_pairs(self):
class Model(models.Model):
field = models.CharField(
max_length=10,
choices=[["knights", [["L", "Lancelot", "Du Lac"]]]],
)
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'choices' must be an iterable containing (actual value, "
"human readable name) tuples.",
obj=field,
id="fields.E005",
),
],
)
def test_choices_named_group_bad_structure(self):
class Model(models.Model):
field = models.CharField(
max_length=10,
choices=[
[
"knights",
[
["Noble", [["G", "Galahad"]]],
["Combative", [["L", "Lancelot"]]],
],
],
],
)
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'choices' must be an iterable containing (actual value, "
"human readable name) tuples.",
obj=field,
id="fields.E005",
),
],
)
def test_choices_named_group_lazy(self):
class Model(models.Model):
field = models.CharField(
max_length=10,
choices=[
[_("knights"), [["L", _("Lancelot")], ["G", _("Galahad")]]],
["R", _("Random character")],
],
)
self.assertEqual(Model._meta.get_field("field").check(), [])
def test_choices_in_max_length(self):
class Model(models.Model):
field = models.CharField(
max_length=2,
choices=[("ABC", "Value Too Long!"), ("OK", "Good")],
)
group = models.CharField(
max_length=2,
choices=[
("Nested", [("OK", "Good"), ("Longer", "Longer")]),
("Grouped", [("Bad", "Bad")]),
],
)
for name, choice_max_length in (("field", 3), ("group", 6)):
with self.subTest(name):
field = Model._meta.get_field(name)
self.assertEqual(
field.check(),
[
Error(
"'max_length' is too small to fit the longest value "
"in 'choices' (%d characters)." % choice_max_length,
obj=field,
id="fields.E009",
),
],
)
def test_bad_db_index_value(self):
class Model(models.Model):
field = models.CharField(max_length=10, db_index="bad")
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'db_index' must be None, True or False.",
obj=field,
id="fields.E006",
),
],
)
def test_bad_validators(self):
class Model(models.Model):
field = models.CharField(max_length=10, validators=[True])
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"All 'validators' must be callable.",
hint=(
"validators[0] (True) isn't a function or instance of a "
"validator class."
),
obj=field,
id="fields.E008",
),
],
)
@unittest.skipUnless(connection.vendor == "mysql", "Test valid only for MySQL")
def test_too_long_char_field_under_mysql(self):
from django.db.backends.mysql.validation import DatabaseValidation
class Model(models.Model):
field = models.CharField(unique=True, max_length=256)
field = Model._meta.get_field("field")
validator = DatabaseValidation(connection=connection)
self.assertEqual(
validator.check_field(field),
[
DjangoWarning(
"%s may not allow unique CharFields to have a max_length > "
"255." % connection.display_name,
hint=(
"See: https://docs.djangoproject.com/en/%s/ref/databases/"
"#mysql-character-fields" % get_docs_version()
),
obj=field,
id="mysql.W003",
)
],
)
def test_db_collation(self):
class Model(models.Model):
field = models.CharField(max_length=100, db_collation="anything")
field = Model._meta.get_field("field")
error = Error(
"%s does not support a database collation on CharFields."
% connection.display_name,
id="fields.E190",
obj=field,
)
expected = (
[] if connection.features.supports_collation_on_charfield else [error]
)
self.assertEqual(field.check(databases=self.databases), expected)
def test_db_collation_required_db_features(self):
class Model(models.Model):
field = models.CharField(max_length=100, db_collation="anything")
class Meta:
required_db_features = {"supports_collation_on_charfield"}
field = Model._meta.get_field("field")
self.assertEqual(field.check(databases=self.databases), [])
@isolate_apps("invalid_models_tests")
class DateFieldTests(SimpleTestCase):
maxDiff = None
def test_auto_now_and_auto_now_add_raise_error(self):
class Model(models.Model):
field0 = models.DateTimeField(auto_now=True, auto_now_add=True, default=now)
field1 = models.DateTimeField(
auto_now=True, auto_now_add=False, default=now
)
field2 = models.DateTimeField(
auto_now=False, auto_now_add=True, default=now
)
field3 = models.DateTimeField(
auto_now=True, auto_now_add=True, default=None
)
expected = []
checks = []
for i in range(4):
field = Model._meta.get_field("field%d" % i)
expected.append(
Error(
"The options auto_now, auto_now_add, and default "
"are mutually exclusive. Only one of these options "
"may be present.",
obj=field,
id="fields.E160",
)
)
checks.extend(field.check())
self.assertEqual(checks, expected)
def test_fix_default_value(self):
class Model(models.Model):
field_dt = models.DateField(default=now())
field_d = models.DateField(default=now().date())
field_now = models.DateField(default=now)
field_dt = Model._meta.get_field("field_dt")
field_d = Model._meta.get_field("field_d")
field_now = Model._meta.get_field("field_now")
errors = field_dt.check()
errors.extend(field_d.check())
errors.extend(field_now.check()) # doesn't raise a warning
self.assertEqual(
errors,
[
DjangoWarning(
"Fixed default value provided.",
hint="It seems you set a fixed date / time / datetime "
"value as default for this field. This may not be "
"what you want. If you want to have the current date "
"as default, use `django.utils.timezone.now`",
obj=field_dt,
id="fields.W161",
),
DjangoWarning(
"Fixed default value provided.",
hint="It seems you set a fixed date / time / datetime "
"value as default for this field. This may not be "
"what you want. If you want to have the current date "
"as default, use `django.utils.timezone.now`",
obj=field_d,
id="fields.W161",
),
],
)
@override_settings(USE_TZ=True)
def test_fix_default_value_tz(self):
self.test_fix_default_value()
@isolate_apps("invalid_models_tests")
class DateTimeFieldTests(SimpleTestCase):
maxDiff = None
def test_fix_default_value(self):
class Model(models.Model):
field_dt = models.DateTimeField(default=now())
field_d = models.DateTimeField(default=now().date())
field_now = models.DateTimeField(default=now)
field_dt = Model._meta.get_field("field_dt")
field_d = Model._meta.get_field("field_d")
field_now = Model._meta.get_field("field_now")
errors = field_dt.check()
errors.extend(field_d.check())
errors.extend(field_now.check()) # doesn't raise a warning
self.assertEqual(
errors,
[
DjangoWarning(
"Fixed default value provided.",
hint="It seems you set a fixed date / time / datetime "
"value as default for this field. This may not be "
"what you want. If you want to have the current date "
"as default, use `django.utils.timezone.now`",
obj=field_dt,
id="fields.W161",
),
DjangoWarning(
"Fixed default value provided.",
hint="It seems you set a fixed date / time / datetime "
"value as default for this field. This may not be "
"what you want. If you want to have the current date "
"as default, use `django.utils.timezone.now`",
obj=field_d,
id="fields.W161",
),
],
)
@override_settings(USE_TZ=True)
def test_fix_default_value_tz(self):
self.test_fix_default_value()
@isolate_apps("invalid_models_tests")
class DecimalFieldTests(SimpleTestCase):
def test_required_attributes(self):
class Model(models.Model):
field = models.DecimalField()
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"DecimalFields must define a 'decimal_places' attribute.",
obj=field,
id="fields.E130",
),
Error(
"DecimalFields must define a 'max_digits' attribute.",
obj=field,
id="fields.E132",
),
],
)
def test_negative_max_digits_and_decimal_places(self):
class Model(models.Model):
field = models.DecimalField(max_digits=-1, decimal_places=-1)
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'decimal_places' must be a non-negative integer.",
obj=field,
id="fields.E131",
),
Error(
"'max_digits' must be a positive integer.",
obj=field,
id="fields.E133",
),
],
)
def test_bad_values_of_max_digits_and_decimal_places(self):
class Model(models.Model):
field = models.DecimalField(max_digits="bad", decimal_places="bad")
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'decimal_places' must be a non-negative integer.",
obj=field,
id="fields.E131",
),
Error(
"'max_digits' must be a positive integer.",
obj=field,
id="fields.E133",
),
],
)
def test_decimal_places_greater_than_max_digits(self):
class Model(models.Model):
field = models.DecimalField(max_digits=9, decimal_places=10)
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'max_digits' must be greater or equal to 'decimal_places'.",
obj=field,
id="fields.E134",
),
],
)
def test_valid_field(self):
class Model(models.Model):
field = models.DecimalField(max_digits=10, decimal_places=10)
field = Model._meta.get_field("field")
self.assertEqual(field.check(), [])
@isolate_apps("invalid_models_tests")
class FileFieldTests(SimpleTestCase):
def test_valid_default_case(self):
class Model(models.Model):
field = models.FileField()
self.assertEqual(Model._meta.get_field("field").check(), [])
def test_valid_case(self):
class Model(models.Model):
field = models.FileField(upload_to="somewhere")
field = Model._meta.get_field("field")
self.assertEqual(field.check(), [])
def test_primary_key(self):
class Model(models.Model):
field = models.FileField(primary_key=False, upload_to="somewhere")
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"'primary_key' is not a valid argument for a FileField.",
obj=field,
id="fields.E201",
)
],
)
def test_upload_to_starts_with_slash(self):
class Model(models.Model):
field = models.FileField(upload_to="/somewhere")
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"FileField's 'upload_to' argument must be a relative path, not "
"an absolute path.",
obj=field,
id="fields.E202",
hint="Remove the leading slash.",
)
],
)
def test_upload_to_callable_not_checked(self):
def callable(instance, filename):
return "/" + filename
class Model(models.Model):
field = models.FileField(upload_to=callable)
field = Model._meta.get_field("field")
self.assertEqual(field.check(), [])
@isolate_apps("invalid_models_tests")
class FilePathFieldTests(SimpleTestCase):
def test_forbidden_files_and_folders(self):
class Model(models.Model):
field = models.FilePathField(allow_files=False, allow_folders=False)
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
"FilePathFields must have either 'allow_files' or 'allow_folders' "
"set to True.",
obj=field,
id="fields.E140",
),
],
)
@isolate_apps("invalid_models_tests")
class GenericIPAddressFieldTests(SimpleTestCase):
def test_non_nullable_blank(self):
class Model(models.Model):
field = models.GenericIPAddressField(null=False, blank=True)
field = Model._meta.get_field("field")
self.assertEqual(
field.check(),
[
Error(
(
"GenericIPAddressFields cannot have blank=True if null=False, "
"as blank values are stored as nulls."
),
obj=field,
id="fields.E150",
),
],
)
@isolate_apps("invalid_models_tests")
class ImageFieldTests(SimpleTestCase):
def test_pillow_installed(self):
try:
from PIL import Image # NOQA
except ImportError:
pillow_installed = False
else:
pillow_installed = True
class Model(models.Model):
field = models.ImageField(upload_to="somewhere")
field = Model._meta.get_field("field")
errors = field.check()
expected = (
[]
if pillow_installed
else [
Error(
"Cannot use ImageField because Pillow is not installed.",
hint=(
"Get Pillow at https://pypi.org/project/Pillow/ "
'or run command "python -m pip install Pillow".'
),
obj=field,
id="fields.E210",
),
]
)
self.assertEqual(errors, expected)
@isolate_apps("invalid_models_tests")
class IntegerFieldTests(SimpleTestCase):
def test_max_length_warning(self):
class Model(models.Model):
integer = models.IntegerField(max_length=2)
biginteger = models.BigIntegerField(max_length=2)
smallinteger = models.SmallIntegerField(max_length=2)
positiveinteger = models.PositiveIntegerField(max_length=2)
positivebiginteger = models.PositiveBigIntegerField(max_length=2)
positivesmallinteger = models.PositiveSmallIntegerField(max_length=2)
for field in Model._meta.get_fields():
if field.auto_created:
continue
with self.subTest(name=field.name):
self.assertEqual(
field.check(),
[
DjangoWarning(
"'max_length' is ignored when used with %s."
% field.__class__.__name__,
hint="Remove 'max_length' from field",
obj=field,
id="fields.W122",
)
],
)
@isolate_apps("invalid_models_tests")
class TimeFieldTests(SimpleTestCase):
maxDiff = None
def test_fix_default_value(self):
class Model(models.Model):
field_dt = models.TimeField(default=now())
field_t = models.TimeField(default=now().time())
# Timezone-aware time object (when USE_TZ=True).
field_tz = models.TimeField(default=now().timetz())
field_now = models.DateField(default=now)
names = ["field_dt", "field_t", "field_tz", "field_now"]
fields = [Model._meta.get_field(name) for name in names]
errors = []
for field in fields:
errors.extend(field.check())
self.assertEqual(
errors,
[
DjangoWarning(
"Fixed default value provided.",
hint="It seems you set a fixed date / time / datetime "
"value as default for this field. This may not be "
"what you want. If you want to have the current date "
"as default, use `django.utils.timezone.now`",
obj=fields[0],
id="fields.W161",
),
DjangoWarning(
"Fixed default value provided.",
hint="It seems you set a fixed date / time / datetime "
"value as default for this field. This may not be "
"what you want. If you want to have the current date "
"as default, use `django.utils.timezone.now`",
obj=fields[1],
id="fields.W161",
),
DjangoWarning(
"Fixed default value provided.",
hint=(
"It seems you set a fixed date / time / datetime value as "
"default for this field. This may not be what you want. "
"If you want to have the current date as default, use "
"`django.utils.timezone.now`"
),
obj=fields[2],
id="fields.W161",
),
# field_now doesn't raise a warning.
],
)
@override_settings(USE_TZ=True)
def test_fix_default_value_tz(self):
self.test_fix_default_value()
@isolate_apps("invalid_models_tests")
class TextFieldTests(TestCase):
@skipIfDBFeature("supports_index_on_text_field")
def test_max_length_warning(self):
class Model(models.Model):
value = models.TextField(db_index=True)
field = Model._meta.get_field("value")
field_type = field.db_type(connection)
self.assertEqual(
field.check(databases=self.databases),
[
DjangoWarning(
"%s does not support a database index on %s columns."
% (connection.display_name, field_type),
hint=(
"An index won't be created. Silence this warning if you "
"don't care about it."
),
obj=field,
id="fields.W162",
)
],
)
def test_db_collation(self):
class Model(models.Model):
field = models.TextField(db_collation="anything")
field = Model._meta.get_field("field")
error = Error(
"%s does not support a database collation on TextFields."
% connection.display_name,
id="fields.E190",
obj=field,
)
expected = (
[] if connection.features.supports_collation_on_textfield else [error]
)
self.assertEqual(field.check(databases=self.databases), expected)
def test_db_collation_required_db_features(self):
class Model(models.Model):
field = models.TextField(db_collation="anything")
class Meta:
required_db_features = {"supports_collation_on_textfield"}
field = Model._meta.get_field("field")
self.assertEqual(field.check(databases=self.databases), [])
@isolate_apps("invalid_models_tests")
class UUIDFieldTests(TestCase):
def test_choices_named_group(self):
class Model(models.Model):
field = models.UUIDField(
choices=[
[
"knights",
[
[
uuid.UUID("5c859437-d061-4847-b3f7-e6b78852f8c8"),
"Lancelot",
],
[
uuid.UUID("c7853ec1-2ea3-4359-b02d-b54e8f1bcee2"),
"Galahad",
],
],
],
[uuid.UUID("25d405be-4895-4d50-9b2e-d6695359ce47"), "Other"],
],
)
self.assertEqual(Model._meta.get_field("field").check(), [])
@isolate_apps("invalid_models_tests")
@skipUnlessDBFeature("supports_json_field")
class JSONFieldTests(TestCase):
def test_invalid_default(self):
class Model(models.Model):
field = models.JSONField(default={})
self.assertEqual(
Model._meta.get_field("field").check(),
[
DjangoWarning(
msg=(
"JSONField default should be a callable instead of an "
"instance so that it's not shared between all field "
"instances."
),
hint=("Use a callable instead, e.g., use `dict` instead of `{}`."),
obj=Model._meta.get_field("field"),
id="fields.E010",
)
],
)
def test_valid_default(self):
class Model(models.Model):
field = models.JSONField(default=dict)
self.assertEqual(Model._meta.get_field("field").check(), [])
def test_valid_default_none(self):
class Model(models.Model):
field = models.JSONField(default=None)
self.assertEqual(Model._meta.get_field("field").check(), [])
def test_valid_callable_default(self):
def callable_default():
return {"it": "works"}
class Model(models.Model):
field = models.JSONField(default=callable_default)
self.assertEqual(Model._meta.get_field("field").check(), [])
@isolate_apps("invalid_models_tests")
class DbCommentTests(TestCase):
def test_db_comment(self):
class Model(models.Model):
field = models.IntegerField(db_comment="Column comment")
errors = Model._meta.get_field("field").check(databases=self.databases)
expected = (
[]
if connection.features.supports_comments
else [
DjangoWarning(
f"{connection.display_name} does not support comments on columns "
f"(db_comment).",
obj=Model._meta.get_field("field"),
id="fields.W163",
),
]
)
self.assertEqual(errors, expected)
def test_db_comment_required_db_features(self):
class Model(models.Model):
field = models.IntegerField(db_comment="Column comment")
class Meta:
required_db_features = {"supports_comments"}
errors = Model._meta.get_field("field").check(databases=self.databases)
self.assertEqual(errors, [])
@isolate_apps("invalid_models_tests")
class InvalidDBDefaultTests(TestCase):
def test_db_default(self):
class Model(models.Model):
field = models.FloatField(db_default=Pi())
field = Model._meta.get_field("field")
errors = field.check(databases=self.databases)
if connection.features.supports_expression_defaults:
expected_errors = []
else:
msg = (
f"{connection.display_name} does not support default database values "
"with expressions (db_default)."
)
expected_errors = [Error(msg=msg, obj=field, id="fields.E011")]
self.assertEqual(errors, expected_errors)
def test_db_default_literal(self):
class Model(models.Model):
field = models.IntegerField(db_default=1)
field = Model._meta.get_field("field")
errors = field.check(databases=self.databases)
self.assertEqual(errors, [])
def test_db_default_required_db_features(self):
class Model(models.Model):
field = models.FloatField(db_default=Pi())
class Meta:
required_db_features = {"supports_expression_defaults"}
field = Model._meta.get_field("field")
errors = field.check(databases=self.databases)
self.assertEqual(errors, [])
def test_db_default_expression_invalid(self):
expression = models.F("field_name")
class Model(models.Model):
field = models.FloatField(db_default=expression)
field = Model._meta.get_field("field")
errors = field.check(databases=self.databases)
if connection.features.supports_expression_defaults:
msg = f"{expression} cannot be used in db_default."
expected_errors = [Error(msg=msg, obj=field, id="fields.E012")]
else:
msg = (
f"{connection.display_name} does not support default database values "
"with expressions (db_default)."
)
expected_errors = [Error(msg=msg, obj=field, id="fields.E011")]
self.assertEqual(errors, expected_errors)
def test_db_default_expression_required_db_features(self):
expression = models.F("field_name")
class Model(models.Model):
field = models.FloatField(db_default=expression)
class Meta:
required_db_features = {"supports_expression_defaults"}
field = Model._meta.get_field("field")
errors = field.check(databases=self.databases)
if connection.features.supports_expression_defaults:
msg = f"{expression} cannot be used in db_default."
expected_errors = [Error(msg=msg, obj=field, id="fields.E012")]
else:
expected_errors = []
self.assertEqual(errors, expected_errors)
@skipUnlessDBFeature("supports_expression_defaults")
def test_db_default_combined_invalid(self):
expression = models.Value(4.5) + models.F("field_name")
class Model(models.Model):
field = models.FloatField(db_default=expression)
field = Model._meta.get_field("field")
errors = field.check(databases=self.databases)
msg = f"{expression} cannot be used in db_default."
expected_error = Error(msg=msg, obj=field, id="fields.E012")
self.assertEqual(errors, [expected_error])
@skipUnlessDBFeature("supports_expression_defaults")
def test_db_default_function_arguments_invalid(self):
expression = Coalesce(models.Value(4.5), models.F("field_name"))
class Model(models.Model):
field = models.FloatField(db_default=expression)
field = Model._meta.get_field("field")
errors = field.check(databases=self.databases)
msg = f"{expression} cannot be used in db_default."
expected_error = Error(msg=msg, obj=field, id="fields.E012")
self.assertEqual(errors, [expected_error])
|
ce9298466df6ab4e645c77bcf4fc907b59af7523ed9a39842c643777065e767e | import operator
import uuid
from unittest import mock
from django import forms
from django.core import serializers
from django.core.exceptions import ValidationError
from django.core.serializers.json import DjangoJSONEncoder
from django.db import (
DataError,
IntegrityError,
NotSupportedError,
OperationalError,
connection,
models,
)
from django.db.models import (
Count,
ExpressionWrapper,
F,
IntegerField,
JSONField,
OuterRef,
Q,
Subquery,
Transform,
Value,
)
from django.db.models.expressions import RawSQL
from django.db.models.fields.json import (
KT,
KeyTextTransform,
KeyTransform,
KeyTransformFactory,
KeyTransformTextLookupMixin,
)
from django.db.models.functions import Cast
from django.test import SimpleTestCase, TestCase, skipIfDBFeature, skipUnlessDBFeature
from django.test.utils import CaptureQueriesContext
from django.utils.deprecation import RemovedInDjango51Warning
from .models import CustomJSONDecoder, JSONModel, NullableJSONModel, RelatedJSONModel
@skipUnlessDBFeature("supports_json_field")
class JSONFieldTests(TestCase):
def test_invalid_value(self):
msg = "is not JSON serializable"
with self.assertRaisesMessage(TypeError, msg):
NullableJSONModel.objects.create(
value={
"uuid": uuid.UUID("d85e2076-b67c-4ee7-8c3a-2bf5a2cc2475"),
}
)
def test_custom_encoder_decoder(self):
value = {"uuid": uuid.UUID("{d85e2076-b67c-4ee7-8c3a-2bf5a2cc2475}")}
obj = NullableJSONModel(value_custom=value)
obj.clean_fields()
obj.save()
obj.refresh_from_db()
self.assertEqual(obj.value_custom, value)
def test_db_check_constraints(self):
value = "{@!invalid json value 123 $!@#"
with mock.patch.object(DjangoJSONEncoder, "encode", return_value=value):
with self.assertRaises((IntegrityError, DataError, OperationalError)):
NullableJSONModel.objects.create(value_custom=value)
class TestMethods(SimpleTestCase):
def test_deconstruct(self):
field = models.JSONField()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, "django.db.models.JSONField")
self.assertEqual(args, [])
self.assertEqual(kwargs, {})
def test_deconstruct_custom_encoder_decoder(self):
field = models.JSONField(encoder=DjangoJSONEncoder, decoder=CustomJSONDecoder)
name, path, args, kwargs = field.deconstruct()
self.assertEqual(kwargs["encoder"], DjangoJSONEncoder)
self.assertEqual(kwargs["decoder"], CustomJSONDecoder)
def test_get_transforms(self):
@models.JSONField.register_lookup
class MyTransform(Transform):
lookup_name = "my_transform"
field = models.JSONField()
transform = field.get_transform("my_transform")
self.assertIs(transform, MyTransform)
models.JSONField._unregister_lookup(MyTransform)
transform = field.get_transform("my_transform")
self.assertIsInstance(transform, KeyTransformFactory)
def test_key_transform_text_lookup_mixin_non_key_transform(self):
transform = Transform("test")
msg = (
"Transform should be an instance of KeyTransform in order to use "
"this lookup."
)
with self.assertRaisesMessage(TypeError, msg):
KeyTransformTextLookupMixin(transform)
def test_get_prep_value(self):
class JSONFieldGetPrepValue(models.JSONField):
def get_prep_value(self, value):
if value is True:
return {"value": True}
return value
def noop_adapt_json_value(value, encoder):
return value
field = JSONFieldGetPrepValue()
with mock.patch.object(
connection.ops, "adapt_json_value", noop_adapt_json_value
):
self.assertEqual(
field.get_db_prep_value(True, connection, prepared=False),
{"value": True},
)
self.assertIs(
field.get_db_prep_value(True, connection, prepared=True), True
)
self.assertEqual(field.get_db_prep_value(1, connection, prepared=False), 1)
class TestValidation(SimpleTestCase):
def test_invalid_encoder(self):
msg = "The encoder parameter must be a callable object."
with self.assertRaisesMessage(ValueError, msg):
models.JSONField(encoder=DjangoJSONEncoder())
def test_invalid_decoder(self):
msg = "The decoder parameter must be a callable object."
with self.assertRaisesMessage(ValueError, msg):
models.JSONField(decoder=CustomJSONDecoder())
def test_validation_error(self):
field = models.JSONField()
msg = "Value must be valid JSON."
value = uuid.UUID("{d85e2076-b67c-4ee7-8c3a-2bf5a2cc2475}")
with self.assertRaisesMessage(ValidationError, msg):
field.clean({"uuid": value}, None)
def test_custom_encoder(self):
field = models.JSONField(encoder=DjangoJSONEncoder)
value = uuid.UUID("{d85e2076-b67c-4ee7-8c3a-2bf5a2cc2475}")
field.clean({"uuid": value}, None)
class TestFormField(SimpleTestCase):
def test_formfield(self):
model_field = models.JSONField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, forms.JSONField)
def test_formfield_custom_encoder_decoder(self):
model_field = models.JSONField(
encoder=DjangoJSONEncoder, decoder=CustomJSONDecoder
)
form_field = model_field.formfield()
self.assertIs(form_field.encoder, DjangoJSONEncoder)
self.assertIs(form_field.decoder, CustomJSONDecoder)
class TestSerialization(SimpleTestCase):
test_data = (
'[{"fields": {"value": %s}, "model": "model_fields.jsonmodel", "pk": null}]'
)
test_values = (
# (Python value, serialized value),
({"a": "b", "c": None}, '{"a": "b", "c": null}'),
("abc", '"abc"'),
('{"a": "a"}', '"{\\"a\\": \\"a\\"}"'),
)
def test_dumping(self):
for value, serialized in self.test_values:
with self.subTest(value=value):
instance = JSONModel(value=value)
data = serializers.serialize("json", [instance])
self.assertJSONEqual(data, self.test_data % serialized)
def test_loading(self):
for value, serialized in self.test_values:
with self.subTest(value=value):
instance = list(
serializers.deserialize("json", self.test_data % serialized)
)[0].object
self.assertEqual(instance.value, value)
def test_xml_serialization(self):
test_xml_data = (
'<django-objects version="1.0">'
'<object model="model_fields.nullablejsonmodel">'
'<field name="value" type="JSONField">%s'
"</field></object></django-objects>"
)
for value, serialized in self.test_values:
with self.subTest(value=value):
instance = NullableJSONModel(value=value)
data = serializers.serialize("xml", [instance], fields=["value"])
self.assertXMLEqual(data, test_xml_data % serialized)
new_instance = list(serializers.deserialize("xml", data))[0].object
self.assertEqual(new_instance.value, instance.value)
@skipUnlessDBFeature("supports_json_field")
class TestSaveLoad(TestCase):
def test_null(self):
obj = NullableJSONModel(value=None)
obj.save()
obj.refresh_from_db()
self.assertIsNone(obj.value)
def test_ambiguous_str_value_deprecation(self):
msg = (
"Providing an encoded JSON string via Value() is deprecated. Use Value([], "
"output_field=JSONField()) instead."
)
with self.assertWarnsMessage(RemovedInDjango51Warning, msg):
obj = NullableJSONModel.objects.create(value=Value("[]"))
obj.refresh_from_db()
self.assertEqual(obj.value, [])
@skipUnlessDBFeature("supports_primitives_in_json_field")
def test_value_str_primitives_deprecation(self):
msg = (
"Providing an encoded JSON string via Value() is deprecated. Use "
"Value(None, output_field=JSONField()) instead."
)
with self.assertWarnsMessage(RemovedInDjango51Warning, msg):
obj = NullableJSONModel.objects.create(value=Value("null"))
obj.refresh_from_db()
self.assertIsNone(obj.value)
obj = NullableJSONModel.objects.create(value=Value("invalid-json"))
obj.refresh_from_db()
self.assertEqual(obj.value, "invalid-json")
@skipUnlessDBFeature("supports_primitives_in_json_field")
def test_json_null_different_from_sql_null(self):
json_null = NullableJSONModel.objects.create(value=Value(None, JSONField()))
NullableJSONModel.objects.update(value=Value(None, JSONField()))
json_null.refresh_from_db()
sql_null = NullableJSONModel.objects.create(value=None)
sql_null.refresh_from_db()
# 'null' is not equal to NULL in the database.
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value=Value(None, JSONField())),
[json_null],
)
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value=None),
[json_null],
)
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__isnull=True),
[sql_null],
)
# 'null' is equal to NULL in Python (None).
self.assertEqual(json_null.value, sql_null.value)
@skipUnlessDBFeature("supports_primitives_in_json_field")
def test_primitives(self):
values = [
True,
1,
1.45,
"String",
"",
]
for value in values:
with self.subTest(value=value):
obj = JSONModel(value=value)
obj.save()
obj.refresh_from_db()
self.assertEqual(obj.value, value)
def test_dict(self):
values = [
{},
{"name": "John", "age": 20, "height": 180.3},
{"a": True, "b": {"b1": False, "b2": None}},
]
for value in values:
with self.subTest(value=value):
obj = JSONModel.objects.create(value=value)
obj.refresh_from_db()
self.assertEqual(obj.value, value)
def test_list(self):
values = [
[],
["John", 20, 180.3],
[True, [False, None]],
]
for value in values:
with self.subTest(value=value):
obj = JSONModel.objects.create(value=value)
obj.refresh_from_db()
self.assertEqual(obj.value, value)
def test_realistic_object(self):
value = {
"name": "John",
"age": 20,
"pets": [
{"name": "Kit", "type": "cat", "age": 2},
{"name": "Max", "type": "dog", "age": 1},
],
"courses": [
["A1", "A2", "A3"],
["B1", "B2"],
["C1"],
],
}
obj = JSONModel.objects.create(value=value)
obj.refresh_from_db()
self.assertEqual(obj.value, value)
@skipUnlessDBFeature("supports_json_field")
class TestQuerying(TestCase):
@classmethod
def setUpTestData(cls):
cls.primitives = [True, False, "yes", 7, 9.6]
values = [
None,
[],
{},
{"a": "b", "c": 14},
{
"a": "b",
"c": 14,
"d": ["e", {"f": "g"}],
"h": True,
"i": False,
"j": None,
"k": {"l": "m"},
"n": [None, True, False],
"o": '"quoted"',
"p": 4.2,
"r": {"s": True, "t": False},
},
[1, [2]],
{"k": True, "l": False, "foo": "bax"},
{
"foo": "bar",
"baz": {"a": "b", "c": "d"},
"bar": ["foo", "bar"],
"bax": {"foo": "bar"},
},
]
cls.objs = [NullableJSONModel.objects.create(value=value) for value in values]
if connection.features.supports_primitives_in_json_field:
cls.objs.extend(
[
NullableJSONModel.objects.create(value=value)
for value in cls.primitives
]
)
cls.raw_sql = "%s::jsonb" if connection.vendor == "postgresql" else "%s"
def test_exact(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__exact={}),
[self.objs[2]],
)
def test_exact_complex(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__exact={"a": "b", "c": 14}),
[self.objs[3]],
)
def test_icontains(self):
self.assertCountEqual(
NullableJSONModel.objects.filter(value__icontains="BaX"),
self.objs[6:8],
)
def test_isnull(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__isnull=True),
[self.objs[0]],
)
def test_ordering_by_transform(self):
mariadb = connection.vendor == "mysql" and connection.mysql_is_mariadb
values = [
{"ord": 93, "name": "bar"},
{"ord": 22.1, "name": "foo"},
{"ord": -1, "name": "baz"},
{"ord": 21.931902, "name": "spam"},
{"ord": -100291029, "name": "eggs"},
]
for field_name in ["value", "value_custom"]:
with self.subTest(field=field_name):
objs = [
NullableJSONModel.objects.create(**{field_name: value})
for value in values
]
query = NullableJSONModel.objects.filter(
**{"%s__name__isnull" % field_name: False},
).order_by("%s__ord" % field_name)
expected = [objs[4], objs[2], objs[3], objs[1], objs[0]]
if mariadb or connection.vendor == "oracle":
# MariaDB and Oracle return JSON values as strings.
expected = [objs[2], objs[4], objs[3], objs[1], objs[0]]
self.assertSequenceEqual(query, expected)
def test_ordering_grouping_by_key_transform(self):
base_qs = NullableJSONModel.objects.filter(value__d__0__isnull=False)
for qs in (
base_qs.order_by("value__d__0"),
base_qs.annotate(
key=KeyTransform("0", KeyTransform("d", "value"))
).order_by("key"),
):
self.assertSequenceEqual(qs, [self.objs[4]])
none_val = "" if connection.features.interprets_empty_strings_as_nulls else None
qs = NullableJSONModel.objects.filter(value__isnull=False)
self.assertQuerySetEqual(
qs.filter(value__isnull=False)
.annotate(key=KT("value__d__1__f"))
.values("key")
.annotate(count=Count("key"))
.order_by("count"),
[(none_val, 0), ("g", 1)],
operator.itemgetter("key", "count"),
)
def test_ordering_grouping_by_count(self):
qs = (
NullableJSONModel.objects.filter(
value__isnull=False,
)
.values("value__d__0")
.annotate(count=Count("value__d__0"))
.order_by("count")
)
self.assertQuerySetEqual(qs, [0, 1], operator.itemgetter("count"))
def test_order_grouping_custom_decoder(self):
NullableJSONModel.objects.create(value_custom={"a": "b"})
qs = NullableJSONModel.objects.filter(value_custom__isnull=False)
self.assertSequenceEqual(
qs.values(
"value_custom__a",
)
.annotate(
count=Count("id"),
)
.order_by("value_custom__a"),
[{"value_custom__a": "b", "count": 1}],
)
def test_key_transform_raw_expression(self):
expr = RawSQL(self.raw_sql, ['{"x": "bar"}'])
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__foo=KeyTransform("x", expr)),
[self.objs[7]],
)
def test_nested_key_transform_raw_expression(self):
expr = RawSQL(self.raw_sql, ['{"x": {"y": "bar"}}'])
self.assertSequenceEqual(
NullableJSONModel.objects.filter(
value__foo=KeyTransform("y", KeyTransform("x", expr))
),
[self.objs[7]],
)
def test_key_transform_expression(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__d__0__isnull=False)
.annotate(
key=KeyTransform("d", "value"),
chain=KeyTransform("0", "key"),
expr=KeyTransform("0", Cast("key", models.JSONField())),
)
.filter(chain=F("expr")),
[self.objs[4]],
)
def test_key_transform_annotation_expression(self):
obj = NullableJSONModel.objects.create(value={"d": ["e", "e"]})
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__d__0__isnull=False)
.annotate(
key=F("value__d"),
chain=F("key__0"),
expr=Cast("key", models.JSONField()),
)
.filter(chain=F("expr__1")),
[obj],
)
def test_nested_key_transform_expression(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__d__0__isnull=False)
.annotate(
key=KeyTransform("d", "value"),
chain=KeyTransform("f", KeyTransform("1", "key")),
expr=KeyTransform(
"f", KeyTransform("1", Cast("key", models.JSONField()))
),
)
.filter(chain=F("expr")),
[self.objs[4]],
)
def test_nested_key_transform_annotation_expression(self):
obj = NullableJSONModel.objects.create(
value={"d": ["e", {"f": "g"}, {"f": "g"}]},
)
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__d__0__isnull=False)
.annotate(
key=F("value__d"),
chain=F("key__1__f"),
expr=Cast("key", models.JSONField()),
)
.filter(chain=F("expr__2__f")),
[obj],
)
def test_nested_key_transform_on_subquery(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__d__0__isnull=False)
.annotate(
subquery_value=Subquery(
NullableJSONModel.objects.filter(pk=OuterRef("pk")).values("value")
),
key=KeyTransform("d", "subquery_value"),
chain=KeyTransform("f", KeyTransform("1", "key")),
)
.filter(chain="g"),
[self.objs[4]],
)
def test_key_text_transform_char_lookup(self):
qs = NullableJSONModel.objects.annotate(
char_value=KeyTextTransform("foo", "value"),
).filter(char_value__startswith="bar")
self.assertSequenceEqual(qs, [self.objs[7]])
qs = NullableJSONModel.objects.annotate(
char_value=KeyTextTransform(1, KeyTextTransform("bar", "value")),
).filter(char_value__startswith="bar")
self.assertSequenceEqual(qs, [self.objs[7]])
def test_expression_wrapper_key_transform(self):
self.assertCountEqual(
NullableJSONModel.objects.annotate(
expr=ExpressionWrapper(
KeyTransform("c", "value"),
output_field=IntegerField(),
),
).filter(expr__isnull=False),
self.objs[3:5],
)
def test_has_key(self):
self.assertCountEqual(
NullableJSONModel.objects.filter(value__has_key="a"),
[self.objs[3], self.objs[4]],
)
def test_has_key_null_value(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__has_key="j"),
[self.objs[4]],
)
def test_has_key_deep(self):
tests = [
(Q(value__baz__has_key="a"), self.objs[7]),
(
Q(value__has_key=KeyTransform("a", KeyTransform("baz", "value"))),
self.objs[7],
),
(Q(value__has_key=F("value__baz__a")), self.objs[7]),
(
Q(value__has_key=KeyTransform("c", KeyTransform("baz", "value"))),
self.objs[7],
),
(Q(value__has_key=F("value__baz__c")), self.objs[7]),
(Q(value__d__1__has_key="f"), self.objs[4]),
(
Q(
value__has_key=KeyTransform(
"f", KeyTransform("1", KeyTransform("d", "value"))
)
),
self.objs[4],
),
(Q(value__has_key=F("value__d__1__f")), self.objs[4]),
]
for condition, expected in tests:
with self.subTest(condition=condition):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(condition),
[expected],
)
def test_has_key_list(self):
obj = NullableJSONModel.objects.create(value=[{"a": 1}, {"b": "x"}])
tests = [
Q(value__1__has_key="b"),
Q(value__has_key=KeyTransform("b", KeyTransform(1, "value"))),
Q(value__has_key=KeyTransform("b", KeyTransform("1", "value"))),
Q(value__has_key=F("value__1__b")),
]
for condition in tests:
with self.subTest(condition=condition):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(condition),
[obj],
)
def test_has_keys(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__has_keys=["a", "c", "h"]),
[self.objs[4]],
)
def test_has_any_keys(self):
self.assertCountEqual(
NullableJSONModel.objects.filter(value__has_any_keys=["c", "l"]),
[self.objs[3], self.objs[4], self.objs[6]],
)
def test_has_key_number(self):
obj = NullableJSONModel.objects.create(
value={
"123": "value",
"nested": {"456": "bar", "lorem": "abc", "999": True},
"array": [{"789": "baz", "777": "def", "ipsum": 200}],
"000": "val",
}
)
tests = [
Q(value__has_key="123"),
Q(value__nested__has_key="456"),
Q(value__array__0__has_key="789"),
Q(value__has_keys=["nested", "123", "array", "000"]),
Q(value__nested__has_keys=["lorem", "999", "456"]),
Q(value__array__0__has_keys=["789", "ipsum", "777"]),
Q(value__has_any_keys=["000", "nonexistent"]),
Q(value__nested__has_any_keys=["999", "nonexistent"]),
Q(value__array__0__has_any_keys=["777", "nonexistent"]),
]
for condition in tests:
with self.subTest(condition=condition):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(condition),
[obj],
)
@skipUnlessDBFeature("supports_json_field_contains")
def test_contains(self):
tests = [
({}, self.objs[2:5] + self.objs[6:8]),
({"baz": {"a": "b", "c": "d"}}, [self.objs[7]]),
({"baz": {"a": "b"}}, [self.objs[7]]),
({"baz": {"c": "d"}}, [self.objs[7]]),
({"k": True, "l": False}, [self.objs[6]]),
({"d": ["e", {"f": "g"}]}, [self.objs[4]]),
({"d": ["e"]}, [self.objs[4]]),
({"d": [{"f": "g"}]}, [self.objs[4]]),
([1, [2]], [self.objs[5]]),
([1], [self.objs[5]]),
([[2]], [self.objs[5]]),
({"n": [None, True, False]}, [self.objs[4]]),
({"j": None}, [self.objs[4]]),
]
for value, expected in tests:
with self.subTest(value=value):
qs = NullableJSONModel.objects.filter(value__contains=value)
self.assertCountEqual(qs, expected)
@skipIfDBFeature("supports_json_field_contains")
def test_contains_unsupported(self):
msg = "contains lookup is not supported on this database backend."
with self.assertRaisesMessage(NotSupportedError, msg):
NullableJSONModel.objects.filter(
value__contains={"baz": {"a": "b", "c": "d"}},
).get()
@skipUnlessDBFeature(
"supports_primitives_in_json_field",
"supports_json_field_contains",
)
def test_contains_primitives(self):
for value in self.primitives:
with self.subTest(value=value):
qs = NullableJSONModel.objects.filter(value__contains=value)
self.assertIs(qs.exists(), True)
@skipUnlessDBFeature("supports_json_field_contains")
def test_contained_by(self):
qs = NullableJSONModel.objects.filter(
value__contained_by={"a": "b", "c": 14, "h": True}
)
self.assertCountEqual(qs, self.objs[2:4])
@skipIfDBFeature("supports_json_field_contains")
def test_contained_by_unsupported(self):
msg = "contained_by lookup is not supported on this database backend."
with self.assertRaisesMessage(NotSupportedError, msg):
NullableJSONModel.objects.filter(value__contained_by={"a": "b"}).get()
def test_deep_values(self):
qs = NullableJSONModel.objects.values_list("value__k__l").order_by("pk")
expected_objs = [(None,)] * len(self.objs)
expected_objs[4] = ("m",)
self.assertSequenceEqual(qs, expected_objs)
@skipUnlessDBFeature("can_distinct_on_fields")
def test_deep_distinct(self):
query = NullableJSONModel.objects.distinct("value__k__l").values_list(
"value__k__l"
)
self.assertSequenceEqual(query, [("m",), (None,)])
def test_isnull_key(self):
# key__isnull=False works the same as has_key='key'.
self.assertCountEqual(
NullableJSONModel.objects.filter(value__a__isnull=True),
self.objs[:3] + self.objs[5:],
)
self.assertCountEqual(
NullableJSONModel.objects.filter(value__j__isnull=True),
self.objs[:4] + self.objs[5:],
)
self.assertCountEqual(
NullableJSONModel.objects.filter(value__a__isnull=False),
[self.objs[3], self.objs[4]],
)
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__j__isnull=False),
[self.objs[4]],
)
def test_isnull_key_or_none(self):
obj = NullableJSONModel.objects.create(value={"a": None})
self.assertCountEqual(
NullableJSONModel.objects.filter(
Q(value__a__isnull=True) | Q(value__a=None)
),
self.objs[:3] + self.objs[5:] + [obj],
)
def test_none_key(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__j=None),
[self.objs[4]],
)
def test_none_key_exclude(self):
obj = NullableJSONModel.objects.create(value={"j": 1})
if connection.vendor == "oracle":
# Oracle supports filtering JSON objects with NULL keys, but the
# current implementation doesn't support it.
self.assertSequenceEqual(
NullableJSONModel.objects.exclude(value__j=None),
self.objs[1:4] + self.objs[5:] + [obj],
)
else:
self.assertSequenceEqual(
NullableJSONModel.objects.exclude(value__j=None), [obj]
)
def test_shallow_list_lookup(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__0=1),
[self.objs[5]],
)
def test_shallow_obj_lookup(self):
self.assertCountEqual(
NullableJSONModel.objects.filter(value__a="b"),
[self.objs[3], self.objs[4]],
)
def test_obj_subquery_lookup(self):
qs = NullableJSONModel.objects.annotate(
field=Subquery(
NullableJSONModel.objects.filter(pk=OuterRef("pk")).values("value")
),
).filter(field__a="b")
self.assertCountEqual(qs, [self.objs[3], self.objs[4]])
def test_deep_lookup_objs(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__k__l="m"),
[self.objs[4]],
)
def test_shallow_lookup_obj_target(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__k={"l": "m"}),
[self.objs[4]],
)
def test_deep_lookup_array(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__1__0=2),
[self.objs[5]],
)
def test_deep_lookup_mixed(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__d__1__f="g"),
[self.objs[4]],
)
def test_deep_lookup_transform(self):
self.assertCountEqual(
NullableJSONModel.objects.filter(value__c__gt=2),
[self.objs[3], self.objs[4]],
)
self.assertCountEqual(
NullableJSONModel.objects.filter(value__c__gt=2.33),
[self.objs[3], self.objs[4]],
)
self.assertIs(NullableJSONModel.objects.filter(value__c__lt=5).exists(), False)
def test_lookup_exclude(self):
tests = [
(Q(value__a="b"), [self.objs[0]]),
(Q(value__foo="bax"), [self.objs[0], self.objs[7]]),
]
for condition, expected in tests:
self.assertCountEqual(
NullableJSONModel.objects.exclude(condition),
expected,
)
self.assertCountEqual(
NullableJSONModel.objects.filter(~condition),
expected,
)
def test_lookup_exclude_nonexistent_key(self):
# Values without the key are ignored.
condition = Q(value__foo="bax")
objs_with_value = [self.objs[6]]
objs_with_different_value = [self.objs[0], self.objs[7]]
self.assertCountEqual(
NullableJSONModel.objects.exclude(condition),
objs_with_different_value,
)
self.assertSequenceEqual(
NullableJSONModel.objects.exclude(~condition),
objs_with_value,
)
self.assertCountEqual(
NullableJSONModel.objects.filter(condition | ~condition),
objs_with_value + objs_with_different_value,
)
self.assertCountEqual(
NullableJSONModel.objects.exclude(condition & ~condition),
objs_with_value + objs_with_different_value,
)
# Add the __isnull lookup to get an exhaustive set.
self.assertCountEqual(
NullableJSONModel.objects.exclude(condition & Q(value__foo__isnull=False)),
self.objs[0:6] + self.objs[7:],
)
self.assertSequenceEqual(
NullableJSONModel.objects.filter(condition & Q(value__foo__isnull=False)),
objs_with_value,
)
def test_usage_in_subquery(self):
self.assertCountEqual(
NullableJSONModel.objects.filter(
id__in=NullableJSONModel.objects.filter(value__c=14),
),
self.objs[3:5],
)
@skipUnlessDBFeature("supports_json_field_contains")
def test_array_key_contains(self):
tests = [
([], [self.objs[7]]),
("bar", [self.objs[7]]),
(["bar"], [self.objs[7]]),
("ar", []),
]
for value, expected in tests:
with self.subTest(value=value):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__bar__contains=value),
expected,
)
def test_key_iexact(self):
self.assertIs(
NullableJSONModel.objects.filter(value__foo__iexact="BaR").exists(), True
)
self.assertIs(
NullableJSONModel.objects.filter(value__foo__iexact='"BaR"').exists(), False
)
def test_key_in(self):
tests = [
("value__c__in", [14], self.objs[3:5]),
("value__c__in", [14, 15], self.objs[3:5]),
("value__0__in", [1], [self.objs[5]]),
("value__0__in", [1, 3], [self.objs[5]]),
("value__foo__in", ["bar"], [self.objs[7]]),
(
"value__foo__in",
[KeyTransform("foo", KeyTransform("bax", "value"))],
[self.objs[7]],
),
("value__foo__in", [F("value__bax__foo")], [self.objs[7]]),
(
"value__foo__in",
[KeyTransform("foo", KeyTransform("bax", "value")), "baz"],
[self.objs[7]],
),
("value__foo__in", [F("value__bax__foo"), "baz"], [self.objs[7]]),
("value__foo__in", ["bar", "baz"], [self.objs[7]]),
("value__bar__in", [["foo", "bar"]], [self.objs[7]]),
("value__bar__in", [["foo", "bar"], ["a"]], [self.objs[7]]),
("value__bax__in", [{"foo": "bar"}, {"a": "b"}], [self.objs[7]]),
("value__h__in", [True, "foo"], [self.objs[4]]),
("value__i__in", [False, "foo"], [self.objs[4]]),
]
for lookup, value, expected in tests:
with self.subTest(lookup=lookup, value=value):
self.assertCountEqual(
NullableJSONModel.objects.filter(**{lookup: value}),
expected,
)
def test_key_values(self):
qs = NullableJSONModel.objects.filter(value__h=True)
tests = [
("value__a", "b"),
("value__c", 14),
("value__d", ["e", {"f": "g"}]),
("value__h", True),
("value__i", False),
("value__j", None),
("value__k", {"l": "m"}),
("value__n", [None, True, False]),
("value__p", 4.2),
("value__r", {"s": True, "t": False}),
]
for lookup, expected in tests:
with self.subTest(lookup=lookup):
self.assertEqual(qs.values_list(lookup, flat=True).get(), expected)
def test_key_values_boolean(self):
qs = NullableJSONModel.objects.filter(value__h=True, value__i=False)
tests = [
("value__h", True),
("value__i", False),
]
for lookup, expected in tests:
with self.subTest(lookup=lookup):
self.assertIs(qs.values_list(lookup, flat=True).get(), expected)
@skipUnlessDBFeature("supports_json_field_contains")
def test_key_contains(self):
self.assertIs(
NullableJSONModel.objects.filter(value__foo__contains="ar").exists(), False
)
self.assertIs(
NullableJSONModel.objects.filter(value__foo__contains="bar").exists(), True
)
def test_key_icontains(self):
self.assertIs(
NullableJSONModel.objects.filter(value__foo__icontains="Ar").exists(), True
)
def test_key_startswith(self):
self.assertIs(
NullableJSONModel.objects.filter(value__foo__startswith="b").exists(), True
)
def test_key_istartswith(self):
self.assertIs(
NullableJSONModel.objects.filter(value__foo__istartswith="B").exists(), True
)
def test_key_endswith(self):
self.assertIs(
NullableJSONModel.objects.filter(value__foo__endswith="r").exists(), True
)
def test_key_iendswith(self):
self.assertIs(
NullableJSONModel.objects.filter(value__foo__iendswith="R").exists(), True
)
def test_key_regex(self):
self.assertIs(
NullableJSONModel.objects.filter(value__foo__regex=r"^bar$").exists(), True
)
def test_key_iregex(self):
self.assertIs(
NullableJSONModel.objects.filter(value__foo__iregex=r"^bAr$").exists(), True
)
def test_key_quoted_string(self):
self.assertEqual(
NullableJSONModel.objects.filter(value__o='"quoted"').get(),
self.objs[4],
)
@skipUnlessDBFeature("has_json_operators")
def test_key_sql_injection(self):
with CaptureQueriesContext(connection) as queries:
self.assertIs(
NullableJSONModel.objects.filter(
**{
"""value__test' = '"a"') OR 1 = 1 OR ('d""": "x",
}
).exists(),
False,
)
self.assertIn(
"""."value" -> 'test'' = ''"a"'') OR 1 = 1 OR (''d') = '"x"'""",
queries[0]["sql"],
)
@skipIfDBFeature("has_json_operators")
def test_key_sql_injection_escape(self):
query = str(
JSONModel.objects.filter(
**{
"""value__test") = '"a"' OR 1 = 1 OR ("d""": "x",
}
).query
)
self.assertIn('"test\\"', query)
self.assertIn('\\"d', query)
def test_key_escape(self):
obj = NullableJSONModel.objects.create(value={"%total": 10})
self.assertEqual(
NullableJSONModel.objects.filter(**{"value__%total": 10}).get(), obj
)
def test_none_key_and_exact_lookup(self):
self.assertSequenceEqual(
NullableJSONModel.objects.filter(value__a="b", value__j=None),
[self.objs[4]],
)
def test_lookups_with_key_transform(self):
tests = (
("value__baz__has_key", "c"),
("value__baz__has_keys", ["a", "c"]),
("value__baz__has_any_keys", ["a", "x"]),
("value__has_key", KeyTextTransform("foo", "value")),
)
for lookup, value in tests:
with self.subTest(lookup=lookup):
self.assertIs(
NullableJSONModel.objects.filter(
**{lookup: value},
).exists(),
True,
)
@skipUnlessDBFeature("supports_json_field_contains")
def test_contains_contained_by_with_key_transform(self):
tests = [
("value__d__contains", "e"),
("value__d__contains", [{"f": "g"}]),
("value__contains", KeyTransform("bax", "value")),
("value__contains", F("value__bax")),
("value__baz__contains", {"a": "b"}),
("value__baz__contained_by", {"a": "b", "c": "d", "e": "f"}),
(
"value__contained_by",
KeyTransform(
"x",
RawSQL(
self.raw_sql,
['{"x": {"a": "b", "c": 1, "d": "e"}}'],
),
),
),
]
# For databases where {'f': 'g'} (without surrounding []) matches
# [{'f': 'g'}].
if not connection.features.json_key_contains_list_matching_requires_list:
tests.append(("value__d__contains", {"f": "g"}))
for lookup, value in tests:
with self.subTest(lookup=lookup, value=value):
self.assertIs(
NullableJSONModel.objects.filter(
**{lookup: value},
).exists(),
True,
)
def test_join_key_transform_annotation_expression(self):
related_obj = RelatedJSONModel.objects.create(
value={"d": ["f", "e"]},
json_model=self.objs[4],
)
RelatedJSONModel.objects.create(
value={"d": ["e", "f"]},
json_model=self.objs[4],
)
self.assertSequenceEqual(
RelatedJSONModel.objects.annotate(
key=F("value__d"),
related_key=F("json_model__value__d"),
chain=F("key__1"),
expr=Cast("key", models.JSONField()),
).filter(chain=F("related_key__0")),
[related_obj],
)
def test_key_text_transform_from_lookup(self):
qs = NullableJSONModel.objects.annotate(b=KT("value__bax__foo")).filter(
b__contains="ar",
)
self.assertSequenceEqual(qs, [self.objs[7]])
qs = NullableJSONModel.objects.annotate(c=KT("value__o")).filter(
c__contains="uot",
)
self.assertSequenceEqual(qs, [self.objs[4]])
def test_key_text_transform_from_lookup_invalid(self):
msg = "Lookup must contain key or index transforms."
with self.assertRaisesMessage(ValueError, msg):
KT("value")
with self.assertRaisesMessage(ValueError, msg):
KT("")
|
2ad6f3f2622d25a658e19ba85746f15b674830912a01c9fd0a8f3529efd6f612 | import os
import shutil
from unittest import skipIf
from django.core.exceptions import ImproperlyConfigured
from django.core.files import File
from django.core.files.images import ImageFile
from django.db.models import signals
from django.test import TestCase
from django.test.testcases import SerializeMixin
try:
from .models import Image
except ImproperlyConfigured:
Image = None
if Image:
from .models import (
Person,
PersonDimensionsFirst,
PersonTwoImages,
PersonWithHeight,
PersonWithHeightAndWidth,
TestImageFieldFile,
temp_storage_dir,
)
else:
# Pillow not available, create dummy classes (tests will be skipped anyway)
class Person:
pass
PersonWithHeight = PersonWithHeightAndWidth = PersonDimensionsFirst = Person
PersonTwoImages = Person
class ImageFieldTestMixin(SerializeMixin):
"""
Mixin class to provide common functionality to ImageField test classes.
"""
lockfile = __file__
# Person model to use for tests.
PersonModel = PersonWithHeightAndWidth
# File class to use for file instances.
File = ImageFile
def setUp(self):
"""
Creates a pristine temp directory (or deletes and recreates if it
already exists) that the model uses as its storage directory.
Sets up two ImageFile instances for use in tests.
"""
if os.path.exists(temp_storage_dir):
shutil.rmtree(temp_storage_dir)
os.mkdir(temp_storage_dir)
file_path1 = os.path.join(os.path.dirname(__file__), "4x8.png")
self.file1 = self.File(open(file_path1, "rb"), name="4x8.png")
file_path2 = os.path.join(os.path.dirname(__file__), "8x4.png")
self.file2 = self.File(open(file_path2, "rb"), name="8x4.png")
def tearDown(self):
"""
Removes temp directory and all its contents.
"""
self.file1.close()
self.file2.close()
shutil.rmtree(temp_storage_dir)
def check_dimensions(self, instance, width, height, field_name="mugshot"):
"""
Asserts that the given width and height values match both the
field's height and width attributes and the height and width fields
(if defined) the image field is caching to.
Note, this method will check for dimension fields named by adding
"_width" or "_height" to the name of the ImageField. So, the
models used in these tests must have their fields named
accordingly.
By default, we check the field named "mugshot", but this can be
specified by passing the field_name parameter.
"""
field = getattr(instance, field_name)
# Check height/width attributes of field.
if width is None and height is None:
with self.assertRaises(ValueError):
getattr(field, "width")
with self.assertRaises(ValueError):
getattr(field, "height")
else:
self.assertEqual(field.width, width)
self.assertEqual(field.height, height)
# Check height/width fields of model, if defined.
width_field_name = field_name + "_width"
if hasattr(instance, width_field_name):
self.assertEqual(getattr(instance, width_field_name), width)
height_field_name = field_name + "_height"
if hasattr(instance, height_field_name):
self.assertEqual(getattr(instance, height_field_name), height)
@skipIf(Image is None, "Pillow is required to test ImageField")
class ImageFieldTests(ImageFieldTestMixin, TestCase):
"""
Tests for ImageField that don't need to be run with each of the
different test model classes.
"""
def test_equal_notequal_hash(self):
"""
Bug #9786: Ensure '==' and '!=' work correctly.
Bug #9508: make sure hash() works as expected (equal items must
hash to the same value).
"""
# Create two Persons with different mugshots.
p1 = self.PersonModel(name="Joe")
p1.mugshot.save("mug", self.file1)
p2 = self.PersonModel(name="Bob")
p2.mugshot.save("mug", self.file2)
self.assertIs(p1.mugshot == p2.mugshot, False)
self.assertIs(p1.mugshot != p2.mugshot, True)
# Test again with an instance fetched from the db.
p1_db = self.PersonModel.objects.get(name="Joe")
self.assertIs(p1_db.mugshot == p2.mugshot, False)
self.assertIs(p1_db.mugshot != p2.mugshot, True)
# Instance from db should match the local instance.
self.assertIs(p1_db.mugshot == p1.mugshot, True)
self.assertEqual(hash(p1_db.mugshot), hash(p1.mugshot))
self.assertIs(p1_db.mugshot != p1.mugshot, False)
def test_instantiate_missing(self):
"""
If the underlying file is unavailable, still create instantiate the
object without error.
"""
p = self.PersonModel(name="Joan")
p.mugshot.save("shot", self.file1)
p = self.PersonModel.objects.get(name="Joan")
path = p.mugshot.path
shutil.move(path, path + ".moved")
self.PersonModel.objects.get(name="Joan")
def test_delete_when_missing(self):
"""
Bug #8175: correctly delete an object where the file no longer
exists on the file system.
"""
p = self.PersonModel(name="Fred")
p.mugshot.save("shot", self.file1)
os.remove(p.mugshot.path)
p.delete()
def test_size_method(self):
"""
Bug #8534: FileField.size should not leave the file open.
"""
p = self.PersonModel(name="Joan")
p.mugshot.save("shot", self.file1)
# Get a "clean" model instance
p = self.PersonModel.objects.get(name="Joan")
# It won't have an opened file.
self.assertIs(p.mugshot.closed, True)
# After asking for the size, the file should still be closed.
p.mugshot.size
self.assertIs(p.mugshot.closed, True)
def test_pickle(self):
"""
ImageField can be pickled, unpickled, and that the image of
the unpickled version is the same as the original.
"""
import pickle
p = Person(name="Joe")
p.mugshot.save("mug", self.file1)
dump = pickle.dumps(p)
loaded_p = pickle.loads(dump)
self.assertEqual(p.mugshot, loaded_p.mugshot)
self.assertEqual(p.mugshot.url, loaded_p.mugshot.url)
self.assertEqual(p.mugshot.storage, loaded_p.mugshot.storage)
self.assertEqual(p.mugshot.instance, loaded_p.mugshot.instance)
self.assertEqual(p.mugshot.field, loaded_p.mugshot.field)
mugshot_dump = pickle.dumps(p.mugshot)
loaded_mugshot = pickle.loads(mugshot_dump)
self.assertEqual(p.mugshot, loaded_mugshot)
self.assertEqual(p.mugshot.url, loaded_mugshot.url)
self.assertEqual(p.mugshot.storage, loaded_mugshot.storage)
self.assertEqual(p.mugshot.instance, loaded_mugshot.instance)
self.assertEqual(p.mugshot.field, loaded_mugshot.field)
def test_defer(self):
self.PersonModel.objects.create(name="Joe", mugshot=self.file1)
with self.assertNumQueries(1):
qs = list(self.PersonModel.objects.defer("mugshot"))
with self.assertNumQueries(0):
self.assertEqual(qs[0].name, "Joe")
@skipIf(Image is None, "Pillow is required to test ImageField")
class ImageFieldTwoDimensionsTests(ImageFieldTestMixin, TestCase):
"""
Tests behavior of an ImageField and its dimensions fields.
"""
def test_constructor(self):
"""
Tests assigning an image field through the model's constructor.
"""
p = self.PersonModel(name="Joe", mugshot=self.file1)
self.check_dimensions(p, 4, 8)
p.save()
self.check_dimensions(p, 4, 8)
def test_image_after_constructor(self):
"""
Tests behavior when image is not passed in constructor.
"""
p = self.PersonModel(name="Joe")
# TestImageField value will default to being an instance of its
# attr_class, a TestImageFieldFile, with name == None, which will
# cause it to evaluate as False.
self.assertIsInstance(p.mugshot, TestImageFieldFile)
self.assertFalse(p.mugshot)
# Test setting a fresh created model instance.
p = self.PersonModel(name="Joe")
p.mugshot = self.file1
self.check_dimensions(p, 4, 8)
def test_create(self):
"""
Tests assigning an image in Manager.create().
"""
p = self.PersonModel.objects.create(name="Joe", mugshot=self.file1)
self.check_dimensions(p, 4, 8)
def test_default_value(self):
"""
The default value for an ImageField is an instance of
the field's attr_class (TestImageFieldFile in this case) with no
name (name set to None).
"""
p = self.PersonModel()
self.assertIsInstance(p.mugshot, TestImageFieldFile)
self.assertFalse(p.mugshot)
def test_assignment_to_None(self):
"""
Assigning ImageField to None clears dimensions.
"""
p = self.PersonModel(name="Joe", mugshot=self.file1)
self.check_dimensions(p, 4, 8)
# If image assigned to None, dimension fields should be cleared.
p.mugshot = None
self.check_dimensions(p, None, None)
p.mugshot = self.file2
self.check_dimensions(p, 8, 4)
def test_field_save_and_delete_methods(self):
"""
Tests assignment using the field's save method and deletion using
the field's delete method.
"""
p = self.PersonModel(name="Joe")
p.mugshot.save("mug", self.file1)
self.check_dimensions(p, 4, 8)
# A new file should update dimensions.
p.mugshot.save("mug", self.file2)
self.check_dimensions(p, 8, 4)
# Field and dimensions should be cleared after a delete.
p.mugshot.delete(save=False)
self.assertIsNone(p.mugshot.name)
self.check_dimensions(p, None, None)
def test_dimensions(self):
"""
Dimensions are updated correctly in various situations.
"""
p = self.PersonModel(name="Joe")
# Dimensions should get set if file is saved.
p.mugshot.save("mug", self.file1)
self.check_dimensions(p, 4, 8)
# Test dimensions after fetching from database.
p = self.PersonModel.objects.get(name="Joe")
# Bug 11084: Dimensions should not get recalculated if file is
# coming from the database. We test this by checking if the file
# was opened.
self.assertIs(p.mugshot.was_opened, False)
self.check_dimensions(p, 4, 8)
# After checking dimensions on the image field, the file will have
# opened.
self.assertIs(p.mugshot.was_opened, True)
# Dimensions should now be cached, and if we reset was_opened and
# check dimensions again, the file should not have opened.
p.mugshot.was_opened = False
self.check_dimensions(p, 4, 8)
self.assertIs(p.mugshot.was_opened, False)
# If we assign a new image to the instance, the dimensions should
# update.
p.mugshot = self.file2
self.check_dimensions(p, 8, 4)
# Dimensions were recalculated, and hence file should have opened.
self.assertIs(p.mugshot.was_opened, True)
@skipIf(Image is None, "Pillow is required to test ImageField")
class ImageFieldNoDimensionsTests(ImageFieldTwoDimensionsTests):
"""
Tests behavior of an ImageField with no dimension fields.
"""
PersonModel = Person
def test_post_init_not_connected(self):
person_model_id = id(self.PersonModel)
self.assertNotIn(
person_model_id,
[sender_id for (_, sender_id), *_ in signals.post_init.receivers],
)
@skipIf(Image is None, "Pillow is required to test ImageField")
class ImageFieldOneDimensionTests(ImageFieldTwoDimensionsTests):
"""
Tests behavior of an ImageField with one dimensions field.
"""
PersonModel = PersonWithHeight
@skipIf(Image is None, "Pillow is required to test ImageField")
class ImageFieldDimensionsFirstTests(ImageFieldTwoDimensionsTests):
"""
Tests behavior of an ImageField where the dimensions fields are
defined before the ImageField.
"""
PersonModel = PersonDimensionsFirst
@skipIf(Image is None, "Pillow is required to test ImageField")
class ImageFieldUsingFileTests(ImageFieldTwoDimensionsTests):
"""
Tests behavior of an ImageField when assigning it a File instance
rather than an ImageFile instance.
"""
PersonModel = PersonDimensionsFirst
File = File
@skipIf(Image is None, "Pillow is required to test ImageField")
class TwoImageFieldTests(ImageFieldTestMixin, TestCase):
"""
Tests a model with two ImageFields.
"""
PersonModel = PersonTwoImages
def test_constructor(self):
p = self.PersonModel(mugshot=self.file1, headshot=self.file2)
self.check_dimensions(p, 4, 8, "mugshot")
self.check_dimensions(p, 8, 4, "headshot")
p.save()
self.check_dimensions(p, 4, 8, "mugshot")
self.check_dimensions(p, 8, 4, "headshot")
def test_create(self):
p = self.PersonModel.objects.create(mugshot=self.file1, headshot=self.file2)
self.check_dimensions(p, 4, 8)
self.check_dimensions(p, 8, 4, "headshot")
def test_assignment(self):
p = self.PersonModel()
self.check_dimensions(p, None, None, "mugshot")
self.check_dimensions(p, None, None, "headshot")
p.mugshot = self.file1
self.check_dimensions(p, 4, 8, "mugshot")
self.check_dimensions(p, None, None, "headshot")
p.headshot = self.file2
self.check_dimensions(p, 4, 8, "mugshot")
self.check_dimensions(p, 8, 4, "headshot")
# Clear the ImageFields one at a time.
p.mugshot = None
self.check_dimensions(p, None, None, "mugshot")
self.check_dimensions(p, 8, 4, "headshot")
p.headshot = None
self.check_dimensions(p, None, None, "mugshot")
self.check_dimensions(p, None, None, "headshot")
def test_field_save_and_delete_methods(self):
p = self.PersonModel(name="Joe")
p.mugshot.save("mug", self.file1)
self.check_dimensions(p, 4, 8, "mugshot")
self.check_dimensions(p, None, None, "headshot")
p.headshot.save("head", self.file2)
self.check_dimensions(p, 4, 8, "mugshot")
self.check_dimensions(p, 8, 4, "headshot")
# We can use save=True when deleting the image field with null=True
# dimension fields and the other field has an image.
p.headshot.delete(save=True)
self.check_dimensions(p, 4, 8, "mugshot")
self.check_dimensions(p, None, None, "headshot")
p.mugshot.delete(save=False)
self.check_dimensions(p, None, None, "mugshot")
self.check_dimensions(p, None, None, "headshot")
def test_dimensions(self):
"""
Dimensions are updated correctly in various situations.
"""
p = self.PersonModel(name="Joe")
# Dimensions should get set for the saved file.
p.mugshot.save("mug", self.file1)
p.headshot.save("head", self.file2)
self.check_dimensions(p, 4, 8, "mugshot")
self.check_dimensions(p, 8, 4, "headshot")
# Test dimensions after fetching from database.
p = self.PersonModel.objects.get(name="Joe")
# Bug 11084: Dimensions should not get recalculated if file is
# coming from the database. We test this by checking if the file
# was opened.
self.assertIs(p.mugshot.was_opened, False)
self.assertIs(p.headshot.was_opened, False)
self.check_dimensions(p, 4, 8, "mugshot")
self.check_dimensions(p, 8, 4, "headshot")
# After checking dimensions on the image fields, the files will
# have been opened.
self.assertIs(p.mugshot.was_opened, True)
self.assertIs(p.headshot.was_opened, True)
# Dimensions should now be cached, and if we reset was_opened and
# check dimensions again, the file should not have opened.
p.mugshot.was_opened = False
p.headshot.was_opened = False
self.check_dimensions(p, 4, 8, "mugshot")
self.check_dimensions(p, 8, 4, "headshot")
self.assertIs(p.mugshot.was_opened, False)
self.assertIs(p.headshot.was_opened, False)
# If we assign a new image to the instance, the dimensions should
# update.
p.mugshot = self.file2
p.headshot = self.file1
self.check_dimensions(p, 8, 4, "mugshot")
self.check_dimensions(p, 4, 8, "headshot")
# Dimensions were recalculated, and hence file should have opened.
self.assertIs(p.mugshot.was_opened, True)
self.assertIs(p.headshot.was_opened, True)
|
b76fa95538438007a980be85dc285a1c35491b4863ac23b410549c26ddd5e1cd | from operator import attrgetter
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.db.models import Count
from django.test import TestCase
from .models import (
Base,
Child,
Derived,
Feature,
Item,
ItemAndSimpleItem,
Leaf,
Location,
OneToOneItem,
Proxy,
ProxyRelated,
RelatedItem,
Request,
ResolveThis,
SimpleItem,
SpecialFeature,
)
class DeferRegressionTest(TestCase):
def test_basic(self):
# Deferred fields should really be deferred and not accidentally use
# the field's default value just because they aren't passed to __init__
Item.objects.create(name="first", value=42)
obj = Item.objects.only("name", "other_value").get(name="first")
# Accessing "name" doesn't trigger a new database query. Accessing
# "value" or "text" should.
with self.assertNumQueries(0):
self.assertEqual(obj.name, "first")
self.assertEqual(obj.other_value, 0)
with self.assertNumQueries(1):
self.assertEqual(obj.value, 42)
with self.assertNumQueries(1):
self.assertEqual(obj.text, "xyzzy")
with self.assertNumQueries(0):
self.assertEqual(obj.text, "xyzzy")
# Regression test for #10695. Make sure different instances don't
# inadvertently share data in the deferred descriptor objects.
i = Item.objects.create(name="no I'm first", value=37)
items = Item.objects.only("value").order_by("-value")
self.assertEqual(items[0].name, "first")
self.assertEqual(items[1].name, "no I'm first")
RelatedItem.objects.create(item=i)
r = RelatedItem.objects.defer("item").get()
self.assertEqual(r.item_id, i.id)
self.assertEqual(r.item, i)
# Some further checks for select_related() and inherited model
# behavior (regression for #10710).
c1 = Child.objects.create(name="c1", value=42)
c2 = Child.objects.create(name="c2", value=37)
Leaf.objects.create(name="l1", child=c1, second_child=c2)
obj = Leaf.objects.only("name", "child").select_related()[0]
self.assertEqual(obj.child.name, "c1")
self.assertQuerySetEqual(
Leaf.objects.select_related().only("child__name", "second_child__name"),
[
"l1",
],
attrgetter("name"),
)
# Models instances with deferred fields should still return the same
# content types as their non-deferred versions (bug #10738).
ctype = ContentType.objects.get_for_model
c1 = ctype(Item.objects.all()[0])
c2 = ctype(Item.objects.defer("name")[0])
c3 = ctype(Item.objects.only("name")[0])
self.assertTrue(c1 is c2 is c3)
# Regression for #10733 - only() can be used on a model with two
# foreign keys.
results = Leaf.objects.only("name", "child", "second_child").select_related()
self.assertEqual(results[0].child.name, "c1")
self.assertEqual(results[0].second_child.name, "c2")
results = Leaf.objects.only(
"name", "child", "second_child", "child__name", "second_child__name"
).select_related()
self.assertEqual(results[0].child.name, "c1")
self.assertEqual(results[0].second_child.name, "c2")
# Regression for #16409 - make sure defer() and only() work with annotate()
self.assertIsInstance(
list(SimpleItem.objects.annotate(Count("feature")).defer("name")), list
)
self.assertIsInstance(
list(SimpleItem.objects.annotate(Count("feature")).only("name")), list
)
def test_ticket_16409(self):
# Regression for #16409 - make sure defer() and only() work with annotate()
self.assertIsInstance(
list(SimpleItem.objects.annotate(Count("feature")).defer("name")), list
)
self.assertIsInstance(
list(SimpleItem.objects.annotate(Count("feature")).only("name")), list
)
def test_ticket_23270(self):
d = Derived.objects.create(text="foo", other_text="bar")
with self.assertNumQueries(1):
obj = Base.objects.select_related("derived").defer("text")[0]
self.assertIsInstance(obj.derived, Derived)
self.assertEqual("bar", obj.derived.other_text)
self.assertNotIn("text", obj.__dict__)
self.assertEqual(d.pk, obj.derived.base_ptr_id)
def test_only_and_defer_usage_on_proxy_models(self):
# Regression for #15790 - only() broken for proxy models
proxy = Proxy.objects.create(name="proxy", value=42)
msg = "QuerySet.only() return bogus results with proxy models"
dp = Proxy.objects.only("other_value").get(pk=proxy.pk)
self.assertEqual(dp.name, proxy.name, msg=msg)
self.assertEqual(dp.value, proxy.value, msg=msg)
# also test things with .defer()
msg = "QuerySet.defer() return bogus results with proxy models"
dp = Proxy.objects.defer("name", "text", "value").get(pk=proxy.pk)
self.assertEqual(dp.name, proxy.name, msg=msg)
self.assertEqual(dp.value, proxy.value, msg=msg)
def test_resolve_columns(self):
ResolveThis.objects.create(num=5.0, name="Foobar")
qs = ResolveThis.objects.defer("num")
self.assertEqual(1, qs.count())
self.assertEqual("Foobar", qs[0].name)
def test_reverse_one_to_one_relations(self):
# Refs #14694. Test reverse relations which are known unique (reverse
# side has o2ofield or unique FK) - the o2o case
item = Item.objects.create(name="first", value=42)
o2o = OneToOneItem.objects.create(item=item, name="second")
self.assertEqual(len(Item.objects.defer("one_to_one_item__name")), 1)
self.assertEqual(len(Item.objects.select_related("one_to_one_item")), 1)
self.assertEqual(
len(
Item.objects.select_related("one_to_one_item").defer(
"one_to_one_item__name"
)
),
1,
)
self.assertEqual(
len(Item.objects.select_related("one_to_one_item").defer("value")), 1
)
# Make sure that `only()` doesn't break when we pass in a unique relation,
# rather than a field on the relation.
self.assertEqual(len(Item.objects.only("one_to_one_item")), 1)
with self.assertNumQueries(1):
i = Item.objects.select_related("one_to_one_item")[0]
self.assertEqual(i.one_to_one_item.pk, o2o.pk)
self.assertEqual(i.one_to_one_item.name, "second")
with self.assertNumQueries(1):
i = Item.objects.select_related("one_to_one_item").defer(
"value", "one_to_one_item__name"
)[0]
self.assertEqual(i.one_to_one_item.pk, o2o.pk)
self.assertEqual(i.name, "first")
with self.assertNumQueries(1):
self.assertEqual(i.one_to_one_item.name, "second")
with self.assertNumQueries(1):
self.assertEqual(i.value, 42)
def test_defer_with_select_related(self):
item1 = Item.objects.create(name="first", value=47)
item2 = Item.objects.create(name="second", value=42)
simple = SimpleItem.objects.create(name="simple", value="23")
ItemAndSimpleItem.objects.create(item=item1, simple=simple)
obj = ItemAndSimpleItem.objects.defer("item").select_related("simple").get()
self.assertEqual(obj.item, item1)
self.assertEqual(obj.item_id, item1.id)
obj.item = item2
obj.save()
obj = ItemAndSimpleItem.objects.defer("item").select_related("simple").get()
self.assertEqual(obj.item, item2)
self.assertEqual(obj.item_id, item2.id)
def test_proxy_model_defer_with_select_related(self):
# Regression for #22050
item = Item.objects.create(name="first", value=47)
RelatedItem.objects.create(item=item)
# Defer fields with only()
obj = ProxyRelated.objects.select_related().only("item__name")[0]
with self.assertNumQueries(0):
self.assertEqual(obj.item.name, "first")
with self.assertNumQueries(1):
self.assertEqual(obj.item.value, 47)
def test_only_with_select_related(self):
# Test for #17485.
item = SimpleItem.objects.create(name="first", value=47)
feature = Feature.objects.create(item=item)
SpecialFeature.objects.create(feature=feature)
qs = Feature.objects.only("item__name").select_related("item")
self.assertEqual(len(qs), 1)
qs = SpecialFeature.objects.only("feature__item__name").select_related(
"feature__item"
)
self.assertEqual(len(qs), 1)
def test_defer_annotate_select_related(self):
location = Location.objects.create()
Request.objects.create(location=location)
self.assertIsInstance(
list(
Request.objects.annotate(Count("items"))
.select_related("profile", "location")
.only("profile", "location")
),
list,
)
self.assertIsInstance(
list(
Request.objects.annotate(Count("items"))
.select_related("profile", "location")
.only("profile__profile1", "location__location1")
),
list,
)
self.assertIsInstance(
list(
Request.objects.annotate(Count("items"))
.select_related("profile", "location")
.defer("request1", "request2", "request3", "request4")
),
list,
)
def test_common_model_different_mask(self):
child = Child.objects.create(name="Child", value=42)
second_child = Child.objects.create(name="Second", value=64)
Leaf.objects.create(child=child, second_child=second_child)
with self.assertNumQueries(1):
leaf = (
Leaf.objects.select_related("child", "second_child")
.defer("child__name", "second_child__value")
.get()
)
self.assertEqual(leaf.child, child)
self.assertEqual(leaf.second_child, second_child)
self.assertEqual(leaf.child.get_deferred_fields(), {"name"})
self.assertEqual(leaf.second_child.get_deferred_fields(), {"value"})
with self.assertNumQueries(0):
self.assertEqual(leaf.child.value, 42)
self.assertEqual(leaf.second_child.name, "Second")
with self.assertNumQueries(1):
self.assertEqual(leaf.child.name, "Child")
with self.assertNumQueries(1):
self.assertEqual(leaf.second_child.value, 64)
def test_defer_many_to_many_ignored(self):
location = Location.objects.create()
request = Request.objects.create(location=location)
with self.assertNumQueries(1):
self.assertEqual(Request.objects.defer("items").get(), request)
class DeferDeletionSignalsTests(TestCase):
senders = [Item, Proxy]
@classmethod
def setUpTestData(cls):
cls.item_pk = Item.objects.create(value=1).pk
def setUp(self):
self.pre_delete_senders = []
self.post_delete_senders = []
for sender in self.senders:
models.signals.pre_delete.connect(self.pre_delete_receiver, sender)
models.signals.post_delete.connect(self.post_delete_receiver, sender)
def tearDown(self):
for sender in self.senders:
models.signals.pre_delete.disconnect(self.pre_delete_receiver, sender)
models.signals.post_delete.disconnect(self.post_delete_receiver, sender)
def pre_delete_receiver(self, sender, **kwargs):
self.pre_delete_senders.append(sender)
def post_delete_receiver(self, sender, **kwargs):
self.post_delete_senders.append(sender)
def test_delete_defered_model(self):
Item.objects.only("value").get(pk=self.item_pk).delete()
self.assertEqual(self.pre_delete_senders, [Item])
self.assertEqual(self.post_delete_senders, [Item])
def test_delete_defered_proxy_model(self):
Proxy.objects.only("value").get(pk=self.item_pk).delete()
self.assertEqual(self.pre_delete_senders, [Proxy])
self.assertEqual(self.post_delete_senders, [Proxy])
|
91293beaa924977fa88d4e915b72477b80dd5ce0ce3c21682809108fc280825f | import collections.abc
from datetime import datetime
from math import ceil
from operator import attrgetter
from unittest import skipUnless
from django.core.exceptions import FieldError
from django.db import connection, models
from django.db.models import (
BooleanField,
Case,
Exists,
ExpressionWrapper,
F,
Max,
OuterRef,
Q,
Subquery,
Value,
When,
)
from django.db.models.functions import Abs, Cast, Length, Substr
from django.db.models.lookups import (
Exact,
GreaterThan,
GreaterThanOrEqual,
IsNull,
LessThan,
LessThanOrEqual,
)
from django.test import TestCase, skipUnlessDBFeature
from django.test.utils import isolate_apps, register_lookup
from .models import (
Article,
Author,
Freebie,
Game,
IsNullWithNoneAsRHS,
Player,
Product,
Season,
Stock,
Tag,
)
class LookupTests(TestCase):
@classmethod
def setUpTestData(cls):
# Create a few Authors.
cls.au1 = Author.objects.create(name="Author 1", alias="a1", bio="x" * 4001)
cls.au2 = Author.objects.create(name="Author 2", alias="a2")
# Create a few Articles.
cls.a1 = Article.objects.create(
headline="Article 1",
pub_date=datetime(2005, 7, 26),
author=cls.au1,
slug="a1",
)
cls.a2 = Article.objects.create(
headline="Article 2",
pub_date=datetime(2005, 7, 27),
author=cls.au1,
slug="a2",
)
cls.a3 = Article.objects.create(
headline="Article 3",
pub_date=datetime(2005, 7, 27),
author=cls.au1,
slug="a3",
)
cls.a4 = Article.objects.create(
headline="Article 4",
pub_date=datetime(2005, 7, 28),
author=cls.au1,
slug="a4",
)
cls.a5 = Article.objects.create(
headline="Article 5",
pub_date=datetime(2005, 8, 1, 9, 0),
author=cls.au2,
slug="a5",
)
cls.a6 = Article.objects.create(
headline="Article 6",
pub_date=datetime(2005, 8, 1, 8, 0),
author=cls.au2,
slug="a6",
)
cls.a7 = Article.objects.create(
headline="Article 7",
pub_date=datetime(2005, 7, 27),
author=cls.au2,
slug="a7",
)
# Create a few Tags.
cls.t1 = Tag.objects.create(name="Tag 1")
cls.t1.articles.add(cls.a1, cls.a2, cls.a3)
cls.t2 = Tag.objects.create(name="Tag 2")
cls.t2.articles.add(cls.a3, cls.a4, cls.a5)
cls.t3 = Tag.objects.create(name="Tag 3")
cls.t3.articles.add(cls.a5, cls.a6, cls.a7)
def test_exists(self):
# We can use .exists() to check that there are some
self.assertTrue(Article.objects.exists())
for a in Article.objects.all():
a.delete()
# There should be none now!
self.assertFalse(Article.objects.exists())
def test_lookup_int_as_str(self):
# Integer value can be queried using string
self.assertSequenceEqual(
Article.objects.filter(id__iexact=str(self.a1.id)),
[self.a1],
)
@skipUnlessDBFeature("supports_date_lookup_using_string")
def test_lookup_date_as_str(self):
# A date lookup can be performed using a string search
self.assertSequenceEqual(
Article.objects.filter(pub_date__startswith="2005"),
[self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1],
)
def test_iterator(self):
# Each QuerySet gets iterator(), which is a generator that "lazily"
# returns results using database-level iteration.
self.assertIsInstance(Article.objects.iterator(), collections.abc.Iterator)
self.assertQuerySetEqual(
Article.objects.iterator(),
[
"Article 5",
"Article 6",
"Article 4",
"Article 2",
"Article 3",
"Article 7",
"Article 1",
],
transform=attrgetter("headline"),
)
# iterator() can be used on any QuerySet.
self.assertQuerySetEqual(
Article.objects.filter(headline__endswith="4").iterator(),
["Article 4"],
transform=attrgetter("headline"),
)
def test_count(self):
# count() returns the number of objects matching search criteria.
self.assertEqual(Article.objects.count(), 7)
self.assertEqual(
Article.objects.filter(pub_date__exact=datetime(2005, 7, 27)).count(), 3
)
self.assertEqual(
Article.objects.filter(headline__startswith="Blah blah").count(), 0
)
# count() should respect sliced query sets.
articles = Article.objects.all()
self.assertEqual(articles.count(), 7)
self.assertEqual(articles[:4].count(), 4)
self.assertEqual(articles[1:100].count(), 6)
self.assertEqual(articles[10:100].count(), 0)
# Date and date/time lookups can also be done with strings.
self.assertEqual(
Article.objects.filter(pub_date__exact="2005-07-27 00:00:00").count(), 3
)
def test_in_bulk(self):
# in_bulk() takes a list of IDs and returns a dictionary mapping IDs to objects.
arts = Article.objects.in_bulk([self.a1.id, self.a2.id])
self.assertEqual(arts[self.a1.id], self.a1)
self.assertEqual(arts[self.a2.id], self.a2)
self.assertEqual(
Article.objects.in_bulk(),
{
self.a1.id: self.a1,
self.a2.id: self.a2,
self.a3.id: self.a3,
self.a4.id: self.a4,
self.a5.id: self.a5,
self.a6.id: self.a6,
self.a7.id: self.a7,
},
)
self.assertEqual(Article.objects.in_bulk([self.a3.id]), {self.a3.id: self.a3})
self.assertEqual(Article.objects.in_bulk({self.a3.id}), {self.a3.id: self.a3})
self.assertEqual(
Article.objects.in_bulk(frozenset([self.a3.id])), {self.a3.id: self.a3}
)
self.assertEqual(Article.objects.in_bulk((self.a3.id,)), {self.a3.id: self.a3})
self.assertEqual(Article.objects.in_bulk([1000]), {})
self.assertEqual(Article.objects.in_bulk([]), {})
self.assertEqual(
Article.objects.in_bulk(iter([self.a1.id])), {self.a1.id: self.a1}
)
self.assertEqual(Article.objects.in_bulk(iter([])), {})
with self.assertRaises(TypeError):
Article.objects.in_bulk(headline__startswith="Blah")
def test_in_bulk_lots_of_ids(self):
test_range = 2000
max_query_params = connection.features.max_query_params
expected_num_queries = (
ceil(test_range / max_query_params) if max_query_params else 1
)
Author.objects.bulk_create(
[Author() for i in range(test_range - Author.objects.count())]
)
authors = {author.pk: author for author in Author.objects.all()}
with self.assertNumQueries(expected_num_queries):
self.assertEqual(Author.objects.in_bulk(authors), authors)
def test_in_bulk_with_field(self):
self.assertEqual(
Article.objects.in_bulk(
[self.a1.slug, self.a2.slug, self.a3.slug], field_name="slug"
),
{
self.a1.slug: self.a1,
self.a2.slug: self.a2,
self.a3.slug: self.a3,
},
)
def test_in_bulk_meta_constraint(self):
season_2011 = Season.objects.create(year=2011)
season_2012 = Season.objects.create(year=2012)
Season.objects.create(year=2013)
self.assertEqual(
Season.objects.in_bulk(
[season_2011.year, season_2012.year],
field_name="year",
),
{season_2011.year: season_2011, season_2012.year: season_2012},
)
def test_in_bulk_non_unique_field(self):
msg = "in_bulk()'s field_name must be a unique field but 'author' isn't."
with self.assertRaisesMessage(ValueError, msg):
Article.objects.in_bulk([self.au1], field_name="author")
@skipUnlessDBFeature("can_distinct_on_fields")
def test_in_bulk_preserve_ordering(self):
articles = (
Article.objects.order_by("author_id", "-pub_date")
.distinct("author_id")
.in_bulk([self.au1.id, self.au2.id], field_name="author_id")
)
self.assertEqual(
articles,
{self.au1.id: self.a4, self.au2.id: self.a5},
)
@skipUnlessDBFeature("can_distinct_on_fields")
def test_in_bulk_preserve_ordering_with_batch_size(self):
old_max_query_params = connection.features.max_query_params
connection.features.max_query_params = 1
try:
articles = (
Article.objects.order_by("author_id", "-pub_date")
.distinct("author_id")
.in_bulk([self.au1.id, self.au2.id], field_name="author_id")
)
self.assertEqual(
articles,
{self.au1.id: self.a4, self.au2.id: self.a5},
)
finally:
connection.features.max_query_params = old_max_query_params
@skipUnlessDBFeature("can_distinct_on_fields")
def test_in_bulk_distinct_field(self):
self.assertEqual(
Article.objects.order_by("headline")
.distinct("headline")
.in_bulk(
[self.a1.headline, self.a5.headline],
field_name="headline",
),
{self.a1.headline: self.a1, self.a5.headline: self.a5},
)
@skipUnlessDBFeature("can_distinct_on_fields")
def test_in_bulk_multiple_distinct_field(self):
msg = "in_bulk()'s field_name must be a unique field but 'pub_date' isn't."
with self.assertRaisesMessage(ValueError, msg):
Article.objects.order_by("headline", "pub_date").distinct(
"headline",
"pub_date",
).in_bulk(field_name="pub_date")
@isolate_apps("lookup")
def test_in_bulk_non_unique_meta_constaint(self):
class Model(models.Model):
ean = models.CharField(max_length=100)
brand = models.CharField(max_length=100)
name = models.CharField(max_length=80)
class Meta:
constraints = [
models.UniqueConstraint(
fields=["ean"],
name="partial_ean_unique",
condition=models.Q(is_active=True),
),
models.UniqueConstraint(
fields=["brand", "name"],
name="together_brand_name_unique",
),
]
msg = "in_bulk()'s field_name must be a unique field but '%s' isn't."
for field_name in ["brand", "ean"]:
with self.subTest(field_name=field_name):
with self.assertRaisesMessage(ValueError, msg % field_name):
Model.objects.in_bulk(field_name=field_name)
def test_in_bulk_sliced_queryset(self):
msg = "Cannot use 'limit' or 'offset' with in_bulk()."
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()[0:5].in_bulk([self.a1.id, self.a2.id])
def test_values(self):
# values() returns a list of dictionaries instead of object instances --
# and you can specify which fields you want to retrieve.
self.assertSequenceEqual(
Article.objects.values("headline"),
[
{"headline": "Article 5"},
{"headline": "Article 6"},
{"headline": "Article 4"},
{"headline": "Article 2"},
{"headline": "Article 3"},
{"headline": "Article 7"},
{"headline": "Article 1"},
],
)
self.assertSequenceEqual(
Article.objects.filter(pub_date__exact=datetime(2005, 7, 27)).values("id"),
[{"id": self.a2.id}, {"id": self.a3.id}, {"id": self.a7.id}],
)
self.assertSequenceEqual(
Article.objects.values("id", "headline"),
[
{"id": self.a5.id, "headline": "Article 5"},
{"id": self.a6.id, "headline": "Article 6"},
{"id": self.a4.id, "headline": "Article 4"},
{"id": self.a2.id, "headline": "Article 2"},
{"id": self.a3.id, "headline": "Article 3"},
{"id": self.a7.id, "headline": "Article 7"},
{"id": self.a1.id, "headline": "Article 1"},
],
)
# You can use values() with iterator() for memory savings,
# because iterator() uses database-level iteration.
self.assertSequenceEqual(
list(Article.objects.values("id", "headline").iterator()),
[
{"headline": "Article 5", "id": self.a5.id},
{"headline": "Article 6", "id": self.a6.id},
{"headline": "Article 4", "id": self.a4.id},
{"headline": "Article 2", "id": self.a2.id},
{"headline": "Article 3", "id": self.a3.id},
{"headline": "Article 7", "id": self.a7.id},
{"headline": "Article 1", "id": self.a1.id},
],
)
# The values() method works with "extra" fields specified in extra(select).
self.assertSequenceEqual(
Article.objects.extra(select={"id_plus_one": "id + 1"}).values(
"id", "id_plus_one"
),
[
{"id": self.a5.id, "id_plus_one": self.a5.id + 1},
{"id": self.a6.id, "id_plus_one": self.a6.id + 1},
{"id": self.a4.id, "id_plus_one": self.a4.id + 1},
{"id": self.a2.id, "id_plus_one": self.a2.id + 1},
{"id": self.a3.id, "id_plus_one": self.a3.id + 1},
{"id": self.a7.id, "id_plus_one": self.a7.id + 1},
{"id": self.a1.id, "id_plus_one": self.a1.id + 1},
],
)
data = {
"id_plus_one": "id+1",
"id_plus_two": "id+2",
"id_plus_three": "id+3",
"id_plus_four": "id+4",
"id_plus_five": "id+5",
"id_plus_six": "id+6",
"id_plus_seven": "id+7",
"id_plus_eight": "id+8",
}
self.assertSequenceEqual(
Article.objects.filter(id=self.a1.id).extra(select=data).values(*data),
[
{
"id_plus_one": self.a1.id + 1,
"id_plus_two": self.a1.id + 2,
"id_plus_three": self.a1.id + 3,
"id_plus_four": self.a1.id + 4,
"id_plus_five": self.a1.id + 5,
"id_plus_six": self.a1.id + 6,
"id_plus_seven": self.a1.id + 7,
"id_plus_eight": self.a1.id + 8,
}
],
)
# You can specify fields from forward and reverse relations, just like filter().
self.assertSequenceEqual(
Article.objects.values("headline", "author__name"),
[
{"headline": self.a5.headline, "author__name": self.au2.name},
{"headline": self.a6.headline, "author__name": self.au2.name},
{"headline": self.a4.headline, "author__name": self.au1.name},
{"headline": self.a2.headline, "author__name": self.au1.name},
{"headline": self.a3.headline, "author__name": self.au1.name},
{"headline": self.a7.headline, "author__name": self.au2.name},
{"headline": self.a1.headline, "author__name": self.au1.name},
],
)
self.assertSequenceEqual(
Author.objects.values("name", "article__headline").order_by(
"name", "article__headline"
),
[
{"name": self.au1.name, "article__headline": self.a1.headline},
{"name": self.au1.name, "article__headline": self.a2.headline},
{"name": self.au1.name, "article__headline": self.a3.headline},
{"name": self.au1.name, "article__headline": self.a4.headline},
{"name": self.au2.name, "article__headline": self.a5.headline},
{"name": self.au2.name, "article__headline": self.a6.headline},
{"name": self.au2.name, "article__headline": self.a7.headline},
],
)
self.assertSequenceEqual(
(
Author.objects.values(
"name", "article__headline", "article__tag__name"
).order_by("name", "article__headline", "article__tag__name")
),
[
{
"name": self.au1.name,
"article__headline": self.a1.headline,
"article__tag__name": self.t1.name,
},
{
"name": self.au1.name,
"article__headline": self.a2.headline,
"article__tag__name": self.t1.name,
},
{
"name": self.au1.name,
"article__headline": self.a3.headline,
"article__tag__name": self.t1.name,
},
{
"name": self.au1.name,
"article__headline": self.a3.headline,
"article__tag__name": self.t2.name,
},
{
"name": self.au1.name,
"article__headline": self.a4.headline,
"article__tag__name": self.t2.name,
},
{
"name": self.au2.name,
"article__headline": self.a5.headline,
"article__tag__name": self.t2.name,
},
{
"name": self.au2.name,
"article__headline": self.a5.headline,
"article__tag__name": self.t3.name,
},
{
"name": self.au2.name,
"article__headline": self.a6.headline,
"article__tag__name": self.t3.name,
},
{
"name": self.au2.name,
"article__headline": self.a7.headline,
"article__tag__name": self.t3.name,
},
],
)
# However, an exception FieldDoesNotExist will be thrown if you specify
# a nonexistent field name in values() (a field that is neither in the
# model nor in extra(select)).
msg = (
"Cannot resolve keyword 'id_plus_two' into field. Choices are: "
"author, author_id, headline, id, id_plus_one, pub_date, slug, tag"
)
with self.assertRaisesMessage(FieldError, msg):
Article.objects.extra(select={"id_plus_one": "id + 1"}).values(
"id", "id_plus_two"
)
# If you don't specify field names to values(), all are returned.
self.assertSequenceEqual(
Article.objects.filter(id=self.a5.id).values(),
[
{
"id": self.a5.id,
"author_id": self.au2.id,
"headline": "Article 5",
"pub_date": datetime(2005, 8, 1, 9, 0),
"slug": "a5",
}
],
)
def test_values_list(self):
# values_list() is similar to values(), except that the results are
# returned as a list of tuples, rather than a list of dictionaries.
# Within each tuple, the order of the elements is the same as the order
# of fields in the values_list() call.
self.assertSequenceEqual(
Article.objects.values_list("headline"),
[
("Article 5",),
("Article 6",),
("Article 4",),
("Article 2",),
("Article 3",),
("Article 7",),
("Article 1",),
],
)
self.assertSequenceEqual(
Article.objects.values_list("id").order_by("id"),
[
(self.a1.id,),
(self.a2.id,),
(self.a3.id,),
(self.a4.id,),
(self.a5.id,),
(self.a6.id,),
(self.a7.id,),
],
)
self.assertSequenceEqual(
Article.objects.values_list("id", flat=True).order_by("id"),
[
self.a1.id,
self.a2.id,
self.a3.id,
self.a4.id,
self.a5.id,
self.a6.id,
self.a7.id,
],
)
self.assertSequenceEqual(
Article.objects.extra(select={"id_plus_one": "id+1"})
.order_by("id")
.values_list("id"),
[
(self.a1.id,),
(self.a2.id,),
(self.a3.id,),
(self.a4.id,),
(self.a5.id,),
(self.a6.id,),
(self.a7.id,),
],
)
self.assertSequenceEqual(
Article.objects.extra(select={"id_plus_one": "id+1"})
.order_by("id")
.values_list("id_plus_one", "id"),
[
(self.a1.id + 1, self.a1.id),
(self.a2.id + 1, self.a2.id),
(self.a3.id + 1, self.a3.id),
(self.a4.id + 1, self.a4.id),
(self.a5.id + 1, self.a5.id),
(self.a6.id + 1, self.a6.id),
(self.a7.id + 1, self.a7.id),
],
)
self.assertSequenceEqual(
Article.objects.extra(select={"id_plus_one": "id+1"})
.order_by("id")
.values_list("id", "id_plus_one"),
[
(self.a1.id, self.a1.id + 1),
(self.a2.id, self.a2.id + 1),
(self.a3.id, self.a3.id + 1),
(self.a4.id, self.a4.id + 1),
(self.a5.id, self.a5.id + 1),
(self.a6.id, self.a6.id + 1),
(self.a7.id, self.a7.id + 1),
],
)
args = ("name", "article__headline", "article__tag__name")
self.assertSequenceEqual(
Author.objects.values_list(*args).order_by(*args),
[
(self.au1.name, self.a1.headline, self.t1.name),
(self.au1.name, self.a2.headline, self.t1.name),
(self.au1.name, self.a3.headline, self.t1.name),
(self.au1.name, self.a3.headline, self.t2.name),
(self.au1.name, self.a4.headline, self.t2.name),
(self.au2.name, self.a5.headline, self.t2.name),
(self.au2.name, self.a5.headline, self.t3.name),
(self.au2.name, self.a6.headline, self.t3.name),
(self.au2.name, self.a7.headline, self.t3.name),
],
)
with self.assertRaises(TypeError):
Article.objects.values_list("id", "headline", flat=True)
def test_get_next_previous_by(self):
# Every DateField and DateTimeField creates get_next_by_FOO() and
# get_previous_by_FOO() methods. In the case of identical date values,
# these methods will use the ID as a fallback check. This guarantees
# that no records are skipped or duplicated.
self.assertEqual(repr(self.a1.get_next_by_pub_date()), "<Article: Article 2>")
self.assertEqual(repr(self.a2.get_next_by_pub_date()), "<Article: Article 3>")
self.assertEqual(
repr(self.a2.get_next_by_pub_date(headline__endswith="6")),
"<Article: Article 6>",
)
self.assertEqual(repr(self.a3.get_next_by_pub_date()), "<Article: Article 7>")
self.assertEqual(repr(self.a4.get_next_by_pub_date()), "<Article: Article 6>")
with self.assertRaises(Article.DoesNotExist):
self.a5.get_next_by_pub_date()
self.assertEqual(repr(self.a6.get_next_by_pub_date()), "<Article: Article 5>")
self.assertEqual(repr(self.a7.get_next_by_pub_date()), "<Article: Article 4>")
self.assertEqual(
repr(self.a7.get_previous_by_pub_date()), "<Article: Article 3>"
)
self.assertEqual(
repr(self.a6.get_previous_by_pub_date()), "<Article: Article 4>"
)
self.assertEqual(
repr(self.a5.get_previous_by_pub_date()), "<Article: Article 6>"
)
self.assertEqual(
repr(self.a4.get_previous_by_pub_date()), "<Article: Article 7>"
)
self.assertEqual(
repr(self.a3.get_previous_by_pub_date()), "<Article: Article 2>"
)
self.assertEqual(
repr(self.a2.get_previous_by_pub_date()), "<Article: Article 1>"
)
def test_escaping(self):
# Underscores, percent signs and backslashes have special meaning in the
# underlying SQL code, but Django handles the quoting of them automatically.
a8 = Article.objects.create(
headline="Article_ with underscore", pub_date=datetime(2005, 11, 20)
)
self.assertSequenceEqual(
Article.objects.filter(headline__startswith="Article"),
[a8, self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1],
)
self.assertSequenceEqual(
Article.objects.filter(headline__startswith="Article_"),
[a8],
)
a9 = Article.objects.create(
headline="Article% with percent sign", pub_date=datetime(2005, 11, 21)
)
self.assertSequenceEqual(
Article.objects.filter(headline__startswith="Article"),
[a9, a8, self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1],
)
self.assertSequenceEqual(
Article.objects.filter(headline__startswith="Article%"),
[a9],
)
a10 = Article.objects.create(
headline="Article with \\ backslash", pub_date=datetime(2005, 11, 22)
)
self.assertSequenceEqual(
Article.objects.filter(headline__contains="\\"),
[a10],
)
def test_exclude(self):
pub_date = datetime(2005, 11, 20)
a8 = Article.objects.create(
headline="Article_ with underscore", pub_date=pub_date
)
a9 = Article.objects.create(
headline="Article% with percent sign", pub_date=pub_date
)
a10 = Article.objects.create(
headline="Article with \\ backslash", pub_date=pub_date
)
# exclude() is the opposite of filter() when doing lookups:
self.assertSequenceEqual(
Article.objects.filter(headline__contains="Article").exclude(
headline__contains="with"
),
[self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1],
)
self.assertSequenceEqual(
Article.objects.exclude(headline__startswith="Article_"),
[a10, a9, self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1],
)
self.assertSequenceEqual(
Article.objects.exclude(headline="Article 7"),
[a10, a9, a8, self.a5, self.a6, self.a4, self.a2, self.a3, self.a1],
)
def test_none(self):
# none() returns a QuerySet that behaves like any other QuerySet object
self.assertSequenceEqual(Article.objects.none(), [])
self.assertSequenceEqual(
Article.objects.none().filter(headline__startswith="Article"), []
)
self.assertSequenceEqual(
Article.objects.filter(headline__startswith="Article").none(), []
)
self.assertEqual(Article.objects.none().count(), 0)
self.assertEqual(
Article.objects.none().update(headline="This should not take effect"), 0
)
self.assertSequenceEqual(list(Article.objects.none().iterator()), [])
def test_in(self):
self.assertSequenceEqual(
Article.objects.exclude(id__in=[]),
[self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1],
)
def test_in_empty_list(self):
self.assertSequenceEqual(Article.objects.filter(id__in=[]), [])
def test_in_different_database(self):
with self.assertRaisesMessage(
ValueError,
"Subqueries aren't allowed across different databases. Force the "
"inner query to be evaluated using `list(inner_query)`.",
):
list(Article.objects.filter(id__in=Article.objects.using("other").all()))
def test_in_keeps_value_ordering(self):
query = (
Article.objects.filter(slug__in=["a%d" % i for i in range(1, 8)])
.values("pk")
.query
)
self.assertIn(" IN (a1, a2, a3, a4, a5, a6, a7) ", str(query))
def test_in_ignore_none(self):
with self.assertNumQueries(1) as ctx:
self.assertSequenceEqual(
Article.objects.filter(id__in=[None, self.a1.id]),
[self.a1],
)
sql = ctx.captured_queries[0]["sql"]
self.assertIn("IN (%s)" % self.a1.pk, sql)
def test_in_ignore_solo_none(self):
with self.assertNumQueries(0):
self.assertSequenceEqual(Article.objects.filter(id__in=[None]), [])
def test_in_ignore_none_with_unhashable_items(self):
class UnhashableInt(int):
__hash__ = None
with self.assertNumQueries(1) as ctx:
self.assertSequenceEqual(
Article.objects.filter(id__in=[None, UnhashableInt(self.a1.id)]),
[self.a1],
)
sql = ctx.captured_queries[0]["sql"]
self.assertIn("IN (%s)" % self.a1.pk, sql)
def test_error_messages(self):
# Programming errors are pointed out with nice error messages
with self.assertRaisesMessage(
FieldError,
"Cannot resolve keyword 'pub_date_year' into field. Choices are: "
"author, author_id, headline, id, pub_date, slug, tag",
):
Article.objects.filter(pub_date_year="2005").count()
def test_unsupported_lookups(self):
with self.assertRaisesMessage(
FieldError,
"Unsupported lookup 'starts' for CharField or join on the field "
"not permitted, perhaps you meant startswith or istartswith?",
):
Article.objects.filter(headline__starts="Article")
with self.assertRaisesMessage(
FieldError,
"Unsupported lookup 'is_null' for DateTimeField or join on the field "
"not permitted, perhaps you meant isnull?",
):
Article.objects.filter(pub_date__is_null=True)
with self.assertRaisesMessage(
FieldError,
"Unsupported lookup 'gobbledygook' for DateTimeField or join on the field "
"not permitted.",
):
Article.objects.filter(pub_date__gobbledygook="blahblah")
def test_unsupported_lookups_custom_lookups(self):
slug_field = Article._meta.get_field("slug")
msg = (
"Unsupported lookup 'lengtp' for SlugField or join on the field not "
"permitted, perhaps you meant length?"
)
with self.assertRaisesMessage(FieldError, msg):
with register_lookup(slug_field, Length):
Article.objects.filter(slug__lengtp=20)
def test_relation_nested_lookup_error(self):
# An invalid nested lookup on a related field raises a useful error.
msg = (
"Unsupported lookup 'editor' for ForeignKey or join on the field not "
"permitted."
)
with self.assertRaisesMessage(FieldError, msg):
Article.objects.filter(author__editor__name="James")
msg = (
"Unsupported lookup 'foo' for ForeignKey or join on the field not "
"permitted."
)
with self.assertRaisesMessage(FieldError, msg):
Tag.objects.filter(articles__foo="bar")
def test_unsupported_lookup_reverse_foreign_key(self):
msg = (
"Unsupported lookup 'title' for ManyToOneRel or join on the field not "
"permitted."
)
with self.assertRaisesMessage(FieldError, msg):
Author.objects.filter(article__title="Article 1")
def test_unsupported_lookup_reverse_foreign_key_custom_lookups(self):
msg = (
"Unsupported lookup 'abspl' for ManyToOneRel or join on the field not "
"permitted, perhaps you meant abspk?"
)
fk_field = Article._meta.get_field("author")
with self.assertRaisesMessage(FieldError, msg):
with register_lookup(fk_field, Abs, lookup_name="abspk"):
Author.objects.filter(article__abspl=2)
def test_filter_by_reverse_related_field_transform(self):
fk_field = Article._meta.get_field("author")
with register_lookup(fk_field, Abs):
self.assertSequenceEqual(
Author.objects.filter(article__abs=self.a1.pk), [self.au1]
)
def test_regex(self):
# Create some articles with a bit more interesting headlines for
# testing field lookups.
Article.objects.all().delete()
now = datetime.now()
Article.objects.bulk_create(
[
Article(pub_date=now, headline="f"),
Article(pub_date=now, headline="fo"),
Article(pub_date=now, headline="foo"),
Article(pub_date=now, headline="fooo"),
Article(pub_date=now, headline="hey-Foo"),
Article(pub_date=now, headline="bar"),
Article(pub_date=now, headline="AbBa"),
Article(pub_date=now, headline="baz"),
Article(pub_date=now, headline="baxZ"),
]
)
# zero-or-more
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"fo*"),
Article.objects.filter(headline__in=["f", "fo", "foo", "fooo"]),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__iregex=r"fo*"),
Article.objects.filter(headline__in=["f", "fo", "foo", "fooo", "hey-Foo"]),
)
# one-or-more
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"fo+"),
Article.objects.filter(headline__in=["fo", "foo", "fooo"]),
)
# wildcard
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"fooo?"),
Article.objects.filter(headline__in=["foo", "fooo"]),
)
# leading anchor
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"^b"),
Article.objects.filter(headline__in=["bar", "baxZ", "baz"]),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__iregex=r"^a"),
Article.objects.filter(headline="AbBa"),
)
# trailing anchor
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"z$"),
Article.objects.filter(headline="baz"),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__iregex=r"z$"),
Article.objects.filter(headline__in=["baxZ", "baz"]),
)
# character sets
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"ba[rz]"),
Article.objects.filter(headline__in=["bar", "baz"]),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"ba.[RxZ]"),
Article.objects.filter(headline="baxZ"),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__iregex=r"ba[RxZ]"),
Article.objects.filter(headline__in=["bar", "baxZ", "baz"]),
)
# and more articles:
Article.objects.bulk_create(
[
Article(pub_date=now, headline="foobar"),
Article(pub_date=now, headline="foobaz"),
Article(pub_date=now, headline="ooF"),
Article(pub_date=now, headline="foobarbaz"),
Article(pub_date=now, headline="zoocarfaz"),
Article(pub_date=now, headline="barfoobaz"),
Article(pub_date=now, headline="bazbaRFOO"),
]
)
# alternation
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"oo(f|b)"),
Article.objects.filter(
headline__in=[
"barfoobaz",
"foobar",
"foobarbaz",
"foobaz",
]
),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__iregex=r"oo(f|b)"),
Article.objects.filter(
headline__in=[
"barfoobaz",
"foobar",
"foobarbaz",
"foobaz",
"ooF",
]
),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"^foo(f|b)"),
Article.objects.filter(headline__in=["foobar", "foobarbaz", "foobaz"]),
)
# greedy matching
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"b.*az"),
Article.objects.filter(
headline__in=[
"barfoobaz",
"baz",
"bazbaRFOO",
"foobarbaz",
"foobaz",
]
),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__iregex=r"b.*ar"),
Article.objects.filter(
headline__in=[
"bar",
"barfoobaz",
"bazbaRFOO",
"foobar",
"foobarbaz",
]
),
)
@skipUnlessDBFeature("supports_regex_backreferencing")
def test_regex_backreferencing(self):
# grouping and backreferences
now = datetime.now()
Article.objects.bulk_create(
[
Article(pub_date=now, headline="foobar"),
Article(pub_date=now, headline="foobaz"),
Article(pub_date=now, headline="ooF"),
Article(pub_date=now, headline="foobarbaz"),
Article(pub_date=now, headline="zoocarfaz"),
Article(pub_date=now, headline="barfoobaz"),
Article(pub_date=now, headline="bazbaRFOO"),
]
)
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"b(.).*b\1").values_list(
"headline", flat=True
),
["barfoobaz", "bazbaRFOO", "foobarbaz"],
)
def test_regex_null(self):
"""
A regex lookup does not fail on null/None values
"""
Season.objects.create(year=2012, gt=None)
self.assertQuerySetEqual(Season.objects.filter(gt__regex=r"^$"), [])
def test_textfield_exact_null(self):
with self.assertNumQueries(1) as ctx:
self.assertSequenceEqual(Author.objects.filter(bio=None), [self.au2])
# Columns with IS NULL condition are not wrapped (except PostgreSQL).
bio_column = connection.ops.quote_name(Author._meta.get_field("bio").column)
self.assertIn(f"{bio_column} IS NULL", ctx.captured_queries[0]["sql"])
def test_regex_non_string(self):
"""
A regex lookup does not fail on non-string fields
"""
s = Season.objects.create(year=2013, gt=444)
self.assertQuerySetEqual(Season.objects.filter(gt__regex=r"^444$"), [s])
def test_regex_non_ascii(self):
"""
A regex lookup does not trip on non-ASCII characters.
"""
Player.objects.create(name="\u2660")
Player.objects.get(name__regex="\u2660")
def test_nonfield_lookups(self):
"""
A lookup query containing non-fields raises the proper exception.
"""
msg = (
"Unsupported lookup 'blahblah' for CharField or join on the field not "
"permitted."
)
with self.assertRaisesMessage(FieldError, msg):
Article.objects.filter(headline__blahblah=99)
with self.assertRaisesMessage(FieldError, msg):
Article.objects.filter(headline__blahblah__exact=99)
msg = (
"Cannot resolve keyword 'blahblah' into field. Choices are: "
"author, author_id, headline, id, pub_date, slug, tag"
)
with self.assertRaisesMessage(FieldError, msg):
Article.objects.filter(blahblah=99)
def test_lookup_collision(self):
"""
Genuine field names don't collide with built-in lookup types
('year', 'gt', 'range', 'in' etc.) (#11670).
"""
# 'gt' is used as a code number for the year, e.g. 111=>2009.
season_2009 = Season.objects.create(year=2009, gt=111)
season_2009.games.create(home="Houston Astros", away="St. Louis Cardinals")
season_2010 = Season.objects.create(year=2010, gt=222)
season_2010.games.create(home="Houston Astros", away="Chicago Cubs")
season_2010.games.create(home="Houston Astros", away="Milwaukee Brewers")
season_2010.games.create(home="Houston Astros", away="St. Louis Cardinals")
season_2011 = Season.objects.create(year=2011, gt=333)
season_2011.games.create(home="Houston Astros", away="St. Louis Cardinals")
season_2011.games.create(home="Houston Astros", away="Milwaukee Brewers")
hunter_pence = Player.objects.create(name="Hunter Pence")
hunter_pence.games.set(Game.objects.filter(season__year__in=[2009, 2010]))
pudge = Player.objects.create(name="Ivan Rodriquez")
pudge.games.set(Game.objects.filter(season__year=2009))
pedro_feliz = Player.objects.create(name="Pedro Feliz")
pedro_feliz.games.set(Game.objects.filter(season__year__in=[2011]))
johnson = Player.objects.create(name="Johnson")
johnson.games.set(Game.objects.filter(season__year__in=[2011]))
# Games in 2010
self.assertEqual(Game.objects.filter(season__year=2010).count(), 3)
self.assertEqual(Game.objects.filter(season__year__exact=2010).count(), 3)
self.assertEqual(Game.objects.filter(season__gt=222).count(), 3)
self.assertEqual(Game.objects.filter(season__gt__exact=222).count(), 3)
# Games in 2011
self.assertEqual(Game.objects.filter(season__year=2011).count(), 2)
self.assertEqual(Game.objects.filter(season__year__exact=2011).count(), 2)
self.assertEqual(Game.objects.filter(season__gt=333).count(), 2)
self.assertEqual(Game.objects.filter(season__gt__exact=333).count(), 2)
self.assertEqual(Game.objects.filter(season__year__gt=2010).count(), 2)
self.assertEqual(Game.objects.filter(season__gt__gt=222).count(), 2)
# Games played in 2010 and 2011
self.assertEqual(Game.objects.filter(season__year__in=[2010, 2011]).count(), 5)
self.assertEqual(Game.objects.filter(season__year__gt=2009).count(), 5)
self.assertEqual(Game.objects.filter(season__gt__in=[222, 333]).count(), 5)
self.assertEqual(Game.objects.filter(season__gt__gt=111).count(), 5)
# Players who played in 2009
self.assertEqual(
Player.objects.filter(games__season__year=2009).distinct().count(), 2
)
self.assertEqual(
Player.objects.filter(games__season__year__exact=2009).distinct().count(), 2
)
self.assertEqual(
Player.objects.filter(games__season__gt=111).distinct().count(), 2
)
self.assertEqual(
Player.objects.filter(games__season__gt__exact=111).distinct().count(), 2
)
# Players who played in 2010
self.assertEqual(
Player.objects.filter(games__season__year=2010).distinct().count(), 1
)
self.assertEqual(
Player.objects.filter(games__season__year__exact=2010).distinct().count(), 1
)
self.assertEqual(
Player.objects.filter(games__season__gt=222).distinct().count(), 1
)
self.assertEqual(
Player.objects.filter(games__season__gt__exact=222).distinct().count(), 1
)
# Players who played in 2011
self.assertEqual(
Player.objects.filter(games__season__year=2011).distinct().count(), 2
)
self.assertEqual(
Player.objects.filter(games__season__year__exact=2011).distinct().count(), 2
)
self.assertEqual(
Player.objects.filter(games__season__gt=333).distinct().count(), 2
)
self.assertEqual(
Player.objects.filter(games__season__year__gt=2010).distinct().count(), 2
)
self.assertEqual(
Player.objects.filter(games__season__gt__gt=222).distinct().count(), 2
)
def test_chain_date_time_lookups(self):
self.assertCountEqual(
Article.objects.filter(pub_date__month__gt=7),
[self.a5, self.a6],
)
self.assertCountEqual(
Article.objects.filter(pub_date__day__gte=27),
[self.a2, self.a3, self.a4, self.a7],
)
self.assertCountEqual(
Article.objects.filter(pub_date__hour__lt=8),
[self.a1, self.a2, self.a3, self.a4, self.a7],
)
self.assertCountEqual(
Article.objects.filter(pub_date__minute__lte=0),
[self.a1, self.a2, self.a3, self.a4, self.a5, self.a6, self.a7],
)
def test_exact_none_transform(self):
"""Transforms are used for __exact=None."""
Season.objects.create(year=1, nulled_text_field="not null")
self.assertFalse(Season.objects.filter(nulled_text_field__isnull=True))
self.assertTrue(Season.objects.filter(nulled_text_field__nulled__isnull=True))
self.assertTrue(Season.objects.filter(nulled_text_field__nulled__exact=None))
self.assertTrue(Season.objects.filter(nulled_text_field__nulled=None))
def test_exact_sliced_queryset_limit_one(self):
self.assertCountEqual(
Article.objects.filter(author=Author.objects.all()[:1]),
[self.a1, self.a2, self.a3, self.a4],
)
def test_exact_sliced_queryset_limit_one_offset(self):
self.assertCountEqual(
Article.objects.filter(author=Author.objects.all()[1:2]),
[self.a5, self.a6, self.a7],
)
def test_exact_sliced_queryset_not_limited_to_one(self):
msg = (
"The QuerySet value for an exact lookup must be limited to one "
"result using slicing."
)
with self.assertRaisesMessage(ValueError, msg):
list(Article.objects.filter(author=Author.objects.all()[:2]))
with self.assertRaisesMessage(ValueError, msg):
list(Article.objects.filter(author=Author.objects.all()[1:]))
@skipUnless(connection.vendor == "mysql", "MySQL-specific workaround.")
def test_exact_booleanfield(self):
# MySQL ignores indexes with boolean fields unless they're compared
# directly to a boolean value.
product = Product.objects.create(name="Paper", qty_target=5000)
Stock.objects.create(product=product, short=False, qty_available=5100)
stock_1 = Stock.objects.create(product=product, short=True, qty_available=180)
qs = Stock.objects.filter(short=True)
self.assertSequenceEqual(qs, [stock_1])
self.assertIn(
"%s = True" % connection.ops.quote_name("short"),
str(qs.query),
)
@skipUnless(connection.vendor == "mysql", "MySQL-specific workaround.")
def test_exact_booleanfield_annotation(self):
# MySQL ignores indexes with boolean fields unless they're compared
# directly to a boolean value.
qs = Author.objects.annotate(
case=Case(
When(alias="a1", then=True),
default=False,
output_field=BooleanField(),
)
).filter(case=True)
self.assertSequenceEqual(qs, [self.au1])
self.assertIn(" = True", str(qs.query))
qs = Author.objects.annotate(
wrapped=ExpressionWrapper(Q(alias="a1"), output_field=BooleanField()),
).filter(wrapped=True)
self.assertSequenceEqual(qs, [self.au1])
self.assertIn(" = True", str(qs.query))
# EXISTS(...) shouldn't be compared to a boolean value.
qs = Author.objects.annotate(
exists=Exists(Author.objects.filter(alias="a1", pk=OuterRef("pk"))),
).filter(exists=True)
self.assertSequenceEqual(qs, [self.au1])
self.assertNotIn(" = True", str(qs.query))
def test_custom_field_none_rhs(self):
"""
__exact=value is transformed to __isnull=True if Field.get_prep_value()
converts value to None.
"""
season = Season.objects.create(year=2012, nulled_text_field=None)
self.assertTrue(
Season.objects.filter(pk=season.pk, nulled_text_field__isnull=True)
)
self.assertTrue(Season.objects.filter(pk=season.pk, nulled_text_field=""))
def test_pattern_lookups_with_substr(self):
a = Author.objects.create(name="John Smith", alias="Johx")
b = Author.objects.create(name="Rhonda Simpson", alias="sonx")
tests = (
("startswith", [a]),
("istartswith", [a]),
("contains", [a, b]),
("icontains", [a, b]),
("endswith", [b]),
("iendswith", [b]),
)
for lookup, result in tests:
with self.subTest(lookup=lookup):
authors = Author.objects.filter(
**{"name__%s" % lookup: Substr("alias", 1, 3)}
)
self.assertCountEqual(authors, result)
def test_custom_lookup_none_rhs(self):
"""Lookup.can_use_none_as_rhs=True allows None as a lookup value."""
season = Season.objects.create(year=2012, nulled_text_field=None)
query = Season.objects.get_queryset().query
field = query.model._meta.get_field("nulled_text_field")
self.assertIsInstance(
query.build_lookup(["isnull_none_rhs"], field, None), IsNullWithNoneAsRHS
)
self.assertTrue(
Season.objects.filter(pk=season.pk, nulled_text_field__isnull_none_rhs=True)
)
def test_exact_exists(self):
qs = Article.objects.filter(pk=OuterRef("pk"))
seasons = Season.objects.annotate(pk_exists=Exists(qs)).filter(
pk_exists=Exists(qs),
)
self.assertCountEqual(seasons, Season.objects.all())
def test_nested_outerref_lhs(self):
tag = Tag.objects.create(name=self.au1.alias)
tag.articles.add(self.a1)
qs = Tag.objects.annotate(
has_author_alias_match=Exists(
Article.objects.annotate(
author_exists=Exists(
Author.objects.filter(alias=OuterRef(OuterRef("name")))
),
).filter(author_exists=True)
),
)
self.assertEqual(qs.get(has_author_alias_match=True), tag)
def test_exact_query_rhs_with_selected_columns(self):
newest_author = Author.objects.create(name="Author 2")
authors_max_ids = (
Author.objects.filter(
name="Author 2",
)
.values(
"name",
)
.annotate(
max_id=Max("id"),
)
.values("max_id")
)
authors = Author.objects.filter(id=authors_max_ids[:1])
self.assertEqual(authors.get(), newest_author)
def test_isnull_non_boolean_value(self):
msg = "The QuerySet value for an isnull lookup must be True or False."
tests = [
Author.objects.filter(alias__isnull=1),
Article.objects.filter(author__isnull=1),
Season.objects.filter(games__isnull=1),
Freebie.objects.filter(stock__isnull=1),
]
for qs in tests:
with self.subTest(qs=qs):
with self.assertRaisesMessage(ValueError, msg):
qs.exists()
def test_lookup_rhs(self):
product = Product.objects.create(name="GME", qty_target=5000)
stock_1 = Stock.objects.create(product=product, short=True, qty_available=180)
stock_2 = Stock.objects.create(product=product, short=False, qty_available=5100)
Stock.objects.create(product=product, short=False, qty_available=4000)
self.assertCountEqual(
Stock.objects.filter(short=Q(qty_available__lt=F("product__qty_target"))),
[stock_1, stock_2],
)
self.assertCountEqual(
Stock.objects.filter(
short=ExpressionWrapper(
Q(qty_available__lt=F("product__qty_target")),
output_field=BooleanField(),
)
),
[stock_1, stock_2],
)
class LookupQueryingTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.s1 = Season.objects.create(year=1942, gt=1942)
cls.s2 = Season.objects.create(year=1842, gt=1942, nulled_text_field="text")
cls.s3 = Season.objects.create(year=2042, gt=1942)
Game.objects.create(season=cls.s1, home="NY", away="Boston")
Game.objects.create(season=cls.s1, home="NY", away="Tampa")
Game.objects.create(season=cls.s3, home="Boston", away="Tampa")
def test_annotate(self):
qs = Season.objects.annotate(equal=Exact(F("year"), 1942))
self.assertCountEqual(
qs.values_list("year", "equal"),
((1942, True), (1842, False), (2042, False)),
)
def test_alias(self):
qs = Season.objects.alias(greater=GreaterThan(F("year"), 1910))
self.assertCountEqual(qs.filter(greater=True), [self.s1, self.s3])
def test_annotate_value_greater_than_value(self):
qs = Season.objects.annotate(greater=GreaterThan(Value(40), Value(30)))
self.assertCountEqual(
qs.values_list("year", "greater"),
((1942, True), (1842, True), (2042, True)),
)
def test_annotate_field_greater_than_field(self):
qs = Season.objects.annotate(greater=GreaterThan(F("year"), F("gt")))
self.assertCountEqual(
qs.values_list("year", "greater"),
((1942, False), (1842, False), (2042, True)),
)
def test_annotate_field_greater_than_value(self):
qs = Season.objects.annotate(greater=GreaterThan(F("year"), Value(1930)))
self.assertCountEqual(
qs.values_list("year", "greater"),
((1942, True), (1842, False), (2042, True)),
)
def test_annotate_field_greater_than_literal(self):
qs = Season.objects.annotate(greater=GreaterThan(F("year"), 1930))
self.assertCountEqual(
qs.values_list("year", "greater"),
((1942, True), (1842, False), (2042, True)),
)
def test_annotate_literal_greater_than_field(self):
qs = Season.objects.annotate(greater=GreaterThan(1930, F("year")))
self.assertCountEqual(
qs.values_list("year", "greater"),
((1942, False), (1842, True), (2042, False)),
)
def test_annotate_less_than_float(self):
qs = Season.objects.annotate(lesser=LessThan(F("year"), 1942.1))
self.assertCountEqual(
qs.values_list("year", "lesser"),
((1942, True), (1842, True), (2042, False)),
)
def test_annotate_greater_than_or_equal(self):
qs = Season.objects.annotate(greater=GreaterThanOrEqual(F("year"), 1942))
self.assertCountEqual(
qs.values_list("year", "greater"),
((1942, True), (1842, False), (2042, True)),
)
def test_annotate_greater_than_or_equal_float(self):
qs = Season.objects.annotate(greater=GreaterThanOrEqual(F("year"), 1942.1))
self.assertCountEqual(
qs.values_list("year", "greater"),
((1942, False), (1842, False), (2042, True)),
)
def test_combined_lookups(self):
expression = Exact(F("year"), 1942) | GreaterThan(F("year"), 1942)
qs = Season.objects.annotate(gte=expression)
self.assertCountEqual(
qs.values_list("year", "gte"),
((1942, True), (1842, False), (2042, True)),
)
def test_lookup_in_filter(self):
qs = Season.objects.filter(GreaterThan(F("year"), 1910))
self.assertCountEqual(qs, [self.s1, self.s3])
def test_isnull_lookup_in_filter(self):
self.assertSequenceEqual(
Season.objects.filter(IsNull(F("nulled_text_field"), False)),
[self.s2],
)
self.assertCountEqual(
Season.objects.filter(IsNull(F("nulled_text_field"), True)),
[self.s1, self.s3],
)
def test_filter_lookup_lhs(self):
qs = Season.objects.annotate(before_20=LessThan(F("year"), 2000)).filter(
before_20=LessThan(F("year"), 1900),
)
self.assertCountEqual(qs, [self.s2, self.s3])
def test_filter_wrapped_lookup_lhs(self):
qs = (
Season.objects.annotate(
before_20=ExpressionWrapper(
Q(year__lt=2000),
output_field=BooleanField(),
)
)
.filter(before_20=LessThan(F("year"), 1900))
.values_list("year", flat=True)
)
self.assertCountEqual(qs, [1842, 2042])
def test_filter_exists_lhs(self):
qs = Season.objects.annotate(
before_20=Exists(
Season.objects.filter(pk=OuterRef("pk"), year__lt=2000),
)
).filter(before_20=LessThan(F("year"), 1900))
self.assertCountEqual(qs, [self.s2, self.s3])
def test_filter_subquery_lhs(self):
qs = Season.objects.annotate(
before_20=Subquery(
Season.objects.filter(pk=OuterRef("pk")).values(
lesser=LessThan(F("year"), 2000),
),
)
).filter(before_20=LessThan(F("year"), 1900))
self.assertCountEqual(qs, [self.s2, self.s3])
def test_combined_lookups_in_filter(self):
expression = Exact(F("year"), 1942) | GreaterThan(F("year"), 1942)
qs = Season.objects.filter(expression)
self.assertCountEqual(qs, [self.s1, self.s3])
def test_combined_annotated_lookups_in_filter(self):
expression = Exact(F("year"), 1942) | GreaterThan(F("year"), 1942)
qs = Season.objects.annotate(gte=expression).filter(gte=True)
self.assertCountEqual(qs, [self.s1, self.s3])
def test_combined_annotated_lookups_in_filter_false(self):
expression = Exact(F("year"), 1942) | GreaterThan(F("year"), 1942)
qs = Season.objects.annotate(gte=expression).filter(gte=False)
self.assertSequenceEqual(qs, [self.s2])
def test_lookup_in_order_by(self):
qs = Season.objects.order_by(LessThan(F("year"), 1910), F("year"))
self.assertSequenceEqual(qs, [self.s1, self.s3, self.s2])
@skipUnlessDBFeature("supports_boolean_expr_in_select_clause")
def test_aggregate_combined_lookup(self):
expression = Cast(GreaterThan(F("year"), 1900), models.IntegerField())
qs = Season.objects.aggregate(modern=models.Sum(expression))
self.assertEqual(qs["modern"], 2)
def test_conditional_expression(self):
qs = Season.objects.annotate(
century=Case(
When(
GreaterThan(F("year"), 1900) & LessThanOrEqual(F("year"), 2000),
then=Value("20th"),
),
default=Value("other"),
)
).values("year", "century")
self.assertCountEqual(
qs,
[
{"year": 1942, "century": "20th"},
{"year": 1842, "century": "other"},
{"year": 2042, "century": "other"},
],
)
def test_multivalued_join_reuse(self):
self.assertEqual(
Season.objects.get(Exact(F("games__home"), "NY"), games__away="Boston"),
self.s1,
)
self.assertEqual(
Season.objects.get(Exact(F("games__home"), "NY") & Q(games__away="Boston")),
self.s1,
)
self.assertEqual(
Season.objects.get(
Exact(F("games__home"), "NY") & Exact(F("games__away"), "Boston")
),
self.s1,
)
|
943008c7eba26a618eb2e81bc4d5d44e96ce9d65c03db231e740bdf0c8a63d16 | """
The lookup API
This demonstrates features of the database API.
"""
from django.db import models
from django.db.models.lookups import IsNull
class Alarm(models.Model):
desc = models.CharField(max_length=100)
time = models.TimeField()
def __str__(self):
return "%s (%s)" % (self.time, self.desc)
class Author(models.Model):
name = models.CharField(max_length=100)
alias = models.CharField(max_length=50, null=True, blank=True)
bio = models.TextField(null=True)
class Meta:
ordering = ("name",)
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateTimeField()
author = models.ForeignKey(Author, models.SET_NULL, blank=True, null=True)
slug = models.SlugField(unique=True, blank=True, null=True)
class Meta:
ordering = ("-pub_date", "headline")
def __str__(self):
return self.headline
class Tag(models.Model):
articles = models.ManyToManyField(Article)
name = models.CharField(max_length=100)
class Meta:
ordering = ("name",)
class NulledTextField(models.TextField):
def get_prep_value(self, value):
return None if value == "" else value
@NulledTextField.register_lookup
class NulledTransform(models.Transform):
lookup_name = "nulled"
template = "NULL"
@NulledTextField.register_lookup
class IsNullWithNoneAsRHS(IsNull):
lookup_name = "isnull_none_rhs"
can_use_none_as_rhs = True
class Season(models.Model):
year = models.PositiveSmallIntegerField()
gt = models.IntegerField(null=True, blank=True)
nulled_text_field = NulledTextField(null=True)
class Meta:
constraints = [
models.UniqueConstraint(fields=["year"], name="season_year_unique"),
]
def __str__(self):
return str(self.year)
class Game(models.Model):
season = models.ForeignKey(Season, models.CASCADE, related_name="games")
home = models.CharField(max_length=100)
away = models.CharField(max_length=100)
class Player(models.Model):
name = models.CharField(max_length=100)
games = models.ManyToManyField(Game, related_name="players")
class Product(models.Model):
name = models.CharField(max_length=80)
qty_target = models.DecimalField(max_digits=6, decimal_places=2)
class Stock(models.Model):
product = models.ForeignKey(Product, models.CASCADE)
short = models.BooleanField(default=False)
qty_available = models.DecimalField(max_digits=6, decimal_places=2)
class Freebie(models.Model):
gift_product = models.ForeignKey(Product, models.CASCADE)
stock_id = models.IntegerField(blank=True, null=True)
stock = models.ForeignObject(
Stock,
from_fields=["stock_id", "gift_product"],
to_fields=["id", "product"],
on_delete=models.CASCADE,
)
|
8419a70b6b7d771d40b8877534ba02c82197293f5cc51233564b2fd952682b0e | import subprocess
from pathlib import Path
from unittest import mock, skipUnless
from django.core.management import CommandError, call_command
from django.db import connection
from django.db.backends.sqlite3.client import DatabaseClient
from django.test import SimpleTestCase
class SqliteDbshellCommandTestCase(SimpleTestCase):
def settings_to_cmd_args_env(self, settings_dict, parameters=None):
if parameters is None:
parameters = []
return DatabaseClient.settings_to_cmd_args_env(settings_dict, parameters)
def test_path_name(self):
self.assertEqual(
self.settings_to_cmd_args_env({"NAME": Path("test.db.sqlite3")}),
(["sqlite3", Path("test.db.sqlite3")], None),
)
def test_parameters(self):
self.assertEqual(
self.settings_to_cmd_args_env({"NAME": "test.db.sqlite3"}, ["-help"]),
(["sqlite3", "test.db.sqlite3", "-help"], None),
)
@skipUnless(connection.vendor == "sqlite", "SQLite test")
def test_non_zero_exit_status_when_path_to_db_is_path(self):
sqlite_with_path = {
"ENGINE": "django.db.backends.sqlite3",
"NAME": Path("test.db.sqlite3"),
}
cmd_args = self.settings_to_cmd_args_env(sqlite_with_path)[0]
msg = '"sqlite3 test.db.sqlite3" returned non-zero exit status 1.'
with mock.patch(
"django.db.backends.sqlite3.client.DatabaseClient.runshell",
side_effect=subprocess.CalledProcessError(returncode=1, cmd=cmd_args),
), self.assertRaisesMessage(CommandError, msg):
call_command("dbshell")
|
5e039728a2158837f13658d90d9cf4a4cde8a19d9f03a6ee0e71abcd6abfdc33 | from django.core.files.uploadedfile import SimpleUploadedFile
from django.forms import ClearableFileInput, FileField, Form, MultiWidget
from .base import WidgetTest
class FakeFieldFile:
"""
Quacks like a FieldFile (has a .url and string representation), but
doesn't require us to care about storages etc.
"""
url = "something"
def __str__(self):
return self.url
class ClearableFileInputTest(WidgetTest):
def setUp(self):
self.widget = ClearableFileInput()
def test_clear_input_renders(self):
"""
A ClearableFileInput with is_required False and rendered with an
initial value that is a file renders a clear checkbox.
"""
self.check_html(
self.widget,
"myfile",
FakeFieldFile(),
html=(
"""
Currently: <a href="something">something</a>
<input type="checkbox" name="myfile-clear" id="myfile-clear_id">
<label for="myfile-clear_id">Clear</label><br>
Change: <input type="file" name="myfile">
"""
),
)
def test_html_escaped(self):
"""
A ClearableFileInput should escape name, filename, and URL
when rendering HTML (#15182).
"""
class StrangeFieldFile:
url = "something?chapter=1§=2©=3&lang=en"
def __str__(self):
return """something<div onclick="alert('oops')">.jpg"""
self.check_html(
ClearableFileInput(),
"my<div>file",
StrangeFieldFile(),
html=(
"""
Currently:
<a href="something?chapter=1&sect=2&copy=3&lang=en">
something<div onclick="alert('oops')">.jpg</a>
<input type="checkbox" name="my<div>file-clear"
id="my<div>file-clear_id">
<label for="my<div>file-clear_id">Clear</label><br>
Change: <input type="file" name="my<div>file">
"""
),
)
def test_clear_input_renders_only_if_not_required(self):
"""
A ClearableFileInput with is_required=True does not render a clear
checkbox.
"""
widget = ClearableFileInput()
widget.is_required = True
self.check_html(
widget,
"myfile",
FakeFieldFile(),
html=(
"""
Currently: <a href="something">something</a> <br>
Change: <input type="file" name="myfile">
"""
),
)
def test_clear_input_renders_only_if_initial(self):
"""
A ClearableFileInput instantiated with no initial value does not render
a clear checkbox.
"""
self.check_html(
self.widget, "myfile", None, html='<input type="file" name="myfile">'
)
def test_render_disabled(self):
self.check_html(
self.widget,
"myfile",
FakeFieldFile(),
attrs={"disabled": True},
html=(
'Currently: <a href="something">something</a>'
'<input type="checkbox" name="myfile-clear" '
'id="myfile-clear_id" disabled>'
'<label for="myfile-clear_id">Clear</label><br>'
'Change: <input type="file" name="myfile" disabled>'
),
)
def test_render_no_disabled(self):
class TestForm(Form):
clearable_file = FileField(
widget=self.widget, initial=FakeFieldFile(), required=False
)
form = TestForm()
with self.assertNoLogs("django.template", "DEBUG"):
form.render()
def test_render_as_subwidget(self):
"""A ClearableFileInput as a subwidget of MultiWidget."""
widget = MultiWidget(widgets=(self.widget,))
self.check_html(
widget,
"myfile",
[FakeFieldFile()],
html=(
"""
Currently: <a href="something">something</a>
<input type="checkbox" name="myfile_0-clear" id="myfile_0-clear_id">
<label for="myfile_0-clear_id">Clear</label><br>
Change: <input type="file" name="myfile_0">
"""
),
)
def test_clear_input_checked_returns_false(self):
"""
ClearableFileInput.value_from_datadict returns False if the clear
checkbox is checked, if not required.
"""
value = self.widget.value_from_datadict(
data={"myfile-clear": True},
files={},
name="myfile",
)
self.assertIs(value, False)
self.assertIs(self.widget.checked, True)
def test_clear_input_checked_returns_false_only_if_not_required(self):
"""
ClearableFileInput.value_from_datadict never returns False if the field
is required.
"""
widget = ClearableFileInput()
widget.is_required = True
field = SimpleUploadedFile("something.txt", b"content")
value = widget.value_from_datadict(
data={"myfile-clear": True},
files={"myfile": field},
name="myfile",
)
self.assertEqual(value, field)
self.assertIs(widget.checked, True)
def test_html_does_not_mask_exceptions(self):
"""
A ClearableFileInput should not mask exceptions produced while
checking that it has a value.
"""
class FailingURLFieldFile:
@property
def url(self):
raise ValueError("Canary")
def __str__(self):
return "value"
with self.assertRaisesMessage(ValueError, "Canary"):
self.widget.render("myfile", FailingURLFieldFile())
def test_url_as_property(self):
class URLFieldFile:
@property
def url(self):
return "https://www.python.org/"
def __str__(self):
return "value"
html = self.widget.render("myfile", URLFieldFile())
self.assertInHTML('<a href="https://www.python.org/">value</a>', html)
def test_return_false_if_url_does_not_exists(self):
class NoURLFieldFile:
def __str__(self):
return "value"
html = self.widget.render("myfile", NoURLFieldFile())
self.assertHTMLEqual(html, '<input name="myfile" type="file">')
def test_use_required_attribute(self):
# False when initial data exists. The file input is left blank by the
# user to keep the existing, initial value.
self.assertIs(self.widget.use_required_attribute(None), True)
self.assertIs(self.widget.use_required_attribute("resume.txt"), False)
def test_value_omitted_from_data(self):
widget = ClearableFileInput()
self.assertIs(widget.value_omitted_from_data({}, {}, "field"), True)
self.assertIs(
widget.value_omitted_from_data({}, {"field": "x"}, "field"), False
)
self.assertIs(
widget.value_omitted_from_data({"field-clear": "y"}, {}, "field"), False
)
def test_fieldset(self):
class TestForm(Form):
template_name = "forms_tests/use_fieldset.html"
field = FileField(widget=self.widget)
with_file = FileField(widget=self.widget, initial=FakeFieldFile())
clearable_file = FileField(
widget=self.widget, initial=FakeFieldFile(), required=False
)
form = TestForm()
self.assertIs(self.widget.use_fieldset, False)
self.assertHTMLEqual(
'<div><label for="id_field">Field:</label>'
'<input id="id_field" name="field" type="file" required></div>'
'<div><label for="id_with_file">With file:</label>Currently: '
'<a href="something">something</a><br>Change:<input type="file" '
'name="with_file" id="id_with_file"></div>'
'<div><label for="id_clearable_file">Clearable file:</label>'
'Currently: <a href="something">something</a><input '
'type="checkbox" name="clearable_file-clear" id="clearable_file-clear_id">'
'<label for="clearable_file-clear_id">Clear</label><br>Change:'
'<input type="file" name="clearable_file" id="id_clearable_file"></div>',
form.render(),
)
def test_multiple_error(self):
msg = "ClearableFileInput doesn't support uploading multiple files."
with self.assertRaisesMessage(ValueError, msg):
ClearableFileInput(attrs={"multiple": True})
|
84e70075afb2921c6e69289f10b144a08eb00f6f879fb8d6fc7b97a3e88f6c22 | from django.core.files.uploadedfile import SimpleUploadedFile
from django.forms import FileField, FileInput, Form
from django.utils.datastructures import MultiValueDict
from .base import WidgetTest
class FileInputTest(WidgetTest):
widget = FileInput()
def test_render(self):
"""
FileInput widgets never render the value attribute. The old value
isn't useful if a form is updated or an error occurred.
"""
self.check_html(
self.widget,
"email",
"[email protected]",
html='<input type="file" name="email">',
)
self.check_html(
self.widget, "email", "", html='<input type="file" name="email">'
)
self.check_html(
self.widget, "email", None, html='<input type="file" name="email">'
)
def test_value_omitted_from_data(self):
self.assertIs(self.widget.value_omitted_from_data({}, {}, "field"), True)
self.assertIs(
self.widget.value_omitted_from_data({}, {"field": "value"}, "field"), False
)
def test_use_required_attribute(self):
# False when initial data exists. The file input is left blank by the
# user to keep the existing, initial value.
self.assertIs(self.widget.use_required_attribute(None), True)
self.assertIs(self.widget.use_required_attribute("resume.txt"), False)
def test_fieldset(self):
class TestForm(Form):
template_name = "forms_tests/use_fieldset.html"
field = FileField(widget=self.widget)
form = TestForm()
self.assertIs(self.widget.use_fieldset, False)
self.assertHTMLEqual(
'<div><label for="id_field">Field:</label><input id="id_field" '
'name="field" required type="file"></div>',
form.render(),
)
def test_multiple_error(self):
msg = "FileInput doesn't support uploading multiple files."
with self.assertRaisesMessage(ValueError, msg):
FileInput(attrs={"multiple": True})
def test_value_from_datadict_multiple(self):
class MultipleFileInput(FileInput):
allow_multiple_selected = True
file_1 = SimpleUploadedFile("something1.txt", b"content 1")
file_2 = SimpleUploadedFile("something2.txt", b"content 2")
# Uploading multiple files is allowed.
widget = MultipleFileInput(attrs={"multiple": True})
value = widget.value_from_datadict(
data={"name": "Test name"},
files=MultiValueDict({"myfile": [file_1, file_2]}),
name="myfile",
)
self.assertEqual(value, [file_1, file_2])
# Uploading multiple files is not allowed.
widget = FileInput()
value = widget.value_from_datadict(
data={"name": "Test name"},
files=MultiValueDict({"myfile": [file_1, file_2]}),
name="myfile",
)
self.assertEqual(value, file_2)
def test_multiple_default(self):
class MultipleFileInput(FileInput):
allow_multiple_selected = True
tests = [
(None, True),
({"class": "myclass"}, True),
({"multiple": False}, False),
]
for attrs, expected in tests:
with self.subTest(attrs=attrs):
widget = MultipleFileInput(attrs=attrs)
self.assertIs(widget.attrs["multiple"], expected)
|
2dabe9275f6f7bd94bfb3e4513f88bbf171d167abcf90a9d69e82d102b8e429f | import pickle
import unittest
from django.core.exceptions import ValidationError
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.validators import validate_image_file_extension
from django.forms import FileField, FileInput
from django.test import SimpleTestCase
try:
from PIL import Image # NOQA
except ImportError:
HAS_PILLOW = False
else:
HAS_PILLOW = True
class FileFieldTest(SimpleTestCase):
def test_filefield_1(self):
f = FileField()
with self.assertRaisesMessage(ValidationError, "'This field is required.'"):
f.clean("")
with self.assertRaisesMessage(ValidationError, "'This field is required.'"):
f.clean("", "")
self.assertEqual("files/test1.pdf", f.clean("", "files/test1.pdf"))
with self.assertRaisesMessage(ValidationError, "'This field is required.'"):
f.clean(None)
with self.assertRaisesMessage(ValidationError, "'This field is required.'"):
f.clean(None, "")
self.assertEqual("files/test2.pdf", f.clean(None, "files/test2.pdf"))
no_file_msg = "'No file was submitted. Check the encoding type on the form.'"
file = SimpleUploadedFile(None, b"")
file._name = ""
with self.assertRaisesMessage(ValidationError, no_file_msg):
f.clean(file)
with self.assertRaisesMessage(ValidationError, no_file_msg):
f.clean(file, "")
self.assertEqual("files/test3.pdf", f.clean(None, "files/test3.pdf"))
with self.assertRaisesMessage(ValidationError, no_file_msg):
f.clean("some content that is not a file")
with self.assertRaisesMessage(
ValidationError, "'The submitted file is empty.'"
):
f.clean(SimpleUploadedFile("name", None))
with self.assertRaisesMessage(
ValidationError, "'The submitted file is empty.'"
):
f.clean(SimpleUploadedFile("name", b""))
self.assertEqual(
SimpleUploadedFile,
type(f.clean(SimpleUploadedFile("name", b"Some File Content"))),
)
self.assertIsInstance(
f.clean(
SimpleUploadedFile(
"我隻氣墊船裝滿晒鱔.txt", "मेरी मँडराने वाली नाव सर्पमीनों से भरी ह".encode()
)
),
SimpleUploadedFile,
)
self.assertIsInstance(
f.clean(
SimpleUploadedFile("name", b"Some File Content"), "files/test4.pdf"
),
SimpleUploadedFile,
)
def test_filefield_2(self):
f = FileField(max_length=5)
with self.assertRaisesMessage(
ValidationError,
"'Ensure this filename has at most 5 characters (it has 18).'",
):
f.clean(SimpleUploadedFile("test_maxlength.txt", b"hello world"))
self.assertEqual("files/test1.pdf", f.clean("", "files/test1.pdf"))
self.assertEqual("files/test2.pdf", f.clean(None, "files/test2.pdf"))
self.assertIsInstance(
f.clean(SimpleUploadedFile("name", b"Some File Content")),
SimpleUploadedFile,
)
def test_filefield_3(self):
f = FileField(allow_empty_file=True)
self.assertIsInstance(
f.clean(SimpleUploadedFile("name", b"")), SimpleUploadedFile
)
def test_filefield_changed(self):
"""
The value of data will more than likely come from request.FILES. The
value of initial data will likely be a filename stored in the database.
Since its value is of no use to a FileField it is ignored.
"""
f = FileField()
# No file was uploaded and no initial data.
self.assertFalse(f.has_changed("", None))
# A file was uploaded and no initial data.
self.assertTrue(
f.has_changed("", {"filename": "resume.txt", "content": "My resume"})
)
# A file was not uploaded, but there is initial data
self.assertFalse(f.has_changed("resume.txt", None))
# A file was uploaded and there is initial data (file identity is not dealt
# with here)
self.assertTrue(
f.has_changed(
"resume.txt", {"filename": "resume.txt", "content": "My resume"}
)
)
def test_disabled_has_changed(self):
f = FileField(disabled=True)
self.assertIs(f.has_changed("x", "y"), False)
def test_file_picklable(self):
self.assertIsInstance(pickle.loads(pickle.dumps(FileField())), FileField)
class MultipleFileInput(FileInput):
allow_multiple_selected = True
class MultipleFileField(FileField):
def __init__(self, *args, **kwargs):
kwargs.setdefault("widget", MultipleFileInput())
super().__init__(*args, **kwargs)
def clean(self, data, initial=None):
single_file_clean = super().clean
if isinstance(data, (list, tuple)):
result = [single_file_clean(d, initial) for d in data]
else:
result = single_file_clean(data, initial)
return result
class MultipleFileFieldTest(SimpleTestCase):
def test_file_multiple(self):
f = MultipleFileField()
files = [
SimpleUploadedFile("name1", b"Content 1"),
SimpleUploadedFile("name2", b"Content 2"),
]
self.assertEqual(f.clean(files), files)
def test_file_multiple_empty(self):
f = MultipleFileField()
files = [
SimpleUploadedFile("empty", b""),
SimpleUploadedFile("nonempty", b"Some Content"),
]
msg = "'The submitted file is empty.'"
with self.assertRaisesMessage(ValidationError, msg):
f.clean(files)
with self.assertRaisesMessage(ValidationError, msg):
f.clean(files[::-1])
@unittest.skipUnless(HAS_PILLOW, "Pillow not installed")
def test_file_multiple_validation(self):
f = MultipleFileField(validators=[validate_image_file_extension])
good_files = [
SimpleUploadedFile("image1.jpg", b"fake JPEG"),
SimpleUploadedFile("image2.png", b"faux image"),
SimpleUploadedFile("image3.bmp", b"fraudulent bitmap"),
]
self.assertEqual(f.clean(good_files), good_files)
evil_files = [
SimpleUploadedFile("image1.sh", b"#!/bin/bash -c 'echo pwned!'\n"),
SimpleUploadedFile("image2.png", b"faux image"),
SimpleUploadedFile("image3.jpg", b"fake JPEG"),
]
evil_rotations = (
evil_files[i:] + evil_files[:i] # Rotate by i.
for i in range(len(evil_files))
)
msg = "File extension “sh” is not allowed. Allowed extensions are: "
for rotated_evil_files in evil_rotations:
with self.assertRaisesMessage(ValidationError, msg):
f.clean(rotated_evil_files)
|
1b87a500fb7a29eecac91dfdb7774977fb15324698906a92dd94e04933504798 | import os
import unittest
from django.forms.renderers import (
BaseRenderer,
DjangoDivFormRenderer,
DjangoTemplates,
Jinja2,
Jinja2DivFormRenderer,
TemplatesSetting,
)
from django.test import SimpleTestCase, ignore_warnings
from django.utils.deprecation import RemovedInDjango60Warning
try:
import jinja2
except ImportError:
jinja2 = None
class SharedTests:
expected_widget_dir = "templates"
def test_installed_apps_template_found(self):
"""Can find a custom template in INSTALLED_APPS."""
renderer = self.renderer()
# Found because forms_tests is .
tpl = renderer.get_template("forms_tests/custom_widget.html")
expected_path = os.path.abspath(
os.path.join(
os.path.dirname(__file__),
"..",
self.expected_widget_dir + "/forms_tests/custom_widget.html",
)
)
self.assertEqual(tpl.origin.name, expected_path)
class BaseTemplateRendererTests(SimpleTestCase):
def test_get_renderer(self):
with self.assertRaisesMessage(
NotImplementedError, "subclasses must implement get_template()"
):
BaseRenderer().get_template("")
class DjangoTemplatesTests(SharedTests, SimpleTestCase):
renderer = DjangoTemplates
@unittest.skipIf(jinja2 is None, "jinja2 required")
class Jinja2Tests(SharedTests, SimpleTestCase):
renderer = Jinja2
expected_widget_dir = "jinja2"
class TemplatesSettingTests(SharedTests, SimpleTestCase):
renderer = TemplatesSetting
class DeprecationTests(SimpleTestCase):
def test_django_div_renderer_warning(self):
msg = (
"The DjangoDivFormRenderer transitional form renderer is deprecated. Use "
"DjangoTemplates instead."
)
with self.assertRaisesMessage(RemovedInDjango60Warning, msg):
DjangoDivFormRenderer()
def test_jinja2_div_renderer_warning(self):
msg = (
"The Jinja2DivFormRenderer transitional form renderer is deprecated. Use "
"Jinja2 instead."
)
with self.assertRaisesMessage(RemovedInDjango60Warning, msg):
Jinja2DivFormRenderer()
@ignore_warnings(category=RemovedInDjango60Warning)
def test_deprecation_renderers_can_be_instantiated(self):
tests = [DjangoDivFormRenderer, Jinja2DivFormRenderer]
for cls in tests:
with self.subTest(renderer_class=cls):
renderer = cls()
self.assertIsInstance(renderer, cls)
|
2c06cb77e9734a17108558c35fcca61bbbe7d8159df35910b5142d7d8cd11253 | from django.template.defaultfilters import join
from django.test import SimpleTestCase
from django.utils.safestring import mark_safe
from ..utils import setup
class JoinTests(SimpleTestCase):
@setup({"join01": '{{ a|join:", " }}'})
def test_join01(self):
output = self.engine.render_to_string("join01", {"a": ["alpha", "beta & me"]})
self.assertEqual(output, "alpha, beta & me")
@setup({"join02": '{% autoescape off %}{{ a|join:", " }}{% endautoescape %}'})
def test_join02(self):
output = self.engine.render_to_string("join02", {"a": ["alpha", "beta & me"]})
self.assertEqual(output, "alpha, beta & me")
@setup({"join03": '{{ a|join:" & " }}'})
def test_join03(self):
output = self.engine.render_to_string("join03", {"a": ["alpha", "beta & me"]})
self.assertEqual(output, "alpha & beta & me")
@setup({"join04": '{% autoescape off %}{{ a|join:" & " }}{% endautoescape %}'})
def test_join04(self):
output = self.engine.render_to_string("join04", {"a": ["alpha", "beta & me"]})
self.assertEqual(output, "alpha & beta & me")
# Joining with unsafe joiners doesn't result in unsafe strings.
@setup({"join05": "{{ a|join:var }}"})
def test_join05(self):
output = self.engine.render_to_string(
"join05", {"a": ["alpha", "beta & me"], "var": " & "}
)
self.assertEqual(output, "alpha & beta & me")
@setup({"join06": "{{ a|join:var }}"})
def test_join06(self):
output = self.engine.render_to_string(
"join06", {"a": ["alpha", "beta & me"], "var": mark_safe(" & ")}
)
self.assertEqual(output, "alpha & beta & me")
@setup({"join07": "{{ a|join:var|lower }}"})
def test_join07(self):
output = self.engine.render_to_string(
"join07", {"a": ["Alpha", "Beta & me"], "var": " & "}
)
self.assertEqual(output, "alpha & beta & me")
@setup({"join08": "{{ a|join:var|lower }}"})
def test_join08(self):
output = self.engine.render_to_string(
"join08", {"a": ["Alpha", "Beta & me"], "var": mark_safe(" & ")}
)
self.assertEqual(output, "alpha & beta & me")
@setup(
{
"join_autoescape_off": (
"{% autoescape off %}"
"{{ var_list|join:var_joiner }}"
"{% endautoescape %}"
),
}
)
def test_join_autoescape_off(self):
var_list = ["<p>Hello World!</p>", "beta & me", "<script>Hi!</script>"]
context = {"var_list": var_list, "var_joiner": "<br/>"}
output = self.engine.render_to_string("join_autoescape_off", context)
expected_result = "<p>Hello World!</p><br/>beta & me<br/><script>Hi!</script>"
self.assertEqual(output, expected_result)
class FunctionTests(SimpleTestCase):
def test_list(self):
self.assertEqual(join([0, 1, 2], "glue"), "0glue1glue2")
def test_autoescape(self):
self.assertEqual(
join(["<a>", "<img>", "</a>"], "<br>"),
"<a><br><img><br></a>",
)
def test_autoescape_off(self):
self.assertEqual(
join(["<a>", "<img>", "</a>"], "<br>", autoescape=False),
"<a><br><img><br></a>",
)
def test_noniterable_arg(self):
obj = object()
self.assertEqual(join(obj, "<br>"), obj)
def test_noniterable_arg_autoescape_off(self):
obj = object()
self.assertEqual(join(obj, "<br>", autoescape=False), obj)
|
77903e290b76392b6ba043fa5a0255ac27a20cc0102fae4f18367f1bdb522e13 | from django.db import connection
from django.db.models import CharField, Value
from django.db.models.functions import Length, Reverse, Trim
from django.test import TestCase
from django.test.utils import register_lookup
from ..models import Author
class ReverseTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.john = Author.objects.create(name="John Smith", alias="smithj")
cls.elena = Author.objects.create(name="Élena Jordan", alias="elena")
cls.python = Author.objects.create(name="パイソン")
def test_null(self):
author = Author.objects.annotate(backward=Reverse("alias")).get(
pk=self.python.pk
)
self.assertEqual(
author.backward,
"" if connection.features.interprets_empty_strings_as_nulls else None,
)
def test_basic(self):
authors = Author.objects.annotate(
backward=Reverse("name"),
constant=Reverse(Value("static string")),
)
self.assertQuerySetEqual(
authors,
[
("John Smith", "htimS nhoJ", "gnirts citats"),
("Élena Jordan", "nadroJ anelÉ", "gnirts citats"),
("パイソン", "ンソイパ", "gnirts citats"),
],
lambda a: (a.name, a.backward, a.constant),
ordered=False,
)
def test_transform(self):
with register_lookup(CharField, Reverse):
authors = Author.objects.all()
self.assertCountEqual(
authors.filter(name__reverse=self.john.name[::-1]), [self.john]
)
self.assertCountEqual(
authors.exclude(name__reverse=self.john.name[::-1]),
[self.elena, self.python],
)
def test_expressions(self):
author = Author.objects.annotate(backward=Reverse(Trim("name"))).get(
pk=self.john.pk
)
self.assertEqual(author.backward, self.john.name[::-1])
with register_lookup(CharField, Reverse), register_lookup(CharField, Length):
authors = Author.objects.all()
self.assertCountEqual(
authors.filter(name__reverse__length__gt=7), [self.john, self.elena]
)
self.assertCountEqual(
authors.exclude(name__reverse__length__gt=7), [self.python]
)
|
6ac8805334c8841b7af8105924147930e1e5aaf4731550b3ed24c3821e53410a | """
Sphinx plugins for Django documentation.
"""
import json
import os
import re
from docutils import nodes
from docutils.parsers.rst import Directive
from docutils.statemachine import ViewList
from sphinx import addnodes
from sphinx.builders.html import StandaloneHTMLBuilder
from sphinx.directives.code import CodeBlock
from sphinx.domains.std import Cmdoption
from sphinx.errors import ExtensionError
from sphinx.util import logging
from sphinx.util.console import bold
from sphinx.writers.html import HTMLTranslator
logger = logging.getLogger(__name__)
# RE for option descriptions without a '--' prefix
simple_option_desc_re = re.compile(r"([-_a-zA-Z0-9]+)(\s*.*?)(?=,\s+(?:/|-|--)|$)")
def setup(app):
app.add_crossref_type(
directivename="setting",
rolename="setting",
indextemplate="pair: %s; setting",
)
app.add_crossref_type(
directivename="templatetag",
rolename="ttag",
indextemplate="pair: %s; template tag",
)
app.add_crossref_type(
directivename="templatefilter",
rolename="tfilter",
indextemplate="pair: %s; template filter",
)
app.add_crossref_type(
directivename="fieldlookup",
rolename="lookup",
indextemplate="pair: %s; field lookup type",
)
app.add_object_type(
directivename="django-admin",
rolename="djadmin",
indextemplate="pair: %s; django-admin command",
parse_node=parse_django_admin_node,
)
app.add_directive("django-admin-option", Cmdoption)
app.add_config_value("django_next_version", "0.0", True)
app.add_directive("versionadded", VersionDirective)
app.add_directive("versionchanged", VersionDirective)
app.add_builder(DjangoStandaloneHTMLBuilder)
app.set_translator("djangohtml", DjangoHTMLTranslator)
app.set_translator("json", DjangoHTMLTranslator)
app.add_node(
ConsoleNode,
html=(visit_console_html, None),
latex=(visit_console_dummy, depart_console_dummy),
man=(visit_console_dummy, depart_console_dummy),
text=(visit_console_dummy, depart_console_dummy),
texinfo=(visit_console_dummy, depart_console_dummy),
)
app.add_directive("console", ConsoleDirective)
app.connect("html-page-context", html_page_context_hook)
app.add_role("default-role-error", default_role_error)
return {"parallel_read_safe": True}
class VersionDirective(Directive):
has_content = True
required_arguments = 1
optional_arguments = 1
final_argument_whitespace = True
option_spec = {}
def run(self):
if len(self.arguments) > 1:
msg = """Only one argument accepted for directive '{directive_name}::'.
Comments should be provided as content,
not as an extra argument.""".format(
directive_name=self.name
)
raise self.error(msg)
env = self.state.document.settings.env
ret = []
node = addnodes.versionmodified()
ret.append(node)
if self.arguments[0] == env.config.django_next_version:
node["version"] = "Development version"
else:
node["version"] = self.arguments[0]
node["type"] = self.name
if self.content:
self.state.nested_parse(self.content, self.content_offset, node)
try:
env.get_domain("changeset").note_changeset(node)
except ExtensionError:
# Sphinx < 1.8: Domain 'changeset' is not registered
env.note_versionchange(node["type"], node["version"], node, self.lineno)
return ret
class DjangoHTMLTranslator(HTMLTranslator):
"""
Django-specific reST to HTML tweaks.
"""
# Don't use border=1, which docutils does by default.
def visit_table(self, node):
self.context.append(self.compact_p)
self.compact_p = True
# Needed by Sphinx.
self._table_row_indices.append(0)
self.body.append(self.starttag(node, "table", CLASS="docutils"))
def depart_table(self, node):
self.compact_p = self.context.pop()
self._table_row_indices.pop()
self.body.append("</table>\n")
def visit_desc_parameterlist(self, node):
self.body.append("(") # by default sphinx puts <big> around the "("
self.first_param = 1
self.optional_param_level = 0
self.param_separator = node.child_text_separator
self.required_params_left = sum(
isinstance(c, addnodes.desc_parameter) for c in node.children
)
def depart_desc_parameterlist(self, node):
self.body.append(")")
#
# Turn the "new in version" stuff (versionadded/versionchanged) into a
# better callout -- the Sphinx default is just a little span,
# which is a bit less obvious that I'd like.
#
# FIXME: these messages are all hardcoded in English. We need to change
# that to accommodate other language docs, but I can't work out how to make
# that work.
#
version_text = {
"versionchanged": "Changed in Django %s",
"versionadded": "New in Django %s",
}
def visit_versionmodified(self, node):
self.body.append(self.starttag(node, "div", CLASS=node["type"]))
version_text = self.version_text.get(node["type"])
if version_text:
title = "%s%s" % (version_text % node["version"], ":" if len(node) else ".")
self.body.append('<span class="title">%s</span> ' % title)
def depart_versionmodified(self, node):
self.body.append("</div>\n")
# Give each section a unique ID -- nice for custom CSS hooks
def visit_section(self, node):
old_ids = node.get("ids", [])
node["ids"] = ["s-" + i for i in old_ids]
node["ids"].extend(old_ids)
super().visit_section(node)
node["ids"] = old_ids
def parse_django_admin_node(env, sig, signode):
command = sig.split(" ")[0]
env.ref_context["std:program"] = command
title = "django-admin %s" % sig
signode += addnodes.desc_name(title, title)
return command
class DjangoStandaloneHTMLBuilder(StandaloneHTMLBuilder):
"""
Subclass to add some extra things we need.
"""
name = "djangohtml"
def finish(self):
super().finish()
logger.info(bold("writing templatebuiltins.js..."))
xrefs = self.env.domaindata["std"]["objects"]
templatebuiltins = {
"ttags": [
n
for ((t, n), (k, a)) in xrefs.items()
if t == "templatetag" and k == "ref/templates/builtins"
],
"tfilters": [
n
for ((t, n), (k, a)) in xrefs.items()
if t == "templatefilter" and k == "ref/templates/builtins"
],
}
outfilename = os.path.join(self.outdir, "templatebuiltins.js")
with open(outfilename, "w") as fp:
fp.write("var django_template_builtins = ")
json.dump(templatebuiltins, fp)
fp.write(";\n")
class ConsoleNode(nodes.literal_block):
"""
Custom node to override the visit/depart event handlers at registration
time. Wrap a literal_block object and defer to it.
"""
tagname = "ConsoleNode"
def __init__(self, litblk_obj):
self.wrapped = litblk_obj
def __getattr__(self, attr):
if attr == "wrapped":
return self.__dict__.wrapped
return getattr(self.wrapped, attr)
def visit_console_dummy(self, node):
"""Defer to the corresponding parent's handler."""
self.visit_literal_block(node)
def depart_console_dummy(self, node):
"""Defer to the corresponding parent's handler."""
self.depart_literal_block(node)
def visit_console_html(self, node):
"""Generate HTML for the console directive."""
if self.builder.name in ("djangohtml", "json") and node["win_console_text"]:
# Put a mark on the document object signaling the fact the directive
# has been used on it.
self.document._console_directive_used_flag = True
uid = node["uid"]
self.body.append(
"""\
<div class="console-block" id="console-block-%(id)s">
<input class="c-tab-unix" id="c-tab-%(id)s-unix" type="radio" name="console-%(id)s" \
checked>
<label for="c-tab-%(id)s-unix" title="Linux/macOS">/</label>
<input class="c-tab-win" id="c-tab-%(id)s-win" type="radio" name="console-%(id)s">
<label for="c-tab-%(id)s-win" title="Windows"></label>
<section class="c-content-unix" id="c-content-%(id)s-unix">\n"""
% {"id": uid}
)
try:
self.visit_literal_block(node)
except nodes.SkipNode:
pass
self.body.append("</section>\n")
self.body.append(
'<section class="c-content-win" id="c-content-%(id)s-win">\n' % {"id": uid}
)
win_text = node["win_console_text"]
highlight_args = {"force": True}
linenos = node.get("linenos", False)
def warner(msg):
self.builder.warn(msg, (self.builder.current_docname, node.line))
highlighted = self.highlighter.highlight_block(
win_text, "doscon", warn=warner, linenos=linenos, **highlight_args
)
self.body.append(highlighted)
self.body.append("</section>\n")
self.body.append("</div>\n")
raise nodes.SkipNode
else:
self.visit_literal_block(node)
class ConsoleDirective(CodeBlock):
"""
A reStructuredText directive which renders a two-tab code block in which
the second tab shows a Windows command line equivalent of the usual
Unix-oriented examples.
"""
required_arguments = 0
# The 'doscon' Pygments formatter needs a prompt like this. '>' alone
# won't do it because then it simply paints the whole command line as a
# gray comment with no highlighting at all.
WIN_PROMPT = r"...\> "
def run(self):
def args_to_win(cmdline):
changed = False
out = []
for token in cmdline.split():
if token[:2] == "./":
token = token[2:]
changed = True
elif token[:2] == "~/":
token = "%HOMEPATH%\\" + token[2:]
changed = True
elif token == "make":
token = "make.bat"
changed = True
if "://" not in token and "git" not in cmdline:
out.append(token.replace("/", "\\"))
changed = True
else:
out.append(token)
if changed:
return " ".join(out)
return cmdline
def cmdline_to_win(line):
if line.startswith("# "):
return "REM " + args_to_win(line[2:])
if line.startswith("$ # "):
return "REM " + args_to_win(line[4:])
if line.startswith("$ ./manage.py"):
return "manage.py " + args_to_win(line[13:])
if line.startswith("$ manage.py"):
return "manage.py " + args_to_win(line[11:])
if line.startswith("$ ./runtests.py"):
return "runtests.py " + args_to_win(line[15:])
if line.startswith("$ ./"):
return args_to_win(line[4:])
if line.startswith("$ python3"):
return "py " + args_to_win(line[9:])
if line.startswith("$ python"):
return "py " + args_to_win(line[8:])
if line.startswith("$ "):
return args_to_win(line[2:])
return None
def code_block_to_win(content):
bchanged = False
lines = []
for line in content:
modline = cmdline_to_win(line)
if modline is None:
lines.append(line)
else:
lines.append(self.WIN_PROMPT + modline)
bchanged = True
if bchanged:
return ViewList(lines)
return None
env = self.state.document.settings.env
self.arguments = ["console"]
lit_blk_obj = super().run()[0]
# Only do work when the djangohtml HTML Sphinx builder is being used,
# invoke the default behavior for the rest.
if env.app.builder.name not in ("djangohtml", "json"):
return [lit_blk_obj]
lit_blk_obj["uid"] = str(env.new_serialno("console"))
# Only add the tabbed UI if there is actually a Windows-specific
# version of the CLI example.
win_content = code_block_to_win(self.content)
if win_content is None:
lit_blk_obj["win_console_text"] = None
else:
self.content = win_content
lit_blk_obj["win_console_text"] = super().run()[0].rawsource
# Replace the literal_node object returned by Sphinx's CodeBlock with
# the ConsoleNode wrapper.
return [ConsoleNode(lit_blk_obj)]
def html_page_context_hook(app, pagename, templatename, context, doctree):
# Put a bool on the context used to render the template. It's used to
# control inclusion of console-tabs.css and activation of the JavaScript.
# This way it's include only from HTML files rendered from reST files where
# the ConsoleDirective is used.
context["include_console_assets"] = getattr(
doctree, "_console_directive_used_flag", False
)
def default_role_error(
name, rawtext, text, lineno, inliner, options=None, content=None
):
msg = (
"Default role used (`single backticks`): %s. Did you mean to use two "
"backticks for ``code``, or miss an underscore for a `link`_ ?" % rawtext
)
logger.warning(msg, location=(inliner.document.current_source, lineno))
return [nodes.Text(text)], []
|
3564d1a53600018dbde393dd139819d057b69832c007d896383e98566ff0f4f9 | """Default variable filters."""
import random as random_module
import re
import types
import warnings
from decimal import ROUND_HALF_UP, Context, Decimal, InvalidOperation, getcontext
from functools import wraps
from inspect import unwrap
from operator import itemgetter
from pprint import pformat
from urllib.parse import quote
from django.utils import formats
from django.utils.dateformat import format, time_format
from django.utils.deprecation import RemovedInDjango51Warning
from django.utils.encoding import iri_to_uri
from django.utils.html import avoid_wrapping, conditional_escape, escape, escapejs
from django.utils.html import json_script as _json_script
from django.utils.html import linebreaks, strip_tags
from django.utils.html import urlize as _urlize
from django.utils.safestring import SafeData, mark_safe
from django.utils.text import Truncator, normalize_newlines, phone2numeric
from django.utils.text import slugify as _slugify
from django.utils.text import wrap
from django.utils.timesince import timesince, timeuntil
from django.utils.translation import gettext, ngettext
from .base import VARIABLE_ATTRIBUTE_SEPARATOR
from .library import Library
register = Library()
#######################
# STRING DECORATOR #
#######################
def stringfilter(func):
"""
Decorator for filters which should only receive strings. The object
passed as the first positional argument will be converted to a string.
"""
@wraps(func)
def _dec(first, *args, **kwargs):
first = str(first)
result = func(first, *args, **kwargs)
if isinstance(first, SafeData) and getattr(unwrap(func), "is_safe", False):
result = mark_safe(result)
return result
return _dec
###################
# STRINGS #
###################
@register.filter(is_safe=True)
@stringfilter
def addslashes(value):
"""
Add slashes before quotes. Useful for escaping strings in CSV, for
example. Less useful for escaping JavaScript; use the ``escapejs``
filter instead.
"""
return value.replace("\\", "\\\\").replace('"', '\\"').replace("'", "\\'")
@register.filter(is_safe=True)
@stringfilter
def capfirst(value):
"""Capitalize the first character of the value."""
return value and value[0].upper() + value[1:]
@register.filter("escapejs")
@stringfilter
def escapejs_filter(value):
"""Hex encode characters for use in JavaScript strings."""
return escapejs(value)
@register.filter(is_safe=True)
def json_script(value, element_id=None):
"""
Output value JSON-encoded, wrapped in a <script type="application/json">
tag (with an optional id).
"""
return _json_script(value, element_id)
@register.filter(is_safe=True)
def floatformat(text, arg=-1):
"""
Display a float to a specified number of decimal places.
If called without an argument, display the floating point number with one
decimal place -- but only if there's a decimal place to be displayed:
* num1 = 34.23234
* num2 = 34.00000
* num3 = 34.26000
* {{ num1|floatformat }} displays "34.2"
* {{ num2|floatformat }} displays "34"
* {{ num3|floatformat }} displays "34.3"
If arg is positive, always display exactly arg number of decimal places:
* {{ num1|floatformat:3 }} displays "34.232"
* {{ num2|floatformat:3 }} displays "34.000"
* {{ num3|floatformat:3 }} displays "34.260"
If arg is negative, display arg number of decimal places -- but only if
there are places to be displayed:
* {{ num1|floatformat:"-3" }} displays "34.232"
* {{ num2|floatformat:"-3" }} displays "34"
* {{ num3|floatformat:"-3" }} displays "34.260"
If arg has the 'g' suffix, force the result to be grouped by the
THOUSAND_SEPARATOR for the active locale. When the active locale is
en (English):
* {{ 6666.6666|floatformat:"2g" }} displays "6,666.67"
* {{ 10000|floatformat:"g" }} displays "10,000"
If arg has the 'u' suffix, force the result to be unlocalized. When the
active locale is pl (Polish):
* {{ 66666.6666|floatformat:"2" }} displays "66666,67"
* {{ 66666.6666|floatformat:"2u" }} displays "66666.67"
If the input float is infinity or NaN, display the string representation
of that value.
"""
force_grouping = False
use_l10n = True
if isinstance(arg, str):
last_char = arg[-1]
if arg[-2:] in {"gu", "ug"}:
force_grouping = True
use_l10n = False
arg = arg[:-2] or -1
elif last_char == "g":
force_grouping = True
arg = arg[:-1] or -1
elif last_char == "u":
use_l10n = False
arg = arg[:-1] or -1
try:
input_val = str(text)
d = Decimal(input_val)
except InvalidOperation:
try:
d = Decimal(str(float(text)))
except (ValueError, InvalidOperation, TypeError):
return ""
try:
p = int(arg)
except ValueError:
return input_val
try:
m = int(d) - d
except (ValueError, OverflowError, InvalidOperation):
return input_val
if not m and p <= 0:
return mark_safe(
formats.number_format(
"%d" % (int(d)),
0,
use_l10n=use_l10n,
force_grouping=force_grouping,
)
)
exp = Decimal(1).scaleb(-abs(p))
# Set the precision high enough to avoid an exception (#15789).
tupl = d.as_tuple()
units = len(tupl[1])
units += -tupl[2] if m else tupl[2]
prec = abs(p) + units + 1
prec = max(getcontext().prec, prec)
# Avoid conversion to scientific notation by accessing `sign`, `digits`,
# and `exponent` from Decimal.as_tuple() directly.
rounded_d = d.quantize(exp, ROUND_HALF_UP, Context(prec=prec))
sign, digits, exponent = rounded_d.as_tuple()
digits = [str(digit) for digit in reversed(digits)]
while len(digits) <= abs(exponent):
digits.append("0")
digits.insert(-exponent, ".")
if sign and rounded_d:
digits.append("-")
number = "".join(reversed(digits))
return mark_safe(
formats.number_format(
number,
abs(p),
use_l10n=use_l10n,
force_grouping=force_grouping,
)
)
@register.filter(is_safe=True)
@stringfilter
def iriencode(value):
"""Escape an IRI value for use in a URL."""
return iri_to_uri(value)
@register.filter(is_safe=True, needs_autoescape=True)
@stringfilter
def linenumbers(value, autoescape=True):
"""Display text with line numbers."""
lines = value.split("\n")
# Find the maximum width of the line count, for use with zero padding
# string format command
width = str(len(str(len(lines))))
if not autoescape or isinstance(value, SafeData):
for i, line in enumerate(lines):
lines[i] = ("%0" + width + "d. %s") % (i + 1, line)
else:
for i, line in enumerate(lines):
lines[i] = ("%0" + width + "d. %s") % (i + 1, escape(line))
return mark_safe("\n".join(lines))
@register.filter(is_safe=True)
@stringfilter
def lower(value):
"""Convert a string into all lowercase."""
return value.lower()
@register.filter(is_safe=False)
@stringfilter
def make_list(value):
"""
Return the value turned into a list.
For an integer, it's a list of digits.
For a string, it's a list of characters.
"""
return list(value)
@register.filter(is_safe=True)
@stringfilter
def slugify(value):
"""
Convert to ASCII. Convert spaces to hyphens. Remove characters that aren't
alphanumerics, underscores, or hyphens. Convert to lowercase. Also strip
leading and trailing whitespace.
"""
return _slugify(value)
@register.filter(is_safe=True)
def stringformat(value, arg):
"""
Format the variable according to the arg, a string formatting specifier.
This specifier uses Python string formatting syntax, with the exception
that the leading "%" is dropped.
See https://docs.python.org/library/stdtypes.html#printf-style-string-formatting
for documentation of Python string formatting.
"""
if isinstance(value, tuple):
value = str(value)
try:
return ("%" + str(arg)) % value
except (ValueError, TypeError):
return ""
@register.filter(is_safe=True)
@stringfilter
def title(value):
"""Convert a string into titlecase."""
t = re.sub("([a-z])'([A-Z])", lambda m: m[0].lower(), value.title())
return re.sub(r"\d([A-Z])", lambda m: m[0].lower(), t)
@register.filter(is_safe=True)
@stringfilter
def truncatechars(value, arg):
"""Truncate a string after `arg` number of characters."""
try:
length = int(arg)
except ValueError: # Invalid literal for int().
return value # Fail silently.
return Truncator(value).chars(length)
@register.filter(is_safe=True)
@stringfilter
def truncatechars_html(value, arg):
"""
Truncate HTML after `arg` number of chars.
Preserve newlines in the HTML.
"""
try:
length = int(arg)
except ValueError: # invalid literal for int()
return value # Fail silently.
return Truncator(value).chars(length, html=True)
@register.filter(is_safe=True)
@stringfilter
def truncatewords(value, arg):
"""
Truncate a string after `arg` number of words.
Remove newlines within the string.
"""
try:
length = int(arg)
except ValueError: # Invalid literal for int().
return value # Fail silently.
return Truncator(value).words(length, truncate=" …")
@register.filter(is_safe=True)
@stringfilter
def truncatewords_html(value, arg):
"""
Truncate HTML after `arg` number of words.
Preserve newlines in the HTML.
"""
try:
length = int(arg)
except ValueError: # invalid literal for int()
return value # Fail silently.
return Truncator(value).words(length, html=True, truncate=" …")
@register.filter(is_safe=False)
@stringfilter
def upper(value):
"""Convert a string into all uppercase."""
return value.upper()
@register.filter(is_safe=False)
@stringfilter
def urlencode(value, safe=None):
"""
Escape a value for use in a URL.
The ``safe`` parameter determines the characters which should not be
escaped by Python's quote() function. If not provided, use the default safe
characters (but an empty string can be provided when *all* characters
should be escaped).
"""
kwargs = {}
if safe is not None:
kwargs["safe"] = safe
return quote(value, **kwargs)
@register.filter(is_safe=True, needs_autoescape=True)
@stringfilter
def urlize(value, autoescape=True):
"""Convert URLs in plain text into clickable links."""
return mark_safe(_urlize(value, nofollow=True, autoescape=autoescape))
@register.filter(is_safe=True, needs_autoescape=True)
@stringfilter
def urlizetrunc(value, limit, autoescape=True):
"""
Convert URLs into clickable links, truncating URLs to the given character
limit, and adding 'rel=nofollow' attribute to discourage spamming.
Argument: Length to truncate URLs to.
"""
return mark_safe(
_urlize(value, trim_url_limit=int(limit), nofollow=True, autoescape=autoescape)
)
@register.filter(is_safe=False)
@stringfilter
def wordcount(value):
"""Return the number of words."""
return len(value.split())
@register.filter(is_safe=True)
@stringfilter
def wordwrap(value, arg):
"""Wrap words at `arg` line length."""
return wrap(value, int(arg))
@register.filter(is_safe=True)
@stringfilter
def ljust(value, arg):
"""Left-align the value in a field of a given width."""
return value.ljust(int(arg))
@register.filter(is_safe=True)
@stringfilter
def rjust(value, arg):
"""Right-align the value in a field of a given width."""
return value.rjust(int(arg))
@register.filter(is_safe=True)
@stringfilter
def center(value, arg):
"""Center the value in a field of a given width."""
return value.center(int(arg))
@register.filter
@stringfilter
def cut(value, arg):
"""Remove all values of arg from the given string."""
safe = isinstance(value, SafeData)
value = value.replace(arg, "")
if safe and arg != ";":
return mark_safe(value)
return value
###################
# HTML STRINGS #
###################
@register.filter("escape", is_safe=True)
@stringfilter
def escape_filter(value):
"""Mark the value as a string that should be auto-escaped."""
return conditional_escape(value)
@register.filter(is_safe=True)
def escapeseq(value):
"""
An "escape" filter for sequences. Mark each element in the sequence,
individually, as a string that should be auto-escaped. Return a list with
the results.
"""
return [conditional_escape(obj) for obj in value]
@register.filter(is_safe=True)
@stringfilter
def force_escape(value):
"""
Escape a string's HTML. Return a new string containing the escaped
characters (as opposed to "escape", which marks the content for later
possible escaping).
"""
return escape(value)
@register.filter("linebreaks", is_safe=True, needs_autoescape=True)
@stringfilter
def linebreaks_filter(value, autoescape=True):
"""
Replace line breaks in plain text with appropriate HTML; a single
newline becomes an HTML line break (``<br>``) and a new line
followed by a blank line becomes a paragraph break (``</p>``).
"""
autoescape = autoescape and not isinstance(value, SafeData)
return mark_safe(linebreaks(value, autoescape))
@register.filter(is_safe=True, needs_autoescape=True)
@stringfilter
def linebreaksbr(value, autoescape=True):
"""
Convert all newlines in a piece of plain text to HTML line breaks
(``<br>``).
"""
autoescape = autoescape and not isinstance(value, SafeData)
value = normalize_newlines(value)
if autoescape:
value = escape(value)
return mark_safe(value.replace("\n", "<br>"))
@register.filter(is_safe=True)
@stringfilter
def safe(value):
"""Mark the value as a string that should not be auto-escaped."""
return mark_safe(value)
@register.filter(is_safe=True)
def safeseq(value):
"""
A "safe" filter for sequences. Mark each element in the sequence,
individually, as safe, after converting them to strings. Return a list
with the results.
"""
return [mark_safe(obj) for obj in value]
@register.filter(is_safe=True)
@stringfilter
def striptags(value):
"""Strip all [X]HTML tags."""
return strip_tags(value)
###################
# LISTS #
###################
def _property_resolver(arg):
"""
When arg is convertible to float, behave like operator.itemgetter(arg)
Otherwise, chain __getitem__() and getattr().
>>> _property_resolver(1)('abc')
'b'
>>> _property_resolver('1')('abc')
Traceback (most recent call last):
...
TypeError: string indices must be integers
>>> class Foo:
... a = 42
... b = 3.14
... c = 'Hey!'
>>> _property_resolver('b')(Foo())
3.14
"""
try:
float(arg)
except ValueError:
if VARIABLE_ATTRIBUTE_SEPARATOR + "_" in arg or arg[0] == "_":
raise AttributeError("Access to private variables is forbidden.")
parts = arg.split(VARIABLE_ATTRIBUTE_SEPARATOR)
def resolve(value):
for part in parts:
try:
value = value[part]
except (AttributeError, IndexError, KeyError, TypeError, ValueError):
value = getattr(value, part)
return value
return resolve
else:
return itemgetter(arg)
@register.filter(is_safe=False)
def dictsort(value, arg):
"""
Given a list of dicts, return that list sorted by the property given in
the argument.
"""
try:
return sorted(value, key=_property_resolver(arg))
except (AttributeError, TypeError):
return ""
@register.filter(is_safe=False)
def dictsortreversed(value, arg):
"""
Given a list of dicts, return that list sorted in reverse order by the
property given in the argument.
"""
try:
return sorted(value, key=_property_resolver(arg), reverse=True)
except (AttributeError, TypeError):
return ""
@register.filter(is_safe=False)
def first(value):
"""Return the first item in a list."""
try:
return value[0]
except IndexError:
return ""
@register.filter(is_safe=True, needs_autoescape=True)
def join(value, arg, autoescape=True):
"""Join a list with a string, like Python's ``str.join(list)``."""
try:
if autoescape:
data = conditional_escape(arg).join([conditional_escape(v) for v in value])
else:
data = arg.join(value)
except TypeError: # Fail silently if arg isn't iterable.
return value
return mark_safe(data)
@register.filter(is_safe=True)
def last(value):
"""Return the last item in a list."""
try:
return value[-1]
except IndexError:
return ""
@register.filter(is_safe=False)
def length(value):
"""Return the length of the value - useful for lists."""
try:
return len(value)
except (ValueError, TypeError):
return 0
@register.filter(is_safe=False)
def length_is(value, arg):
"""Return a boolean of whether the value's length is the argument."""
warnings.warn(
"The length_is template filter is deprecated in favor of the length template "
"filter and the == operator within an {% if %} tag.",
RemovedInDjango51Warning,
)
try:
return len(value) == int(arg)
except (ValueError, TypeError):
return ""
@register.filter(is_safe=True)
def random(value):
"""Return a random item from the list."""
try:
return random_module.choice(value)
except IndexError:
return ""
@register.filter("slice", is_safe=True)
def slice_filter(value, arg):
"""
Return a slice of the list using the same syntax as Python's list slicing.
"""
try:
bits = []
for x in str(arg).split(":"):
if not x:
bits.append(None)
else:
bits.append(int(x))
return value[slice(*bits)]
except (ValueError, TypeError):
return value # Fail silently.
@register.filter(is_safe=True, needs_autoescape=True)
def unordered_list(value, autoescape=True):
"""
Recursively take a self-nested list and return an HTML unordered list --
WITHOUT opening and closing <ul> tags.
Assume the list is in the proper format. For example, if ``var`` contains:
``['States', ['Kansas', ['Lawrence', 'Topeka'], 'Illinois']]``, then
``{{ var|unordered_list }}`` returns::
<li>States
<ul>
<li>Kansas
<ul>
<li>Lawrence</li>
<li>Topeka</li>
</ul>
</li>
<li>Illinois</li>
</ul>
</li>
"""
if autoescape:
escaper = conditional_escape
else:
def escaper(x):
return x
def walk_items(item_list):
item_iterator = iter(item_list)
try:
item = next(item_iterator)
while True:
try:
next_item = next(item_iterator)
except StopIteration:
yield item, None
break
if isinstance(next_item, (list, tuple, types.GeneratorType)):
try:
iter(next_item)
except TypeError:
pass
else:
yield item, next_item
item = next(item_iterator)
continue
yield item, None
item = next_item
except StopIteration:
pass
def list_formatter(item_list, tabs=1):
indent = "\t" * tabs
output = []
for item, children in walk_items(item_list):
sublist = ""
if children:
sublist = "\n%s<ul>\n%s\n%s</ul>\n%s" % (
indent,
list_formatter(children, tabs + 1),
indent,
indent,
)
output.append("%s<li>%s%s</li>" % (indent, escaper(item), sublist))
return "\n".join(output)
return mark_safe(list_formatter(value))
###################
# INTEGERS #
###################
@register.filter(is_safe=False)
def add(value, arg):
"""Add the arg to the value."""
try:
return int(value) + int(arg)
except (ValueError, TypeError):
try:
return value + arg
except Exception:
return ""
@register.filter(is_safe=False)
def get_digit(value, arg):
"""
Given a whole number, return the requested digit of it, where 1 is the
right-most digit, 2 is the second-right-most digit, etc. Return the
original value for invalid input (if input or argument is not an integer,
or if argument is less than 1). Otherwise, output is always an integer.
"""
try:
arg = int(arg)
value = int(value)
except ValueError:
return value # Fail silently for an invalid argument
if arg < 1:
return value
try:
return int(str(value)[-arg])
except IndexError:
return 0
###################
# DATES #
###################
@register.filter(expects_localtime=True, is_safe=False)
def date(value, arg=None):
"""Format a date according to the given format."""
if value in (None, ""):
return ""
try:
return formats.date_format(value, arg)
except AttributeError:
try:
return format(value, arg)
except AttributeError:
return ""
@register.filter(expects_localtime=True, is_safe=False)
def time(value, arg=None):
"""Format a time according to the given format."""
if value in (None, ""):
return ""
try:
return formats.time_format(value, arg)
except (AttributeError, TypeError):
try:
return time_format(value, arg)
except (AttributeError, TypeError):
return ""
@register.filter("timesince", is_safe=False)
def timesince_filter(value, arg=None):
"""Format a date as the time since that date (i.e. "4 days, 6 hours")."""
if not value:
return ""
try:
if arg:
return timesince(value, arg)
return timesince(value)
except (ValueError, TypeError):
return ""
@register.filter("timeuntil", is_safe=False)
def timeuntil_filter(value, arg=None):
"""Format a date as the time until that date (i.e. "4 days, 6 hours")."""
if not value:
return ""
try:
return timeuntil(value, arg)
except (ValueError, TypeError):
return ""
###################
# LOGIC #
###################
@register.filter(is_safe=False)
def default(value, arg):
"""If value is unavailable, use given default."""
return value or arg
@register.filter(is_safe=False)
def default_if_none(value, arg):
"""If value is None, use given default."""
if value is None:
return arg
return value
@register.filter(is_safe=False)
def divisibleby(value, arg):
"""Return True if the value is divisible by the argument."""
return int(value) % int(arg) == 0
@register.filter(is_safe=False)
def yesno(value, arg=None):
"""
Given a string mapping values for true, false, and (optionally) None,
return one of those strings according to the value:
========== ====================== ==================================
Value Argument Outputs
========== ====================== ==================================
``True`` ``"yeah,no,maybe"`` ``yeah``
``False`` ``"yeah,no,maybe"`` ``no``
``None`` ``"yeah,no,maybe"`` ``maybe``
``None`` ``"yeah,no"`` ``"no"`` (converts None to False
if no mapping for None is given.
========== ====================== ==================================
"""
if arg is None:
# Translators: Please do not add spaces around commas.
arg = gettext("yes,no,maybe")
bits = arg.split(",")
if len(bits) < 2:
return value # Invalid arg.
try:
yes, no, maybe = bits
except ValueError:
# Unpack list of wrong size (no "maybe" value provided).
yes, no, maybe = bits[0], bits[1], bits[1]
if value is None:
return maybe
if value:
return yes
return no
###################
# MISC #
###################
@register.filter(is_safe=True)
def filesizeformat(bytes_):
"""
Format the value like a 'human-readable' file size (i.e. 13 KB, 4.1 MB,
102 bytes, etc.).
"""
try:
bytes_ = int(bytes_)
except (TypeError, ValueError, UnicodeDecodeError):
value = ngettext("%(size)d byte", "%(size)d bytes", 0) % {"size": 0}
return avoid_wrapping(value)
def filesize_number_format(value):
return formats.number_format(round(value, 1), 1)
KB = 1 << 10
MB = 1 << 20
GB = 1 << 30
TB = 1 << 40
PB = 1 << 50
negative = bytes_ < 0
if negative:
bytes_ = -bytes_ # Allow formatting of negative numbers.
if bytes_ < KB:
value = ngettext("%(size)d byte", "%(size)d bytes", bytes_) % {"size": bytes_}
elif bytes_ < MB:
value = gettext("%s KB") % filesize_number_format(bytes_ / KB)
elif bytes_ < GB:
value = gettext("%s MB") % filesize_number_format(bytes_ / MB)
elif bytes_ < TB:
value = gettext("%s GB") % filesize_number_format(bytes_ / GB)
elif bytes_ < PB:
value = gettext("%s TB") % filesize_number_format(bytes_ / TB)
else:
value = gettext("%s PB") % filesize_number_format(bytes_ / PB)
if negative:
value = "-%s" % value
return avoid_wrapping(value)
@register.filter(is_safe=False)
def pluralize(value, arg="s"):
"""
Return a plural suffix if the value is not 1, '1', or an object of
length 1. By default, use 's' as the suffix:
* If value is 0, vote{{ value|pluralize }} display "votes".
* If value is 1, vote{{ value|pluralize }} display "vote".
* If value is 2, vote{{ value|pluralize }} display "votes".
If an argument is provided, use that string instead:
* If value is 0, class{{ value|pluralize:"es" }} display "classes".
* If value is 1, class{{ value|pluralize:"es" }} display "class".
* If value is 2, class{{ value|pluralize:"es" }} display "classes".
If the provided argument contains a comma, use the text before the comma
for the singular case and the text after the comma for the plural case:
* If value is 0, cand{{ value|pluralize:"y,ies" }} display "candies".
* If value is 1, cand{{ value|pluralize:"y,ies" }} display "candy".
* If value is 2, cand{{ value|pluralize:"y,ies" }} display "candies".
"""
if "," not in arg:
arg = "," + arg
bits = arg.split(",")
if len(bits) > 2:
return ""
singular_suffix, plural_suffix = bits[:2]
try:
return singular_suffix if float(value) == 1 else plural_suffix
except ValueError: # Invalid string that's not a number.
pass
except TypeError: # Value isn't a string or a number; maybe it's a list?
try:
return singular_suffix if len(value) == 1 else plural_suffix
except TypeError: # len() of unsized object.
pass
return ""
@register.filter("phone2numeric", is_safe=True)
def phone2numeric_filter(value):
"""Take a phone number and converts it in to its numerical equivalent."""
return phone2numeric(value)
@register.filter(is_safe=True)
def pprint(value):
"""A wrapper around pprint.pprint -- for debugging, really."""
try:
return pformat(value)
except Exception as e:
return "Error in formatting: %s: %s" % (e.__class__.__name__, e)
|
29c8f543e68f4c12c6316fe491e52579189888464283bba83ff51ec64972c900 | from functools import wraps
from asgiref.sync import iscoroutinefunction
def no_append_slash(view_func):
"""
Mark a view function as excluded from CommonMiddleware's APPEND_SLASH
redirection.
"""
# view_func.should_append_slash = False would also work, but decorators are
# nicer if they don't have side effects, so return a new function.
if iscoroutinefunction(view_func):
async def _view_wrapper(request, *args, **kwargs):
return await view_func(request, *args, **kwargs)
else:
def _view_wrapper(request, *args, **kwargs):
return view_func(request, *args, **kwargs)
_view_wrapper.should_append_slash = False
return wraps(view_func)(_view_wrapper)
|
8cddc4d91232ec863998ada6cc84e04467b7c0dfc7bcba6f42f1e446233444f2 | """
Decorators for views based on HTTP headers.
"""
import datetime
from functools import wraps
from django.http import HttpResponseNotAllowed
from django.middleware.http import ConditionalGetMiddleware
from django.utils import timezone
from django.utils.cache import get_conditional_response
from django.utils.decorators import decorator_from_middleware
from django.utils.http import http_date, quote_etag
from django.utils.log import log_response
conditional_page = decorator_from_middleware(ConditionalGetMiddleware)
def require_http_methods(request_method_list):
"""
Decorator to make a view only accept particular request methods. Usage::
@require_http_methods(["GET", "POST"])
def my_view(request):
# I can assume now that only GET or POST requests make it this far
# ...
Note that request methods should be in uppercase.
"""
def decorator(func):
@wraps(func)
def inner(request, *args, **kwargs):
if request.method not in request_method_list:
response = HttpResponseNotAllowed(request_method_list)
log_response(
"Method Not Allowed (%s): %s",
request.method,
request.path,
response=response,
request=request,
)
return response
return func(request, *args, **kwargs)
return inner
return decorator
require_GET = require_http_methods(["GET"])
require_GET.__doc__ = "Decorator to require that a view only accepts the GET method."
require_POST = require_http_methods(["POST"])
require_POST.__doc__ = "Decorator to require that a view only accepts the POST method."
require_safe = require_http_methods(["GET", "HEAD"])
require_safe.__doc__ = (
"Decorator to require that a view only accepts safe methods: GET and HEAD."
)
def condition(etag_func=None, last_modified_func=None):
"""
Decorator to support conditional retrieval (or change) for a view
function.
The parameters are callables to compute the ETag and last modified time for
the requested resource, respectively. The callables are passed the same
parameters as the view itself. The ETag function should return a string (or
None if the resource doesn't exist), while the last_modified function
should return a datetime object (or None if the resource doesn't exist).
The ETag function should return a complete ETag, including quotes (e.g.
'"etag"'), since that's the only way to distinguish between weak and strong
ETags. If an unquoted ETag is returned (e.g. 'etag'), it will be converted
to a strong ETag by adding quotes.
This decorator will either pass control to the wrapped view function or
return an HTTP 304 response (unmodified) or 412 response (precondition
failed), depending upon the request method. In either case, the decorator
will add the generated ETag and Last-Modified headers to the response if
the headers aren't already set and if the request's method is safe.
"""
def decorator(func):
@wraps(func)
def inner(request, *args, **kwargs):
# Compute values (if any) for the requested resource.
res_last_modified = None
if last_modified_func:
if dt := last_modified_func(request, *args, **kwargs):
if not timezone.is_aware(dt):
dt = timezone.make_aware(dt, datetime.timezone.utc)
res_last_modified = int(dt.timestamp())
# The value from etag_func() could be quoted or unquoted.
res_etag = etag_func(request, *args, **kwargs) if etag_func else None
res_etag = quote_etag(res_etag) if res_etag is not None else None
response = get_conditional_response(
request,
etag=res_etag,
last_modified=res_last_modified,
)
if response is None:
response = func(request, *args, **kwargs)
# Set relevant headers on the response if they don't already exist
# and if the request method is safe.
if request.method in ("GET", "HEAD"):
if res_last_modified and not response.has_header("Last-Modified"):
response.headers["Last-Modified"] = http_date(res_last_modified)
if res_etag:
response.headers.setdefault("ETag", res_etag)
return response
return inner
return decorator
# Shortcut decorators for common cases based on ETag or Last-Modified only
def etag(etag_func):
return condition(etag_func=etag_func)
def last_modified(last_modified_func):
return condition(last_modified_func=last_modified_func)
|
1ef880dc3c9d24faa913c828c2fd6502721d2cc4d2469457b8b167e77afe4356 | from django.apps.registry import Apps
from django.db import DatabaseError, models
from django.utils.functional import classproperty
from django.utils.timezone import now
from .exceptions import MigrationSchemaMissing
class MigrationRecorder:
"""
Deal with storing migration records in the database.
Because this table is actually itself used for dealing with model
creation, it's the one thing we can't do normally via migrations.
We manually handle table creation/schema updating (using schema backend)
and then have a floating model to do queries with.
If a migration is unapplied its row is removed from the table. Having
a row in the table always means a migration is applied.
"""
_migration_class = None
@classproperty
def Migration(cls):
"""
Lazy load to avoid AppRegistryNotReady if installed apps import
MigrationRecorder.
"""
if cls._migration_class is None:
class Migration(models.Model):
app = models.CharField(max_length=255)
name = models.CharField(max_length=255)
applied = models.DateTimeField(default=now)
class Meta:
apps = Apps()
app_label = "migrations"
db_table = "django_migrations"
def __str__(self):
return "Migration %s for %s" % (self.name, self.app)
cls._migration_class = Migration
return cls._migration_class
def __init__(self, connection):
self.connection = connection
self._has_table = False
@property
def migration_qs(self):
return self.Migration.objects.using(self.connection.alias)
def has_table(self):
"""Return True if the django_migrations table exists."""
# If the migrations table has already been confirmed to exist, don't
# recheck it's existence.
if self._has_table:
return True
# It hasn't been confirmed to exist, recheck.
with self.connection.cursor() as cursor:
tables = self.connection.introspection.table_names(cursor)
self._has_table = self.Migration._meta.db_table in tables
return self._has_table
def ensure_schema(self):
"""Ensure the table exists and has the correct schema."""
# If the table's there, that's fine - we've never changed its schema
# in the codebase.
if self.has_table():
return
# Make the table
try:
with self.connection.schema_editor() as editor:
editor.create_model(self.Migration)
except DatabaseError as exc:
raise MigrationSchemaMissing(
"Unable to create the django_migrations table (%s)" % exc
)
def applied_migrations(self):
"""
Return a dict mapping (app_name, migration_name) to Migration instances
for all applied migrations.
"""
if self.has_table():
return {
(migration.app, migration.name): migration
for migration in self.migration_qs
}
else:
# If the django_migrations table doesn't exist, then no migrations
# are applied.
return {}
def record_applied(self, app, name):
"""Record that a migration was applied."""
self.ensure_schema()
self.migration_qs.create(app=app, name=name)
def record_unapplied(self, app, name):
"""Record that a migration was unapplied."""
self.ensure_schema()
self.migration_qs.filter(app=app, name=name).delete()
def flush(self):
"""Delete all migration records. Useful for testing migrations."""
self.migration_qs.all().delete()
|
28a633ecacf32e8a8449c081783abb9b6466a5d84d576b52be2d8faa6c710880 | """
Classes to represent the definitions of aggregate functions.
"""
from django.core.exceptions import FieldError, FullResultSet
from django.db.models.expressions import Case, Func, Star, Value, When
from django.db.models.fields import IntegerField
from django.db.models.functions.comparison import Coalesce
from django.db.models.functions.mixins import (
FixDurationInputMixin,
NumericOutputFieldMixin,
)
__all__ = [
"Aggregate",
"Avg",
"Count",
"Max",
"Min",
"StdDev",
"Sum",
"Variance",
]
class Aggregate(Func):
template = "%(function)s(%(distinct)s%(expressions)s)"
contains_aggregate = True
name = None
filter_template = "%s FILTER (WHERE %%(filter)s)"
window_compatible = True
allow_distinct = False
empty_result_set_value = None
def __init__(
self, *expressions, distinct=False, filter=None, default=None, **extra
):
if distinct and not self.allow_distinct:
raise TypeError("%s does not allow distinct." % self.__class__.__name__)
if default is not None and self.empty_result_set_value is not None:
raise TypeError(f"{self.__class__.__name__} does not allow default.")
self.distinct = distinct
self.filter = filter
self.default = default
super().__init__(*expressions, **extra)
def get_source_fields(self):
# Don't return the filter expression since it's not a source field.
return [e._output_field_or_none for e in super().get_source_expressions()]
def get_source_expressions(self):
source_expressions = super().get_source_expressions()
if self.filter:
return source_expressions + [self.filter]
return source_expressions
def set_source_expressions(self, exprs):
self.filter = self.filter and exprs.pop()
return super().set_source_expressions(exprs)
def resolve_expression(
self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False
):
# Aggregates are not allowed in UPDATE queries, so ignore for_save
c = super().resolve_expression(query, allow_joins, reuse, summarize)
c.filter = c.filter and c.filter.resolve_expression(
query, allow_joins, reuse, summarize
)
if summarize:
# Summarized aggregates cannot refer to summarized aggregates.
for ref in c.get_refs():
if query.annotations[ref].is_summary:
raise FieldError(
f"Cannot compute {c.name}('{ref}'): '{ref}' is an aggregate"
)
elif not self.is_summary:
# Call Aggregate.get_source_expressions() to avoid
# returning self.filter and including that in this loop.
expressions = super(Aggregate, c).get_source_expressions()
for index, expr in enumerate(expressions):
if expr.contains_aggregate:
before_resolved = self.get_source_expressions()[index]
name = (
before_resolved.name
if hasattr(before_resolved, "name")
else repr(before_resolved)
)
raise FieldError(
"Cannot compute %s('%s'): '%s' is an aggregate"
% (c.name, name, name)
)
if (default := c.default) is None:
return c
if hasattr(default, "resolve_expression"):
default = default.resolve_expression(query, allow_joins, reuse, summarize)
if default._output_field_or_none is None:
default.output_field = c._output_field_or_none
else:
default = Value(default, c._output_field_or_none)
c.default = None # Reset the default argument before wrapping.
coalesce = Coalesce(c, default, output_field=c._output_field_or_none)
coalesce.is_summary = c.is_summary
return coalesce
@property
def default_alias(self):
expressions = self.get_source_expressions()
if len(expressions) == 1 and hasattr(expressions[0], "name"):
return "%s__%s" % (expressions[0].name, self.name.lower())
raise TypeError("Complex expressions require an alias")
def get_group_by_cols(self):
return []
def as_sql(self, compiler, connection, **extra_context):
extra_context["distinct"] = "DISTINCT " if self.distinct else ""
if self.filter:
if connection.features.supports_aggregate_filter_clause:
try:
filter_sql, filter_params = self.filter.as_sql(compiler, connection)
except FullResultSet:
pass
else:
template = self.filter_template % extra_context.get(
"template", self.template
)
sql, params = super().as_sql(
compiler,
connection,
template=template,
filter=filter_sql,
**extra_context,
)
return sql, (*params, *filter_params)
else:
copy = self.copy()
copy.filter = None
source_expressions = copy.get_source_expressions()
condition = When(self.filter, then=source_expressions[0])
copy.set_source_expressions([Case(condition)] + source_expressions[1:])
return super(Aggregate, copy).as_sql(
compiler, connection, **extra_context
)
return super().as_sql(compiler, connection, **extra_context)
def _get_repr_options(self):
options = super()._get_repr_options()
if self.distinct:
options["distinct"] = self.distinct
if self.filter:
options["filter"] = self.filter
return options
class Avg(FixDurationInputMixin, NumericOutputFieldMixin, Aggregate):
function = "AVG"
name = "Avg"
allow_distinct = True
class Count(Aggregate):
function = "COUNT"
name = "Count"
output_field = IntegerField()
allow_distinct = True
empty_result_set_value = 0
def __init__(self, expression, filter=None, **extra):
if expression == "*":
expression = Star()
if isinstance(expression, Star) and filter is not None:
raise ValueError("Star cannot be used with filter. Please specify a field.")
super().__init__(expression, filter=filter, **extra)
class Max(Aggregate):
function = "MAX"
name = "Max"
class Min(Aggregate):
function = "MIN"
name = "Min"
class StdDev(NumericOutputFieldMixin, Aggregate):
name = "StdDev"
def __init__(self, expression, sample=False, **extra):
self.function = "STDDEV_SAMP" if sample else "STDDEV_POP"
super().__init__(expression, **extra)
def _get_repr_options(self):
return {**super()._get_repr_options(), "sample": self.function == "STDDEV_SAMP"}
class Sum(FixDurationInputMixin, Aggregate):
function = "SUM"
name = "Sum"
allow_distinct = True
class Variance(NumericOutputFieldMixin, Aggregate):
name = "Variance"
def __init__(self, expression, sample=False, **extra):
self.function = "VAR_SAMP" if sample else "VAR_POP"
super().__init__(expression, **extra)
def _get_repr_options(self):
return {**super()._get_repr_options(), "sample": self.function == "VAR_SAMP"}
|
bb50785feb0fd4229378bceae68ee7058006e60670db439ffb8d3a5feb5bf774 | import copy
import datetime
import functools
import inspect
from collections import defaultdict
from decimal import Decimal
from types import NoneType
from uuid import UUID
from django.core.exceptions import EmptyResultSet, FieldError, FullResultSet
from django.db import DatabaseError, NotSupportedError, connection
from django.db.models import fields
from django.db.models.constants import LOOKUP_SEP
from django.db.models.query_utils import Q
from django.utils.deconstruct import deconstructible
from django.utils.functional import cached_property
from django.utils.hashable import make_hashable
class SQLiteNumericMixin:
"""
Some expressions with output_field=DecimalField() must be cast to
numeric to be properly filtered.
"""
def as_sqlite(self, compiler, connection, **extra_context):
sql, params = self.as_sql(compiler, connection, **extra_context)
try:
if self.output_field.get_internal_type() == "DecimalField":
sql = "CAST(%s AS NUMERIC)" % sql
except FieldError:
pass
return sql, params
class Combinable:
"""
Provide the ability to combine one or two objects with
some connector. For example F('foo') + F('bar').
"""
# Arithmetic connectors
ADD = "+"
SUB = "-"
MUL = "*"
DIV = "/"
POW = "^"
# The following is a quoted % operator - it is quoted because it can be
# used in strings that also have parameter substitution.
MOD = "%%"
# Bitwise operators - note that these are generated by .bitand()
# and .bitor(), the '&' and '|' are reserved for boolean operator
# usage.
BITAND = "&"
BITOR = "|"
BITLEFTSHIFT = "<<"
BITRIGHTSHIFT = ">>"
BITXOR = "#"
def _combine(self, other, connector, reversed):
if not hasattr(other, "resolve_expression"):
# everything must be resolvable to an expression
other = Value(other)
if reversed:
return CombinedExpression(other, connector, self)
return CombinedExpression(self, connector, other)
#############
# OPERATORS #
#############
def __neg__(self):
return self._combine(-1, self.MUL, False)
def __add__(self, other):
return self._combine(other, self.ADD, False)
def __sub__(self, other):
return self._combine(other, self.SUB, False)
def __mul__(self, other):
return self._combine(other, self.MUL, False)
def __truediv__(self, other):
return self._combine(other, self.DIV, False)
def __mod__(self, other):
return self._combine(other, self.MOD, False)
def __pow__(self, other):
return self._combine(other, self.POW, False)
def __and__(self, other):
if getattr(self, "conditional", False) and getattr(other, "conditional", False):
return Q(self) & Q(other)
raise NotImplementedError(
"Use .bitand(), .bitor(), and .bitxor() for bitwise logical operations."
)
def bitand(self, other):
return self._combine(other, self.BITAND, False)
def bitleftshift(self, other):
return self._combine(other, self.BITLEFTSHIFT, False)
def bitrightshift(self, other):
return self._combine(other, self.BITRIGHTSHIFT, False)
def __xor__(self, other):
if getattr(self, "conditional", False) and getattr(other, "conditional", False):
return Q(self) ^ Q(other)
raise NotImplementedError(
"Use .bitand(), .bitor(), and .bitxor() for bitwise logical operations."
)
def bitxor(self, other):
return self._combine(other, self.BITXOR, False)
def __or__(self, other):
if getattr(self, "conditional", False) and getattr(other, "conditional", False):
return Q(self) | Q(other)
raise NotImplementedError(
"Use .bitand(), .bitor(), and .bitxor() for bitwise logical operations."
)
def bitor(self, other):
return self._combine(other, self.BITOR, False)
def __radd__(self, other):
return self._combine(other, self.ADD, True)
def __rsub__(self, other):
return self._combine(other, self.SUB, True)
def __rmul__(self, other):
return self._combine(other, self.MUL, True)
def __rtruediv__(self, other):
return self._combine(other, self.DIV, True)
def __rmod__(self, other):
return self._combine(other, self.MOD, True)
def __rpow__(self, other):
return self._combine(other, self.POW, True)
def __rand__(self, other):
raise NotImplementedError(
"Use .bitand(), .bitor(), and .bitxor() for bitwise logical operations."
)
def __ror__(self, other):
raise NotImplementedError(
"Use .bitand(), .bitor(), and .bitxor() for bitwise logical operations."
)
def __rxor__(self, other):
raise NotImplementedError(
"Use .bitand(), .bitor(), and .bitxor() for bitwise logical operations."
)
def __invert__(self):
return NegatedExpression(self)
class BaseExpression:
"""Base class for all query expressions."""
empty_result_set_value = NotImplemented
# aggregate specific fields
is_summary = False
_output_field_resolved_to_none = False
# Can the expression be used in a WHERE clause?
filterable = True
# Can the expression can be used as a source expression in Window?
window_compatible = False
# Can the expression be used as a database default value?
allowed_default = False
def __init__(self, output_field=None):
if output_field is not None:
self.output_field = output_field
def __getstate__(self):
state = self.__dict__.copy()
state.pop("convert_value", None)
return state
def get_db_converters(self, connection):
return (
[]
if self.convert_value is self._convert_value_noop
else [self.convert_value]
) + self.output_field.get_db_converters(connection)
def get_source_expressions(self):
return []
def set_source_expressions(self, exprs):
assert not exprs
def _parse_expressions(self, *expressions):
return [
arg
if hasattr(arg, "resolve_expression")
else (F(arg) if isinstance(arg, str) else Value(arg))
for arg in expressions
]
def as_sql(self, compiler, connection):
"""
Responsible for returning a (sql, [params]) tuple to be included
in the current query.
Different backends can provide their own implementation, by
providing an `as_{vendor}` method and patching the Expression:
```
def override_as_sql(self, compiler, connection):
# custom logic
return super().as_sql(compiler, connection)
setattr(Expression, 'as_' + connection.vendor, override_as_sql)
```
Arguments:
* compiler: the query compiler responsible for generating the query.
Must have a compile method, returning a (sql, [params]) tuple.
Calling compiler(value) will return a quoted `value`.
* connection: the database connection used for the current query.
Return: (sql, params)
Where `sql` is a string containing ordered sql parameters to be
replaced with the elements of the list `params`.
"""
raise NotImplementedError("Subclasses must implement as_sql()")
@cached_property
def contains_aggregate(self):
return any(
expr and expr.contains_aggregate for expr in self.get_source_expressions()
)
@cached_property
def contains_over_clause(self):
return any(
expr and expr.contains_over_clause for expr in self.get_source_expressions()
)
@cached_property
def contains_column_references(self):
return any(
expr and expr.contains_column_references
for expr in self.get_source_expressions()
)
def resolve_expression(
self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False
):
"""
Provide the chance to do any preprocessing or validation before being
added to the query.
Arguments:
* query: the backend query implementation
* allow_joins: boolean allowing or denying use of joins
in this query
* reuse: a set of reusable joins for multijoins
* summarize: a terminal aggregate clause
* for_save: whether this expression about to be used in a save or update
Return: an Expression to be added to the query.
"""
c = self.copy()
c.is_summary = summarize
c.set_source_expressions(
[
expr.resolve_expression(query, allow_joins, reuse, summarize)
if expr
else None
for expr in c.get_source_expressions()
]
)
return c
@property
def conditional(self):
return isinstance(self.output_field, fields.BooleanField)
@property
def field(self):
return self.output_field
@cached_property
def output_field(self):
"""Return the output type of this expressions."""
output_field = self._resolve_output_field()
if output_field is None:
self._output_field_resolved_to_none = True
raise FieldError("Cannot resolve expression type, unknown output_field")
return output_field
@cached_property
def _output_field_or_none(self):
"""
Return the output field of this expression, or None if
_resolve_output_field() didn't return an output type.
"""
try:
return self.output_field
except FieldError:
if not self._output_field_resolved_to_none:
raise
def _resolve_output_field(self):
"""
Attempt to infer the output type of the expression.
As a guess, if the output fields of all source fields match then simply
infer the same type here.
If a source's output field resolves to None, exclude it from this check.
If all sources are None, then an error is raised higher up the stack in
the output_field property.
"""
# This guess is mostly a bad idea, but there is quite a lot of code
# (especially 3rd party Func subclasses) that depend on it, we'd need a
# deprecation path to fix it.
sources_iter = (
source for source in self.get_source_fields() if source is not None
)
for output_field in sources_iter:
for source in sources_iter:
if not isinstance(output_field, source.__class__):
raise FieldError(
"Expression contains mixed types: %s, %s. You must "
"set output_field."
% (
output_field.__class__.__name__,
source.__class__.__name__,
)
)
return output_field
@staticmethod
def _convert_value_noop(value, expression, connection):
return value
@cached_property
def convert_value(self):
"""
Expressions provide their own converters because users have the option
of manually specifying the output_field which may be a different type
from the one the database returns.
"""
field = self.output_field
internal_type = field.get_internal_type()
if internal_type == "FloatField":
return (
lambda value, expression, connection: None
if value is None
else float(value)
)
elif internal_type.endswith("IntegerField"):
return (
lambda value, expression, connection: None
if value is None
else int(value)
)
elif internal_type == "DecimalField":
return (
lambda value, expression, connection: None
if value is None
else Decimal(value)
)
return self._convert_value_noop
def get_lookup(self, lookup):
return self.output_field.get_lookup(lookup)
def get_transform(self, name):
return self.output_field.get_transform(name)
def relabeled_clone(self, change_map):
clone = self.copy()
clone.set_source_expressions(
[
e.relabeled_clone(change_map) if e is not None else None
for e in self.get_source_expressions()
]
)
return clone
def replace_expressions(self, replacements):
if replacement := replacements.get(self):
return replacement
clone = self.copy()
source_expressions = clone.get_source_expressions()
clone.set_source_expressions(
[
expr.replace_expressions(replacements) if expr else None
for expr in source_expressions
]
)
return clone
def get_refs(self):
refs = set()
for expr in self.get_source_expressions():
refs |= expr.get_refs()
return refs
def copy(self):
return copy.copy(self)
def prefix_references(self, prefix):
clone = self.copy()
clone.set_source_expressions(
[
F(f"{prefix}{expr.name}")
if isinstance(expr, F)
else expr.prefix_references(prefix)
for expr in self.get_source_expressions()
]
)
return clone
def get_group_by_cols(self):
if not self.contains_aggregate:
return [self]
cols = []
for source in self.get_source_expressions():
cols.extend(source.get_group_by_cols())
return cols
def get_source_fields(self):
"""Return the underlying field types used by this aggregate."""
return [e._output_field_or_none for e in self.get_source_expressions()]
def asc(self, **kwargs):
return OrderBy(self, **kwargs)
def desc(self, **kwargs):
return OrderBy(self, descending=True, **kwargs)
def reverse_ordering(self):
return self
def flatten(self):
"""
Recursively yield this expression and all subexpressions, in
depth-first order.
"""
yield self
for expr in self.get_source_expressions():
if expr:
if hasattr(expr, "flatten"):
yield from expr.flatten()
else:
yield expr
def select_format(self, compiler, sql, params):
"""
Custom format for select clauses. For example, EXISTS expressions need
to be wrapped in CASE WHEN on Oracle.
"""
if hasattr(self.output_field, "select_format"):
return self.output_field.select_format(compiler, sql, params)
return sql, params
@deconstructible
class Expression(BaseExpression, Combinable):
"""An expression that can be combined with other expressions."""
@cached_property
def identity(self):
constructor_signature = inspect.signature(self.__init__)
args, kwargs = self._constructor_args
signature = constructor_signature.bind_partial(*args, **kwargs)
signature.apply_defaults()
arguments = signature.arguments.items()
identity = [self.__class__]
for arg, value in arguments:
if isinstance(value, fields.Field):
if value.name and value.model:
value = (value.model._meta.label, value.name)
else:
value = type(value)
else:
value = make_hashable(value)
identity.append((arg, value))
return tuple(identity)
def __eq__(self, other):
if not isinstance(other, Expression):
return NotImplemented
return other.identity == self.identity
def __hash__(self):
return hash(self.identity)
# Type inference for CombinedExpression.output_field.
# Missing items will result in FieldError, by design.
#
# The current approach for NULL is based on lowest common denominator behavior
# i.e. if one of the supported databases is raising an error (rather than
# return NULL) for `val <op> NULL`, then Django raises FieldError.
_connector_combinations = [
# Numeric operations - operands of same type.
{
connector: [
(fields.IntegerField, fields.IntegerField, fields.IntegerField),
(fields.FloatField, fields.FloatField, fields.FloatField),
(fields.DecimalField, fields.DecimalField, fields.DecimalField),
]
for connector in (
Combinable.ADD,
Combinable.SUB,
Combinable.MUL,
# Behavior for DIV with integer arguments follows Postgres/SQLite,
# not MySQL/Oracle.
Combinable.DIV,
Combinable.MOD,
Combinable.POW,
)
},
# Numeric operations - operands of different type.
{
connector: [
(fields.IntegerField, fields.DecimalField, fields.DecimalField),
(fields.DecimalField, fields.IntegerField, fields.DecimalField),
(fields.IntegerField, fields.FloatField, fields.FloatField),
(fields.FloatField, fields.IntegerField, fields.FloatField),
]
for connector in (
Combinable.ADD,
Combinable.SUB,
Combinable.MUL,
Combinable.DIV,
Combinable.MOD,
)
},
# Bitwise operators.
{
connector: [
(fields.IntegerField, fields.IntegerField, fields.IntegerField),
]
for connector in (
Combinable.BITAND,
Combinable.BITOR,
Combinable.BITLEFTSHIFT,
Combinable.BITRIGHTSHIFT,
Combinable.BITXOR,
)
},
# Numeric with NULL.
{
connector: [
(field_type, NoneType, field_type),
(NoneType, field_type, field_type),
]
for connector in (
Combinable.ADD,
Combinable.SUB,
Combinable.MUL,
Combinable.DIV,
Combinable.MOD,
Combinable.POW,
)
for field_type in (fields.IntegerField, fields.DecimalField, fields.FloatField)
},
# Date/DateTimeField/DurationField/TimeField.
{
Combinable.ADD: [
# Date/DateTimeField.
(fields.DateField, fields.DurationField, fields.DateTimeField),
(fields.DateTimeField, fields.DurationField, fields.DateTimeField),
(fields.DurationField, fields.DateField, fields.DateTimeField),
(fields.DurationField, fields.DateTimeField, fields.DateTimeField),
# DurationField.
(fields.DurationField, fields.DurationField, fields.DurationField),
# TimeField.
(fields.TimeField, fields.DurationField, fields.TimeField),
(fields.DurationField, fields.TimeField, fields.TimeField),
],
},
{
Combinable.SUB: [
# Date/DateTimeField.
(fields.DateField, fields.DurationField, fields.DateTimeField),
(fields.DateTimeField, fields.DurationField, fields.DateTimeField),
(fields.DateField, fields.DateField, fields.DurationField),
(fields.DateField, fields.DateTimeField, fields.DurationField),
(fields.DateTimeField, fields.DateField, fields.DurationField),
(fields.DateTimeField, fields.DateTimeField, fields.DurationField),
# DurationField.
(fields.DurationField, fields.DurationField, fields.DurationField),
# TimeField.
(fields.TimeField, fields.DurationField, fields.TimeField),
(fields.TimeField, fields.TimeField, fields.DurationField),
],
},
]
_connector_combinators = defaultdict(list)
def register_combinable_fields(lhs, connector, rhs, result):
"""
Register combinable types:
lhs <connector> rhs -> result
e.g.
register_combinable_fields(
IntegerField, Combinable.ADD, FloatField, FloatField
)
"""
_connector_combinators[connector].append((lhs, rhs, result))
for d in _connector_combinations:
for connector, field_types in d.items():
for lhs, rhs, result in field_types:
register_combinable_fields(lhs, connector, rhs, result)
@functools.lru_cache(maxsize=128)
def _resolve_combined_type(connector, lhs_type, rhs_type):
combinators = _connector_combinators.get(connector, ())
for combinator_lhs_type, combinator_rhs_type, combined_type in combinators:
if issubclass(lhs_type, combinator_lhs_type) and issubclass(
rhs_type, combinator_rhs_type
):
return combined_type
class CombinedExpression(SQLiteNumericMixin, Expression):
def __init__(self, lhs, connector, rhs, output_field=None):
super().__init__(output_field=output_field)
self.connector = connector
self.lhs = lhs
self.rhs = rhs
def __repr__(self):
return "<{}: {}>".format(self.__class__.__name__, self)
def __str__(self):
return "{} {} {}".format(self.lhs, self.connector, self.rhs)
def get_source_expressions(self):
return [self.lhs, self.rhs]
def set_source_expressions(self, exprs):
self.lhs, self.rhs = exprs
def _resolve_output_field(self):
# We avoid using super() here for reasons given in
# Expression._resolve_output_field()
combined_type = _resolve_combined_type(
self.connector,
type(self.lhs._output_field_or_none),
type(self.rhs._output_field_or_none),
)
if combined_type is None:
raise FieldError(
f"Cannot infer type of {self.connector!r} expression involving these "
f"types: {self.lhs.output_field.__class__.__name__}, "
f"{self.rhs.output_field.__class__.__name__}. You must set "
f"output_field."
)
return combined_type()
def as_sql(self, compiler, connection):
expressions = []
expression_params = []
sql, params = compiler.compile(self.lhs)
expressions.append(sql)
expression_params.extend(params)
sql, params = compiler.compile(self.rhs)
expressions.append(sql)
expression_params.extend(params)
# order of precedence
expression_wrapper = "(%s)"
sql = connection.ops.combine_expression(self.connector, expressions)
return expression_wrapper % sql, expression_params
def resolve_expression(
self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False
):
lhs = self.lhs.resolve_expression(
query, allow_joins, reuse, summarize, for_save
)
rhs = self.rhs.resolve_expression(
query, allow_joins, reuse, summarize, for_save
)
if not isinstance(self, (DurationExpression, TemporalSubtraction)):
try:
lhs_type = lhs.output_field.get_internal_type()
except (AttributeError, FieldError):
lhs_type = None
try:
rhs_type = rhs.output_field.get_internal_type()
except (AttributeError, FieldError):
rhs_type = None
if "DurationField" in {lhs_type, rhs_type} and lhs_type != rhs_type:
return DurationExpression(
self.lhs, self.connector, self.rhs
).resolve_expression(
query,
allow_joins,
reuse,
summarize,
for_save,
)
datetime_fields = {"DateField", "DateTimeField", "TimeField"}
if (
self.connector == self.SUB
and lhs_type in datetime_fields
and lhs_type == rhs_type
):
return TemporalSubtraction(self.lhs, self.rhs).resolve_expression(
query,
allow_joins,
reuse,
summarize,
for_save,
)
c = self.copy()
c.is_summary = summarize
c.lhs = lhs
c.rhs = rhs
return c
@cached_property
def allowed_default(self):
return self.lhs.allowed_default and self.rhs.allowed_default
class DurationExpression(CombinedExpression):
def compile(self, side, compiler, connection):
try:
output = side.output_field
except FieldError:
pass
else:
if output.get_internal_type() == "DurationField":
sql, params = compiler.compile(side)
return connection.ops.format_for_duration_arithmetic(sql), params
return compiler.compile(side)
def as_sql(self, compiler, connection):
if connection.features.has_native_duration_field:
return super().as_sql(compiler, connection)
connection.ops.check_expression_support(self)
expressions = []
expression_params = []
sql, params = self.compile(self.lhs, compiler, connection)
expressions.append(sql)
expression_params.extend(params)
sql, params = self.compile(self.rhs, compiler, connection)
expressions.append(sql)
expression_params.extend(params)
# order of precedence
expression_wrapper = "(%s)"
sql = connection.ops.combine_duration_expression(self.connector, expressions)
return expression_wrapper % sql, expression_params
def as_sqlite(self, compiler, connection, **extra_context):
sql, params = self.as_sql(compiler, connection, **extra_context)
if self.connector in {Combinable.MUL, Combinable.DIV}:
try:
lhs_type = self.lhs.output_field.get_internal_type()
rhs_type = self.rhs.output_field.get_internal_type()
except (AttributeError, FieldError):
pass
else:
allowed_fields = {
"DecimalField",
"DurationField",
"FloatField",
"IntegerField",
}
if lhs_type not in allowed_fields or rhs_type not in allowed_fields:
raise DatabaseError(
f"Invalid arguments for operator {self.connector}."
)
return sql, params
class TemporalSubtraction(CombinedExpression):
output_field = fields.DurationField()
def __init__(self, lhs, rhs):
super().__init__(lhs, self.SUB, rhs)
def as_sql(self, compiler, connection):
connection.ops.check_expression_support(self)
lhs = compiler.compile(self.lhs)
rhs = compiler.compile(self.rhs)
return connection.ops.subtract_temporals(
self.lhs.output_field.get_internal_type(), lhs, rhs
)
@deconstructible(path="django.db.models.F")
class F(Combinable):
"""An object capable of resolving references to existing query objects."""
allowed_default = False
def __init__(self, name):
"""
Arguments:
* name: the name of the field this expression references
"""
self.name = name
def __repr__(self):
return "{}({})".format(self.__class__.__name__, self.name)
def resolve_expression(
self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False
):
return query.resolve_ref(self.name, allow_joins, reuse, summarize)
def replace_expressions(self, replacements):
return replacements.get(self, self)
def asc(self, **kwargs):
return OrderBy(self, **kwargs)
def desc(self, **kwargs):
return OrderBy(self, descending=True, **kwargs)
def __eq__(self, other):
return self.__class__ == other.__class__ and self.name == other.name
def __hash__(self):
return hash(self.name)
def copy(self):
return copy.copy(self)
class ResolvedOuterRef(F):
"""
An object that contains a reference to an outer query.
In this case, the reference to the outer query has been resolved because
the inner query has been used as a subquery.
"""
contains_aggregate = False
contains_over_clause = False
def as_sql(self, *args, **kwargs):
raise ValueError(
"This queryset contains a reference to an outer query and may "
"only be used in a subquery."
)
def resolve_expression(self, *args, **kwargs):
col = super().resolve_expression(*args, **kwargs)
if col.contains_over_clause:
raise NotSupportedError(
f"Referencing outer query window expression is not supported: "
f"{self.name}."
)
# FIXME: Rename possibly_multivalued to multivalued and fix detection
# for non-multivalued JOINs (e.g. foreign key fields). This should take
# into account only many-to-many and one-to-many relationships.
col.possibly_multivalued = LOOKUP_SEP in self.name
return col
def relabeled_clone(self, relabels):
return self
def get_group_by_cols(self):
return []
class OuterRef(F):
contains_aggregate = False
def resolve_expression(self, *args, **kwargs):
if isinstance(self.name, self.__class__):
return self.name
return ResolvedOuterRef(self.name)
def relabeled_clone(self, relabels):
return self
@deconstructible(path="django.db.models.Func")
class Func(SQLiteNumericMixin, Expression):
"""An SQL function call."""
function = None
template = "%(function)s(%(expressions)s)"
arg_joiner = ", "
arity = None # The number of arguments the function accepts.
def __init__(self, *expressions, output_field=None, **extra):
if self.arity is not None and len(expressions) != self.arity:
raise TypeError(
"'%s' takes exactly %s %s (%s given)"
% (
self.__class__.__name__,
self.arity,
"argument" if self.arity == 1 else "arguments",
len(expressions),
)
)
super().__init__(output_field=output_field)
self.source_expressions = self._parse_expressions(*expressions)
self.extra = extra
def __repr__(self):
args = self.arg_joiner.join(str(arg) for arg in self.source_expressions)
extra = {**self.extra, **self._get_repr_options()}
if extra:
extra = ", ".join(
str(key) + "=" + str(val) for key, val in sorted(extra.items())
)
return "{}({}, {})".format(self.__class__.__name__, args, extra)
return "{}({})".format(self.__class__.__name__, args)
def _get_repr_options(self):
"""Return a dict of extra __init__() options to include in the repr."""
return {}
def get_source_expressions(self):
return self.source_expressions
def set_source_expressions(self, exprs):
self.source_expressions = exprs
def resolve_expression(
self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False
):
c = self.copy()
c.is_summary = summarize
for pos, arg in enumerate(c.source_expressions):
c.source_expressions[pos] = arg.resolve_expression(
query, allow_joins, reuse, summarize, for_save
)
return c
def as_sql(
self,
compiler,
connection,
function=None,
template=None,
arg_joiner=None,
**extra_context,
):
connection.ops.check_expression_support(self)
sql_parts = []
params = []
for arg in self.source_expressions:
try:
arg_sql, arg_params = compiler.compile(arg)
except EmptyResultSet:
empty_result_set_value = getattr(
arg, "empty_result_set_value", NotImplemented
)
if empty_result_set_value is NotImplemented:
raise
arg_sql, arg_params = compiler.compile(Value(empty_result_set_value))
except FullResultSet:
arg_sql, arg_params = compiler.compile(Value(True))
sql_parts.append(arg_sql)
params.extend(arg_params)
data = {**self.extra, **extra_context}
# Use the first supplied value in this order: the parameter to this
# method, a value supplied in __init__()'s **extra (the value in
# `data`), or the value defined on the class.
if function is not None:
data["function"] = function
else:
data.setdefault("function", self.function)
template = template or data.get("template", self.template)
arg_joiner = arg_joiner or data.get("arg_joiner", self.arg_joiner)
data["expressions"] = data["field"] = arg_joiner.join(sql_parts)
return template % data, params
def copy(self):
copy = super().copy()
copy.source_expressions = self.source_expressions[:]
copy.extra = self.extra.copy()
return copy
@cached_property
def allowed_default(self):
return all(expression.allowed_default for expression in self.source_expressions)
@deconstructible(path="django.db.models.Value")
class Value(SQLiteNumericMixin, Expression):
"""Represent a wrapped value as a node within an expression."""
# Provide a default value for `for_save` in order to allow unresolved
# instances to be compiled until a decision is taken in #25425.
for_save = False
allowed_default = True
def __init__(self, value, output_field=None):
"""
Arguments:
* value: the value this expression represents. The value will be
added into the sql parameter list and properly quoted.
* output_field: an instance of the model field type that this
expression will return, such as IntegerField() or CharField().
"""
super().__init__(output_field=output_field)
self.value = value
def __repr__(self):
return f"{self.__class__.__name__}({self.value!r})"
def as_sql(self, compiler, connection):
connection.ops.check_expression_support(self)
val = self.value
output_field = self._output_field_or_none
if output_field is not None:
if self.for_save:
val = output_field.get_db_prep_save(val, connection=connection)
else:
val = output_field.get_db_prep_value(val, connection=connection)
if hasattr(output_field, "get_placeholder"):
return output_field.get_placeholder(val, compiler, connection), [val]
if val is None:
# cx_Oracle does not always convert None to the appropriate
# NULL type (like in case expressions using numbers), so we
# use a literal SQL NULL
return "NULL", []
return "%s", [val]
def resolve_expression(
self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False
):
c = super().resolve_expression(query, allow_joins, reuse, summarize, for_save)
c.for_save = for_save
return c
def get_group_by_cols(self):
return []
def _resolve_output_field(self):
if isinstance(self.value, str):
return fields.CharField()
if isinstance(self.value, bool):
return fields.BooleanField()
if isinstance(self.value, int):
return fields.IntegerField()
if isinstance(self.value, float):
return fields.FloatField()
if isinstance(self.value, datetime.datetime):
return fields.DateTimeField()
if isinstance(self.value, datetime.date):
return fields.DateField()
if isinstance(self.value, datetime.time):
return fields.TimeField()
if isinstance(self.value, datetime.timedelta):
return fields.DurationField()
if isinstance(self.value, Decimal):
return fields.DecimalField()
if isinstance(self.value, bytes):
return fields.BinaryField()
if isinstance(self.value, UUID):
return fields.UUIDField()
@property
def empty_result_set_value(self):
return self.value
class RawSQL(Expression):
allowed_default = True
def __init__(self, sql, params, output_field=None):
if output_field is None:
output_field = fields.Field()
self.sql, self.params = sql, params
super().__init__(output_field=output_field)
def __repr__(self):
return "{}({}, {})".format(self.__class__.__name__, self.sql, self.params)
def as_sql(self, compiler, connection):
return "(%s)" % self.sql, self.params
def get_group_by_cols(self):
return [self]
def resolve_expression(
self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False
):
# Resolve parents fields used in raw SQL.
if query.model:
for parent in query.model._meta.get_parent_list():
for parent_field in parent._meta.local_fields:
_, column_name = parent_field.get_attname_column()
if column_name.lower() in self.sql.lower():
query.resolve_ref(
parent_field.name, allow_joins, reuse, summarize
)
break
return super().resolve_expression(
query, allow_joins, reuse, summarize, for_save
)
class Star(Expression):
def __repr__(self):
return "'*'"
def as_sql(self, compiler, connection):
return "*", []
class DatabaseDefault(Expression):
"""Placeholder expression for the database default in an insert query."""
def as_sql(self, compiler, connection):
return "DEFAULT", []
class Col(Expression):
contains_column_references = True
possibly_multivalued = False
def __init__(self, alias, target, output_field=None):
if output_field is None:
output_field = target
super().__init__(output_field=output_field)
self.alias, self.target = alias, target
def __repr__(self):
alias, target = self.alias, self.target
identifiers = (alias, str(target)) if alias else (str(target),)
return "{}({})".format(self.__class__.__name__, ", ".join(identifiers))
def as_sql(self, compiler, connection):
alias, column = self.alias, self.target.column
identifiers = (alias, column) if alias else (column,)
sql = ".".join(map(compiler.quote_name_unless_alias, identifiers))
return sql, []
def relabeled_clone(self, relabels):
if self.alias is None:
return self
return self.__class__(
relabels.get(self.alias, self.alias), self.target, self.output_field
)
def get_group_by_cols(self):
return [self]
def get_db_converters(self, connection):
if self.target == self.output_field:
return self.output_field.get_db_converters(connection)
return self.output_field.get_db_converters(
connection
) + self.target.get_db_converters(connection)
class Ref(Expression):
"""
Reference to column alias of the query. For example, Ref('sum_cost') in
qs.annotate(sum_cost=Sum('cost')) query.
"""
def __init__(self, refs, source):
super().__init__()
self.refs, self.source = refs, source
def __repr__(self):
return "{}({}, {})".format(self.__class__.__name__, self.refs, self.source)
def get_source_expressions(self):
return [self.source]
def set_source_expressions(self, exprs):
(self.source,) = exprs
def resolve_expression(
self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False
):
# The sub-expression `source` has already been resolved, as this is
# just a reference to the name of `source`.
return self
def get_refs(self):
return {self.refs}
def relabeled_clone(self, relabels):
return self
def as_sql(self, compiler, connection):
return connection.ops.quote_name(self.refs), []
def get_group_by_cols(self):
return [self]
class ExpressionList(Func):
"""
An expression containing multiple expressions. Can be used to provide a
list of expressions as an argument to another expression, like a partition
clause.
"""
template = "%(expressions)s"
def __init__(self, *expressions, **extra):
if not expressions:
raise ValueError(
"%s requires at least one expression." % self.__class__.__name__
)
super().__init__(*expressions, **extra)
def __str__(self):
return self.arg_joiner.join(str(arg) for arg in self.source_expressions)
def as_sqlite(self, compiler, connection, **extra_context):
# Casting to numeric is unnecessary.
return self.as_sql(compiler, connection, **extra_context)
class OrderByList(Func):
allowed_default = False
template = "ORDER BY %(expressions)s"
def __init__(self, *expressions, **extra):
expressions = (
(
OrderBy(F(expr[1:]), descending=True)
if isinstance(expr, str) and expr[0] == "-"
else expr
)
for expr in expressions
)
super().__init__(*expressions, **extra)
def as_sql(self, *args, **kwargs):
if not self.source_expressions:
return "", ()
return super().as_sql(*args, **kwargs)
def get_group_by_cols(self):
group_by_cols = []
for order_by in self.get_source_expressions():
group_by_cols.extend(order_by.get_group_by_cols())
return group_by_cols
@deconstructible(path="django.db.models.ExpressionWrapper")
class ExpressionWrapper(SQLiteNumericMixin, Expression):
"""
An expression that can wrap another expression so that it can provide
extra context to the inner expression, such as the output_field.
"""
def __init__(self, expression, output_field):
super().__init__(output_field=output_field)
self.expression = expression
def set_source_expressions(self, exprs):
self.expression = exprs[0]
def get_source_expressions(self):
return [self.expression]
def get_group_by_cols(self):
if isinstance(self.expression, Expression):
expression = self.expression.copy()
expression.output_field = self.output_field
return expression.get_group_by_cols()
# For non-expressions e.g. an SQL WHERE clause, the entire
# `expression` must be included in the GROUP BY clause.
return super().get_group_by_cols()
def as_sql(self, compiler, connection):
return compiler.compile(self.expression)
def __repr__(self):
return "{}({})".format(self.__class__.__name__, self.expression)
@property
def allowed_default(self):
return self.expression.allowed_default
class NegatedExpression(ExpressionWrapper):
"""The logical negation of a conditional expression."""
def __init__(self, expression):
super().__init__(expression, output_field=fields.BooleanField())
def __invert__(self):
return self.expression.copy()
def as_sql(self, compiler, connection):
try:
sql, params = super().as_sql(compiler, connection)
except EmptyResultSet:
features = compiler.connection.features
if not features.supports_boolean_expr_in_select_clause:
return "1=1", ()
return compiler.compile(Value(True))
ops = compiler.connection.ops
# Some database backends (e.g. Oracle) don't allow EXISTS() and filters
# to be compared to another expression unless they're wrapped in a CASE
# WHEN.
if not ops.conditional_expression_supported_in_where_clause(self.expression):
return f"CASE WHEN {sql} = 0 THEN 1 ELSE 0 END", params
return f"NOT {sql}", params
def resolve_expression(
self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False
):
resolved = super().resolve_expression(
query, allow_joins, reuse, summarize, for_save
)
if not getattr(resolved.expression, "conditional", False):
raise TypeError("Cannot negate non-conditional expressions.")
return resolved
def select_format(self, compiler, sql, params):
# Wrap boolean expressions with a CASE WHEN expression if a database
# backend (e.g. Oracle) doesn't support boolean expression in SELECT or
# GROUP BY list.
expression_supported_in_where_clause = (
compiler.connection.ops.conditional_expression_supported_in_where_clause
)
if (
not compiler.connection.features.supports_boolean_expr_in_select_clause
# Avoid double wrapping.
and expression_supported_in_where_clause(self.expression)
):
sql = "CASE WHEN {} THEN 1 ELSE 0 END".format(sql)
return sql, params
@deconstructible(path="django.db.models.When")
class When(Expression):
template = "WHEN %(condition)s THEN %(result)s"
# This isn't a complete conditional expression, must be used in Case().
conditional = False
def __init__(self, condition=None, then=None, **lookups):
if lookups:
if condition is None:
condition, lookups = Q(**lookups), None
elif getattr(condition, "conditional", False):
condition, lookups = Q(condition, **lookups), None
if condition is None or not getattr(condition, "conditional", False) or lookups:
raise TypeError(
"When() supports a Q object, a boolean expression, or lookups "
"as a condition."
)
if isinstance(condition, Q) and not condition:
raise ValueError("An empty Q() can't be used as a When() condition.")
super().__init__(output_field=None)
self.condition = condition
self.result = self._parse_expressions(then)[0]
def __str__(self):
return "WHEN %r THEN %r" % (self.condition, self.result)
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self)
def get_source_expressions(self):
return [self.condition, self.result]
def set_source_expressions(self, exprs):
self.condition, self.result = exprs
def get_source_fields(self):
# We're only interested in the fields of the result expressions.
return [self.result._output_field_or_none]
def resolve_expression(
self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False
):
c = self.copy()
c.is_summary = summarize
if hasattr(c.condition, "resolve_expression"):
c.condition = c.condition.resolve_expression(
query, allow_joins, reuse, summarize, False
)
c.result = c.result.resolve_expression(
query, allow_joins, reuse, summarize, for_save
)
return c
def as_sql(self, compiler, connection, template=None, **extra_context):
connection.ops.check_expression_support(self)
template_params = extra_context
sql_params = []
condition_sql, condition_params = compiler.compile(self.condition)
template_params["condition"] = condition_sql
result_sql, result_params = compiler.compile(self.result)
template_params["result"] = result_sql
template = template or self.template
return template % template_params, (
*sql_params,
*condition_params,
*result_params,
)
def get_group_by_cols(self):
# This is not a complete expression and cannot be used in GROUP BY.
cols = []
for source in self.get_source_expressions():
cols.extend(source.get_group_by_cols())
return cols
@cached_property
def allowed_default(self):
return self.condition.allowed_default and self.result.allowed_default
@deconstructible(path="django.db.models.Case")
class Case(SQLiteNumericMixin, Expression):
"""
An SQL searched CASE expression:
CASE
WHEN n > 0
THEN 'positive'
WHEN n < 0
THEN 'negative'
ELSE 'zero'
END
"""
template = "CASE %(cases)s ELSE %(default)s END"
case_joiner = " "
def __init__(self, *cases, default=None, output_field=None, **extra):
if not all(isinstance(case, When) for case in cases):
raise TypeError("Positional arguments must all be When objects.")
super().__init__(output_field)
self.cases = list(cases)
self.default = self._parse_expressions(default)[0]
self.extra = extra
def __str__(self):
return "CASE %s, ELSE %r" % (
", ".join(str(c) for c in self.cases),
self.default,
)
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self)
def get_source_expressions(self):
return self.cases + [self.default]
def set_source_expressions(self, exprs):
*self.cases, self.default = exprs
def resolve_expression(
self, query=None, allow_joins=True, reuse=None, summarize=False, for_save=False
):
c = self.copy()
c.is_summary = summarize
for pos, case in enumerate(c.cases):
c.cases[pos] = case.resolve_expression(
query, allow_joins, reuse, summarize, for_save
)
c.default = c.default.resolve_expression(
query, allow_joins, reuse, summarize, for_save
)
return c
def copy(self):
c = super().copy()
c.cases = c.cases[:]
return c
def as_sql(
self, compiler, connection, template=None, case_joiner=None, **extra_context
):
connection.ops.check_expression_support(self)
if not self.cases:
return compiler.compile(self.default)
template_params = {**self.extra, **extra_context}
case_parts = []
sql_params = []
default_sql, default_params = compiler.compile(self.default)
for case in self.cases:
try:
case_sql, case_params = compiler.compile(case)
except EmptyResultSet:
continue
except FullResultSet:
default_sql, default_params = compiler.compile(case.result)
break
case_parts.append(case_sql)
sql_params.extend(case_params)
if not case_parts:
return default_sql, default_params
case_joiner = case_joiner or self.case_joiner
template_params["cases"] = case_joiner.join(case_parts)
template_params["default"] = default_sql
sql_params.extend(default_params)
template = template or template_params.get("template", self.template)
sql = template % template_params
if self._output_field_or_none is not None:
sql = connection.ops.unification_cast_sql(self.output_field) % sql
return sql, sql_params
def get_group_by_cols(self):
if not self.cases:
return self.default.get_group_by_cols()
return super().get_group_by_cols()
@cached_property
def allowed_default(self):
return self.default.allowed_default and all(
case_.allowed_default for case_ in self.cases
)
class Subquery(BaseExpression, Combinable):
"""
An explicit subquery. It may contain OuterRef() references to the outer
query which will be resolved when it is applied to that query.
"""
template = "(%(subquery)s)"
contains_aggregate = False
empty_result_set_value = None
subquery = True
def __init__(self, queryset, output_field=None, **extra):
# Allow the usage of both QuerySet and sql.Query objects.
self.query = getattr(queryset, "query", queryset).clone()
self.query.subquery = True
self.extra = extra
super().__init__(output_field)
def get_source_expressions(self):
return [self.query]
def set_source_expressions(self, exprs):
self.query = exprs[0]
def _resolve_output_field(self):
return self.query.output_field
def copy(self):
clone = super().copy()
clone.query = clone.query.clone()
return clone
@property
def external_aliases(self):
return self.query.external_aliases
def get_external_cols(self):
return self.query.get_external_cols()
def as_sql(self, compiler, connection, template=None, **extra_context):
connection.ops.check_expression_support(self)
template_params = {**self.extra, **extra_context}
subquery_sql, sql_params = self.query.as_sql(compiler, connection)
template_params["subquery"] = subquery_sql[1:-1]
template = template or template_params.get("template", self.template)
sql = template % template_params
return sql, sql_params
def get_group_by_cols(self):
return self.query.get_group_by_cols(wrapper=self)
class Exists(Subquery):
template = "EXISTS(%(subquery)s)"
output_field = fields.BooleanField()
empty_result_set_value = False
def __init__(self, queryset, **kwargs):
super().__init__(queryset, **kwargs)
self.query = self.query.exists()
def select_format(self, compiler, sql, params):
# Wrap EXISTS() with a CASE WHEN expression if a database backend
# (e.g. Oracle) doesn't support boolean expression in SELECT or GROUP
# BY list.
if not compiler.connection.features.supports_boolean_expr_in_select_clause:
sql = "CASE WHEN {} THEN 1 ELSE 0 END".format(sql)
return sql, params
@deconstructible(path="django.db.models.OrderBy")
class OrderBy(Expression):
template = "%(expression)s %(ordering)s"
conditional = False
def __init__(self, expression, descending=False, nulls_first=None, nulls_last=None):
if nulls_first and nulls_last:
raise ValueError("nulls_first and nulls_last are mutually exclusive")
if nulls_first is False or nulls_last is False:
raise ValueError("nulls_first and nulls_last values must be True or None.")
self.nulls_first = nulls_first
self.nulls_last = nulls_last
self.descending = descending
if not hasattr(expression, "resolve_expression"):
raise ValueError("expression must be an expression type")
self.expression = expression
def __repr__(self):
return "{}({}, descending={})".format(
self.__class__.__name__, self.expression, self.descending
)
def set_source_expressions(self, exprs):
self.expression = exprs[0]
def get_source_expressions(self):
return [self.expression]
def as_sql(self, compiler, connection, template=None, **extra_context):
template = template or self.template
if connection.features.supports_order_by_nulls_modifier:
if self.nulls_last:
template = "%s NULLS LAST" % template
elif self.nulls_first:
template = "%s NULLS FIRST" % template
else:
if self.nulls_last and not (
self.descending and connection.features.order_by_nulls_first
):
template = "%%(expression)s IS NULL, %s" % template
elif self.nulls_first and not (
not self.descending and connection.features.order_by_nulls_first
):
template = "%%(expression)s IS NOT NULL, %s" % template
connection.ops.check_expression_support(self)
expression_sql, params = compiler.compile(self.expression)
placeholders = {
"expression": expression_sql,
"ordering": "DESC" if self.descending else "ASC",
**extra_context,
}
params *= template.count("%(expression)s")
return (template % placeholders).rstrip(), params
def as_oracle(self, compiler, connection):
# Oracle doesn't allow ORDER BY EXISTS() or filters unless it's wrapped
# in a CASE WHEN.
if connection.ops.conditional_expression_supported_in_where_clause(
self.expression
):
copy = self.copy()
copy.expression = Case(
When(self.expression, then=True),
default=False,
)
return copy.as_sql(compiler, connection)
return self.as_sql(compiler, connection)
def get_group_by_cols(self):
cols = []
for source in self.get_source_expressions():
cols.extend(source.get_group_by_cols())
return cols
def reverse_ordering(self):
self.descending = not self.descending
if self.nulls_first:
self.nulls_last = True
self.nulls_first = None
elif self.nulls_last:
self.nulls_first = True
self.nulls_last = None
return self
def asc(self):
self.descending = False
def desc(self):
self.descending = True
class Window(SQLiteNumericMixin, Expression):
template = "%(expression)s OVER (%(window)s)"
# Although the main expression may either be an aggregate or an
# expression with an aggregate function, the GROUP BY that will
# be introduced in the query as a result is not desired.
contains_aggregate = False
contains_over_clause = True
def __init__(
self,
expression,
partition_by=None,
order_by=None,
frame=None,
output_field=None,
):
self.partition_by = partition_by
self.order_by = order_by
self.frame = frame
if not getattr(expression, "window_compatible", False):
raise ValueError(
"Expression '%s' isn't compatible with OVER clauses."
% expression.__class__.__name__
)
if self.partition_by is not None:
if not isinstance(self.partition_by, (tuple, list)):
self.partition_by = (self.partition_by,)
self.partition_by = ExpressionList(*self.partition_by)
if self.order_by is not None:
if isinstance(self.order_by, (list, tuple)):
self.order_by = OrderByList(*self.order_by)
elif isinstance(self.order_by, (BaseExpression, str)):
self.order_by = OrderByList(self.order_by)
else:
raise ValueError(
"Window.order_by must be either a string reference to a "
"field, an expression, or a list or tuple of them."
)
super().__init__(output_field=output_field)
self.source_expression = self._parse_expressions(expression)[0]
def _resolve_output_field(self):
return self.source_expression.output_field
def get_source_expressions(self):
return [self.source_expression, self.partition_by, self.order_by, self.frame]
def set_source_expressions(self, exprs):
self.source_expression, self.partition_by, self.order_by, self.frame = exprs
def as_sql(self, compiler, connection, template=None):
connection.ops.check_expression_support(self)
if not connection.features.supports_over_clause:
raise NotSupportedError("This backend does not support window expressions.")
expr_sql, params = compiler.compile(self.source_expression)
window_sql, window_params = [], ()
if self.partition_by is not None:
sql_expr, sql_params = self.partition_by.as_sql(
compiler=compiler,
connection=connection,
template="PARTITION BY %(expressions)s",
)
window_sql.append(sql_expr)
window_params += tuple(sql_params)
if self.order_by is not None:
order_sql, order_params = compiler.compile(self.order_by)
window_sql.append(order_sql)
window_params += tuple(order_params)
if self.frame:
frame_sql, frame_params = compiler.compile(self.frame)
window_sql.append(frame_sql)
window_params += tuple(frame_params)
template = template or self.template
return (
template % {"expression": expr_sql, "window": " ".join(window_sql).strip()},
(*params, *window_params),
)
def as_sqlite(self, compiler, connection):
if isinstance(self.output_field, fields.DecimalField):
# Casting to numeric must be outside of the window expression.
copy = self.copy()
source_expressions = copy.get_source_expressions()
source_expressions[0].output_field = fields.FloatField()
copy.set_source_expressions(source_expressions)
return super(Window, copy).as_sqlite(compiler, connection)
return self.as_sql(compiler, connection)
def __str__(self):
return "{} OVER ({}{}{})".format(
str(self.source_expression),
"PARTITION BY " + str(self.partition_by) if self.partition_by else "",
str(self.order_by or ""),
str(self.frame or ""),
)
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self)
def get_group_by_cols(self):
group_by_cols = []
if self.partition_by:
group_by_cols.extend(self.partition_by.get_group_by_cols())
if self.order_by is not None:
group_by_cols.extend(self.order_by.get_group_by_cols())
return group_by_cols
class WindowFrame(Expression):
"""
Model the frame clause in window expressions. There are two types of frame
clauses which are subclasses, however, all processing and validation (by no
means intended to be complete) is done here. Thus, providing an end for a
frame is optional (the default is UNBOUNDED FOLLOWING, which is the last
row in the frame).
"""
template = "%(frame_type)s BETWEEN %(start)s AND %(end)s"
def __init__(self, start=None, end=None):
self.start = Value(start)
self.end = Value(end)
def set_source_expressions(self, exprs):
self.start, self.end = exprs
def get_source_expressions(self):
return [self.start, self.end]
def as_sql(self, compiler, connection):
connection.ops.check_expression_support(self)
start, end = self.window_frame_start_end(
connection, self.start.value, self.end.value
)
return (
self.template
% {
"frame_type": self.frame_type,
"start": start,
"end": end,
},
[],
)
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self)
def get_group_by_cols(self):
return []
def __str__(self):
if self.start.value is not None and self.start.value < 0:
start = "%d %s" % (abs(self.start.value), connection.ops.PRECEDING)
elif self.start.value is not None and self.start.value == 0:
start = connection.ops.CURRENT_ROW
else:
start = connection.ops.UNBOUNDED_PRECEDING
if self.end.value is not None and self.end.value > 0:
end = "%d %s" % (self.end.value, connection.ops.FOLLOWING)
elif self.end.value is not None and self.end.value == 0:
end = connection.ops.CURRENT_ROW
else:
end = connection.ops.UNBOUNDED_FOLLOWING
return self.template % {
"frame_type": self.frame_type,
"start": start,
"end": end,
}
def window_frame_start_end(self, connection, start, end):
raise NotImplementedError("Subclasses must implement window_frame_start_end().")
class RowRange(WindowFrame):
frame_type = "ROWS"
def window_frame_start_end(self, connection, start, end):
return connection.ops.window_frame_rows_start_end(start, end)
class ValueRange(WindowFrame):
frame_type = "RANGE"
def window_frame_start_end(self, connection, start, end):
return connection.ops.window_frame_range_start_end(start, end)
|
760fd2400d7b3fb1445836a8d657181e023519664a1d1e38a453bc99fec8bb75 | import collections.abc
import copy
import datetime
import decimal
import operator
import uuid
import warnings
from base64 import b64decode, b64encode
from functools import partialmethod, total_ordering
from django import forms
from django.apps import apps
from django.conf import settings
from django.core import checks, exceptions, validators
from django.db import connection, connections, router
from django.db.models.constants import LOOKUP_SEP
from django.db.models.enums import ChoicesMeta
from django.db.models.query_utils import DeferredAttribute, RegisterLookupMixin
from django.utils import timezone
from django.utils.datastructures import DictWrapper
from django.utils.dateparse import (
parse_date,
parse_datetime,
parse_duration,
parse_time,
)
from django.utils.duration import duration_microseconds, duration_string
from django.utils.functional import Promise, cached_property
from django.utils.ipv6 import clean_ipv6_address
from django.utils.itercompat import is_iterable
from django.utils.text import capfirst
from django.utils.translation import gettext_lazy as _
__all__ = [
"AutoField",
"BLANK_CHOICE_DASH",
"BigAutoField",
"BigIntegerField",
"BinaryField",
"BooleanField",
"CharField",
"CommaSeparatedIntegerField",
"DateField",
"DateTimeField",
"DecimalField",
"DurationField",
"EmailField",
"Empty",
"Field",
"FilePathField",
"FloatField",
"GenericIPAddressField",
"IPAddressField",
"IntegerField",
"NOT_PROVIDED",
"NullBooleanField",
"PositiveBigIntegerField",
"PositiveIntegerField",
"PositiveSmallIntegerField",
"SlugField",
"SmallAutoField",
"SmallIntegerField",
"TextField",
"TimeField",
"URLField",
"UUIDField",
]
class Empty:
pass
class NOT_PROVIDED:
pass
# The values to use for "blank" in SelectFields. Will be appended to the start
# of most "choices" lists.
BLANK_CHOICE_DASH = [("", "---------")]
def _load_field(app_label, model_name, field_name):
return apps.get_model(app_label, model_name)._meta.get_field(field_name)
# A guide to Field parameters:
#
# * name: The name of the field specified in the model.
# * attname: The attribute to use on the model object. This is the same as
# "name", except in the case of ForeignKeys, where "_id" is
# appended.
# * db_column: The db_column specified in the model (or None).
# * column: The database column for this field. This is the same as
# "attname", except if db_column is specified.
#
# Code that introspects values, or does other dynamic things, should use
# attname. For example, this gets the primary key value of object "obj":
#
# getattr(obj, opts.pk.attname)
def _empty(of_cls):
new = Empty()
new.__class__ = of_cls
return new
def return_None():
return None
@total_ordering
class Field(RegisterLookupMixin):
"""Base class for all field types"""
# Designates whether empty strings fundamentally are allowed at the
# database level.
empty_strings_allowed = True
empty_values = list(validators.EMPTY_VALUES)
# These track each time a Field instance is created. Used to retain order.
# The auto_creation_counter is used for fields that Django implicitly
# creates, creation_counter is used for all user-specified fields.
creation_counter = 0
auto_creation_counter = -1
default_validators = [] # Default set of validators
default_error_messages = {
"invalid_choice": _("Value %(value)r is not a valid choice."),
"null": _("This field cannot be null."),
"blank": _("This field cannot be blank."),
"unique": _("%(model_name)s with this %(field_label)s already exists."),
"unique_for_date": _(
# Translators: The 'lookup_type' is one of 'date', 'year' or
# 'month'. Eg: "Title must be unique for pub_date year"
"%(field_label)s must be unique for "
"%(date_field_label)s %(lookup_type)s."
),
}
system_check_deprecated_details = None
system_check_removed_details = None
# Attributes that don't affect a column definition.
# These attributes are ignored when altering the field.
non_db_attrs = (
"blank",
"choices",
"db_column",
"editable",
"error_messages",
"help_text",
"limit_choices_to",
# Database-level options are not supported, see #21961.
"on_delete",
"related_name",
"related_query_name",
"validators",
"verbose_name",
)
# Field flags
hidden = False
many_to_many = None
many_to_one = None
one_to_many = None
one_to_one = None
related_model = None
descriptor_class = DeferredAttribute
# Generic field type description, usually overridden by subclasses
def _description(self):
return _("Field of type: %(field_type)s") % {
"field_type": self.__class__.__name__
}
description = property(_description)
def __init__(
self,
verbose_name=None,
name=None,
primary_key=False,
max_length=None,
unique=False,
blank=False,
null=False,
db_index=False,
rel=None,
default=NOT_PROVIDED,
editable=True,
serialize=True,
unique_for_date=None,
unique_for_month=None,
unique_for_year=None,
choices=None,
help_text="",
db_column=None,
db_tablespace=None,
auto_created=False,
validators=(),
error_messages=None,
db_comment=None,
db_default=NOT_PROVIDED,
):
self.name = name
self.verbose_name = verbose_name # May be set by set_attributes_from_name
self._verbose_name = verbose_name # Store original for deconstruction
self.primary_key = primary_key
self.max_length, self._unique = max_length, unique
self.blank, self.null = blank, null
self.remote_field = rel
self.is_relation = self.remote_field is not None
self.default = default
if db_default is not NOT_PROVIDED and not hasattr(
db_default, "resolve_expression"
):
from django.db.models.expressions import Value
db_default = Value(db_default)
self.db_default = db_default
self.editable = editable
self.serialize = serialize
self.unique_for_date = unique_for_date
self.unique_for_month = unique_for_month
self.unique_for_year = unique_for_year
if isinstance(choices, ChoicesMeta):
choices = choices.choices
if isinstance(choices, collections.abc.Iterator):
choices = list(choices)
self.choices = choices
self.help_text = help_text
self.db_index = db_index
self.db_column = db_column
self.db_comment = db_comment
self._db_tablespace = db_tablespace
self.auto_created = auto_created
# Adjust the appropriate creation counter, and save our local copy.
if auto_created:
self.creation_counter = Field.auto_creation_counter
Field.auto_creation_counter -= 1
else:
self.creation_counter = Field.creation_counter
Field.creation_counter += 1
self._validators = list(validators) # Store for deconstruction later
self._error_messages = error_messages # Store for deconstruction later
def __str__(self):
"""
Return "app_label.model_label.field_name" for fields attached to
models.
"""
if not hasattr(self, "model"):
return super().__str__()
model = self.model
return "%s.%s" % (model._meta.label, self.name)
def __repr__(self):
"""Display the module, class, and name of the field."""
path = "%s.%s" % (self.__class__.__module__, self.__class__.__qualname__)
name = getattr(self, "name", None)
if name is not None:
return "<%s: %s>" % (path, name)
return "<%s>" % path
def check(self, **kwargs):
return [
*self._check_field_name(),
*self._check_choices(),
*self._check_db_default(**kwargs),
*self._check_db_index(),
*self._check_db_comment(**kwargs),
*self._check_null_allowed_for_primary_keys(),
*self._check_backend_specific_checks(**kwargs),
*self._check_validators(),
*self._check_deprecation_details(),
]
def _check_field_name(self):
"""
Check if field name is valid, i.e. 1) does not end with an
underscore, 2) does not contain "__" and 3) is not "pk".
"""
if self.name.endswith("_"):
return [
checks.Error(
"Field names must not end with an underscore.",
obj=self,
id="fields.E001",
)
]
elif LOOKUP_SEP in self.name:
return [
checks.Error(
'Field names must not contain "%s".' % LOOKUP_SEP,
obj=self,
id="fields.E002",
)
]
elif self.name == "pk":
return [
checks.Error(
"'pk' is a reserved word that cannot be used as a field name.",
obj=self,
id="fields.E003",
)
]
else:
return []
@classmethod
def _choices_is_value(cls, value):
return isinstance(value, (str, Promise)) or not is_iterable(value)
def _check_choices(self):
if not self.choices:
return []
if not is_iterable(self.choices) or isinstance(self.choices, str):
return [
checks.Error(
"'choices' must be an iterable (e.g., a list or tuple).",
obj=self,
id="fields.E004",
)
]
choice_max_length = 0
# Expect [group_name, [value, display]]
for choices_group in self.choices:
try:
group_name, group_choices = choices_group
except (TypeError, ValueError):
# Containing non-pairs
break
try:
if not all(
self._choices_is_value(value) and self._choices_is_value(human_name)
for value, human_name in group_choices
):
break
if self.max_length is not None and group_choices:
choice_max_length = max(
[
choice_max_length,
*(
len(value)
for value, _ in group_choices
if isinstance(value, str)
),
]
)
except (TypeError, ValueError):
# No groups, choices in the form [value, display]
value, human_name = group_name, group_choices
if not self._choices_is_value(value) or not self._choices_is_value(
human_name
):
break
if self.max_length is not None and isinstance(value, str):
choice_max_length = max(choice_max_length, len(value))
# Special case: choices=['ab']
if isinstance(choices_group, str):
break
else:
if self.max_length is not None and choice_max_length > self.max_length:
return [
checks.Error(
"'max_length' is too small to fit the longest value "
"in 'choices' (%d characters)." % choice_max_length,
obj=self,
id="fields.E009",
),
]
return []
return [
checks.Error(
"'choices' must be an iterable containing "
"(actual value, human readable name) tuples.",
obj=self,
id="fields.E005",
)
]
def _check_db_default(self, databases=None, **kwargs):
from django.db.models.expressions import Value
if (
self.db_default is NOT_PROVIDED
or isinstance(self.db_default, Value)
or databases is None
):
return []
errors = []
for db in databases:
if not router.allow_migrate_model(db, self.model):
continue
connection = connections[db]
if not getattr(self.db_default, "allowed_default", False) and (
connection.features.supports_expression_defaults
):
msg = f"{self.db_default} cannot be used in db_default."
errors.append(checks.Error(msg, obj=self, id="fields.E012"))
if not (
connection.features.supports_expression_defaults
or "supports_expression_defaults"
in self.model._meta.required_db_features
):
msg = (
f"{connection.display_name} does not support default database "
"values with expressions (db_default)."
)
errors.append(checks.Error(msg, obj=self, id="fields.E011"))
return errors
def _check_db_index(self):
if self.db_index not in (None, True, False):
return [
checks.Error(
"'db_index' must be None, True or False.",
obj=self,
id="fields.E006",
)
]
else:
return []
def _check_db_comment(self, databases=None, **kwargs):
if not self.db_comment or not databases:
return []
errors = []
for db in databases:
if not router.allow_migrate_model(db, self.model):
continue
connection = connections[db]
if not (
connection.features.supports_comments
or "supports_comments" in self.model._meta.required_db_features
):
errors.append(
checks.Warning(
f"{connection.display_name} does not support comments on "
f"columns (db_comment).",
obj=self,
id="fields.W163",
)
)
return errors
def _check_null_allowed_for_primary_keys(self):
if (
self.primary_key
and self.null
and not connection.features.interprets_empty_strings_as_nulls
):
# We cannot reliably check this for backends like Oracle which
# consider NULL and '' to be equal (and thus set up
# character-based fields a little differently).
return [
checks.Error(
"Primary keys must not have null=True.",
hint=(
"Set null=False on the field, or "
"remove primary_key=True argument."
),
obj=self,
id="fields.E007",
)
]
else:
return []
def _check_backend_specific_checks(self, databases=None, **kwargs):
if databases is None:
return []
errors = []
for alias in databases:
if router.allow_migrate_model(alias, self.model):
errors.extend(connections[alias].validation.check_field(self, **kwargs))
return errors
def _check_validators(self):
errors = []
for i, validator in enumerate(self.validators):
if not callable(validator):
errors.append(
checks.Error(
"All 'validators' must be callable.",
hint=(
"validators[{i}] ({repr}) isn't a function or "
"instance of a validator class.".format(
i=i,
repr=repr(validator),
)
),
obj=self,
id="fields.E008",
)
)
return errors
def _check_deprecation_details(self):
if self.system_check_removed_details is not None:
return [
checks.Error(
self.system_check_removed_details.get(
"msg",
"%s has been removed except for support in historical "
"migrations." % self.__class__.__name__,
),
hint=self.system_check_removed_details.get("hint"),
obj=self,
id=self.system_check_removed_details.get("id", "fields.EXXX"),
)
]
elif self.system_check_deprecated_details is not None:
return [
checks.Warning(
self.system_check_deprecated_details.get(
"msg", "%s has been deprecated." % self.__class__.__name__
),
hint=self.system_check_deprecated_details.get("hint"),
obj=self,
id=self.system_check_deprecated_details.get("id", "fields.WXXX"),
)
]
return []
def get_col(self, alias, output_field=None):
if alias == self.model._meta.db_table and (
output_field is None or output_field == self
):
return self.cached_col
from django.db.models.expressions import Col
return Col(alias, self, output_field)
@cached_property
def cached_col(self):
from django.db.models.expressions import Col
return Col(self.model._meta.db_table, self)
def select_format(self, compiler, sql, params):
"""
Custom format for select clauses. For example, GIS columns need to be
selected as AsText(table.col) on MySQL as the table.col data can't be
used by Django.
"""
return sql, params
def deconstruct(self):
"""
Return enough information to recreate the field as a 4-tuple:
* The name of the field on the model, if contribute_to_class() has
been run.
* The import path of the field, including the class, e.g.
django.db.models.IntegerField. This should be the most portable
version, so less specific may be better.
* A list of positional arguments.
* A dict of keyword arguments.
Note that the positional or keyword arguments must contain values of
the following types (including inner values of collection types):
* None, bool, str, int, float, complex, set, frozenset, list, tuple,
dict
* UUID
* datetime.datetime (naive), datetime.date
* top-level classes, top-level functions - will be referenced by their
full import path
* Storage instances - these have their own deconstruct() method
This is because the values here must be serialized into a text format
(possibly new Python code, possibly JSON) and these are the only types
with encoding handlers defined.
There's no need to return the exact way the field was instantiated this
time, just ensure that the resulting field is the same - prefer keyword
arguments over positional ones, and omit parameters with their default
values.
"""
# Short-form way of fetching all the default parameters
keywords = {}
possibles = {
"verbose_name": None,
"primary_key": False,
"max_length": None,
"unique": False,
"blank": False,
"null": False,
"db_index": False,
"default": NOT_PROVIDED,
"db_default": NOT_PROVIDED,
"editable": True,
"serialize": True,
"unique_for_date": None,
"unique_for_month": None,
"unique_for_year": None,
"choices": None,
"help_text": "",
"db_column": None,
"db_comment": None,
"db_tablespace": None,
"auto_created": False,
"validators": [],
"error_messages": None,
}
attr_overrides = {
"unique": "_unique",
"error_messages": "_error_messages",
"validators": "_validators",
"verbose_name": "_verbose_name",
"db_tablespace": "_db_tablespace",
}
equals_comparison = {"choices", "validators"}
for name, default in possibles.items():
value = getattr(self, attr_overrides.get(name, name))
# Unroll anything iterable for choices into a concrete list
if name == "choices" and isinstance(value, collections.abc.Iterable):
value = list(value)
# Do correct kind of comparison
if name in equals_comparison:
if value != default:
keywords[name] = value
else:
if value is not default:
keywords[name] = value
# Work out path - we shorten it for known Django core fields
path = "%s.%s" % (self.__class__.__module__, self.__class__.__qualname__)
if path.startswith("django.db.models.fields.related"):
path = path.replace("django.db.models.fields.related", "django.db.models")
elif path.startswith("django.db.models.fields.files"):
path = path.replace("django.db.models.fields.files", "django.db.models")
elif path.startswith("django.db.models.fields.json"):
path = path.replace("django.db.models.fields.json", "django.db.models")
elif path.startswith("django.db.models.fields.proxy"):
path = path.replace("django.db.models.fields.proxy", "django.db.models")
elif path.startswith("django.db.models.fields"):
path = path.replace("django.db.models.fields", "django.db.models")
# Return basic info - other fields should override this.
return (self.name, path, [], keywords)
def clone(self):
"""
Uses deconstruct() to clone a new copy of this Field.
Will not preserve any class attachments/attribute names.
"""
name, path, args, kwargs = self.deconstruct()
return self.__class__(*args, **kwargs)
def __eq__(self, other):
# Needed for @total_ordering
if isinstance(other, Field):
return self.creation_counter == other.creation_counter and getattr(
self, "model", None
) == getattr(other, "model", None)
return NotImplemented
def __lt__(self, other):
# This is needed because bisect does not take a comparison function.
# Order by creation_counter first for backward compatibility.
if isinstance(other, Field):
if (
self.creation_counter != other.creation_counter
or not hasattr(self, "model")
and not hasattr(other, "model")
):
return self.creation_counter < other.creation_counter
elif hasattr(self, "model") != hasattr(other, "model"):
return not hasattr(self, "model") # Order no-model fields first
else:
# creation_counter's are equal, compare only models.
return (self.model._meta.app_label, self.model._meta.model_name) < (
other.model._meta.app_label,
other.model._meta.model_name,
)
return NotImplemented
def __hash__(self):
return hash(self.creation_counter)
def __deepcopy__(self, memodict):
# We don't have to deepcopy very much here, since most things are not
# intended to be altered after initial creation.
obj = copy.copy(self)
if self.remote_field:
obj.remote_field = copy.copy(self.remote_field)
if hasattr(self.remote_field, "field") and self.remote_field.field is self:
obj.remote_field.field = obj
memodict[id(self)] = obj
return obj
def __copy__(self):
# We need to avoid hitting __reduce__, so define this
# slightly weird copy construct.
obj = Empty()
obj.__class__ = self.__class__
obj.__dict__ = self.__dict__.copy()
return obj
def __reduce__(self):
"""
Pickling should return the model._meta.fields instance of the field,
not a new copy of that field. So, use the app registry to load the
model and then the field back.
"""
if not hasattr(self, "model"):
# Fields are sometimes used without attaching them to models (for
# example in aggregation). In this case give back a plain field
# instance. The code below will create a new empty instance of
# class self.__class__, then update its dict with self.__dict__
# values - so, this is very close to normal pickle.
state = self.__dict__.copy()
# The _get_default cached_property can't be pickled due to lambda
# usage.
state.pop("_get_default", None)
return _empty, (self.__class__,), state
return _load_field, (
self.model._meta.app_label,
self.model._meta.object_name,
self.name,
)
def get_pk_value_on_save(self, instance):
"""
Hook to generate new PK values on save. This method is called when
saving instances with no primary key value set. If this method returns
something else than None, then the returned value is used when saving
the new instance.
"""
if self.default:
return self.get_default()
return None
def to_python(self, value):
"""
Convert the input value into the expected Python data type, raising
django.core.exceptions.ValidationError if the data can't be converted.
Return the converted value. Subclasses should override this.
"""
return value
@cached_property
def error_messages(self):
messages = {}
for c in reversed(self.__class__.__mro__):
messages.update(getattr(c, "default_error_messages", {}))
messages.update(self._error_messages or {})
return messages
@cached_property
def validators(self):
"""
Some validators can't be created at field initialization time.
This method provides a way to delay their creation until required.
"""
return [*self.default_validators, *self._validators]
def run_validators(self, value):
if value in self.empty_values:
return
errors = []
for v in self.validators:
try:
v(value)
except exceptions.ValidationError as e:
if hasattr(e, "code") and e.code in self.error_messages:
e.message = self.error_messages[e.code]
errors.extend(e.error_list)
if errors:
raise exceptions.ValidationError(errors)
def validate(self, value, model_instance):
"""
Validate value and raise ValidationError if necessary. Subclasses
should override this to provide validation logic.
"""
if not self.editable:
# Skip validation for non-editable fields.
return
if self.choices is not None and value not in self.empty_values:
for option_key, option_value in self.choices:
if isinstance(option_value, (list, tuple)):
# This is an optgroup, so look inside the group for
# options.
for optgroup_key, optgroup_value in option_value:
if value == optgroup_key:
return
elif value == option_key:
return
raise exceptions.ValidationError(
self.error_messages["invalid_choice"],
code="invalid_choice",
params={"value": value},
)
if value is None and not self.null:
raise exceptions.ValidationError(self.error_messages["null"], code="null")
if not self.blank and value in self.empty_values:
raise exceptions.ValidationError(self.error_messages["blank"], code="blank")
def clean(self, value, model_instance):
"""
Convert the value's type and run validation. Validation errors
from to_python() and validate() are propagated. Return the correct
value if no error is raised.
"""
value = self.to_python(value)
self.validate(value, model_instance)
self.run_validators(value)
return value
def db_type_parameters(self, connection):
return DictWrapper(self.__dict__, connection.ops.quote_name, "qn_")
def db_check(self, connection):
"""
Return the database column check constraint for this field, for the
provided connection. Works the same way as db_type() for the case that
get_internal_type() does not map to a preexisting model field.
"""
data = self.db_type_parameters(connection)
try:
return (
connection.data_type_check_constraints[self.get_internal_type()] % data
)
except KeyError:
return None
def db_type(self, connection):
"""
Return the database column data type for this field, for the provided
connection.
"""
# The default implementation of this method looks at the
# backend-specific data_types dictionary, looking up the field by its
# "internal type".
#
# A Field class can implement the get_internal_type() method to specify
# which *preexisting* Django Field class it's most similar to -- i.e.,
# a custom field might be represented by a TEXT column type, which is
# the same as the TextField Django field type, which means the custom
# field's get_internal_type() returns 'TextField'.
#
# But the limitation of the get_internal_type() / data_types approach
# is that it cannot handle database column types that aren't already
# mapped to one of the built-in Django field types. In this case, you
# can implement db_type() instead of get_internal_type() to specify
# exactly which wacky database column type you want to use.
data = self.db_type_parameters(connection)
try:
column_type = connection.data_types[self.get_internal_type()]
except KeyError:
return None
else:
# column_type is either a single-parameter function or a string.
if callable(column_type):
return column_type(data)
return column_type % data
def rel_db_type(self, connection):
"""
Return the data type that a related field pointing to this field should
use. For example, this method is called by ForeignKey and OneToOneField
to determine its data type.
"""
return self.db_type(connection)
def cast_db_type(self, connection):
"""Return the data type to use in the Cast() function."""
db_type = connection.ops.cast_data_types.get(self.get_internal_type())
if db_type:
return db_type % self.db_type_parameters(connection)
return self.db_type(connection)
def db_parameters(self, connection):
"""
Extension of db_type(), providing a range of different return values
(type, checks). This will look at db_type(), allowing custom model
fields to override it.
"""
type_string = self.db_type(connection)
check_string = self.db_check(connection)
return {
"type": type_string,
"check": check_string,
}
def db_type_suffix(self, connection):
return connection.data_types_suffix.get(self.get_internal_type())
def get_db_converters(self, connection):
if hasattr(self, "from_db_value"):
return [self.from_db_value]
return []
@property
def unique(self):
return self._unique or self.primary_key
@property
def db_tablespace(self):
return self._db_tablespace or settings.DEFAULT_INDEX_TABLESPACE
@property
def db_returning(self):
"""Private API intended only to be used by Django itself."""
return (
self.db_default is not NOT_PROVIDED
and connection.features.can_return_columns_from_insert
)
def set_attributes_from_name(self, name):
self.name = self.name or name
self.attname, self.column = self.get_attname_column()
self.concrete = self.column is not None
if self.verbose_name is None and self.name:
self.verbose_name = self.name.replace("_", " ")
def contribute_to_class(self, cls, name, private_only=False):
"""
Register the field with the model class it belongs to.
If private_only is True, create a separate instance of this field
for every subclass of cls, even if cls is not an abstract model.
"""
self.set_attributes_from_name(name)
self.model = cls
cls._meta.add_field(self, private=private_only)
if self.column:
setattr(cls, self.attname, self.descriptor_class(self))
if self.choices is not None:
# Don't override a get_FOO_display() method defined explicitly on
# this class, but don't check methods derived from inheritance, to
# allow overriding inherited choices. For more complex inheritance
# structures users should override contribute_to_class().
if "get_%s_display" % self.name not in cls.__dict__:
setattr(
cls,
"get_%s_display" % self.name,
partialmethod(cls._get_FIELD_display, field=self),
)
def get_filter_kwargs_for_object(self, obj):
"""
Return a dict that when passed as kwargs to self.model.filter(), would
yield all instances having the same value for this field as obj has.
"""
return {self.name: getattr(obj, self.attname)}
def get_attname(self):
return self.name
def get_attname_column(self):
attname = self.get_attname()
column = self.db_column or attname
return attname, column
def get_internal_type(self):
return self.__class__.__name__
def pre_save(self, model_instance, add):
"""Return field's value just before saving."""
value = getattr(model_instance, self.attname)
if not connection.features.supports_default_keyword_in_insert:
from django.db.models.expressions import DatabaseDefault
if isinstance(value, DatabaseDefault):
return self.db_default
return value
def get_prep_value(self, value):
"""Perform preliminary non-db specific value checks and conversions."""
if isinstance(value, Promise):
value = value._proxy____cast()
return value
def get_db_prep_value(self, value, connection, prepared=False):
"""
Return field's value prepared for interacting with the database backend.
Used by the default implementations of get_db_prep_save().
"""
if not prepared:
value = self.get_prep_value(value)
return value
def get_db_prep_save(self, value, connection):
"""Return field's value prepared for saving into a database."""
if hasattr(value, "as_sql"):
return value
return self.get_db_prep_value(value, connection=connection, prepared=False)
def has_default(self):
"""Return a boolean of whether this field has a default value."""
return self.default is not NOT_PROVIDED
def get_default(self):
"""Return the default value for this field."""
return self._get_default()
@cached_property
def _get_default(self):
if self.has_default():
if callable(self.default):
return self.default
return lambda: self.default
if self.db_default is not NOT_PROVIDED:
from django.db.models.expressions import DatabaseDefault
return DatabaseDefault
if (
not self.empty_strings_allowed
or self.null
and not connection.features.interprets_empty_strings_as_nulls
):
return return_None
return str # return empty string
def get_choices(
self,
include_blank=True,
blank_choice=BLANK_CHOICE_DASH,
limit_choices_to=None,
ordering=(),
):
"""
Return choices with a default blank choices included, for use
as <select> choices for this field.
"""
if self.choices is not None:
choices = list(self.choices)
if include_blank:
blank_defined = any(
choice in ("", None) for choice, _ in self.flatchoices
)
if not blank_defined:
choices = blank_choice + choices
return choices
rel_model = self.remote_field.model
limit_choices_to = limit_choices_to or self.get_limit_choices_to()
choice_func = operator.attrgetter(
self.remote_field.get_related_field().attname
if hasattr(self.remote_field, "get_related_field")
else "pk"
)
qs = rel_model._default_manager.complex_filter(limit_choices_to)
if ordering:
qs = qs.order_by(*ordering)
return (blank_choice if include_blank else []) + [
(choice_func(x), str(x)) for x in qs
]
def value_to_string(self, obj):
"""
Return a string value of this field from the passed obj.
This is used by the serialization framework.
"""
return str(self.value_from_object(obj))
def _get_flatchoices(self):
"""Flattened version of choices tuple."""
if self.choices is None:
return []
flat = []
for choice, value in self.choices:
if isinstance(value, (list, tuple)):
flat.extend(value)
else:
flat.append((choice, value))
return flat
flatchoices = property(_get_flatchoices)
def save_form_data(self, instance, data):
setattr(instance, self.name, data)
def formfield(self, form_class=None, choices_form_class=None, **kwargs):
"""Return a django.forms.Field instance for this field."""
defaults = {
"required": not self.blank,
"label": capfirst(self.verbose_name),
"help_text": self.help_text,
}
if self.has_default():
if callable(self.default):
defaults["initial"] = self.default
defaults["show_hidden_initial"] = True
else:
defaults["initial"] = self.get_default()
if self.choices is not None:
# Fields with choices get special treatment.
include_blank = self.blank or not (
self.has_default() or "initial" in kwargs
)
defaults["choices"] = self.get_choices(include_blank=include_blank)
defaults["coerce"] = self.to_python
if self.null:
defaults["empty_value"] = None
if choices_form_class is not None:
form_class = choices_form_class
else:
form_class = forms.TypedChoiceField
# Many of the subclass-specific formfield arguments (min_value,
# max_value) don't apply for choice fields, so be sure to only pass
# the values that TypedChoiceField will understand.
for k in list(kwargs):
if k not in (
"coerce",
"empty_value",
"choices",
"required",
"widget",
"label",
"initial",
"help_text",
"error_messages",
"show_hidden_initial",
"disabled",
):
del kwargs[k]
defaults.update(kwargs)
if form_class is None:
form_class = forms.CharField
return form_class(**defaults)
def value_from_object(self, obj):
"""Return the value of this field in the given model instance."""
return getattr(obj, self.attname)
class BooleanField(Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _("“%(value)s” value must be either True or False."),
"invalid_nullable": _("“%(value)s” value must be either True, False, or None."),
}
description = _("Boolean (Either True or False)")
def get_internal_type(self):
return "BooleanField"
def to_python(self, value):
if self.null and value in self.empty_values:
return None
if value in (True, False):
# 1/0 are equal to True/False. bool() converts former to latter.
return bool(value)
if value in ("t", "True", "1"):
return True
if value in ("f", "False", "0"):
return False
raise exceptions.ValidationError(
self.error_messages["invalid_nullable" if self.null else "invalid"],
code="invalid",
params={"value": value},
)
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return self.to_python(value)
def formfield(self, **kwargs):
if self.choices is not None:
include_blank = not (self.has_default() or "initial" in kwargs)
defaults = {"choices": self.get_choices(include_blank=include_blank)}
else:
form_class = forms.NullBooleanField if self.null else forms.BooleanField
# In HTML checkboxes, 'required' means "must be checked" which is
# different from the choices case ("must select some value").
# required=False allows unchecked checkboxes.
defaults = {"form_class": form_class, "required": False}
return super().formfield(**{**defaults, **kwargs})
class CharField(Field):
def __init__(self, *args, db_collation=None, **kwargs):
super().__init__(*args, **kwargs)
self.db_collation = db_collation
if self.max_length is not None:
self.validators.append(validators.MaxLengthValidator(self.max_length))
@property
def description(self):
if self.max_length is not None:
return _("String (up to %(max_length)s)")
else:
return _("String (unlimited)")
def check(self, **kwargs):
databases = kwargs.get("databases") or []
return [
*super().check(**kwargs),
*self._check_db_collation(databases),
*self._check_max_length_attribute(**kwargs),
]
def _check_max_length_attribute(self, **kwargs):
if self.max_length is None:
if (
connection.features.supports_unlimited_charfield
or "supports_unlimited_charfield"
in self.model._meta.required_db_features
):
return []
return [
checks.Error(
"CharFields must define a 'max_length' attribute.",
obj=self,
id="fields.E120",
)
]
elif (
not isinstance(self.max_length, int)
or isinstance(self.max_length, bool)
or self.max_length <= 0
):
return [
checks.Error(
"'max_length' must be a positive integer.",
obj=self,
id="fields.E121",
)
]
else:
return []
def _check_db_collation(self, databases):
errors = []
for db in databases:
if not router.allow_migrate_model(db, self.model):
continue
connection = connections[db]
if not (
self.db_collation is None
or "supports_collation_on_charfield"
in self.model._meta.required_db_features
or connection.features.supports_collation_on_charfield
):
errors.append(
checks.Error(
"%s does not support a database collation on "
"CharFields." % connection.display_name,
obj=self,
id="fields.E190",
),
)
return errors
def cast_db_type(self, connection):
if self.max_length is None:
return connection.ops.cast_char_field_without_max_length
return super().cast_db_type(connection)
def db_parameters(self, connection):
db_params = super().db_parameters(connection)
db_params["collation"] = self.db_collation
return db_params
def get_internal_type(self):
return "CharField"
def to_python(self, value):
if isinstance(value, str) or value is None:
return value
return str(value)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
# Passing max_length to forms.CharField means that the value's length
# will be validated twice. This is considered acceptable since we want
# the value in the form field (to pass into widget for example).
defaults = {"max_length": self.max_length}
# TODO: Handle multiple backends with different feature flags.
if self.null and not connection.features.interprets_empty_strings_as_nulls:
defaults["empty_value"] = None
defaults.update(kwargs)
return super().formfield(**defaults)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.db_collation:
kwargs["db_collation"] = self.db_collation
return name, path, args, kwargs
class CommaSeparatedIntegerField(CharField):
default_validators = [validators.validate_comma_separated_integer_list]
description = _("Comma-separated integers")
system_check_removed_details = {
"msg": (
"CommaSeparatedIntegerField is removed except for support in "
"historical migrations."
),
"hint": (
"Use CharField(validators=[validate_comma_separated_integer_list]) "
"instead."
),
"id": "fields.E901",
}
def _to_naive(value):
if timezone.is_aware(value):
value = timezone.make_naive(value, datetime.timezone.utc)
return value
def _get_naive_now():
return _to_naive(timezone.now())
class DateTimeCheckMixin:
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_mutually_exclusive_options(),
*self._check_fix_default_value(),
]
def _check_mutually_exclusive_options(self):
# auto_now, auto_now_add, and default are mutually exclusive
# options. The use of more than one of these options together
# will trigger an Error
mutually_exclusive_options = [
self.auto_now_add,
self.auto_now,
self.has_default(),
]
enabled_options = [
option not in (None, False) for option in mutually_exclusive_options
].count(True)
if enabled_options > 1:
return [
checks.Error(
"The options auto_now, auto_now_add, and default "
"are mutually exclusive. Only one of these options "
"may be present.",
obj=self,
id="fields.E160",
)
]
else:
return []
def _check_fix_default_value(self):
return []
# Concrete subclasses use this in their implementations of
# _check_fix_default_value().
def _check_if_value_fixed(self, value, now=None):
"""
Check if the given value appears to have been provided as a "fixed"
time value, and include a warning in the returned list if it does. The
value argument must be a date object or aware/naive datetime object. If
now is provided, it must be a naive datetime object.
"""
if now is None:
now = _get_naive_now()
offset = datetime.timedelta(seconds=10)
lower = now - offset
upper = now + offset
if isinstance(value, datetime.datetime):
value = _to_naive(value)
else:
assert isinstance(value, datetime.date)
lower = lower.date()
upper = upper.date()
if lower <= value <= upper:
return [
checks.Warning(
"Fixed default value provided.",
hint=(
"It seems you set a fixed date / time / datetime "
"value as default for this field. This may not be "
"what you want. If you want to have the current date "
"as default, use `django.utils.timezone.now`"
),
obj=self,
id="fields.W161",
)
]
return []
class DateField(DateTimeCheckMixin, Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _(
"“%(value)s” value has an invalid date format. It must be "
"in YYYY-MM-DD format."
),
"invalid_date": _(
"“%(value)s” value has the correct format (YYYY-MM-DD) "
"but it is an invalid date."
),
}
description = _("Date (without time)")
def __init__(
self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs
):
self.auto_now, self.auto_now_add = auto_now, auto_now_add
if auto_now or auto_now_add:
kwargs["editable"] = False
kwargs["blank"] = True
super().__init__(verbose_name, name, **kwargs)
def _check_fix_default_value(self):
"""
Warn that using an actual date or datetime value is probably wrong;
it's only evaluated on server startup.
"""
if not self.has_default():
return []
value = self.default
if isinstance(value, datetime.datetime):
value = _to_naive(value).date()
elif isinstance(value, datetime.date):
pass
else:
# No explicit date / datetime value -- no checks necessary
return []
# At this point, value is a date object.
return self._check_if_value_fixed(value)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.auto_now:
kwargs["auto_now"] = True
if self.auto_now_add:
kwargs["auto_now_add"] = True
if self.auto_now or self.auto_now_add:
del kwargs["editable"]
del kwargs["blank"]
return name, path, args, kwargs
def get_internal_type(self):
return "DateField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.datetime):
if settings.USE_TZ and timezone.is_aware(value):
# Convert aware datetimes to the default time zone
# before casting them to dates (#17742).
default_timezone = timezone.get_default_timezone()
value = timezone.make_naive(value, default_timezone)
return value.date()
if isinstance(value, datetime.date):
return value
try:
parsed = parse_date(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages["invalid_date"],
code="invalid_date",
params={"value": value},
)
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = datetime.date.today()
setattr(model_instance, self.attname, value)
return value
else:
return super().pre_save(model_instance, add)
def contribute_to_class(self, cls, name, **kwargs):
super().contribute_to_class(cls, name, **kwargs)
if not self.null:
setattr(
cls,
"get_next_by_%s" % self.name,
partialmethod(
cls._get_next_or_previous_by_FIELD, field=self, is_next=True
),
)
setattr(
cls,
"get_previous_by_%s" % self.name,
partialmethod(
cls._get_next_or_previous_by_FIELD, field=self, is_next=False
),
)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
# Casts dates into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_datefield_value(value)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return "" if val is None else val.isoformat()
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.DateField,
**kwargs,
}
)
class DateTimeField(DateField):
empty_strings_allowed = False
default_error_messages = {
"invalid": _(
"“%(value)s” value has an invalid format. It must be in "
"YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format."
),
"invalid_date": _(
"“%(value)s” value has the correct format "
"(YYYY-MM-DD) but it is an invalid date."
),
"invalid_datetime": _(
"“%(value)s” value has the correct format "
"(YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) "
"but it is an invalid date/time."
),
}
description = _("Date (with time)")
# __init__ is inherited from DateField
def _check_fix_default_value(self):
"""
Warn that using an actual date or datetime value is probably wrong;
it's only evaluated on server startup.
"""
if not self.has_default():
return []
value = self.default
if isinstance(value, (datetime.datetime, datetime.date)):
return self._check_if_value_fixed(value)
# No explicit date / datetime value -- no checks necessary.
return []
def get_internal_type(self):
return "DateTimeField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.datetime):
return value
if isinstance(value, datetime.date):
value = datetime.datetime(value.year, value.month, value.day)
if settings.USE_TZ:
# For backwards compatibility, interpret naive datetimes in
# local time. This won't work during DST change, but we can't
# do much about it, so we let the exceptions percolate up the
# call stack.
warnings.warn(
"DateTimeField %s.%s received a naive datetime "
"(%s) while time zone support is active."
% (self.model.__name__, self.name, value),
RuntimeWarning,
)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
return value
try:
parsed = parse_datetime(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages["invalid_datetime"],
code="invalid_datetime",
params={"value": value},
)
try:
parsed = parse_date(value)
if parsed is not None:
return datetime.datetime(parsed.year, parsed.month, parsed.day)
except ValueError:
raise exceptions.ValidationError(
self.error_messages["invalid_date"],
code="invalid_date",
params={"value": value},
)
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = timezone.now()
setattr(model_instance, self.attname, value)
return value
else:
return super().pre_save(model_instance, add)
# contribute_to_class is inherited from DateField, it registers
# get_next_by_FOO and get_prev_by_FOO
def get_prep_value(self, value):
value = super().get_prep_value(value)
value = self.to_python(value)
if value is not None and settings.USE_TZ and timezone.is_naive(value):
# For backwards compatibility, interpret naive datetimes in local
# time. This won't work during DST change, but we can't do much
# about it, so we let the exceptions percolate up the call stack.
try:
name = "%s.%s" % (self.model.__name__, self.name)
except AttributeError:
name = "(unbound)"
warnings.warn(
"DateTimeField %s received a naive datetime (%s)"
" while time zone support is active." % (name, value),
RuntimeWarning,
)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
return value
def get_db_prep_value(self, value, connection, prepared=False):
# Casts datetimes into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_datetimefield_value(value)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return "" if val is None else val.isoformat()
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.DateTimeField,
**kwargs,
}
)
class DecimalField(Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _("“%(value)s” value must be a decimal number."),
}
description = _("Decimal number")
def __init__(
self,
verbose_name=None,
name=None,
max_digits=None,
decimal_places=None,
**kwargs,
):
self.max_digits, self.decimal_places = max_digits, decimal_places
super().__init__(verbose_name, name, **kwargs)
def check(self, **kwargs):
errors = super().check(**kwargs)
digits_errors = [
*self._check_decimal_places(),
*self._check_max_digits(),
]
if not digits_errors:
errors.extend(self._check_decimal_places_and_max_digits(**kwargs))
else:
errors.extend(digits_errors)
return errors
def _check_decimal_places(self):
try:
decimal_places = int(self.decimal_places)
if decimal_places < 0:
raise ValueError()
except TypeError:
return [
checks.Error(
"DecimalFields must define a 'decimal_places' attribute.",
obj=self,
id="fields.E130",
)
]
except ValueError:
return [
checks.Error(
"'decimal_places' must be a non-negative integer.",
obj=self,
id="fields.E131",
)
]
else:
return []
def _check_max_digits(self):
try:
max_digits = int(self.max_digits)
if max_digits <= 0:
raise ValueError()
except TypeError:
return [
checks.Error(
"DecimalFields must define a 'max_digits' attribute.",
obj=self,
id="fields.E132",
)
]
except ValueError:
return [
checks.Error(
"'max_digits' must be a positive integer.",
obj=self,
id="fields.E133",
)
]
else:
return []
def _check_decimal_places_and_max_digits(self, **kwargs):
if int(self.decimal_places) > int(self.max_digits):
return [
checks.Error(
"'max_digits' must be greater or equal to 'decimal_places'.",
obj=self,
id="fields.E134",
)
]
return []
@cached_property
def validators(self):
return super().validators + [
validators.DecimalValidator(self.max_digits, self.decimal_places)
]
@cached_property
def context(self):
return decimal.Context(prec=self.max_digits)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.max_digits is not None:
kwargs["max_digits"] = self.max_digits
if self.decimal_places is not None:
kwargs["decimal_places"] = self.decimal_places
return name, path, args, kwargs
def get_internal_type(self):
return "DecimalField"
def to_python(self, value):
if value is None:
return value
try:
if isinstance(value, float):
decimal_value = self.context.create_decimal_from_float(value)
else:
decimal_value = decimal.Decimal(value)
except (decimal.InvalidOperation, TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
if not decimal_value.is_finite():
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
return decimal_value
def get_db_prep_save(self, value, connection):
if hasattr(value, "as_sql"):
return value
return connection.ops.adapt_decimalfield_value(
self.to_python(value), self.max_digits, self.decimal_places
)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
return super().formfield(
**{
"max_digits": self.max_digits,
"decimal_places": self.decimal_places,
"form_class": forms.DecimalField,
**kwargs,
}
)
class DurationField(Field):
"""
Store timedelta objects.
Use interval on PostgreSQL, INTERVAL DAY TO SECOND on Oracle, and bigint
of microseconds on other databases.
"""
empty_strings_allowed = False
default_error_messages = {
"invalid": _(
"“%(value)s” value has an invalid format. It must be in "
"[DD] [[HH:]MM:]ss[.uuuuuu] format."
)
}
description = _("Duration")
def get_internal_type(self):
return "DurationField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.timedelta):
return value
try:
parsed = parse_duration(value)
except ValueError:
pass
else:
if parsed is not None:
return parsed
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def get_db_prep_value(self, value, connection, prepared=False):
if connection.features.has_native_duration_field:
return value
if value is None:
return None
return duration_microseconds(value)
def get_db_converters(self, connection):
converters = []
if not connection.features.has_native_duration_field:
converters.append(connection.ops.convert_durationfield_value)
return converters + super().get_db_converters(connection)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return "" if val is None else duration_string(val)
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.DurationField,
**kwargs,
}
)
class EmailField(CharField):
default_validators = [validators.validate_email]
description = _("Email address")
def __init__(self, *args, **kwargs):
# max_length=254 to be compliant with RFCs 3696 and 5321
kwargs.setdefault("max_length", 254)
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
# We do not exclude max_length if it matches default as we want to change
# the default in future.
return name, path, args, kwargs
def formfield(self, **kwargs):
# As with CharField, this will cause email validation to be performed
# twice.
return super().formfield(
**{
"form_class": forms.EmailField,
**kwargs,
}
)
class FilePathField(Field):
description = _("File path")
def __init__(
self,
verbose_name=None,
name=None,
path="",
match=None,
recursive=False,
allow_files=True,
allow_folders=False,
**kwargs,
):
self.path, self.match, self.recursive = path, match, recursive
self.allow_files, self.allow_folders = allow_files, allow_folders
kwargs.setdefault("max_length", 100)
super().__init__(verbose_name, name, **kwargs)
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_allowing_files_or_folders(**kwargs),
]
def _check_allowing_files_or_folders(self, **kwargs):
if not self.allow_files and not self.allow_folders:
return [
checks.Error(
"FilePathFields must have either 'allow_files' or 'allow_folders' "
"set to True.",
obj=self,
id="fields.E140",
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.path != "":
kwargs["path"] = self.path
if self.match is not None:
kwargs["match"] = self.match
if self.recursive is not False:
kwargs["recursive"] = self.recursive
if self.allow_files is not True:
kwargs["allow_files"] = self.allow_files
if self.allow_folders is not False:
kwargs["allow_folders"] = self.allow_folders
if kwargs.get("max_length") == 100:
del kwargs["max_length"]
return name, path, args, kwargs
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return str(value)
def formfield(self, **kwargs):
return super().formfield(
**{
"path": self.path() if callable(self.path) else self.path,
"match": self.match,
"recursive": self.recursive,
"form_class": forms.FilePathField,
"allow_files": self.allow_files,
"allow_folders": self.allow_folders,
**kwargs,
}
)
def get_internal_type(self):
return "FilePathField"
class FloatField(Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _("“%(value)s” value must be a float."),
}
description = _("Floating point number")
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
try:
return float(value)
except (TypeError, ValueError) as e:
raise e.__class__(
"Field '%s' expected a number but got %r." % (self.name, value),
) from e
def get_internal_type(self):
return "FloatField"
def to_python(self, value):
if value is None:
return value
try:
return float(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.FloatField,
**kwargs,
}
)
class IntegerField(Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _("“%(value)s” value must be an integer."),
}
description = _("Integer")
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_max_length_warning(),
]
def _check_max_length_warning(self):
if self.max_length is not None:
return [
checks.Warning(
"'max_length' is ignored when used with %s."
% self.__class__.__name__,
hint="Remove 'max_length' from field",
obj=self,
id="fields.W122",
)
]
return []
@cached_property
def validators(self):
# These validators can't be added at field initialization time since
# they're based on values retrieved from `connection`.
validators_ = super().validators
internal_type = self.get_internal_type()
min_value, max_value = connection.ops.integer_field_range(internal_type)
if min_value is not None and not any(
(
isinstance(validator, validators.MinValueValidator)
and (
validator.limit_value()
if callable(validator.limit_value)
else validator.limit_value
)
>= min_value
)
for validator in validators_
):
validators_.append(validators.MinValueValidator(min_value))
if max_value is not None and not any(
(
isinstance(validator, validators.MaxValueValidator)
and (
validator.limit_value()
if callable(validator.limit_value)
else validator.limit_value
)
<= max_value
)
for validator in validators_
):
validators_.append(validators.MaxValueValidator(max_value))
return validators_
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
try:
return int(value)
except (TypeError, ValueError) as e:
raise e.__class__(
"Field '%s' expected a number but got %r." % (self.name, value),
) from e
def get_db_prep_value(self, value, connection, prepared=False):
value = super().get_db_prep_value(value, connection, prepared)
return connection.ops.adapt_integerfield_value(value, self.get_internal_type())
def get_internal_type(self):
return "IntegerField"
def to_python(self, value):
if value is None:
return value
try:
return int(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.IntegerField,
**kwargs,
}
)
class BigIntegerField(IntegerField):
description = _("Big (8 byte) integer")
MAX_BIGINT = 9223372036854775807
def get_internal_type(self):
return "BigIntegerField"
def formfield(self, **kwargs):
return super().formfield(
**{
"min_value": -BigIntegerField.MAX_BIGINT - 1,
"max_value": BigIntegerField.MAX_BIGINT,
**kwargs,
}
)
class SmallIntegerField(IntegerField):
description = _("Small integer")
def get_internal_type(self):
return "SmallIntegerField"
class IPAddressField(Field):
empty_strings_allowed = False
description = _("IPv4 address")
system_check_removed_details = {
"msg": (
"IPAddressField has been removed except for support in "
"historical migrations."
),
"hint": "Use GenericIPAddressField instead.",
"id": "fields.E900",
}
def __init__(self, *args, **kwargs):
kwargs["max_length"] = 15
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["max_length"]
return name, path, args, kwargs
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
return str(value)
def get_internal_type(self):
return "IPAddressField"
class GenericIPAddressField(Field):
empty_strings_allowed = False
description = _("IP address")
default_error_messages = {}
def __init__(
self,
verbose_name=None,
name=None,
protocol="both",
unpack_ipv4=False,
*args,
**kwargs,
):
self.unpack_ipv4 = unpack_ipv4
self.protocol = protocol
(
self.default_validators,
invalid_error_message,
) = validators.ip_address_validators(protocol, unpack_ipv4)
self.default_error_messages["invalid"] = invalid_error_message
kwargs["max_length"] = 39
super().__init__(verbose_name, name, *args, **kwargs)
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_blank_and_null_values(**kwargs),
]
def _check_blank_and_null_values(self, **kwargs):
if not getattr(self, "null", False) and getattr(self, "blank", False):
return [
checks.Error(
"GenericIPAddressFields cannot have blank=True if null=False, "
"as blank values are stored as nulls.",
obj=self,
id="fields.E150",
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.unpack_ipv4 is not False:
kwargs["unpack_ipv4"] = self.unpack_ipv4
if self.protocol != "both":
kwargs["protocol"] = self.protocol
if kwargs.get("max_length") == 39:
del kwargs["max_length"]
return name, path, args, kwargs
def get_internal_type(self):
return "GenericIPAddressField"
def to_python(self, value):
if value is None:
return None
if not isinstance(value, str):
value = str(value)
value = value.strip()
if ":" in value:
return clean_ipv6_address(
value, self.unpack_ipv4, self.error_messages["invalid"]
)
return value
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_ipaddressfield_value(value)
def get_prep_value(self, value):
value = super().get_prep_value(value)
if value is None:
return None
if value and ":" in value:
try:
return clean_ipv6_address(value, self.unpack_ipv4)
except exceptions.ValidationError:
pass
return str(value)
def formfield(self, **kwargs):
return super().formfield(
**{
"protocol": self.protocol,
"form_class": forms.GenericIPAddressField,
**kwargs,
}
)
class NullBooleanField(BooleanField):
default_error_messages = {
"invalid": _("“%(value)s” value must be either None, True or False."),
"invalid_nullable": _("“%(value)s” value must be either None, True or False."),
}
description = _("Boolean (Either True, False or None)")
system_check_removed_details = {
"msg": (
"NullBooleanField is removed except for support in historical "
"migrations."
),
"hint": "Use BooleanField(null=True, blank=True) instead.",
"id": "fields.E903",
}
def __init__(self, *args, **kwargs):
kwargs["null"] = True
kwargs["blank"] = True
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["null"]
del kwargs["blank"]
return name, path, args, kwargs
class PositiveIntegerRelDbTypeMixin:
def __init_subclass__(cls, **kwargs):
super().__init_subclass__(**kwargs)
if not hasattr(cls, "integer_field_class"):
cls.integer_field_class = next(
(
parent
for parent in cls.__mro__[1:]
if issubclass(parent, IntegerField)
),
None,
)
def rel_db_type(self, connection):
"""
Return the data type that a related field pointing to this field should
use. In most cases, a foreign key pointing to a positive integer
primary key will have an integer column data type but some databases
(e.g. MySQL) have an unsigned integer type. In that case
(related_fields_match_type=True), the primary key should return its
db_type.
"""
if connection.features.related_fields_match_type:
return self.db_type(connection)
else:
return self.integer_field_class().db_type(connection=connection)
class PositiveBigIntegerField(PositiveIntegerRelDbTypeMixin, BigIntegerField):
description = _("Positive big integer")
def get_internal_type(self):
return "PositiveBigIntegerField"
def formfield(self, **kwargs):
return super().formfield(
**{
"min_value": 0,
**kwargs,
}
)
class PositiveIntegerField(PositiveIntegerRelDbTypeMixin, IntegerField):
description = _("Positive integer")
def get_internal_type(self):
return "PositiveIntegerField"
def formfield(self, **kwargs):
return super().formfield(
**{
"min_value": 0,
**kwargs,
}
)
class PositiveSmallIntegerField(PositiveIntegerRelDbTypeMixin, SmallIntegerField):
description = _("Positive small integer")
def get_internal_type(self):
return "PositiveSmallIntegerField"
def formfield(self, **kwargs):
return super().formfield(
**{
"min_value": 0,
**kwargs,
}
)
class SlugField(CharField):
default_validators = [validators.validate_slug]
description = _("Slug (up to %(max_length)s)")
def __init__(
self, *args, max_length=50, db_index=True, allow_unicode=False, **kwargs
):
self.allow_unicode = allow_unicode
if self.allow_unicode:
self.default_validators = [validators.validate_unicode_slug]
super().__init__(*args, max_length=max_length, db_index=db_index, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if kwargs.get("max_length") == 50:
del kwargs["max_length"]
if self.db_index is False:
kwargs["db_index"] = False
else:
del kwargs["db_index"]
if self.allow_unicode is not False:
kwargs["allow_unicode"] = self.allow_unicode
return name, path, args, kwargs
def get_internal_type(self):
return "SlugField"
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.SlugField,
"allow_unicode": self.allow_unicode,
**kwargs,
}
)
class TextField(Field):
description = _("Text")
def __init__(self, *args, db_collation=None, **kwargs):
super().__init__(*args, **kwargs)
self.db_collation = db_collation
def check(self, **kwargs):
databases = kwargs.get("databases") or []
return [
*super().check(**kwargs),
*self._check_db_collation(databases),
]
def _check_db_collation(self, databases):
errors = []
for db in databases:
if not router.allow_migrate_model(db, self.model):
continue
connection = connections[db]
if not (
self.db_collation is None
or "supports_collation_on_textfield"
in self.model._meta.required_db_features
or connection.features.supports_collation_on_textfield
):
errors.append(
checks.Error(
"%s does not support a database collation on "
"TextFields." % connection.display_name,
obj=self,
id="fields.E190",
),
)
return errors
def db_parameters(self, connection):
db_params = super().db_parameters(connection)
db_params["collation"] = self.db_collation
return db_params
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if isinstance(value, str) or value is None:
return value
return str(value)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
# Passing max_length to forms.CharField means that the value's length
# will be validated twice. This is considered acceptable since we want
# the value in the form field (to pass into widget for example).
return super().formfield(
**{
"max_length": self.max_length,
**({} if self.choices is not None else {"widget": forms.Textarea}),
**kwargs,
}
)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.db_collation:
kwargs["db_collation"] = self.db_collation
return name, path, args, kwargs
class TimeField(DateTimeCheckMixin, Field):
empty_strings_allowed = False
default_error_messages = {
"invalid": _(
"“%(value)s” value has an invalid format. It must be in "
"HH:MM[:ss[.uuuuuu]] format."
),
"invalid_time": _(
"“%(value)s” value has the correct format "
"(HH:MM[:ss[.uuuuuu]]) but it is an invalid time."
),
}
description = _("Time")
def __init__(
self, verbose_name=None, name=None, auto_now=False, auto_now_add=False, **kwargs
):
self.auto_now, self.auto_now_add = auto_now, auto_now_add
if auto_now or auto_now_add:
kwargs["editable"] = False
kwargs["blank"] = True
super().__init__(verbose_name, name, **kwargs)
def _check_fix_default_value(self):
"""
Warn that using an actual date or datetime value is probably wrong;
it's only evaluated on server startup.
"""
if not self.has_default():
return []
value = self.default
if isinstance(value, datetime.datetime):
now = None
elif isinstance(value, datetime.time):
now = _get_naive_now()
# This will not use the right date in the race condition where now
# is just before the date change and value is just past 0:00.
value = datetime.datetime.combine(now.date(), value)
else:
# No explicit time / datetime value -- no checks necessary
return []
# At this point, value is a datetime object.
return self._check_if_value_fixed(value, now=now)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.auto_now is not False:
kwargs["auto_now"] = self.auto_now
if self.auto_now_add is not False:
kwargs["auto_now_add"] = self.auto_now_add
if self.auto_now or self.auto_now_add:
del kwargs["blank"]
del kwargs["editable"]
return name, path, args, kwargs
def get_internal_type(self):
return "TimeField"
def to_python(self, value):
if value is None:
return None
if isinstance(value, datetime.time):
return value
if isinstance(value, datetime.datetime):
# Not usually a good idea to pass in a datetime here (it loses
# information), but this can be a side-effect of interacting with a
# database backend (e.g. Oracle), so we'll be accommodating.
return value.time()
try:
parsed = parse_time(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages["invalid_time"],
code="invalid_time",
params={"value": value},
)
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = datetime.datetime.now().time()
setattr(model_instance, self.attname, value)
return value
else:
return super().pre_save(model_instance, add)
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
# Casts times into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.adapt_timefield_value(value)
def value_to_string(self, obj):
val = self.value_from_object(obj)
return "" if val is None else val.isoformat()
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.TimeField,
**kwargs,
}
)
class URLField(CharField):
default_validators = [validators.URLValidator()]
description = _("URL")
def __init__(self, verbose_name=None, name=None, **kwargs):
kwargs.setdefault("max_length", 200)
super().__init__(verbose_name, name, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if kwargs.get("max_length") == 200:
del kwargs["max_length"]
return name, path, args, kwargs
def formfield(self, **kwargs):
# As with CharField, this will cause URL validation to be performed
# twice.
return super().formfield(
**{
"form_class": forms.URLField,
**kwargs,
}
)
class BinaryField(Field):
description = _("Raw binary data")
empty_values = [None, b""]
def __init__(self, *args, **kwargs):
kwargs.setdefault("editable", False)
super().__init__(*args, **kwargs)
if self.max_length is not None:
self.validators.append(validators.MaxLengthValidator(self.max_length))
def check(self, **kwargs):
return [*super().check(**kwargs), *self._check_str_default_value()]
def _check_str_default_value(self):
if self.has_default() and isinstance(self.default, str):
return [
checks.Error(
"BinaryField's default cannot be a string. Use bytes "
"content instead.",
obj=self,
id="fields.E170",
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if self.editable:
kwargs["editable"] = True
else:
del kwargs["editable"]
return name, path, args, kwargs
def get_internal_type(self):
return "BinaryField"
def get_placeholder(self, value, compiler, connection):
return connection.ops.binary_placeholder_sql(value)
def get_default(self):
if self.has_default() and not callable(self.default):
return self.default
default = super().get_default()
if default == "":
return b""
return default
def get_db_prep_value(self, value, connection, prepared=False):
value = super().get_db_prep_value(value, connection, prepared)
if value is not None:
return connection.Database.Binary(value)
return value
def value_to_string(self, obj):
"""Binary data is serialized as base64"""
return b64encode(self.value_from_object(obj)).decode("ascii")
def to_python(self, value):
# If it's a string, it should be base64-encoded data
if isinstance(value, str):
return memoryview(b64decode(value.encode("ascii")))
return value
class UUIDField(Field):
default_error_messages = {
"invalid": _("“%(value)s” is not a valid UUID."),
}
description = _("Universally unique identifier")
empty_strings_allowed = False
def __init__(self, verbose_name=None, **kwargs):
kwargs["max_length"] = 32
super().__init__(verbose_name, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["max_length"]
return name, path, args, kwargs
def get_internal_type(self):
return "UUIDField"
def get_prep_value(self, value):
value = super().get_prep_value(value)
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
if value is None:
return None
if not isinstance(value, uuid.UUID):
value = self.to_python(value)
if connection.features.has_native_uuid_field:
return value
return value.hex
def to_python(self, value):
if value is not None and not isinstance(value, uuid.UUID):
input_form = "int" if isinstance(value, int) else "hex"
try:
return uuid.UUID(**{input_form: value})
except (AttributeError, ValueError):
raise exceptions.ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
return value
def formfield(self, **kwargs):
return super().formfield(
**{
"form_class": forms.UUIDField,
**kwargs,
}
)
class AutoFieldMixin:
db_returning = True
def __init__(self, *args, **kwargs):
kwargs["blank"] = True
super().__init__(*args, **kwargs)
def check(self, **kwargs):
return [
*super().check(**kwargs),
*self._check_primary_key(),
]
def _check_primary_key(self):
if not self.primary_key:
return [
checks.Error(
"AutoFields must set primary_key=True.",
obj=self,
id="fields.E100",
),
]
else:
return []
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["blank"]
kwargs["primary_key"] = True
return name, path, args, kwargs
def validate(self, value, model_instance):
pass
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
value = connection.ops.validate_autopk_value(value)
return value
def contribute_to_class(self, cls, name, **kwargs):
if cls._meta.auto_field:
raise ValueError(
"Model %s can't have more than one auto-generated field."
% cls._meta.label
)
super().contribute_to_class(cls, name, **kwargs)
cls._meta.auto_field = self
def formfield(self, **kwargs):
return None
class AutoFieldMeta(type):
"""
Metaclass to maintain backward inheritance compatibility for AutoField.
It is intended that AutoFieldMixin become public API when it is possible to
create a non-integer automatically-generated field using column defaults
stored in the database.
In many areas Django also relies on using isinstance() to check for an
automatically-generated field as a subclass of AutoField. A new flag needs
to be implemented on Field to be used instead.
When these issues have been addressed, this metaclass could be used to
deprecate inheritance from AutoField and use of isinstance() with AutoField
for detecting automatically-generated fields.
"""
@property
def _subclasses(self):
return (BigAutoField, SmallAutoField)
def __instancecheck__(self, instance):
return isinstance(instance, self._subclasses) or super().__instancecheck__(
instance
)
def __subclasscheck__(self, subclass):
return issubclass(subclass, self._subclasses) or super().__subclasscheck__(
subclass
)
class AutoField(AutoFieldMixin, IntegerField, metaclass=AutoFieldMeta):
def get_internal_type(self):
return "AutoField"
def rel_db_type(self, connection):
return IntegerField().db_type(connection=connection)
class BigAutoField(AutoFieldMixin, BigIntegerField):
def get_internal_type(self):
return "BigAutoField"
def rel_db_type(self, connection):
return BigIntegerField().db_type(connection=connection)
class SmallAutoField(AutoFieldMixin, SmallIntegerField):
def get_internal_type(self):
return "SmallAutoField"
def rel_db_type(self, connection):
return SmallIntegerField().db_type(connection=connection)
|
46f4623a3d6849153fb257e45fa120349cecd0bdab6857476e091d7a7c49eb5c | """
Create SQL statements for QuerySets.
The code in here encapsulates all of the SQL construction so that QuerySets
themselves do not have to (and could be backed by things other than SQL
databases). The abstraction barrier only works one way: this module has to know
all about the internals of models in order to get the information it needs.
"""
import copy
import difflib
import functools
import sys
from collections import Counter, namedtuple
from collections.abc import Iterator, Mapping
from itertools import chain, count, product
from string import ascii_uppercase
from django.core.exceptions import FieldDoesNotExist, FieldError
from django.db import DEFAULT_DB_ALIAS, NotSupportedError, connections
from django.db.models.aggregates import Count
from django.db.models.constants import LOOKUP_SEP
from django.db.models.expressions import (
BaseExpression,
Col,
Exists,
F,
OuterRef,
Ref,
ResolvedOuterRef,
Value,
)
from django.db.models.fields import Field
from django.db.models.fields.related_lookups import MultiColSource
from django.db.models.lookups import Lookup
from django.db.models.query_utils import (
Q,
check_rel_lookup_compatibility,
refs_expression,
)
from django.db.models.sql.constants import INNER, LOUTER, ORDER_DIR, SINGLE
from django.db.models.sql.datastructures import BaseTable, Empty, Join, MultiJoin
from django.db.models.sql.where import AND, OR, ExtraWhere, NothingNode, WhereNode
from django.utils.functional import cached_property
from django.utils.regex_helper import _lazy_re_compile
from django.utils.tree import Node
__all__ = ["Query", "RawQuery"]
# Quotation marks ('"`[]), whitespace characters, semicolons, or inline
# SQL comments are forbidden in column aliases.
FORBIDDEN_ALIAS_PATTERN = _lazy_re_compile(r"['`\"\]\[;\s]|--|/\*|\*/")
# Inspired from
# https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
EXPLAIN_OPTIONS_PATTERN = _lazy_re_compile(r"[\w\-]+")
def get_field_names_from_opts(opts):
if opts is None:
return set()
return set(
chain.from_iterable(
(f.name, f.attname) if f.concrete else (f.name,) for f in opts.get_fields()
)
)
def get_children_from_q(q):
for child in q.children:
if isinstance(child, Node):
yield from get_children_from_q(child)
else:
yield child
def rename_prefix_from_q(prefix, replacement, q):
return Q.create(
[
rename_prefix_from_q(prefix, replacement, c)
if isinstance(c, Node)
else (c[0].replace(prefix, replacement, 1), c[1])
for c in q.children
],
q.connector,
q.negated,
)
JoinInfo = namedtuple(
"JoinInfo",
("final_field", "targets", "opts", "joins", "path", "transform_function"),
)
class RawQuery:
"""A single raw SQL query."""
def __init__(self, sql, using, params=()):
self.params = params
self.sql = sql
self.using = using
self.cursor = None
# Mirror some properties of a normal query so that
# the compiler can be used to process results.
self.low_mark, self.high_mark = 0, None # Used for offset/limit
self.extra_select = {}
self.annotation_select = {}
def chain(self, using):
return self.clone(using)
def clone(self, using):
return RawQuery(self.sql, using, params=self.params)
def get_columns(self):
if self.cursor is None:
self._execute_query()
converter = connections[self.using].introspection.identifier_converter
return [converter(column_meta[0]) for column_meta in self.cursor.description]
def __iter__(self):
# Always execute a new query for a new iterator.
# This could be optimized with a cache at the expense of RAM.
self._execute_query()
if not connections[self.using].features.can_use_chunked_reads:
# If the database can't use chunked reads we need to make sure we
# evaluate the entire query up front.
result = list(self.cursor)
else:
result = self.cursor
return iter(result)
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self)
@property
def params_type(self):
if self.params is None:
return None
return dict if isinstance(self.params, Mapping) else tuple
def __str__(self):
if self.params_type is None:
return self.sql
return self.sql % self.params_type(self.params)
def _execute_query(self):
connection = connections[self.using]
# Adapt parameters to the database, as much as possible considering
# that the target type isn't known. See #17755.
params_type = self.params_type
adapter = connection.ops.adapt_unknown_value
if params_type is tuple:
params = tuple(adapter(val) for val in self.params)
elif params_type is dict:
params = {key: adapter(val) for key, val in self.params.items()}
elif params_type is None:
params = None
else:
raise RuntimeError("Unexpected params type: %s" % params_type)
self.cursor = connection.cursor()
self.cursor.execute(self.sql, params)
ExplainInfo = namedtuple("ExplainInfo", ("format", "options"))
class Query(BaseExpression):
"""A single SQL query."""
alias_prefix = "T"
empty_result_set_value = None
subq_aliases = frozenset([alias_prefix])
compiler = "SQLCompiler"
base_table_class = BaseTable
join_class = Join
default_cols = True
default_ordering = True
standard_ordering = True
filter_is_sticky = False
subquery = False
# SQL-related attributes.
# Select and related select clauses are expressions to use in the SELECT
# clause of the query. The select is used for cases where we want to set up
# the select clause to contain other than default fields (values(),
# subqueries...). Note that annotations go to annotations dictionary.
select = ()
# The group_by attribute can have one of the following forms:
# - None: no group by at all in the query
# - A tuple of expressions: group by (at least) those expressions.
# String refs are also allowed for now.
# - True: group by all select fields of the model
# See compiler.get_group_by() for details.
group_by = None
order_by = ()
low_mark = 0 # Used for offset/limit.
high_mark = None # Used for offset/limit.
distinct = False
distinct_fields = ()
select_for_update = False
select_for_update_nowait = False
select_for_update_skip_locked = False
select_for_update_of = ()
select_for_no_key_update = False
select_related = False
has_select_fields = False
# Arbitrary limit for select_related to prevents infinite recursion.
max_depth = 5
# Holds the selects defined by a call to values() or values_list()
# excluding annotation_select and extra_select.
values_select = ()
# SQL annotation-related attributes.
annotation_select_mask = None
_annotation_select_cache = None
# Set combination attributes.
combinator = None
combinator_all = False
combined_queries = ()
# These are for extensions. The contents are more or less appended verbatim
# to the appropriate clause.
extra_select_mask = None
_extra_select_cache = None
extra_tables = ()
extra_order_by = ()
# A tuple that is a set of model field names and either True, if these are
# the fields to defer, or False if these are the only fields to load.
deferred_loading = (frozenset(), True)
explain_info = None
def __init__(self, model, alias_cols=True):
self.model = model
self.alias_refcount = {}
# alias_map is the most important data structure regarding joins.
# It's used for recording which joins exist in the query and what
# types they are. The key is the alias of the joined table (possibly
# the table name) and the value is a Join-like object (see
# sql.datastructures.Join for more information).
self.alias_map = {}
# Whether to provide alias to columns during reference resolving.
self.alias_cols = alias_cols
# Sometimes the query contains references to aliases in outer queries (as
# a result of split_exclude). Correct alias quoting needs to know these
# aliases too.
# Map external tables to whether they are aliased.
self.external_aliases = {}
self.table_map = {} # Maps table names to list of aliases.
self.used_aliases = set()
self.where = WhereNode()
# Maps alias -> Annotation Expression.
self.annotations = {}
# These are for extensions. The contents are more or less appended
# verbatim to the appropriate clause.
self.extra = {} # Maps col_alias -> (col_sql, params).
self._filtered_relations = {}
@property
def output_field(self):
if len(self.select) == 1:
select = self.select[0]
return getattr(select, "target", None) or select.field
elif len(self.annotation_select) == 1:
return next(iter(self.annotation_select.values())).output_field
@cached_property
def base_table(self):
for alias in self.alias_map:
return alias
def __str__(self):
"""
Return the query as a string of SQL with the parameter values
substituted in (use sql_with_params() to see the unsubstituted string).
Parameter values won't necessarily be quoted correctly, since that is
done by the database interface at execution time.
"""
sql, params = self.sql_with_params()
return sql % params
def sql_with_params(self):
"""
Return the query as an SQL string and the parameters that will be
substituted into the query.
"""
return self.get_compiler(DEFAULT_DB_ALIAS).as_sql()
def __deepcopy__(self, memo):
"""Limit the amount of work when a Query is deepcopied."""
result = self.clone()
memo[id(self)] = result
return result
def get_compiler(self, using=None, connection=None, elide_empty=True):
if using is None and connection is None:
raise ValueError("Need either using or connection")
if using:
connection = connections[using]
return connection.ops.compiler(self.compiler)(
self, connection, using, elide_empty
)
def get_meta(self):
"""
Return the Options instance (the model._meta) from which to start
processing. Normally, this is self.model._meta, but it can be changed
by subclasses.
"""
if self.model:
return self.model._meta
def clone(self):
"""
Return a copy of the current Query. A lightweight alternative to
deepcopy().
"""
obj = Empty()
obj.__class__ = self.__class__
# Copy references to everything.
obj.__dict__ = self.__dict__.copy()
# Clone attributes that can't use shallow copy.
obj.alias_refcount = self.alias_refcount.copy()
obj.alias_map = self.alias_map.copy()
obj.external_aliases = self.external_aliases.copy()
obj.table_map = self.table_map.copy()
obj.where = self.where.clone()
obj.annotations = self.annotations.copy()
if self.annotation_select_mask is not None:
obj.annotation_select_mask = self.annotation_select_mask.copy()
if self.combined_queries:
obj.combined_queries = tuple(
[query.clone() for query in self.combined_queries]
)
# _annotation_select_cache cannot be copied, as doing so breaks the
# (necessary) state in which both annotations and
# _annotation_select_cache point to the same underlying objects.
# It will get re-populated in the cloned queryset the next time it's
# used.
obj._annotation_select_cache = None
obj.extra = self.extra.copy()
if self.extra_select_mask is not None:
obj.extra_select_mask = self.extra_select_mask.copy()
if self._extra_select_cache is not None:
obj._extra_select_cache = self._extra_select_cache.copy()
if self.select_related is not False:
# Use deepcopy because select_related stores fields in nested
# dicts.
obj.select_related = copy.deepcopy(obj.select_related)
if "subq_aliases" in self.__dict__:
obj.subq_aliases = self.subq_aliases.copy()
obj.used_aliases = self.used_aliases.copy()
obj._filtered_relations = self._filtered_relations.copy()
# Clear the cached_property, if it exists.
obj.__dict__.pop("base_table", None)
return obj
def chain(self, klass=None):
"""
Return a copy of the current Query that's ready for another operation.
The klass argument changes the type of the Query, e.g. UpdateQuery.
"""
obj = self.clone()
if klass and obj.__class__ != klass:
obj.__class__ = klass
if not obj.filter_is_sticky:
obj.used_aliases = set()
obj.filter_is_sticky = False
if hasattr(obj, "_setup_query"):
obj._setup_query()
return obj
def relabeled_clone(self, change_map):
clone = self.clone()
clone.change_aliases(change_map)
return clone
def _get_col(self, target, field, alias):
if not self.alias_cols:
alias = None
return target.get_col(alias, field)
def get_aggregation(self, using, aggregate_exprs):
"""
Return the dictionary with the values of the existing aggregations.
"""
if not aggregate_exprs:
return {}
# Store annotation mask prior to temporarily adding aggregations for
# resolving purpose to facilitate their subsequent removal.
refs_subquery = False
replacements = {}
annotation_select_mask = self.annotation_select_mask
for alias, aggregate_expr in aggregate_exprs.items():
self.check_alias(alias)
aggregate = aggregate_expr.resolve_expression(
self, allow_joins=True, reuse=None, summarize=True
)
if not aggregate.contains_aggregate:
raise TypeError("%s is not an aggregate expression" % alias)
# Temporarily add aggregate to annotations to allow remaining
# members of `aggregates` to resolve against each others.
self.append_annotation_mask([alias])
refs_subquery |= any(
getattr(self.annotations[ref], "subquery", False)
for ref in aggregate.get_refs()
)
aggregate = aggregate.replace_expressions(replacements)
self.annotations[alias] = aggregate
replacements[Ref(alias, aggregate)] = aggregate
# Stash resolved aggregates now that they have been allowed to resolve
# against each other.
aggregates = {alias: self.annotations.pop(alias) for alias in aggregate_exprs}
self.set_annotation_mask(annotation_select_mask)
# Existing usage of aggregation can be determined by the presence of
# selected aggregates but also by filters against aliased aggregates.
_, having, qualify = self.where.split_having_qualify()
has_existing_aggregation = (
any(
getattr(annotation, "contains_aggregate", True)
for annotation in self.annotations.values()
)
or having
)
# Decide if we need to use a subquery.
#
# Existing aggregations would cause incorrect results as
# get_aggregation() must produce just one result and thus must not use
# GROUP BY.
#
# If the query has limit or distinct, or uses set operations, then
# those operations must be done in a subquery so that the query
# aggregates on the limit and/or distinct results instead of applying
# the distinct and limit after the aggregation.
if (
isinstance(self.group_by, tuple)
or self.is_sliced
or has_existing_aggregation
or refs_subquery
or qualify
or self.distinct
or self.combinator
):
from django.db.models.sql.subqueries import AggregateQuery
inner_query = self.clone()
inner_query.subquery = True
outer_query = AggregateQuery(self.model, inner_query)
inner_query.select_for_update = False
inner_query.select_related = False
inner_query.set_annotation_mask(self.annotation_select)
# Queries with distinct_fields need ordering and when a limit is
# applied we must take the slice from the ordered query. Otherwise
# no need for ordering.
inner_query.clear_ordering(force=False)
if not inner_query.distinct:
# If the inner query uses default select and it has some
# aggregate annotations, then we must make sure the inner
# query is grouped by the main model's primary key. However,
# clearing the select clause can alter results if distinct is
# used.
if inner_query.default_cols and has_existing_aggregation:
inner_query.group_by = (
self.model._meta.pk.get_col(inner_query.get_initial_alias()),
)
inner_query.default_cols = False
if not qualify:
# Mask existing annotations that are not referenced by
# aggregates to be pushed to the outer query unless
# filtering against window functions is involved as it
# requires complex realising.
annotation_mask = set()
if isinstance(self.group_by, tuple):
for expr in self.group_by:
annotation_mask |= expr.get_refs()
for aggregate in aggregates.values():
annotation_mask |= aggregate.get_refs()
inner_query.set_annotation_mask(annotation_mask)
# Add aggregates to the outer AggregateQuery. This requires making
# sure all columns referenced by the aggregates are selected in the
# inner query. It is achieved by retrieving all column references
# by the aggregates, explicitly selecting them in the inner query,
# and making sure the aggregates are repointed to them.
col_refs = {}
for alias, aggregate in aggregates.items():
replacements = {}
for col in self._gen_cols([aggregate], resolve_refs=False):
if not (col_ref := col_refs.get(col)):
index = len(col_refs) + 1
col_alias = f"__col{index}"
col_ref = Ref(col_alias, col)
col_refs[col] = col_ref
inner_query.annotations[col_alias] = col
inner_query.append_annotation_mask([col_alias])
replacements[col] = col_ref
outer_query.annotations[alias] = aggregate.replace_expressions(
replacements
)
if (
inner_query.select == ()
and not inner_query.default_cols
and not inner_query.annotation_select_mask
):
# In case of Model.objects[0:3].count(), there would be no
# field selected in the inner query, yet we must use a subquery.
# So, make sure at least one field is selected.
inner_query.select = (
self.model._meta.pk.get_col(inner_query.get_initial_alias()),
)
else:
outer_query = self
self.select = ()
self.default_cols = False
self.extra = {}
if self.annotations:
# Inline reference to existing annotations and mask them as
# they are unnecessary given only the summarized aggregations
# are requested.
replacements = {
Ref(alias, annotation): annotation
for alias, annotation in self.annotations.items()
}
self.annotations = {
alias: aggregate.replace_expressions(replacements)
for alias, aggregate in aggregates.items()
}
else:
self.annotations = aggregates
self.set_annotation_mask(aggregates)
empty_set_result = [
expression.empty_result_set_value
for expression in outer_query.annotation_select.values()
]
elide_empty = not any(result is NotImplemented for result in empty_set_result)
outer_query.clear_ordering(force=True)
outer_query.clear_limits()
outer_query.select_for_update = False
outer_query.select_related = False
compiler = outer_query.get_compiler(using, elide_empty=elide_empty)
result = compiler.execute_sql(SINGLE)
if result is None:
result = empty_set_result
else:
converters = compiler.get_converters(outer_query.annotation_select.values())
result = next(compiler.apply_converters((result,), converters))
return dict(zip(outer_query.annotation_select, result))
def get_count(self, using):
"""
Perform a COUNT() query using the current filter constraints.
"""
obj = self.clone()
return obj.get_aggregation(using, {"__count": Count("*")})["__count"]
def has_filters(self):
return self.where
def exists(self, limit=True):
q = self.clone()
if not (q.distinct and q.is_sliced):
if q.group_by is True:
q.add_fields(
(f.attname for f in self.model._meta.concrete_fields), False
)
# Disable GROUP BY aliases to avoid orphaning references to the
# SELECT clause which is about to be cleared.
q.set_group_by(allow_aliases=False)
q.clear_select_clause()
if q.combined_queries and q.combinator == "union":
q.combined_queries = tuple(
combined_query.exists(limit=False)
for combined_query in q.combined_queries
)
q.clear_ordering(force=True)
if limit:
q.set_limits(high=1)
q.add_annotation(Value(1), "a")
return q
def has_results(self, using):
q = self.exists(using)
compiler = q.get_compiler(using=using)
return compiler.has_results()
def explain(self, using, format=None, **options):
q = self.clone()
for option_name in options:
if (
not EXPLAIN_OPTIONS_PATTERN.fullmatch(option_name)
or "--" in option_name
):
raise ValueError(f"Invalid option name: {option_name!r}.")
q.explain_info = ExplainInfo(format, options)
compiler = q.get_compiler(using=using)
return "\n".join(compiler.explain_query())
def combine(self, rhs, connector):
"""
Merge the 'rhs' query into the current one (with any 'rhs' effects
being applied *after* (that is, "to the right of") anything in the
current query. 'rhs' is not modified during a call to this function.
The 'connector' parameter describes how to connect filters from the
'rhs' query.
"""
if self.model != rhs.model:
raise TypeError("Cannot combine queries on two different base models.")
if self.is_sliced:
raise TypeError("Cannot combine queries once a slice has been taken.")
if self.distinct != rhs.distinct:
raise TypeError("Cannot combine a unique query with a non-unique query.")
if self.distinct_fields != rhs.distinct_fields:
raise TypeError("Cannot combine queries with different distinct fields.")
# If lhs and rhs shares the same alias prefix, it is possible to have
# conflicting alias changes like T4 -> T5, T5 -> T6, which might end up
# as T4 -> T6 while combining two querysets. To prevent this, change an
# alias prefix of the rhs and update current aliases accordingly,
# except if the alias is the base table since it must be present in the
# query on both sides.
initial_alias = self.get_initial_alias()
rhs.bump_prefix(self, exclude={initial_alias})
# Work out how to relabel the rhs aliases, if necessary.
change_map = {}
conjunction = connector == AND
# Determine which existing joins can be reused. When combining the
# query with AND we must recreate all joins for m2m filters. When
# combining with OR we can reuse joins. The reason is that in AND
# case a single row can't fulfill a condition like:
# revrel__col=1 & revrel__col=2
# But, there might be two different related rows matching this
# condition. In OR case a single True is enough, so single row is
# enough, too.
#
# Note that we will be creating duplicate joins for non-m2m joins in
# the AND case. The results will be correct but this creates too many
# joins. This is something that could be fixed later on.
reuse = set() if conjunction else set(self.alias_map)
joinpromoter = JoinPromoter(connector, 2, False)
joinpromoter.add_votes(
j for j in self.alias_map if self.alias_map[j].join_type == INNER
)
rhs_votes = set()
# Now, add the joins from rhs query into the new query (skipping base
# table).
rhs_tables = list(rhs.alias_map)[1:]
for alias in rhs_tables:
join = rhs.alias_map[alias]
# If the left side of the join was already relabeled, use the
# updated alias.
join = join.relabeled_clone(change_map)
new_alias = self.join(join, reuse=reuse)
if join.join_type == INNER:
rhs_votes.add(new_alias)
# We can't reuse the same join again in the query. If we have two
# distinct joins for the same connection in rhs query, then the
# combined query must have two joins, too.
reuse.discard(new_alias)
if alias != new_alias:
change_map[alias] = new_alias
if not rhs.alias_refcount[alias]:
# The alias was unused in the rhs query. Unref it so that it
# will be unused in the new query, too. We have to add and
# unref the alias so that join promotion has information of
# the join type for the unused alias.
self.unref_alias(new_alias)
joinpromoter.add_votes(rhs_votes)
joinpromoter.update_join_types(self)
# Combine subqueries aliases to ensure aliases relabelling properly
# handle subqueries when combining where and select clauses.
self.subq_aliases |= rhs.subq_aliases
# Now relabel a copy of the rhs where-clause and add it to the current
# one.
w = rhs.where.clone()
w.relabel_aliases(change_map)
self.where.add(w, connector)
# Selection columns and extra extensions are those provided by 'rhs'.
if rhs.select:
self.set_select([col.relabeled_clone(change_map) for col in rhs.select])
else:
self.select = ()
if connector == OR:
# It would be nice to be able to handle this, but the queries don't
# really make sense (or return consistent value sets). Not worth
# the extra complexity when you can write a real query instead.
if self.extra and rhs.extra:
raise ValueError(
"When merging querysets using 'or', you cannot have "
"extra(select=...) on both sides."
)
self.extra.update(rhs.extra)
extra_select_mask = set()
if self.extra_select_mask is not None:
extra_select_mask.update(self.extra_select_mask)
if rhs.extra_select_mask is not None:
extra_select_mask.update(rhs.extra_select_mask)
if extra_select_mask:
self.set_extra_mask(extra_select_mask)
self.extra_tables += rhs.extra_tables
# Ordering uses the 'rhs' ordering, unless it has none, in which case
# the current ordering is used.
self.order_by = rhs.order_by or self.order_by
self.extra_order_by = rhs.extra_order_by or self.extra_order_by
def _get_defer_select_mask(self, opts, mask, select_mask=None):
if select_mask is None:
select_mask = {}
select_mask[opts.pk] = {}
# All concrete fields that are not part of the defer mask must be
# loaded. If a relational field is encountered it gets added to the
# mask for it be considered if `select_related` and the cycle continues
# by recursively calling this function.
for field in opts.concrete_fields:
field_mask = mask.pop(field.name, None)
field_att_mask = mask.pop(field.attname, None)
if field_mask is None and field_att_mask is None:
select_mask.setdefault(field, {})
elif field_mask:
if not field.is_relation:
raise FieldError(next(iter(field_mask)))
field_select_mask = select_mask.setdefault(field, {})
related_model = field.remote_field.model._meta.concrete_model
self._get_defer_select_mask(
related_model._meta, field_mask, field_select_mask
)
# Remaining defer entries must be references to reverse relationships.
# The following code is expected to raise FieldError if it encounters
# a malformed defer entry.
for field_name, field_mask in mask.items():
if filtered_relation := self._filtered_relations.get(field_name):
relation = opts.get_field(filtered_relation.relation_name)
field_select_mask = select_mask.setdefault((field_name, relation), {})
field = relation.field
else:
reverse_rel = opts.get_field(field_name)
# While virtual fields such as many-to-many and generic foreign
# keys cannot be effectively deferred we've historically
# allowed them to be passed to QuerySet.defer(). Ignore such
# field references until a layer of validation at mask
# alteration time will be implemented eventually.
if not hasattr(reverse_rel, "field"):
continue
field = reverse_rel.field
field_select_mask = select_mask.setdefault(field, {})
related_model = field.model._meta.concrete_model
self._get_defer_select_mask(
related_model._meta, field_mask, field_select_mask
)
return select_mask
def _get_only_select_mask(self, opts, mask, select_mask=None):
if select_mask is None:
select_mask = {}
select_mask[opts.pk] = {}
# Only include fields mentioned in the mask.
for field_name, field_mask in mask.items():
field = opts.get_field(field_name)
field_select_mask = select_mask.setdefault(field, {})
if field_mask:
if not field.is_relation:
raise FieldError(next(iter(field_mask)))
related_model = field.remote_field.model._meta.concrete_model
self._get_only_select_mask(
related_model._meta, field_mask, field_select_mask
)
return select_mask
def get_select_mask(self):
"""
Convert the self.deferred_loading data structure to an alternate data
structure, describing the field that *will* be loaded. This is used to
compute the columns to select from the database and also by the
QuerySet class to work out which fields are being initialized on each
model. Models that have all their fields included aren't mentioned in
the result, only those that have field restrictions in place.
"""
field_names, defer = self.deferred_loading
if not field_names:
return {}
mask = {}
for field_name in field_names:
part_mask = mask
for part in field_name.split(LOOKUP_SEP):
part_mask = part_mask.setdefault(part, {})
opts = self.get_meta()
if defer:
return self._get_defer_select_mask(opts, mask)
return self._get_only_select_mask(opts, mask)
def table_alias(self, table_name, create=False, filtered_relation=None):
"""
Return a table alias for the given table_name and whether this is a
new alias or not.
If 'create' is true, a new alias is always created. Otherwise, the
most recently created alias for the table (if one exists) is reused.
"""
alias_list = self.table_map.get(table_name)
if not create and alias_list:
alias = alias_list[0]
self.alias_refcount[alias] += 1
return alias, False
# Create a new alias for this table.
if alias_list:
alias = "%s%d" % (self.alias_prefix, len(self.alias_map) + 1)
alias_list.append(alias)
else:
# The first occurrence of a table uses the table name directly.
alias = (
filtered_relation.alias if filtered_relation is not None else table_name
)
self.table_map[table_name] = [alias]
self.alias_refcount[alias] = 1
return alias, True
def ref_alias(self, alias):
"""Increases the reference count for this alias."""
self.alias_refcount[alias] += 1
def unref_alias(self, alias, amount=1):
"""Decreases the reference count for this alias."""
self.alias_refcount[alias] -= amount
def promote_joins(self, aliases):
"""
Promote recursively the join type of given aliases and its children to
an outer join. If 'unconditional' is False, only promote the join if
it is nullable or the parent join is an outer join.
The children promotion is done to avoid join chains that contain a LOUTER
b INNER c. So, if we have currently a INNER b INNER c and a->b is promoted,
then we must also promote b->c automatically, or otherwise the promotion
of a->b doesn't actually change anything in the query results.
"""
aliases = list(aliases)
while aliases:
alias = aliases.pop(0)
if self.alias_map[alias].join_type is None:
# This is the base table (first FROM entry) - this table
# isn't really joined at all in the query, so we should not
# alter its join type.
continue
# Only the first alias (skipped above) should have None join_type
assert self.alias_map[alias].join_type is not None
parent_alias = self.alias_map[alias].parent_alias
parent_louter = (
parent_alias and self.alias_map[parent_alias].join_type == LOUTER
)
already_louter = self.alias_map[alias].join_type == LOUTER
if (self.alias_map[alias].nullable or parent_louter) and not already_louter:
self.alias_map[alias] = self.alias_map[alias].promote()
# Join type of 'alias' changed, so re-examine all aliases that
# refer to this one.
aliases.extend(
join
for join in self.alias_map
if self.alias_map[join].parent_alias == alias
and join not in aliases
)
def demote_joins(self, aliases):
"""
Change join type from LOUTER to INNER for all joins in aliases.
Similarly to promote_joins(), this method must ensure no join chains
containing first an outer, then an inner join are generated. If we
are demoting b->c join in chain a LOUTER b LOUTER c then we must
demote a->b automatically, or otherwise the demotion of b->c doesn't
actually change anything in the query results. .
"""
aliases = list(aliases)
while aliases:
alias = aliases.pop(0)
if self.alias_map[alias].join_type == LOUTER:
self.alias_map[alias] = self.alias_map[alias].demote()
parent_alias = self.alias_map[alias].parent_alias
if self.alias_map[parent_alias].join_type == INNER:
aliases.append(parent_alias)
def reset_refcounts(self, to_counts):
"""
Reset reference counts for aliases so that they match the value passed
in `to_counts`.
"""
for alias, cur_refcount in self.alias_refcount.copy().items():
unref_amount = cur_refcount - to_counts.get(alias, 0)
self.unref_alias(alias, unref_amount)
def change_aliases(self, change_map):
"""
Change the aliases in change_map (which maps old-alias -> new-alias),
relabelling any references to them in select columns and the where
clause.
"""
# If keys and values of change_map were to intersect, an alias might be
# updated twice (e.g. T4 -> T5, T5 -> T6, so also T4 -> T6) depending
# on their order in change_map.
assert set(change_map).isdisjoint(change_map.values())
# 1. Update references in "select" (normal columns plus aliases),
# "group by" and "where".
self.where.relabel_aliases(change_map)
if isinstance(self.group_by, tuple):
self.group_by = tuple(
[col.relabeled_clone(change_map) for col in self.group_by]
)
self.select = tuple([col.relabeled_clone(change_map) for col in self.select])
self.annotations = self.annotations and {
key: col.relabeled_clone(change_map)
for key, col in self.annotations.items()
}
# 2. Rename the alias in the internal table/alias datastructures.
for old_alias, new_alias in change_map.items():
if old_alias not in self.alias_map:
continue
alias_data = self.alias_map[old_alias].relabeled_clone(change_map)
self.alias_map[new_alias] = alias_data
self.alias_refcount[new_alias] = self.alias_refcount[old_alias]
del self.alias_refcount[old_alias]
del self.alias_map[old_alias]
table_aliases = self.table_map[alias_data.table_name]
for pos, alias in enumerate(table_aliases):
if alias == old_alias:
table_aliases[pos] = new_alias
break
self.external_aliases = {
# Table is aliased or it's being changed and thus is aliased.
change_map.get(alias, alias): (aliased or alias in change_map)
for alias, aliased in self.external_aliases.items()
}
def bump_prefix(self, other_query, exclude=None):
"""
Change the alias prefix to the next letter in the alphabet in a way
that the other query's aliases and this query's aliases will not
conflict. Even tables that previously had no alias will get an alias
after this call. To prevent changing aliases use the exclude parameter.
"""
def prefix_gen():
"""
Generate a sequence of characters in alphabetical order:
-> 'A', 'B', 'C', ...
When the alphabet is finished, the sequence will continue with the
Cartesian product:
-> 'AA', 'AB', 'AC', ...
"""
alphabet = ascii_uppercase
prefix = chr(ord(self.alias_prefix) + 1)
yield prefix
for n in count(1):
seq = alphabet[alphabet.index(prefix) :] if prefix else alphabet
for s in product(seq, repeat=n):
yield "".join(s)
prefix = None
if self.alias_prefix != other_query.alias_prefix:
# No clashes between self and outer query should be possible.
return
# Explicitly avoid infinite loop. The constant divider is based on how
# much depth recursive subquery references add to the stack. This value
# might need to be adjusted when adding or removing function calls from
# the code path in charge of performing these operations.
local_recursion_limit = sys.getrecursionlimit() // 16
for pos, prefix in enumerate(prefix_gen()):
if prefix not in self.subq_aliases:
self.alias_prefix = prefix
break
if pos > local_recursion_limit:
raise RecursionError(
"Maximum recursion depth exceeded: too many subqueries."
)
self.subq_aliases = self.subq_aliases.union([self.alias_prefix])
other_query.subq_aliases = other_query.subq_aliases.union(self.subq_aliases)
if exclude is None:
exclude = {}
self.change_aliases(
{
alias: "%s%d" % (self.alias_prefix, pos)
for pos, alias in enumerate(self.alias_map)
if alias not in exclude
}
)
def get_initial_alias(self):
"""
Return the first alias for this query, after increasing its reference
count.
"""
if self.alias_map:
alias = self.base_table
self.ref_alias(alias)
elif self.model:
alias = self.join(self.base_table_class(self.get_meta().db_table, None))
else:
alias = None
return alias
def count_active_tables(self):
"""
Return the number of tables in this query with a non-zero reference
count. After execution, the reference counts are zeroed, so tables
added in compiler will not be seen by this method.
"""
return len([1 for count in self.alias_refcount.values() if count])
def join(self, join, reuse=None):
"""
Return an alias for the 'join', either reusing an existing alias for
that join or creating a new one. 'join' is either a base_table_class or
join_class.
The 'reuse' parameter can be either None which means all joins are
reusable, or it can be a set containing the aliases that can be reused.
A join is always created as LOUTER if the lhs alias is LOUTER to make
sure chains like t1 LOUTER t2 INNER t3 aren't generated. All new
joins are created as LOUTER if the join is nullable.
"""
reuse_aliases = [
a
for a, j in self.alias_map.items()
if (reuse is None or a in reuse) and j == join
]
if reuse_aliases:
if join.table_alias in reuse_aliases:
reuse_alias = join.table_alias
else:
# Reuse the most recent alias of the joined table
# (a many-to-many relation may be joined multiple times).
reuse_alias = reuse_aliases[-1]
self.ref_alias(reuse_alias)
return reuse_alias
# No reuse is possible, so we need a new alias.
alias, _ = self.table_alias(
join.table_name, create=True, filtered_relation=join.filtered_relation
)
if join.join_type:
if self.alias_map[join.parent_alias].join_type == LOUTER or join.nullable:
join_type = LOUTER
else:
join_type = INNER
join.join_type = join_type
join.table_alias = alias
self.alias_map[alias] = join
if filtered_relation := join.filtered_relation:
resolve_reuse = reuse
if resolve_reuse is not None:
resolve_reuse = set(reuse) | {alias}
joins_len = len(self.alias_map)
join.filtered_relation = filtered_relation.resolve_expression(
self, reuse=resolve_reuse
)
# Some joins were during expression resolving, they must be present
# before the one we just added.
if joins_len < len(self.alias_map):
self.alias_map[alias] = self.alias_map.pop(alias)
return alias
def join_parent_model(self, opts, model, alias, seen):
"""
Make sure the given 'model' is joined in the query. If 'model' isn't
a parent of 'opts' or if it is None this method is a no-op.
The 'alias' is the root alias for starting the join, 'seen' is a dict
of model -> alias of existing joins. It must also contain a mapping
of None -> some alias. This will be returned in the no-op case.
"""
if model in seen:
return seen[model]
chain = opts.get_base_chain(model)
if not chain:
return alias
curr_opts = opts
for int_model in chain:
if int_model in seen:
curr_opts = int_model._meta
alias = seen[int_model]
continue
# Proxy model have elements in base chain
# with no parents, assign the new options
# object and skip to the next base in that
# case
if not curr_opts.parents[int_model]:
curr_opts = int_model._meta
continue
link_field = curr_opts.get_ancestor_link(int_model)
join_info = self.setup_joins([link_field.name], curr_opts, alias)
curr_opts = int_model._meta
alias = seen[int_model] = join_info.joins[-1]
return alias or seen[None]
def check_alias(self, alias):
if FORBIDDEN_ALIAS_PATTERN.search(alias):
raise ValueError(
"Column aliases cannot contain whitespace characters, quotation marks, "
"semicolons, or SQL comments."
)
def add_annotation(self, annotation, alias, select=True):
"""Add a single annotation expression to the Query."""
self.check_alias(alias)
annotation = annotation.resolve_expression(self, allow_joins=True, reuse=None)
if select:
self.append_annotation_mask([alias])
else:
annotation_mask = (
value
for value in dict.fromkeys(self.annotation_select)
if value != alias
)
self.set_annotation_mask(annotation_mask)
self.annotations[alias] = annotation
def resolve_expression(self, query, *args, **kwargs):
clone = self.clone()
# Subqueries need to use a different set of aliases than the outer query.
clone.bump_prefix(query)
clone.subquery = True
clone.where.resolve_expression(query, *args, **kwargs)
# Resolve combined queries.
if clone.combinator:
clone.combined_queries = tuple(
[
combined_query.resolve_expression(query, *args, **kwargs)
for combined_query in clone.combined_queries
]
)
for key, value in clone.annotations.items():
resolved = value.resolve_expression(query, *args, **kwargs)
if hasattr(resolved, "external_aliases"):
resolved.external_aliases.update(clone.external_aliases)
clone.annotations[key] = resolved
# Outer query's aliases are considered external.
for alias, table in query.alias_map.items():
clone.external_aliases[alias] = (
isinstance(table, Join)
and table.join_field.related_model._meta.db_table != alias
) or (
isinstance(table, BaseTable) and table.table_name != table.table_alias
)
return clone
def get_external_cols(self):
exprs = chain(self.annotations.values(), self.where.children)
return [
col
for col in self._gen_cols(exprs, include_external=True)
if col.alias in self.external_aliases
]
def get_group_by_cols(self, wrapper=None):
# If wrapper is referenced by an alias for an explicit GROUP BY through
# values() a reference to this expression and not the self must be
# returned to ensure external column references are not grouped against
# as well.
external_cols = self.get_external_cols()
if any(col.possibly_multivalued for col in external_cols):
return [wrapper or self]
return external_cols
def as_sql(self, compiler, connection):
# Some backends (e.g. Oracle) raise an error when a subquery contains
# unnecessary ORDER BY clause.
if (
self.subquery
and not connection.features.ignores_unnecessary_order_by_in_subqueries
):
self.clear_ordering(force=False)
for query in self.combined_queries:
query.clear_ordering(force=False)
sql, params = self.get_compiler(connection=connection).as_sql()
if self.subquery:
sql = "(%s)" % sql
return sql, params
def resolve_lookup_value(self, value, can_reuse, allow_joins):
if hasattr(value, "resolve_expression"):
value = value.resolve_expression(
self,
reuse=can_reuse,
allow_joins=allow_joins,
)
elif isinstance(value, (list, tuple)):
# The items of the iterable may be expressions and therefore need
# to be resolved independently.
values = (
self.resolve_lookup_value(sub_value, can_reuse, allow_joins)
for sub_value in value
)
type_ = type(value)
if hasattr(type_, "_make"): # namedtuple
return type_(*values)
return type_(values)
return value
def solve_lookup_type(self, lookup, summarize=False):
"""
Solve the lookup type from the lookup (e.g.: 'foobar__id__icontains').
"""
lookup_splitted = lookup.split(LOOKUP_SEP)
if self.annotations:
annotation, expression_lookups = refs_expression(
lookup_splitted, self.annotations
)
if annotation:
expression = self.annotations[annotation]
if summarize:
expression = Ref(annotation, expression)
return expression_lookups, (), expression
_, field, _, lookup_parts = self.names_to_path(lookup_splitted, self.get_meta())
field_parts = lookup_splitted[0 : len(lookup_splitted) - len(lookup_parts)]
if len(lookup_parts) > 1 and not field_parts:
raise FieldError(
'Invalid lookup "%s" for model %s".'
% (lookup, self.get_meta().model.__name__)
)
return lookup_parts, field_parts, False
def check_query_object_type(self, value, opts, field):
"""
Check whether the object passed while querying is of the correct type.
If not, raise a ValueError specifying the wrong object.
"""
if hasattr(value, "_meta"):
if not check_rel_lookup_compatibility(value._meta.model, opts, field):
raise ValueError(
'Cannot query "%s": Must be "%s" instance.'
% (value, opts.object_name)
)
def check_related_objects(self, field, value, opts):
"""Check the type of object passed to query relations."""
if field.is_relation:
# Check that the field and the queryset use the same model in a
# query like .filter(author=Author.objects.all()). For example, the
# opts would be Author's (from the author field) and value.model
# would be Author.objects.all() queryset's .model (Author also).
# The field is the related field on the lhs side.
if (
isinstance(value, Query)
and not value.has_select_fields
and not check_rel_lookup_compatibility(value.model, opts, field)
):
raise ValueError(
'Cannot use QuerySet for "%s": Use a QuerySet for "%s".'
% (value.model._meta.object_name, opts.object_name)
)
elif hasattr(value, "_meta"):
self.check_query_object_type(value, opts, field)
elif hasattr(value, "__iter__"):
for v in value:
self.check_query_object_type(v, opts, field)
def check_filterable(self, expression):
"""Raise an error if expression cannot be used in a WHERE clause."""
if hasattr(expression, "resolve_expression") and not getattr(
expression, "filterable", True
):
raise NotSupportedError(
expression.__class__.__name__ + " is disallowed in the filter "
"clause."
)
if hasattr(expression, "get_source_expressions"):
for expr in expression.get_source_expressions():
self.check_filterable(expr)
def build_lookup(self, lookups, lhs, rhs):
"""
Try to extract transforms and lookup from given lhs.
The lhs value is something that works like SQLExpression.
The rhs value is what the lookup is going to compare against.
The lookups is a list of names to extract using get_lookup()
and get_transform().
"""
# __exact is the default lookup if one isn't given.
*transforms, lookup_name = lookups or ["exact"]
for name in transforms:
lhs = self.try_transform(lhs, name)
# First try get_lookup() so that the lookup takes precedence if the lhs
# supports both transform and lookup for the name.
lookup_class = lhs.get_lookup(lookup_name)
if not lookup_class:
# A lookup wasn't found. Try to interpret the name as a transform
# and do an Exact lookup against it.
lhs = self.try_transform(lhs, lookup_name)
lookup_name = "exact"
lookup_class = lhs.get_lookup(lookup_name)
if not lookup_class:
return
lookup = lookup_class(lhs, rhs)
# Interpret '__exact=None' as the sql 'is NULL'; otherwise, reject all
# uses of None as a query value unless the lookup supports it.
if lookup.rhs is None and not lookup.can_use_none_as_rhs:
if lookup_name not in ("exact", "iexact"):
raise ValueError("Cannot use None as a query value")
return lhs.get_lookup("isnull")(lhs, True)
# For Oracle '' is equivalent to null. The check must be done at this
# stage because join promotion can't be done in the compiler. Using
# DEFAULT_DB_ALIAS isn't nice but it's the best that can be done here.
# A similar thing is done in is_nullable(), too.
if (
lookup_name == "exact"
and lookup.rhs == ""
and connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls
):
return lhs.get_lookup("isnull")(lhs, True)
return lookup
def try_transform(self, lhs, name):
"""
Helper method for build_lookup(). Try to fetch and initialize
a transform for name parameter from lhs.
"""
transform_class = lhs.get_transform(name)
if transform_class:
return transform_class(lhs)
else:
output_field = lhs.output_field.__class__
suggested_lookups = difflib.get_close_matches(
name, lhs.output_field.get_lookups()
)
if suggested_lookups:
suggestion = ", perhaps you meant %s?" % " or ".join(suggested_lookups)
else:
suggestion = "."
raise FieldError(
"Unsupported lookup '%s' for %s or join on the field not "
"permitted%s" % (name, output_field.__name__, suggestion)
)
def build_filter(
self,
filter_expr,
branch_negated=False,
current_negated=False,
can_reuse=None,
allow_joins=True,
split_subq=True,
check_filterable=True,
summarize=False,
update_join_types=True,
):
"""
Build a WhereNode for a single filter clause but don't add it
to this Query. Query.add_q() will then add this filter to the where
Node.
The 'branch_negated' tells us if the current branch contains any
negations. This will be used to determine if subqueries are needed.
The 'current_negated' is used to determine if the current filter is
negated or not and this will be used to determine if IS NULL filtering
is needed.
The difference between current_negated and branch_negated is that
branch_negated is set on first negation, but current_negated is
flipped for each negation.
Note that add_filter will not do any negating itself, that is done
upper in the code by add_q().
The 'can_reuse' is a set of reusable joins for multijoins.
The method will create a filter clause that can be added to the current
query. However, if the filter isn't added to the query then the caller
is responsible for unreffing the joins used.
"""
if isinstance(filter_expr, dict):
raise FieldError("Cannot parse keyword query as dict")
if isinstance(filter_expr, Q):
return self._add_q(
filter_expr,
branch_negated=branch_negated,
current_negated=current_negated,
used_aliases=can_reuse,
allow_joins=allow_joins,
split_subq=split_subq,
check_filterable=check_filterable,
summarize=summarize,
update_join_types=update_join_types,
)
if hasattr(filter_expr, "resolve_expression"):
if not getattr(filter_expr, "conditional", False):
raise TypeError("Cannot filter against a non-conditional expression.")
condition = filter_expr.resolve_expression(
self, allow_joins=allow_joins, reuse=can_reuse, summarize=summarize
)
if not isinstance(condition, Lookup):
condition = self.build_lookup(["exact"], condition, True)
return WhereNode([condition], connector=AND), []
arg, value = filter_expr
if not arg:
raise FieldError("Cannot parse keyword query %r" % arg)
lookups, parts, reffed_expression = self.solve_lookup_type(arg, summarize)
if check_filterable:
self.check_filterable(reffed_expression)
if not allow_joins and len(parts) > 1:
raise FieldError("Joined field references are not permitted in this query")
pre_joins = self.alias_refcount.copy()
value = self.resolve_lookup_value(value, can_reuse, allow_joins)
used_joins = {
k for k, v in self.alias_refcount.items() if v > pre_joins.get(k, 0)
}
if check_filterable:
self.check_filterable(value)
if reffed_expression:
condition = self.build_lookup(lookups, reffed_expression, value)
return WhereNode([condition], connector=AND), []
opts = self.get_meta()
alias = self.get_initial_alias()
allow_many = not branch_negated or not split_subq
try:
join_info = self.setup_joins(
parts,
opts,
alias,
can_reuse=can_reuse,
allow_many=allow_many,
)
# Prevent iterator from being consumed by check_related_objects()
if isinstance(value, Iterator):
value = list(value)
self.check_related_objects(join_info.final_field, value, join_info.opts)
# split_exclude() needs to know which joins were generated for the
# lookup parts
self._lookup_joins = join_info.joins
except MultiJoin as e:
return self.split_exclude(filter_expr, can_reuse, e.names_with_path)
# Update used_joins before trimming since they are reused to determine
# which joins could be later promoted to INNER.
used_joins.update(join_info.joins)
targets, alias, join_list = self.trim_joins(
join_info.targets, join_info.joins, join_info.path
)
if can_reuse is not None:
can_reuse.update(join_list)
if join_info.final_field.is_relation:
if len(targets) == 1:
col = self._get_col(targets[0], join_info.final_field, alias)
else:
col = MultiColSource(
alias, targets, join_info.targets, join_info.final_field
)
else:
col = self._get_col(targets[0], join_info.final_field, alias)
condition = self.build_lookup(lookups, col, value)
lookup_type = condition.lookup_name
clause = WhereNode([condition], connector=AND)
require_outer = (
lookup_type == "isnull" and condition.rhs is True and not current_negated
)
if (
current_negated
and (lookup_type != "isnull" or condition.rhs is False)
and condition.rhs is not None
):
require_outer = True
if lookup_type != "isnull":
# The condition added here will be SQL like this:
# NOT (col IS NOT NULL), where the first NOT is added in
# upper layers of code. The reason for addition is that if col
# is null, then col != someval will result in SQL "unknown"
# which isn't the same as in Python. The Python None handling
# is wanted, and it can be gotten by
# (col IS NULL OR col != someval)
# <=>
# NOT (col IS NOT NULL AND col = someval).
if (
self.is_nullable(targets[0])
or self.alias_map[join_list[-1]].join_type == LOUTER
):
lookup_class = targets[0].get_lookup("isnull")
col = self._get_col(targets[0], join_info.targets[0], alias)
clause.add(lookup_class(col, False), AND)
# If someval is a nullable column, someval IS NOT NULL is
# added.
if isinstance(value, Col) and self.is_nullable(value.target):
lookup_class = value.target.get_lookup("isnull")
clause.add(lookup_class(value, False), AND)
return clause, used_joins if not require_outer else ()
def add_filter(self, filter_lhs, filter_rhs):
self.add_q(Q((filter_lhs, filter_rhs)))
def add_q(self, q_object):
"""
A preprocessor for the internal _add_q(). Responsible for doing final
join promotion.
"""
# For join promotion this case is doing an AND for the added q_object
# and existing conditions. So, any existing inner join forces the join
# type to remain inner. Existing outer joins can however be demoted.
# (Consider case where rel_a is LOUTER and rel_a__col=1 is added - if
# rel_a doesn't produce any rows, then the whole condition must fail.
# So, demotion is OK.
existing_inner = {
a for a in self.alias_map if self.alias_map[a].join_type == INNER
}
clause, _ = self._add_q(q_object, self.used_aliases)
if clause:
self.where.add(clause, AND)
self.demote_joins(existing_inner)
def build_where(self, filter_expr):
return self.build_filter(filter_expr, allow_joins=False)[0]
def clear_where(self):
self.where = WhereNode()
def _add_q(
self,
q_object,
used_aliases,
branch_negated=False,
current_negated=False,
allow_joins=True,
split_subq=True,
check_filterable=True,
summarize=False,
update_join_types=True,
):
"""Add a Q-object to the current filter."""
connector = q_object.connector
current_negated ^= q_object.negated
branch_negated = branch_negated or q_object.negated
target_clause = WhereNode(connector=connector, negated=q_object.negated)
joinpromoter = JoinPromoter(
q_object.connector, len(q_object.children), current_negated
)
for child in q_object.children:
child_clause, needed_inner = self.build_filter(
child,
can_reuse=used_aliases,
branch_negated=branch_negated,
current_negated=current_negated,
allow_joins=allow_joins,
split_subq=split_subq,
check_filterable=check_filterable,
summarize=summarize,
update_join_types=update_join_types,
)
joinpromoter.add_votes(needed_inner)
if child_clause:
target_clause.add(child_clause, connector)
if update_join_types:
needed_inner = joinpromoter.update_join_types(self)
else:
needed_inner = []
return target_clause, needed_inner
def add_filtered_relation(self, filtered_relation, alias):
filtered_relation.alias = alias
lookups = dict(get_children_from_q(filtered_relation.condition))
relation_lookup_parts, relation_field_parts, _ = self.solve_lookup_type(
filtered_relation.relation_name
)
if relation_lookup_parts:
raise ValueError(
"FilteredRelation's relation_name cannot contain lookups "
"(got %r)." % filtered_relation.relation_name
)
for lookup in chain(lookups):
lookup_parts, lookup_field_parts, _ = self.solve_lookup_type(lookup)
shift = 2 if not lookup_parts else 1
lookup_field_path = lookup_field_parts[:-shift]
for idx, lookup_field_part in enumerate(lookup_field_path):
if len(relation_field_parts) > idx:
if relation_field_parts[idx] != lookup_field_part:
raise ValueError(
"FilteredRelation's condition doesn't support "
"relations outside the %r (got %r)."
% (filtered_relation.relation_name, lookup)
)
else:
raise ValueError(
"FilteredRelation's condition doesn't support nested "
"relations deeper than the relation_name (got %r for "
"%r)." % (lookup, filtered_relation.relation_name)
)
filtered_relation.condition = rename_prefix_from_q(
filtered_relation.relation_name,
alias,
filtered_relation.condition,
)
self._filtered_relations[filtered_relation.alias] = filtered_relation
def names_to_path(self, names, opts, allow_many=True, fail_on_missing=False):
"""
Walk the list of names and turns them into PathInfo tuples. A single
name in 'names' can generate multiple PathInfos (m2m, for example).
'names' is the path of names to travel, 'opts' is the model Options we
start the name resolving from, 'allow_many' is as for setup_joins().
If fail_on_missing is set to True, then a name that can't be resolved
will generate a FieldError.
Return a list of PathInfo tuples. In addition return the final field
(the last used join field) and target (which is a field guaranteed to
contain the same value as the final field). Finally, return those names
that weren't found (which are likely transforms and the final lookup).
"""
path, names_with_path = [], []
for pos, name in enumerate(names):
cur_names_with_path = (name, [])
if name == "pk":
name = opts.pk.name
field = None
filtered_relation = None
try:
if opts is None:
raise FieldDoesNotExist
field = opts.get_field(name)
except FieldDoesNotExist:
if name in self.annotation_select:
field = self.annotation_select[name].output_field
elif name in self._filtered_relations and pos == 0:
filtered_relation = self._filtered_relations[name]
if LOOKUP_SEP in filtered_relation.relation_name:
parts = filtered_relation.relation_name.split(LOOKUP_SEP)
filtered_relation_path, field, _, _ = self.names_to_path(
parts,
opts,
allow_many,
fail_on_missing,
)
path.extend(filtered_relation_path[:-1])
else:
field = opts.get_field(filtered_relation.relation_name)
if field is not None:
# Fields that contain one-to-many relations with a generic
# model (like a GenericForeignKey) cannot generate reverse
# relations and therefore cannot be used for reverse querying.
if field.is_relation and not field.related_model:
raise FieldError(
"Field %r does not generate an automatic reverse "
"relation and therefore cannot be used for reverse "
"querying. If it is a GenericForeignKey, consider "
"adding a GenericRelation." % name
)
try:
model = field.model._meta.concrete_model
except AttributeError:
# QuerySet.annotate() may introduce fields that aren't
# attached to a model.
model = None
else:
# We didn't find the current field, so move position back
# one step.
pos -= 1
if pos == -1 or fail_on_missing:
available = sorted(
[
*get_field_names_from_opts(opts),
*self.annotation_select,
*self._filtered_relations,
]
)
raise FieldError(
"Cannot resolve keyword '%s' into field. "
"Choices are: %s" % (name, ", ".join(available))
)
break
# Check if we need any joins for concrete inheritance cases (the
# field lives in parent, but we are currently in one of its
# children)
if opts is not None and model is not opts.model:
path_to_parent = opts.get_path_to_parent(model)
if path_to_parent:
path.extend(path_to_parent)
cur_names_with_path[1].extend(path_to_parent)
opts = path_to_parent[-1].to_opts
if hasattr(field, "path_infos"):
if filtered_relation:
pathinfos = field.get_path_info(filtered_relation)
else:
pathinfos = field.path_infos
if not allow_many:
for inner_pos, p in enumerate(pathinfos):
if p.m2m:
cur_names_with_path[1].extend(pathinfos[0 : inner_pos + 1])
names_with_path.append(cur_names_with_path)
raise MultiJoin(pos + 1, names_with_path)
last = pathinfos[-1]
path.extend(pathinfos)
final_field = last.join_field
opts = last.to_opts
targets = last.target_fields
cur_names_with_path[1].extend(pathinfos)
names_with_path.append(cur_names_with_path)
else:
# Local non-relational field.
final_field = field
targets = (field,)
if fail_on_missing and pos + 1 != len(names):
raise FieldError(
"Cannot resolve keyword %r into field. Join on '%s'"
" not permitted." % (names[pos + 1], name)
)
break
return path, final_field, targets, names[pos + 1 :]
def setup_joins(
self,
names,
opts,
alias,
can_reuse=None,
allow_many=True,
):
"""
Compute the necessary table joins for the passage through the fields
given in 'names'. 'opts' is the Options class for the current model
(which gives the table we are starting from), 'alias' is the alias for
the table to start the joining from.
The 'can_reuse' defines the reverse foreign key joins we can reuse. It
can be None in which case all joins are reusable or a set of aliases
that can be reused. Note that non-reverse foreign keys are always
reusable when using setup_joins().
If 'allow_many' is False, then any reverse foreign key seen will
generate a MultiJoin exception.
Return the final field involved in the joins, the target field (used
for any 'where' constraint), the final 'opts' value, the joins, the
field path traveled to generate the joins, and a transform function
that takes a field and alias and is equivalent to `field.get_col(alias)`
in the simple case but wraps field transforms if they were included in
names.
The target field is the field containing the concrete value. Final
field can be something different, for example foreign key pointing to
that value. Final field is needed for example in some value
conversions (convert 'obj' in fk__id=obj to pk val using the foreign
key field for example).
"""
joins = [alias]
# The transform can't be applied yet, as joins must be trimmed later.
# To avoid making every caller of this method look up transforms
# directly, compute transforms here and create a partial that converts
# fields to the appropriate wrapped version.
def final_transformer(field, alias):
if not self.alias_cols:
alias = None
return field.get_col(alias)
# Try resolving all the names as fields first. If there's an error,
# treat trailing names as lookups until a field can be resolved.
last_field_exception = None
for pivot in range(len(names), 0, -1):
try:
path, final_field, targets, rest = self.names_to_path(
names[:pivot],
opts,
allow_many,
fail_on_missing=True,
)
except FieldError as exc:
if pivot == 1:
# The first item cannot be a lookup, so it's safe
# to raise the field error here.
raise
else:
last_field_exception = exc
else:
# The transforms are the remaining items that couldn't be
# resolved into fields.
transforms = names[pivot:]
break
for name in transforms:
def transform(field, alias, *, name, previous):
try:
wrapped = previous(field, alias)
return self.try_transform(wrapped, name)
except FieldError:
# FieldError is raised if the transform doesn't exist.
if isinstance(final_field, Field) and last_field_exception:
raise last_field_exception
else:
raise
final_transformer = functools.partial(
transform, name=name, previous=final_transformer
)
final_transformer.has_transforms = True
# Then, add the path to the query's joins. Note that we can't trim
# joins at this stage - we will need the information about join type
# of the trimmed joins.
for join in path:
if join.filtered_relation:
filtered_relation = join.filtered_relation.clone()
table_alias = filtered_relation.alias
else:
filtered_relation = None
table_alias = None
opts = join.to_opts
if join.direct:
nullable = self.is_nullable(join.join_field)
else:
nullable = True
connection = self.join_class(
opts.db_table,
alias,
table_alias,
INNER,
join.join_field,
nullable,
filtered_relation=filtered_relation,
)
reuse = can_reuse if join.m2m else None
alias = self.join(connection, reuse=reuse)
joins.append(alias)
return JoinInfo(final_field, targets, opts, joins, path, final_transformer)
def trim_joins(self, targets, joins, path):
"""
The 'target' parameter is the final field being joined to, 'joins'
is the full list of join aliases. The 'path' contain the PathInfos
used to create the joins.
Return the final target field and table alias and the new active
joins.
Always trim any direct join if the target column is already in the
previous table. Can't trim reverse joins as it's unknown if there's
anything on the other side of the join.
"""
joins = joins[:]
for pos, info in enumerate(reversed(path)):
if len(joins) == 1 or not info.direct:
break
if info.filtered_relation:
break
join_targets = {t.column for t in info.join_field.foreign_related_fields}
cur_targets = {t.column for t in targets}
if not cur_targets.issubset(join_targets):
break
targets_dict = {
r[1].column: r[0]
for r in info.join_field.related_fields
if r[1].column in cur_targets
}
targets = tuple(targets_dict[t.column] for t in targets)
self.unref_alias(joins.pop())
return targets, joins[-1], joins
@classmethod
def _gen_cols(cls, exprs, include_external=False, resolve_refs=True):
for expr in exprs:
if isinstance(expr, Col):
yield expr
elif include_external and callable(
getattr(expr, "get_external_cols", None)
):
yield from expr.get_external_cols()
elif hasattr(expr, "get_source_expressions"):
if not resolve_refs and isinstance(expr, Ref):
continue
yield from cls._gen_cols(
expr.get_source_expressions(),
include_external=include_external,
resolve_refs=resolve_refs,
)
@classmethod
def _gen_col_aliases(cls, exprs):
yield from (expr.alias for expr in cls._gen_cols(exprs))
def resolve_ref(self, name, allow_joins=True, reuse=None, summarize=False):
annotation = self.annotations.get(name)
if annotation is not None:
if not allow_joins:
for alias in self._gen_col_aliases([annotation]):
if isinstance(self.alias_map[alias], Join):
raise FieldError(
"Joined field references are not permitted in this query"
)
if summarize:
# Summarize currently means we are doing an aggregate() query
# which is executed as a wrapped subquery if any of the
# aggregate() elements reference an existing annotation. In
# that case we need to return a Ref to the subquery's annotation.
if name not in self.annotation_select:
raise FieldError(
"Cannot aggregate over the '%s' alias. Use annotate() "
"to promote it." % name
)
return Ref(name, self.annotation_select[name])
else:
return annotation
else:
field_list = name.split(LOOKUP_SEP)
annotation = self.annotations.get(field_list[0])
if annotation is not None:
for transform in field_list[1:]:
annotation = self.try_transform(annotation, transform)
return annotation
join_info = self.setup_joins(
field_list, self.get_meta(), self.get_initial_alias(), can_reuse=reuse
)
targets, final_alias, join_list = self.trim_joins(
join_info.targets, join_info.joins, join_info.path
)
if not allow_joins and len(join_list) > 1:
raise FieldError(
"Joined field references are not permitted in this query"
)
if len(targets) > 1:
raise FieldError(
"Referencing multicolumn fields with F() objects isn't supported"
)
# Verify that the last lookup in name is a field or a transform:
# transform_function() raises FieldError if not.
transform = join_info.transform_function(targets[0], final_alias)
if reuse is not None:
reuse.update(join_list)
return transform
def split_exclude(self, filter_expr, can_reuse, names_with_path):
"""
When doing an exclude against any kind of N-to-many relation, we need
to use a subquery. This method constructs the nested query, given the
original exclude filter (filter_expr) and the portion up to the first
N-to-many relation field.
For example, if the origin filter is ~Q(child__name='foo'), filter_expr
is ('child__name', 'foo') and can_reuse is a set of joins usable for
filters in the original query.
We will turn this into equivalent of:
WHERE NOT EXISTS(
SELECT 1
FROM child
WHERE name = 'foo' AND child.parent_id = parent.id
LIMIT 1
)
"""
# Generate the inner query.
query = self.__class__(self.model)
query._filtered_relations = self._filtered_relations
filter_lhs, filter_rhs = filter_expr
if isinstance(filter_rhs, OuterRef):
filter_rhs = OuterRef(filter_rhs)
elif isinstance(filter_rhs, F):
filter_rhs = OuterRef(filter_rhs.name)
query.add_filter(filter_lhs, filter_rhs)
query.clear_ordering(force=True)
# Try to have as simple as possible subquery -> trim leading joins from
# the subquery.
trimmed_prefix, contains_louter = query.trim_start(names_with_path)
col = query.select[0]
select_field = col.target
alias = col.alias
if alias in can_reuse:
pk = select_field.model._meta.pk
# Need to add a restriction so that outer query's filters are in effect for
# the subquery, too.
query.bump_prefix(self)
lookup_class = select_field.get_lookup("exact")
# Note that the query.select[0].alias is different from alias
# due to bump_prefix above.
lookup = lookup_class(pk.get_col(query.select[0].alias), pk.get_col(alias))
query.where.add(lookup, AND)
query.external_aliases[alias] = True
lookup_class = select_field.get_lookup("exact")
lookup = lookup_class(col, ResolvedOuterRef(trimmed_prefix))
query.where.add(lookup, AND)
condition, needed_inner = self.build_filter(Exists(query))
if contains_louter:
or_null_condition, _ = self.build_filter(
("%s__isnull" % trimmed_prefix, True),
current_negated=True,
branch_negated=True,
can_reuse=can_reuse,
)
condition.add(or_null_condition, OR)
# Note that the end result will be:
# (outercol NOT IN innerq AND outercol IS NOT NULL) OR outercol IS NULL.
# This might look crazy but due to how IN works, this seems to be
# correct. If the IS NOT NULL check is removed then outercol NOT
# IN will return UNKNOWN. If the IS NULL check is removed, then if
# outercol IS NULL we will not match the row.
return condition, needed_inner
def set_empty(self):
self.where.add(NothingNode(), AND)
for query in self.combined_queries:
query.set_empty()
def is_empty(self):
return any(isinstance(c, NothingNode) for c in self.where.children)
def set_limits(self, low=None, high=None):
"""
Adjust the limits on the rows retrieved. Use low/high to set these,
as it makes it more Pythonic to read and write. When the SQL query is
created, convert them to the appropriate offset and limit values.
Apply any limits passed in here to the existing constraints. Add low
to the current low value and clamp both to any existing high value.
"""
if high is not None:
if self.high_mark is not None:
self.high_mark = min(self.high_mark, self.low_mark + high)
else:
self.high_mark = self.low_mark + high
if low is not None:
if self.high_mark is not None:
self.low_mark = min(self.high_mark, self.low_mark + low)
else:
self.low_mark = self.low_mark + low
if self.low_mark == self.high_mark:
self.set_empty()
def clear_limits(self):
"""Clear any existing limits."""
self.low_mark, self.high_mark = 0, None
@property
def is_sliced(self):
return self.low_mark != 0 or self.high_mark is not None
def has_limit_one(self):
return self.high_mark is not None and (self.high_mark - self.low_mark) == 1
def can_filter(self):
"""
Return True if adding filters to this instance is still possible.
Typically, this means no limits or offsets have been put on the results.
"""
return not self.is_sliced
def clear_select_clause(self):
"""Remove all fields from SELECT clause."""
self.select = ()
self.default_cols = False
self.select_related = False
self.set_extra_mask(())
self.set_annotation_mask(())
def clear_select_fields(self):
"""
Clear the list of fields to select (but not extra_select columns).
Some queryset types completely replace any existing list of select
columns.
"""
self.select = ()
self.values_select = ()
def add_select_col(self, col, name):
self.select += (col,)
self.values_select += (name,)
def set_select(self, cols):
self.default_cols = False
self.select = tuple(cols)
def add_distinct_fields(self, *field_names):
"""
Add and resolve the given fields to the query's "distinct on" clause.
"""
self.distinct_fields = field_names
self.distinct = True
def add_fields(self, field_names, allow_m2m=True):
"""
Add the given (model) fields to the select set. Add the field names in
the order specified.
"""
alias = self.get_initial_alias()
opts = self.get_meta()
try:
cols = []
for name in field_names:
# Join promotion note - we must not remove any rows here, so
# if there is no existing joins, use outer join.
join_info = self.setup_joins(
name.split(LOOKUP_SEP), opts, alias, allow_many=allow_m2m
)
targets, final_alias, joins = self.trim_joins(
join_info.targets,
join_info.joins,
join_info.path,
)
for target in targets:
cols.append(join_info.transform_function(target, final_alias))
if cols:
self.set_select(cols)
except MultiJoin:
raise FieldError("Invalid field name: '%s'" % name)
except FieldError:
if LOOKUP_SEP in name:
# For lookups spanning over relationships, show the error
# from the model on which the lookup failed.
raise
else:
names = sorted(
[
*get_field_names_from_opts(opts),
*self.extra,
*self.annotation_select,
*self._filtered_relations,
]
)
raise FieldError(
"Cannot resolve keyword %r into field. "
"Choices are: %s" % (name, ", ".join(names))
)
def add_ordering(self, *ordering):
"""
Add items from the 'ordering' sequence to the query's "order by"
clause. These items are either field names (not column names) --
possibly with a direction prefix ('-' or '?') -- or OrderBy
expressions.
If 'ordering' is empty, clear all ordering from the query.
"""
errors = []
for item in ordering:
if isinstance(item, str):
if item == "?":
continue
item = item.removeprefix("-")
if item in self.annotations:
continue
if self.extra and item in self.extra:
continue
# names_to_path() validates the lookup. A descriptive
# FieldError will be raise if it's not.
self.names_to_path(item.split(LOOKUP_SEP), self.model._meta)
elif not hasattr(item, "resolve_expression"):
errors.append(item)
if getattr(item, "contains_aggregate", False):
raise FieldError(
"Using an aggregate in order_by() without also including "
"it in annotate() is not allowed: %s" % item
)
if errors:
raise FieldError("Invalid order_by arguments: %s" % errors)
if ordering:
self.order_by += ordering
else:
self.default_ordering = False
def clear_ordering(self, force=False, clear_default=True):
"""
Remove any ordering settings if the current query allows it without
side effects, set 'force' to True to clear the ordering regardless.
If 'clear_default' is True, there will be no ordering in the resulting
query (not even the model's default).
"""
if not force and (
self.is_sliced or self.distinct_fields or self.select_for_update
):
return
self.order_by = ()
self.extra_order_by = ()
if clear_default:
self.default_ordering = False
def set_group_by(self, allow_aliases=True):
"""
Expand the GROUP BY clause required by the query.
This will usually be the set of all non-aggregate fields in the
return data. If the database backend supports grouping by the
primary key, and the query would be equivalent, the optimization
will be made automatically.
"""
if allow_aliases and self.values_select:
# If grouping by aliases is allowed assign selected value aliases
# by moving them to annotations.
group_by_annotations = {}
values_select = {}
for alias, expr in zip(self.values_select, self.select):
if isinstance(expr, Col):
values_select[alias] = expr
else:
group_by_annotations[alias] = expr
self.annotations = {**group_by_annotations, **self.annotations}
self.append_annotation_mask(group_by_annotations)
self.select = tuple(values_select.values())
self.values_select = tuple(values_select)
group_by = list(self.select)
for alias, annotation in self.annotation_select.items():
if not (group_by_cols := annotation.get_group_by_cols()):
continue
if allow_aliases and not annotation.contains_aggregate:
group_by.append(Ref(alias, annotation))
else:
group_by.extend(group_by_cols)
self.group_by = tuple(group_by)
def add_select_related(self, fields):
"""
Set up the select_related data structure so that we only select
certain related models (as opposed to all models, when
self.select_related=True).
"""
if isinstance(self.select_related, bool):
field_dict = {}
else:
field_dict = self.select_related
for field in fields:
d = field_dict
for part in field.split(LOOKUP_SEP):
d = d.setdefault(part, {})
self.select_related = field_dict
def add_extra(self, select, select_params, where, params, tables, order_by):
"""
Add data to the various extra_* attributes for user-created additions
to the query.
"""
if select:
# We need to pair any placeholder markers in the 'select'
# dictionary with their parameters in 'select_params' so that
# subsequent updates to the select dictionary also adjust the
# parameters appropriately.
select_pairs = {}
if select_params:
param_iter = iter(select_params)
else:
param_iter = iter([])
for name, entry in select.items():
self.check_alias(name)
entry = str(entry)
entry_params = []
pos = entry.find("%s")
while pos != -1:
if pos == 0 or entry[pos - 1] != "%":
entry_params.append(next(param_iter))
pos = entry.find("%s", pos + 2)
select_pairs[name] = (entry, entry_params)
self.extra.update(select_pairs)
if where or params:
self.where.add(ExtraWhere(where, params), AND)
if tables:
self.extra_tables += tuple(tables)
if order_by:
self.extra_order_by = order_by
def clear_deferred_loading(self):
"""Remove any fields from the deferred loading set."""
self.deferred_loading = (frozenset(), True)
def add_deferred_loading(self, field_names):
"""
Add the given list of model field names to the set of fields to
exclude from loading from the database when automatic column selection
is done. Add the new field names to any existing field names that
are deferred (or removed from any existing field names that are marked
as the only ones for immediate loading).
"""
# Fields on related models are stored in the literal double-underscore
# format, so that we can use a set datastructure. We do the foo__bar
# splitting and handling when computing the SQL column names (as part of
# get_columns()).
existing, defer = self.deferred_loading
if defer:
# Add to existing deferred names.
self.deferred_loading = existing.union(field_names), True
else:
# Remove names from the set of any existing "immediate load" names.
if new_existing := existing.difference(field_names):
self.deferred_loading = new_existing, False
else:
self.clear_deferred_loading()
if new_only := set(field_names).difference(existing):
self.deferred_loading = new_only, True
def add_immediate_loading(self, field_names):
"""
Add the given list of model field names to the set of fields to
retrieve when the SQL is executed ("immediate loading" fields). The
field names replace any existing immediate loading field names. If
there are field names already specified for deferred loading, remove
those names from the new field_names before storing the new names
for immediate loading. (That is, immediate loading overrides any
existing immediate values, but respects existing deferrals.)
"""
existing, defer = self.deferred_loading
field_names = set(field_names)
if "pk" in field_names:
field_names.remove("pk")
field_names.add(self.get_meta().pk.name)
if defer:
# Remove any existing deferred names from the current set before
# setting the new names.
self.deferred_loading = field_names.difference(existing), False
else:
# Replace any existing "immediate load" field names.
self.deferred_loading = frozenset(field_names), False
def set_annotation_mask(self, names):
"""Set the mask of annotations that will be returned by the SELECT."""
if names is None:
self.annotation_select_mask = None
else:
self.annotation_select_mask = list(dict.fromkeys(names))
self._annotation_select_cache = None
def append_annotation_mask(self, names):
if self.annotation_select_mask is not None:
self.set_annotation_mask((*self.annotation_select_mask, *names))
def set_extra_mask(self, names):
"""
Set the mask of extra select items that will be returned by SELECT.
Don't remove them from the Query since they might be used later.
"""
if names is None:
self.extra_select_mask = None
else:
self.extra_select_mask = set(names)
self._extra_select_cache = None
def set_values(self, fields):
self.select_related = False
self.clear_deferred_loading()
self.clear_select_fields()
self.has_select_fields = True
if fields:
field_names = []
extra_names = []
annotation_names = []
if not self.extra and not self.annotations:
# Shortcut - if there are no extra or annotations, then
# the values() clause must be just field names.
field_names = list(fields)
else:
self.default_cols = False
for f in fields:
if f in self.extra_select:
extra_names.append(f)
elif f in self.annotation_select:
annotation_names.append(f)
elif f in self.annotations:
raise FieldError(
f"Cannot select the '{f}' alias. Use annotate() to "
"promote it."
)
else:
# Call `names_to_path` to ensure a FieldError including
# annotations about to be masked as valid choices if
# `f` is not resolvable.
if self.annotation_select:
self.names_to_path(f.split(LOOKUP_SEP), self.model._meta)
field_names.append(f)
self.set_extra_mask(extra_names)
self.set_annotation_mask(annotation_names)
selected = frozenset(field_names + extra_names + annotation_names)
else:
field_names = [f.attname for f in self.model._meta.concrete_fields]
selected = frozenset(field_names)
# Selected annotations must be known before setting the GROUP BY
# clause.
if self.group_by is True:
self.add_fields(
(f.attname for f in self.model._meta.concrete_fields), False
)
# Disable GROUP BY aliases to avoid orphaning references to the
# SELECT clause which is about to be cleared.
self.set_group_by(allow_aliases=False)
self.clear_select_fields()
elif self.group_by:
# Resolve GROUP BY annotation references if they are not part of
# the selected fields anymore.
group_by = []
for expr in self.group_by:
if isinstance(expr, Ref) and expr.refs not in selected:
expr = self.annotations[expr.refs]
group_by.append(expr)
self.group_by = tuple(group_by)
self.values_select = tuple(field_names)
self.add_fields(field_names, True)
@property
def annotation_select(self):
"""
Return the dictionary of aggregate columns that are not masked and
should be used in the SELECT clause. Cache this result for performance.
"""
if self._annotation_select_cache is not None:
return self._annotation_select_cache
elif not self.annotations:
return {}
elif self.annotation_select_mask is not None:
self._annotation_select_cache = {
k: self.annotations[k]
for k in self.annotation_select_mask
if k in self.annotations
}
return self._annotation_select_cache
else:
return self.annotations
@property
def extra_select(self):
if self._extra_select_cache is not None:
return self._extra_select_cache
if not self.extra:
return {}
elif self.extra_select_mask is not None:
self._extra_select_cache = {
k: v for k, v in self.extra.items() if k in self.extra_select_mask
}
return self._extra_select_cache
else:
return self.extra
def trim_start(self, names_with_path):
"""
Trim joins from the start of the join path. The candidates for trim
are the PathInfos in names_with_path structure that are m2m joins.
Also set the select column so the start matches the join.
This method is meant to be used for generating the subquery joins &
cols in split_exclude().
Return a lookup usable for doing outerq.filter(lookup=self) and a
boolean indicating if the joins in the prefix contain a LEFT OUTER join.
_"""
all_paths = []
for _, paths in names_with_path:
all_paths.extend(paths)
contains_louter = False
# Trim and operate only on tables that were generated for
# the lookup part of the query. That is, avoid trimming
# joins generated for F() expressions.
lookup_tables = [
t for t in self.alias_map if t in self._lookup_joins or t == self.base_table
]
for trimmed_paths, path in enumerate(all_paths):
if path.m2m:
break
if self.alias_map[lookup_tables[trimmed_paths + 1]].join_type == LOUTER:
contains_louter = True
alias = lookup_tables[trimmed_paths]
self.unref_alias(alias)
# The path.join_field is a Rel, lets get the other side's field
join_field = path.join_field.field
# Build the filter prefix.
paths_in_prefix = trimmed_paths
trimmed_prefix = []
for name, path in names_with_path:
if paths_in_prefix - len(path) < 0:
break
trimmed_prefix.append(name)
paths_in_prefix -= len(path)
trimmed_prefix.append(join_field.foreign_related_fields[0].name)
trimmed_prefix = LOOKUP_SEP.join(trimmed_prefix)
# Lets still see if we can trim the first join from the inner query
# (that is, self). We can't do this for:
# - LEFT JOINs because we would miss those rows that have nothing on
# the outer side,
# - INNER JOINs from filtered relations because we would miss their
# filters.
first_join = self.alias_map[lookup_tables[trimmed_paths + 1]]
if first_join.join_type != LOUTER and not first_join.filtered_relation:
select_fields = [r[0] for r in join_field.related_fields]
select_alias = lookup_tables[trimmed_paths + 1]
self.unref_alias(lookup_tables[trimmed_paths])
extra_restriction = join_field.get_extra_restriction(
None, lookup_tables[trimmed_paths + 1]
)
if extra_restriction:
self.where.add(extra_restriction, AND)
else:
# TODO: It might be possible to trim more joins from the start of the
# inner query if it happens to have a longer join chain containing the
# values in select_fields. Lets punt this one for now.
select_fields = [r[1] for r in join_field.related_fields]
select_alias = lookup_tables[trimmed_paths]
# The found starting point is likely a join_class instead of a
# base_table_class reference. But the first entry in the query's FROM
# clause must not be a JOIN.
for table in self.alias_map:
if self.alias_refcount[table] > 0:
self.alias_map[table] = self.base_table_class(
self.alias_map[table].table_name,
table,
)
break
self.set_select([f.get_col(select_alias) for f in select_fields])
return trimmed_prefix, contains_louter
def is_nullable(self, field):
"""
Check if the given field should be treated as nullable.
Some backends treat '' as null and Django treats such fields as
nullable for those backends. In such situations field.null can be
False even if we should treat the field as nullable.
"""
# We need to use DEFAULT_DB_ALIAS here, as QuerySet does not have
# (nor should it have) knowledge of which connection is going to be
# used. The proper fix would be to defer all decisions where
# is_nullable() is needed to the compiler stage, but that is not easy
# to do currently.
return field.null or (
field.empty_strings_allowed
and connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls
)
def get_order_dir(field, default="ASC"):
"""
Return the field name and direction for an order specification. For
example, '-foo' is returned as ('foo', 'DESC').
The 'default' param is used to indicate which way no prefix (or a '+'
prefix) should sort. The '-' prefix always sorts the opposite way.
"""
dirn = ORDER_DIR[default]
if field[0] == "-":
return field[1:], dirn[1]
return field, dirn[0]
class JoinPromoter:
"""
A class to abstract away join promotion problems for complex filter
conditions.
"""
def __init__(self, connector, num_children, negated):
self.connector = connector
self.negated = negated
if self.negated:
if connector == AND:
self.effective_connector = OR
else:
self.effective_connector = AND
else:
self.effective_connector = self.connector
self.num_children = num_children
# Maps of table alias to how many times it is seen as required for
# inner and/or outer joins.
self.votes = Counter()
def __repr__(self):
return (
f"{self.__class__.__qualname__}(connector={self.connector!r}, "
f"num_children={self.num_children!r}, negated={self.negated!r})"
)
def add_votes(self, votes):
"""
Add single vote per item to self.votes. Parameter can be any
iterable.
"""
self.votes.update(votes)
def update_join_types(self, query):
"""
Change join types so that the generated query is as efficient as
possible, but still correct. So, change as many joins as possible
to INNER, but don't make OUTER joins INNER if that could remove
results from the query.
"""
to_promote = set()
to_demote = set()
# The effective_connector is used so that NOT (a AND b) is treated
# similarly to (a OR b) for join promotion.
for table, votes in self.votes.items():
# We must use outer joins in OR case when the join isn't contained
# in all of the joins. Otherwise the INNER JOIN itself could remove
# valid results. Consider the case where a model with rel_a and
# rel_b relations is queried with rel_a__col=1 | rel_b__col=2. Now,
# if rel_a join doesn't produce any results is null (for example
# reverse foreign key or null value in direct foreign key), and
# there is a matching row in rel_b with col=2, then an INNER join
# to rel_a would remove a valid match from the query. So, we need
# to promote any existing INNER to LOUTER (it is possible this
# promotion in turn will be demoted later on).
if self.effective_connector == OR and votes < self.num_children:
to_promote.add(table)
# If connector is AND and there is a filter that can match only
# when there is a joinable row, then use INNER. For example, in
# rel_a__col=1 & rel_b__col=2, if either of the rels produce NULL
# as join output, then the col=1 or col=2 can't match (as
# NULL=anything is always false).
# For the OR case, if all children voted for a join to be inner,
# then we can use INNER for the join. For example:
# (rel_a__col__icontains=Alex | rel_a__col__icontains=Russell)
# then if rel_a doesn't produce any rows, the whole condition
# can't match. Hence we can safely use INNER join.
if self.effective_connector == AND or (
self.effective_connector == OR and votes == self.num_children
):
to_demote.add(table)
# Finally, what happens in cases where we have:
# (rel_a__col=1|rel_b__col=2) & rel_a__col__gte=0
# Now, we first generate the OR clause, and promote joins for it
# in the first if branch above. Both rel_a and rel_b are promoted
# to LOUTER joins. After that we do the AND case. The OR case
# voted no inner joins but the rel_a__col__gte=0 votes inner join
# for rel_a. We demote it back to INNER join (in AND case a single
# vote is enough). The demotion is OK, if rel_a doesn't produce
# rows, then the rel_a__col__gte=0 clause can't be true, and thus
# the whole clause must be false. So, it is safe to use INNER
# join.
# Note that in this example we could just as well have the __gte
# clause and the OR clause swapped. Or we could replace the __gte
# clause with an OR clause containing rel_a__col=1|rel_a__col=2,
# and again we could safely demote to INNER.
query.promote_joins(to_promote)
query.demote_joins(to_demote)
return to_demote
|
3f38b6857eba9d6625f4e8fa63ace2ed5f667f6528513b1852cc247d1f9811d8 | import keyword
import re
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS, connections
from django.db.models.constants import LOOKUP_SEP
class Command(BaseCommand):
help = (
"Introspects the database tables in the given database and outputs a Django "
"model module."
)
requires_system_checks = []
stealth_options = ("table_name_filter",)
db_module = "django.db"
def add_arguments(self, parser):
parser.add_argument(
"table",
nargs="*",
type=str,
help="Selects what tables or views should be introspected.",
)
parser.add_argument(
"--database",
default=DEFAULT_DB_ALIAS,
help=(
'Nominates a database to introspect. Defaults to using the "default" '
"database."
),
)
parser.add_argument(
"--include-partitions",
action="store_true",
help="Also output models for partition tables.",
)
parser.add_argument(
"--include-views",
action="store_true",
help="Also output models for database views.",
)
def handle(self, **options):
try:
for line in self.handle_inspection(options):
self.stdout.write(line)
except NotImplementedError:
raise CommandError(
"Database inspection isn't supported for the currently selected "
"database backend."
)
def handle_inspection(self, options):
connection = connections[options["database"]]
# 'table_name_filter' is a stealth option
table_name_filter = options.get("table_name_filter")
with connection.cursor() as cursor:
yield "# This is an auto-generated Django model module."
yield "# You'll have to do the following manually to clean this up:"
yield "# * Rearrange models' order"
yield "# * Make sure each model has one field with primary_key=True"
yield (
"# * Make sure each ForeignKey and OneToOneField has `on_delete` set "
"to the desired behavior"
)
yield (
"# * Remove `managed = False` lines if you wish to allow "
"Django to create, modify, and delete the table"
)
yield (
"# Feel free to rename the models, but don't rename db_table values or "
"field names."
)
yield "from %s import models" % self.db_module
known_models = []
# Determine types of tables and/or views to be introspected.
types = {"t"}
if options["include_partitions"]:
types.add("p")
if options["include_views"]:
types.add("v")
table_info = connection.introspection.get_table_list(cursor)
table_info = {info.name: info for info in table_info if info.type in types}
for table_name in options["table"] or sorted(name for name in table_info):
if table_name_filter is not None and callable(table_name_filter):
if not table_name_filter(table_name):
continue
try:
try:
relations = connection.introspection.get_relations(
cursor, table_name
)
except NotImplementedError:
relations = {}
try:
constraints = connection.introspection.get_constraints(
cursor, table_name
)
except NotImplementedError:
constraints = {}
primary_key_columns = (
connection.introspection.get_primary_key_columns(
cursor, table_name
)
)
primary_key_column = (
primary_key_columns[0] if primary_key_columns else None
)
unique_columns = [
c["columns"][0]
for c in constraints.values()
if c["unique"] and len(c["columns"]) == 1
]
table_description = connection.introspection.get_table_description(
cursor, table_name
)
except Exception as e:
yield "# Unable to inspect table '%s'" % table_name
yield "# The error was: %s" % e
continue
model_name = self.normalize_table_name(table_name)
yield ""
yield ""
yield "class %s(models.Model):" % model_name
known_models.append(model_name)
used_column_names = [] # Holds column names used in the table so far
column_to_field_name = {} # Maps column names to names of model fields
used_relations = set() # Holds foreign relations used in the table.
for row in table_description:
comment_notes = (
[]
) # Holds Field notes, to be displayed in a Python comment.
extra_params = {} # Holds Field parameters such as 'db_column'.
column_name = row.name
is_relation = column_name in relations
att_name, params, notes = self.normalize_col_name(
column_name, used_column_names, is_relation
)
extra_params.update(params)
comment_notes.extend(notes)
used_column_names.append(att_name)
column_to_field_name[column_name] = att_name
# Add primary_key and unique, if necessary.
if column_name == primary_key_column:
extra_params["primary_key"] = True
if len(primary_key_columns) > 1:
comment_notes.append(
"The composite primary key (%s) found, that is not "
"supported. The first column is selected."
% ", ".join(primary_key_columns)
)
elif column_name in unique_columns:
extra_params["unique"] = True
if is_relation:
ref_db_column, ref_db_table = relations[column_name]
if extra_params.pop("unique", False) or extra_params.get(
"primary_key"
):
rel_type = "OneToOneField"
else:
rel_type = "ForeignKey"
ref_pk_column = (
connection.introspection.get_primary_key_column(
cursor, ref_db_table
)
)
if ref_pk_column and ref_pk_column != ref_db_column:
extra_params["to_field"] = ref_db_column
rel_to = (
"self"
if ref_db_table == table_name
else self.normalize_table_name(ref_db_table)
)
if rel_to in known_models:
field_type = "%s(%s" % (rel_type, rel_to)
else:
field_type = "%s('%s'" % (rel_type, rel_to)
if rel_to in used_relations:
extra_params["related_name"] = "%s_%s_set" % (
model_name.lower(),
att_name,
)
used_relations.add(rel_to)
else:
# Calling `get_field_type` to get the field type string and any
# additional parameters and notes.
field_type, field_params, field_notes = self.get_field_type(
connection, table_name, row
)
extra_params.update(field_params)
comment_notes.extend(field_notes)
field_type += "("
# Don't output 'id = meta.AutoField(primary_key=True)', because
# that's assumed if it doesn't exist.
if att_name == "id" and extra_params == {"primary_key": True}:
if field_type == "AutoField(":
continue
elif (
field_type
== connection.features.introspected_field_types["AutoField"]
+ "("
):
comment_notes.append("AutoField?")
# Add 'null' and 'blank', if the 'null_ok' flag was present in the
# table description.
if row.null_ok: # If it's NULL...
extra_params["blank"] = True
extra_params["null"] = True
field_desc = "%s = %s%s" % (
att_name,
# Custom fields will have a dotted path
"" if "." in field_type else "models.",
field_type,
)
if field_type.startswith(("ForeignKey(", "OneToOneField(")):
field_desc += ", models.DO_NOTHING"
# Add comment.
if connection.features.supports_comments and row.comment:
extra_params["db_comment"] = row.comment
if extra_params:
if not field_desc.endswith("("):
field_desc += ", "
field_desc += ", ".join(
"%s=%r" % (k, v) for k, v in extra_params.items()
)
field_desc += ")"
if comment_notes:
field_desc += " # " + " ".join(comment_notes)
yield " %s" % field_desc
comment = None
if info := table_info.get(table_name):
is_view = info.type == "v"
is_partition = info.type == "p"
if connection.features.supports_comments:
comment = info.comment
else:
is_view = False
is_partition = False
yield from self.get_meta(
table_name,
constraints,
column_to_field_name,
is_view,
is_partition,
comment,
)
def normalize_col_name(self, col_name, used_column_names, is_relation):
"""
Modify the column name to make it Python-compatible as a field name
"""
field_params = {}
field_notes = []
new_name = col_name.lower()
if new_name != col_name:
field_notes.append("Field name made lowercase.")
if is_relation:
if new_name.endswith("_id"):
new_name = new_name.removesuffix("_id")
else:
field_params["db_column"] = col_name
new_name, num_repl = re.subn(r"\W", "_", new_name)
if num_repl > 0:
field_notes.append("Field renamed to remove unsuitable characters.")
if new_name.find(LOOKUP_SEP) >= 0:
while new_name.find(LOOKUP_SEP) >= 0:
new_name = new_name.replace(LOOKUP_SEP, "_")
if col_name.lower().find(LOOKUP_SEP) >= 0:
# Only add the comment if the double underscore was in the original name
field_notes.append(
"Field renamed because it contained more than one '_' in a row."
)
if new_name.startswith("_"):
new_name = "field%s" % new_name
field_notes.append("Field renamed because it started with '_'.")
if new_name.endswith("_"):
new_name = "%sfield" % new_name
field_notes.append("Field renamed because it ended with '_'.")
if keyword.iskeyword(new_name):
new_name += "_field"
field_notes.append("Field renamed because it was a Python reserved word.")
if new_name[0].isdigit():
new_name = "number_%s" % new_name
field_notes.append(
"Field renamed because it wasn't a valid Python identifier."
)
if new_name in used_column_names:
num = 0
while "%s_%d" % (new_name, num) in used_column_names:
num += 1
new_name = "%s_%d" % (new_name, num)
field_notes.append("Field renamed because of name conflict.")
if col_name != new_name and field_notes:
field_params["db_column"] = col_name
return new_name, field_params, field_notes
def normalize_table_name(self, table_name):
"""Translate the table name to a Python-compatible model name."""
return re.sub(r"[^a-zA-Z0-9]", "", table_name.title())
def get_field_type(self, connection, table_name, row):
"""
Given the database connection, the table name, and the cursor row
description, this routine will return the given field type name, as
well as any additional keyword parameters and notes for the field.
"""
field_params = {}
field_notes = []
try:
field_type = connection.introspection.get_field_type(row.type_code, row)
except KeyError:
field_type = "TextField"
field_notes.append("This field type is a guess.")
# Add max_length for all CharFields.
if field_type == "CharField" and row.display_size:
if (size := int(row.display_size)) and size > 0:
field_params["max_length"] = size
if field_type in {"CharField", "TextField"} and row.collation:
field_params["db_collation"] = row.collation
if field_type == "DecimalField":
if row.precision is None or row.scale is None:
field_notes.append(
"max_digits and decimal_places have been guessed, as this "
"database handles decimal fields as float"
)
field_params["max_digits"] = (
row.precision if row.precision is not None else 10
)
field_params["decimal_places"] = (
row.scale if row.scale is not None else 5
)
else:
field_params["max_digits"] = row.precision
field_params["decimal_places"] = row.scale
return field_type, field_params, field_notes
def get_meta(
self,
table_name,
constraints,
column_to_field_name,
is_view,
is_partition,
comment,
):
"""
Return a sequence comprising the lines of code necessary
to construct the inner Meta class for the model corresponding
to the given database table name.
"""
unique_together = []
has_unsupported_constraint = False
for params in constraints.values():
if params["unique"]:
columns = params["columns"]
if None in columns:
has_unsupported_constraint = True
columns = [
x for x in columns if x is not None and x in column_to_field_name
]
if len(columns) > 1:
unique_together.append(
str(tuple(column_to_field_name[c] for c in columns))
)
if is_view:
managed_comment = " # Created from a view. Don't remove."
elif is_partition:
managed_comment = " # Created from a partition. Don't remove."
else:
managed_comment = ""
meta = [""]
if has_unsupported_constraint:
meta.append(" # A unique constraint could not be introspected.")
meta += [
" class Meta:",
" managed = False%s" % managed_comment,
" db_table = %r" % table_name,
]
if unique_together:
tup = "(" + ", ".join(unique_together) + ",)"
meta += [" unique_together = %s" % tup]
if comment:
meta += [f" db_table_comment = {comment!r}"]
return meta
|
d4acfd708f7338b43b396c01bd19af8fb4e614ff473df9b86444eaaa4a04577b | import warnings
from datetime import datetime, timedelta
from django import forms
from django.conf import settings
from django.contrib import messages
from django.contrib.admin import FieldListFilter
from django.contrib.admin.exceptions import (
DisallowedModelAdminLookup,
DisallowedModelAdminToField,
)
from django.contrib.admin.options import (
IS_FACETS_VAR,
IS_POPUP_VAR,
TO_FIELD_VAR,
IncorrectLookupParameters,
ShowFacets,
)
from django.contrib.admin.utils import (
build_q_object_from_lookup_parameters,
get_fields_from_path,
lookup_spawns_duplicates,
prepare_lookup_value,
quote,
)
from django.core.exceptions import (
FieldDoesNotExist,
ImproperlyConfigured,
SuspiciousOperation,
)
from django.core.paginator import InvalidPage
from django.db.models import Exists, F, Field, ManyToOneRel, OrderBy, OuterRef
from django.db.models.expressions import Combinable
from django.urls import reverse
from django.utils.deprecation import RemovedInDjango60Warning
from django.utils.http import urlencode
from django.utils.inspect import func_supports_parameter
from django.utils.timezone import make_aware
from django.utils.translation import gettext
# Changelist settings
ALL_VAR = "all"
ORDER_VAR = "o"
PAGE_VAR = "p"
SEARCH_VAR = "q"
ERROR_FLAG = "e"
IGNORED_PARAMS = (
ALL_VAR,
ORDER_VAR,
SEARCH_VAR,
IS_FACETS_VAR,
IS_POPUP_VAR,
TO_FIELD_VAR,
)
class ChangeListSearchForm(forms.Form):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Populate "fields" dynamically because SEARCH_VAR is a variable:
self.fields = {
SEARCH_VAR: forms.CharField(required=False, strip=False),
}
class ChangeList:
search_form_class = ChangeListSearchForm
def __init__(
self,
request,
model,
list_display,
list_display_links,
list_filter,
date_hierarchy,
search_fields,
list_select_related,
list_per_page,
list_max_show_all,
list_editable,
model_admin,
sortable_by,
search_help_text,
):
self.model = model
self.opts = model._meta
self.lookup_opts = self.opts
self.root_queryset = model_admin.get_queryset(request)
self.list_display = list_display
self.list_display_links = list_display_links
self.list_filter = list_filter
self.has_filters = None
self.has_active_filters = None
self.clear_all_filters_qs = None
self.date_hierarchy = date_hierarchy
self.search_fields = search_fields
self.list_select_related = list_select_related
self.list_per_page = list_per_page
self.list_max_show_all = list_max_show_all
self.model_admin = model_admin
self.preserved_filters = model_admin.get_preserved_filters(request)
self.sortable_by = sortable_by
self.search_help_text = search_help_text
# Get search parameters from the query string.
_search_form = self.search_form_class(request.GET)
if not _search_form.is_valid():
for error in _search_form.errors.values():
messages.error(request, ", ".join(error))
self.query = _search_form.cleaned_data.get(SEARCH_VAR) or ""
try:
self.page_num = int(request.GET.get(PAGE_VAR, 1))
except ValueError:
self.page_num = 1
self.show_all = ALL_VAR in request.GET
self.is_popup = IS_POPUP_VAR in request.GET
self.add_facets = model_admin.show_facets is ShowFacets.ALWAYS or (
model_admin.show_facets is ShowFacets.ALLOW and IS_FACETS_VAR in request.GET
)
self.is_facets_optional = model_admin.show_facets is ShowFacets.ALLOW
to_field = request.GET.get(TO_FIELD_VAR)
if to_field and not model_admin.to_field_allowed(request, to_field):
raise DisallowedModelAdminToField(
"The field %s cannot be referenced." % to_field
)
self.to_field = to_field
self.params = dict(request.GET.items())
self.filter_params = dict(request.GET.lists())
if PAGE_VAR in self.params:
del self.params[PAGE_VAR]
del self.filter_params[PAGE_VAR]
if ERROR_FLAG in self.params:
del self.params[ERROR_FLAG]
del self.filter_params[ERROR_FLAG]
self.remove_facet_link = self.get_query_string(remove=[IS_FACETS_VAR])
self.add_facet_link = self.get_query_string({IS_FACETS_VAR: True})
if self.is_popup:
self.list_editable = ()
else:
self.list_editable = list_editable
self.queryset = self.get_queryset(request)
self.get_results(request)
if self.is_popup:
title = gettext("Select %s")
elif self.model_admin.has_change_permission(request):
title = gettext("Select %s to change")
else:
title = gettext("Select %s to view")
self.title = title % self.opts.verbose_name
self.pk_attname = self.lookup_opts.pk.attname
def __repr__(self):
return "<%s: model=%s model_admin=%s>" % (
self.__class__.__qualname__,
self.model.__qualname__,
self.model_admin.__class__.__qualname__,
)
def get_filters_params(self, params=None):
"""
Return all params except IGNORED_PARAMS.
"""
params = params or self.filter_params
lookup_params = params.copy() # a dictionary of the query string
# Remove all the parameters that are globally and systematically
# ignored.
for ignored in IGNORED_PARAMS:
if ignored in lookup_params:
del lookup_params[ignored]
return lookup_params
def get_filters(self, request):
lookup_params = self.get_filters_params()
may_have_duplicates = False
has_active_filters = False
supports_request = func_supports_parameter(
self.model_admin.lookup_allowed, "request"
)
if not supports_request:
warnings.warn(
f"`request` must be added to the signature of "
f"{self.model_admin.__class__.__qualname__}.lookup_allowed().",
RemovedInDjango60Warning,
)
for key, value_list in lookup_params.items():
for value in value_list:
params = (key, value, request) if supports_request else (key, value)
if not self.model_admin.lookup_allowed(*params):
raise DisallowedModelAdminLookup(f"Filtering by {key} not allowed")
filter_specs = []
for list_filter in self.list_filter:
lookup_params_count = len(lookup_params)
if callable(list_filter):
# This is simply a custom list filter class.
spec = list_filter(request, lookup_params, self.model, self.model_admin)
else:
field_path = None
if isinstance(list_filter, (tuple, list)):
# This is a custom FieldListFilter class for a given field.
field, field_list_filter_class = list_filter
else:
# This is simply a field name, so use the default
# FieldListFilter class that has been registered for the
# type of the given field.
field, field_list_filter_class = list_filter, FieldListFilter.create
if not isinstance(field, Field):
field_path = field
field = get_fields_from_path(self.model, field_path)[-1]
spec = field_list_filter_class(
field,
request,
lookup_params,
self.model,
self.model_admin,
field_path=field_path,
)
# field_list_filter_class removes any lookup_params it
# processes. If that happened, check if duplicates should be
# removed.
if lookup_params_count > len(lookup_params):
may_have_duplicates |= lookup_spawns_duplicates(
self.lookup_opts,
field_path,
)
if spec and spec.has_output():
filter_specs.append(spec)
if lookup_params_count > len(lookup_params):
has_active_filters = True
if self.date_hierarchy:
# Create bounded lookup parameters so that the query is more
# efficient.
year = lookup_params.pop("%s__year" % self.date_hierarchy, None)
if year is not None:
month = lookup_params.pop("%s__month" % self.date_hierarchy, None)
day = lookup_params.pop("%s__day" % self.date_hierarchy, None)
try:
from_date = datetime(
int(year[-1]),
int(month[-1] if month is not None else 1),
int(day[-1] if day is not None else 1),
)
except ValueError as e:
raise IncorrectLookupParameters(e) from e
if day:
to_date = from_date + timedelta(days=1)
elif month:
# In this branch, from_date will always be the first of a
# month, so advancing 32 days gives the next month.
to_date = (from_date + timedelta(days=32)).replace(day=1)
else:
to_date = from_date.replace(year=from_date.year + 1)
if settings.USE_TZ:
from_date = make_aware(from_date)
to_date = make_aware(to_date)
lookup_params.update(
{
"%s__gte" % self.date_hierarchy: [from_date],
"%s__lt" % self.date_hierarchy: [to_date],
}
)
# At this point, all the parameters used by the various ListFilters
# have been removed from lookup_params, which now only contains other
# parameters passed via the query string. We now loop through the
# remaining parameters both to ensure that all the parameters are valid
# fields and to determine if at least one of them spawns duplicates. If
# the lookup parameters aren't real fields, then bail out.
try:
for key, value in lookup_params.items():
lookup_params[key] = prepare_lookup_value(key, value)
may_have_duplicates |= lookup_spawns_duplicates(self.lookup_opts, key)
return (
filter_specs,
bool(filter_specs),
lookup_params,
may_have_duplicates,
has_active_filters,
)
except FieldDoesNotExist as e:
raise IncorrectLookupParameters(e) from e
def get_query_string(self, new_params=None, remove=None):
if new_params is None:
new_params = {}
if remove is None:
remove = []
p = self.filter_params.copy()
for r in remove:
for k in list(p):
if k.startswith(r):
del p[k]
for k, v in new_params.items():
if v is None:
if k in p:
del p[k]
else:
p[k] = v
return "?%s" % urlencode(sorted(p.items()), doseq=True)
def get_results(self, request):
paginator = self.model_admin.get_paginator(
request, self.queryset, self.list_per_page
)
# Get the number of objects, with admin filters applied.
result_count = paginator.count
# Get the total number of objects, with no admin filters applied.
# Note this isn't necessarily the same as result_count in the case of
# no filtering. Filters defined in list_filters may still apply some
# default filtering which may be removed with query parameters.
if self.model_admin.show_full_result_count:
full_result_count = self.root_queryset.count()
else:
full_result_count = None
can_show_all = result_count <= self.list_max_show_all
multi_page = result_count > self.list_per_page
# Get the list of objects to display on this page.
if (self.show_all and can_show_all) or not multi_page:
result_list = self.queryset._clone()
else:
try:
result_list = paginator.page(self.page_num).object_list
except InvalidPage:
raise IncorrectLookupParameters
self.result_count = result_count
self.show_full_result_count = self.model_admin.show_full_result_count
# Admin actions are shown if there is at least one entry
# or if entries are not counted because show_full_result_count is disabled
self.show_admin_actions = not self.show_full_result_count or bool(
full_result_count
)
self.full_result_count = full_result_count
self.result_list = result_list
self.can_show_all = can_show_all
self.multi_page = multi_page
self.paginator = paginator
def _get_default_ordering(self):
ordering = []
if self.model_admin.ordering:
ordering = self.model_admin.ordering
elif self.lookup_opts.ordering:
ordering = self.lookup_opts.ordering
return ordering
def get_ordering_field(self, field_name):
"""
Return the proper model field name corresponding to the given
field_name to use for ordering. field_name may either be the name of a
proper model field or the name of a method (on the admin or model) or a
callable with the 'admin_order_field' attribute. Return None if no
proper model field name can be matched.
"""
try:
field = self.lookup_opts.get_field(field_name)
return field.name
except FieldDoesNotExist:
# See whether field_name is a name of a non-field
# that allows sorting.
if callable(field_name):
attr = field_name
elif hasattr(self.model_admin, field_name):
attr = getattr(self.model_admin, field_name)
else:
attr = getattr(self.model, field_name)
if isinstance(attr, property) and hasattr(attr, "fget"):
attr = attr.fget
return getattr(attr, "admin_order_field", None)
def get_ordering(self, request, queryset):
"""
Return the list of ordering fields for the change list.
First check the get_ordering() method in model admin, then check
the object's default ordering. Then, any manually-specified ordering
from the query string overrides anything. Finally, a deterministic
order is guaranteed by calling _get_deterministic_ordering() with the
constructed ordering.
"""
params = self.params
ordering = list(
self.model_admin.get_ordering(request) or self._get_default_ordering()
)
if ORDER_VAR in params:
# Clear ordering and used params
ordering = []
order_params = params[ORDER_VAR].split(".")
for p in order_params:
try:
none, pfx, idx = p.rpartition("-")
field_name = self.list_display[int(idx)]
order_field = self.get_ordering_field(field_name)
if not order_field:
continue # No 'admin_order_field', skip it
if isinstance(order_field, OrderBy):
if pfx == "-":
order_field = order_field.copy()
order_field.reverse_ordering()
ordering.append(order_field)
elif hasattr(order_field, "resolve_expression"):
# order_field is an expression.
ordering.append(
order_field.desc() if pfx == "-" else order_field.asc()
)
# reverse order if order_field has already "-" as prefix
elif pfx == "-" and order_field.startswith(pfx):
ordering.append(order_field.removeprefix(pfx))
else:
ordering.append(pfx + order_field)
except (IndexError, ValueError):
continue # Invalid ordering specified, skip it.
# Add the given query's ordering fields, if any.
ordering.extend(queryset.query.order_by)
return self._get_deterministic_ordering(ordering)
def _get_deterministic_ordering(self, ordering):
"""
Ensure a deterministic order across all database backends. Search for a
single field or unique together set of fields providing a total
ordering. If these are missing, augment the ordering with a descendant
primary key.
"""
ordering = list(ordering)
ordering_fields = set()
total_ordering_fields = {"pk"} | {
field.attname
for field in self.lookup_opts.fields
if field.unique and not field.null
}
for part in ordering:
# Search for single field providing a total ordering.
field_name = None
if isinstance(part, str):
field_name = part.lstrip("-")
elif isinstance(part, F):
field_name = part.name
elif isinstance(part, OrderBy) and isinstance(part.expression, F):
field_name = part.expression.name
if field_name:
# Normalize attname references by using get_field().
try:
field = self.lookup_opts.get_field(field_name)
except FieldDoesNotExist:
# Could be "?" for random ordering or a related field
# lookup. Skip this part of introspection for now.
continue
# Ordering by a related field name orders by the referenced
# model's ordering. Skip this part of introspection for now.
if field.remote_field and field_name == field.name:
continue
if field.attname in total_ordering_fields:
break
ordering_fields.add(field.attname)
else:
# No single total ordering field, try unique_together and total
# unique constraints.
constraint_field_names = (
*self.lookup_opts.unique_together,
*(
constraint.fields
for constraint in self.lookup_opts.total_unique_constraints
),
)
for field_names in constraint_field_names:
# Normalize attname references by using get_field().
fields = [
self.lookup_opts.get_field(field_name) for field_name in field_names
]
# Composite unique constraints containing a nullable column
# cannot ensure total ordering.
if any(field.null for field in fields):
continue
if ordering_fields.issuperset(field.attname for field in fields):
break
else:
# If no set of unique fields is present in the ordering, rely
# on the primary key to provide total ordering.
ordering.append("-pk")
return ordering
def get_ordering_field_columns(self):
"""
Return a dictionary of ordering field column numbers and asc/desc.
"""
# We must cope with more than one column having the same underlying sort
# field, so we base things on column numbers.
ordering = self._get_default_ordering()
ordering_fields = {}
if ORDER_VAR not in self.params:
# for ordering specified on ModelAdmin or model Meta, we don't know
# the right column numbers absolutely, because there might be more
# than one column associated with that ordering, so we guess.
for field in ordering:
if isinstance(field, (Combinable, OrderBy)):
if not isinstance(field, OrderBy):
field = field.asc()
if isinstance(field.expression, F):
order_type = "desc" if field.descending else "asc"
field = field.expression.name
else:
continue
elif field.startswith("-"):
field = field.removeprefix("-")
order_type = "desc"
else:
order_type = "asc"
for index, attr in enumerate(self.list_display):
if self.get_ordering_field(attr) == field:
ordering_fields[index] = order_type
break
else:
for p in self.params[ORDER_VAR].split("."):
none, pfx, idx = p.rpartition("-")
try:
idx = int(idx)
except ValueError:
continue # skip it
ordering_fields[idx] = "desc" if pfx == "-" else "asc"
return ordering_fields
def get_queryset(self, request, exclude_parameters=None):
# First, we collect all the declared list filters.
(
self.filter_specs,
self.has_filters,
remaining_lookup_params,
filters_may_have_duplicates,
self.has_active_filters,
) = self.get_filters(request)
# Then, we let every list filter modify the queryset to its liking.
qs = self.root_queryset
for filter_spec in self.filter_specs:
if (
exclude_parameters is None
or filter_spec.expected_parameters() != exclude_parameters
):
new_qs = filter_spec.queryset(request, qs)
if new_qs is not None:
qs = new_qs
try:
# Finally, we apply the remaining lookup parameters from the query
# string (i.e. those that haven't already been processed by the
# filters).
q_object = build_q_object_from_lookup_parameters(remaining_lookup_params)
qs = qs.filter(q_object)
except (SuspiciousOperation, ImproperlyConfigured):
# Allow certain types of errors to be re-raised as-is so that the
# caller can treat them in a special way.
raise
except Exception as e:
# Every other error is caught with a naked except, because we don't
# have any other way of validating lookup parameters. They might be
# invalid if the keyword arguments are incorrect, or if the values
# are not in the correct type, so we might get FieldError,
# ValueError, ValidationError, or ?.
raise IncorrectLookupParameters(e)
# Apply search results
qs, search_may_have_duplicates = self.model_admin.get_search_results(
request,
qs,
self.query,
)
# Set query string for clearing all filters.
self.clear_all_filters_qs = self.get_query_string(
new_params=remaining_lookup_params,
remove=self.get_filters_params(),
)
# Remove duplicates from results, if necessary
if filters_may_have_duplicates | search_may_have_duplicates:
qs = qs.filter(pk=OuterRef("pk"))
qs = self.root_queryset.filter(Exists(qs))
# Set ordering.
ordering = self.get_ordering(request, qs)
qs = qs.order_by(*ordering)
if not qs.query.select_related:
qs = self.apply_select_related(qs)
return qs
def apply_select_related(self, qs):
if self.list_select_related is True:
return qs.select_related()
if self.list_select_related is False:
if self.has_related_field_in_list_display():
return qs.select_related()
if self.list_select_related:
return qs.select_related(*self.list_select_related)
return qs
def has_related_field_in_list_display(self):
for field_name in self.list_display:
try:
field = self.lookup_opts.get_field(field_name)
except FieldDoesNotExist:
pass
else:
if isinstance(field.remote_field, ManyToOneRel):
# <FK>_id field names don't require a join.
if field_name != field.get_attname():
return True
return False
def url_for_result(self, result):
pk = getattr(result, self.pk_attname)
return reverse(
"admin:%s_%s_change" % (self.opts.app_label, self.opts.model_name),
args=(quote(pk),),
current_app=self.model_admin.admin_site.name,
)
|
aa4bbf17ff6cc2ec9ee6c42c92832941be6c230b9452ed8f511725b09ddbb00b | import compileall
import os
from importlib import import_module
from django.db import connection, connections
from django.db.migrations.exceptions import (
AmbiguityError,
InconsistentMigrationHistory,
NodeNotFoundError,
)
from django.db.migrations.loader import MigrationLoader
from django.db.migrations.recorder import MigrationRecorder
from django.test import TestCase, modify_settings, override_settings
from .test_base import MigrationTestBase
class RecorderTests(TestCase):
"""
Tests recording migrations as applied or not.
"""
databases = {"default", "other"}
def test_apply(self):
"""
Tests marking migrations as applied/unapplied.
"""
recorder = MigrationRecorder(connection)
self.assertEqual(
{(x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"},
set(),
)
recorder.record_applied("myapp", "0432_ponies")
self.assertEqual(
{(x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"},
{("myapp", "0432_ponies")},
)
# That should not affect records of another database
recorder_other = MigrationRecorder(connections["other"])
self.assertEqual(
{(x, y) for (x, y) in recorder_other.applied_migrations() if x == "myapp"},
set(),
)
recorder.record_unapplied("myapp", "0432_ponies")
self.assertEqual(
{(x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"},
set(),
)
def test_has_table_cached(self):
"""
The has_table() method caches a positive result and not continually
query for the existence of the migrations table.
"""
recorder = MigrationRecorder(connection)
with self.assertNumQueries(1):
self.assertEqual(recorder.has_table(), True)
self.assertEqual(recorder.has_table(), True)
class LoaderTests(TestCase):
"""
Tests the disk and database loader, and running through migrations
in memory.
"""
def setUp(self):
self.applied_records = []
def tearDown(self):
# Unapply records on databases that don't roll back changes after each
# test method.
if not connection.features.supports_transactions:
for recorder, app, name in self.applied_records:
recorder.record_unapplied(app, name)
def record_applied(self, recorder, app, name):
recorder.record_applied(app, name)
self.applied_records.append((recorder, app, name))
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
@modify_settings(INSTALLED_APPS={"append": "basic"})
def test_load(self):
"""
Makes sure the loader can load the migrations for the test apps,
and then render them out to a new Apps.
"""
# Load and test the plan
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.graph.forwards_plan(("migrations", "0002_second")),
[
("migrations", "0001_initial"),
("migrations", "0002_second"),
],
)
# Now render it out!
project_state = migration_loader.project_state(("migrations", "0002_second"))
self.assertEqual(len(project_state.models), 2)
author_state = project_state.models["migrations", "author"]
self.assertEqual(
list(author_state.fields), ["id", "name", "slug", "age", "rating"]
)
book_state = project_state.models["migrations", "book"]
self.assertEqual(list(book_state.fields), ["id", "author"])
# Ensure we've included unmigrated apps in there too
self.assertIn("basic", project_state.real_apps)
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations",
"migrations2": "migrations2.test_migrations_2",
}
)
@modify_settings(INSTALLED_APPS={"append": "migrations2"})
def test_plan_handles_repeated_migrations(self):
"""
_generate_plan() doesn't readd migrations already in the plan (#29180).
"""
migration_loader = MigrationLoader(connection)
nodes = [("migrations", "0002_second"), ("migrations2", "0001_initial")]
self.assertEqual(
migration_loader.graph._generate_plan(nodes, at_end=True),
[
("migrations", "0001_initial"),
("migrations", "0002_second"),
("migrations2", "0001_initial"),
],
)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_unmigdep"}
)
def test_load_unmigrated_dependency(self):
"""
The loader can load migrations with a dependency on an unmigrated app.
"""
# Load and test the plan
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.graph.forwards_plan(("migrations", "0001_initial")),
[
("contenttypes", "0001_initial"),
("auth", "0001_initial"),
("migrations", "0001_initial"),
],
)
# Now render it out!
project_state = migration_loader.project_state(("migrations", "0001_initial"))
self.assertEqual(
len([m for a, m in project_state.models if a == "migrations"]), 1
)
book_state = project_state.models["migrations", "book"]
self.assertEqual(list(book_state.fields), ["id", "user"])
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"}
)
def test_run_before(self):
"""
Makes sure the loader uses Migration.run_before.
"""
# Load and test the plan
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.graph.forwards_plan(("migrations", "0002_second")),
[
("migrations", "0001_initial"),
("migrations", "0003_third"),
("migrations", "0002_second"),
],
)
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_first",
"migrations2": "migrations2.test_migrations_2_first",
}
)
@modify_settings(INSTALLED_APPS={"append": "migrations2"})
def test_first(self):
"""
Makes sure the '__first__' migrations build correctly.
"""
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.graph.forwards_plan(("migrations", "second")),
[
("migrations", "thefirst"),
("migrations2", "0001_initial"),
("migrations2", "0002_second"),
("migrations", "second"),
],
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_name_match(self):
"Tests prefix name matching"
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.get_migration_by_prefix("migrations", "0001").name,
"0001_initial",
)
msg = "There is more than one migration for 'migrations' with the prefix '0'"
with self.assertRaisesMessage(AmbiguityError, msg):
migration_loader.get_migration_by_prefix("migrations", "0")
msg = "There is no migration for 'migrations' with the prefix 'blarg'"
with self.assertRaisesMessage(KeyError, msg):
migration_loader.get_migration_by_prefix("migrations", "blarg")
def test_load_import_error(self):
with override_settings(
MIGRATION_MODULES={"migrations": "import_error_package"}
):
with self.assertRaises(ImportError):
MigrationLoader(connection)
def test_load_module_file(self):
with override_settings(
MIGRATION_MODULES={"migrations": "migrations.faulty_migrations.file"}
):
loader = MigrationLoader(connection)
self.assertIn(
"migrations",
loader.unmigrated_apps,
"App with migrations module file not in unmigrated apps.",
)
def test_load_empty_dir(self):
with override_settings(
MIGRATION_MODULES={"migrations": "migrations.faulty_migrations.namespace"}
):
loader = MigrationLoader(connection)
self.assertIn(
"migrations",
loader.unmigrated_apps,
"App missing __init__.py in migrations module not in unmigrated apps.",
)
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.migrated_app"],
)
def test_marked_as_migrated(self):
"""
Undefined MIGRATION_MODULES implies default migration module.
"""
migration_loader = MigrationLoader(connection)
self.assertEqual(migration_loader.migrated_apps, {"migrated_app"})
self.assertEqual(migration_loader.unmigrated_apps, set())
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.migrated_app"],
MIGRATION_MODULES={"migrated_app": None},
)
def test_marked_as_unmigrated(self):
"""
MIGRATION_MODULES allows disabling of migrations for a particular app.
"""
migration_loader = MigrationLoader(connection)
self.assertEqual(migration_loader.migrated_apps, set())
self.assertEqual(migration_loader.unmigrated_apps, {"migrated_app"})
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.migrated_app"],
MIGRATION_MODULES={"migrated_app": "missing-module"},
)
def test_explicit_missing_module(self):
"""
If a MIGRATION_MODULES override points to a missing module, the error
raised during the importation attempt should be propagated unless
`ignore_no_migrations=True`.
"""
with self.assertRaisesMessage(ImportError, "missing-module"):
migration_loader = MigrationLoader(connection)
migration_loader = MigrationLoader(connection, ignore_no_migrations=True)
self.assertEqual(migration_loader.migrated_apps, set())
self.assertEqual(migration_loader.unmigrated_apps, {"migrated_app"})
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_loading_squashed(self):
"Tests loading a squashed migration"
migration_loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
self.addCleanup(recorder.flush)
# Loading with nothing applied should just give us the one node
self.assertEqual(
len([x for x in migration_loader.graph.nodes if x[0] == "migrations"]),
1,
)
# However, fake-apply one migration and it should now use the old two
self.record_applied(recorder, "migrations", "0001_initial")
migration_loader.build_graph()
self.assertEqual(
len([x for x in migration_loader.graph.nodes if x[0] == "migrations"]),
2,
)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"}
)
def test_loading_squashed_complex(self):
"Tests loading a complex set of squashed migrations"
loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
self.addCleanup(recorder.flush)
def num_nodes():
plan = set(loader.graph.forwards_plan(("migrations", "7_auto")))
return len(plan - loader.applied_migrations.keys())
# Empty database: use squashed migration
loader.build_graph()
self.assertEqual(num_nodes(), 5)
# Starting at 1 or 2 should use the squashed migration too
self.record_applied(recorder, "migrations", "1_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 4)
self.record_applied(recorder, "migrations", "2_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 3)
# However, starting at 3 to 5 cannot use the squashed migration
self.record_applied(recorder, "migrations", "3_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 4)
self.record_applied(recorder, "migrations", "4_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 3)
# Starting at 5 to 7 we are past the squashed migrations.
self.record_applied(recorder, "migrations", "5_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 2)
self.record_applied(recorder, "migrations", "6_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 1)
self.record_applied(recorder, "migrations", "7_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 0)
@override_settings(
MIGRATION_MODULES={
"app1": "migrations.test_migrations_squashed_complex_multi_apps.app1",
"app2": "migrations.test_migrations_squashed_complex_multi_apps.app2",
}
)
@modify_settings(
INSTALLED_APPS={
"append": [
"migrations.test_migrations_squashed_complex_multi_apps.app1",
"migrations.test_migrations_squashed_complex_multi_apps.app2",
]
}
)
def test_loading_squashed_complex_multi_apps(self):
loader = MigrationLoader(connection)
loader.build_graph()
plan = set(loader.graph.forwards_plan(("app1", "4_auto")))
expected_plan = {
("app1", "1_auto"),
("app2", "1_squashed_2"),
("app1", "2_squashed_3"),
("app1", "4_auto"),
}
self.assertEqual(plan, expected_plan)
@override_settings(
MIGRATION_MODULES={
"app1": "migrations.test_migrations_squashed_complex_multi_apps.app1",
"app2": "migrations.test_migrations_squashed_complex_multi_apps.app2",
}
)
@modify_settings(
INSTALLED_APPS={
"append": [
"migrations.test_migrations_squashed_complex_multi_apps.app1",
"migrations.test_migrations_squashed_complex_multi_apps.app2",
]
}
)
def test_loading_squashed_complex_multi_apps_partially_applied(self):
loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
self.record_applied(recorder, "app1", "1_auto")
self.record_applied(recorder, "app1", "2_auto")
loader.build_graph()
plan = set(loader.graph.forwards_plan(("app1", "4_auto")))
plan -= loader.applied_migrations.keys()
expected_plan = {
("app2", "1_squashed_2"),
("app1", "3_auto"),
("app1", "4_auto"),
}
self.assertEqual(plan, expected_plan)
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_squashed_erroneous"
}
)
def test_loading_squashed_erroneous(self):
"Tests loading a complex but erroneous set of squashed migrations"
loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
self.addCleanup(recorder.flush)
def num_nodes():
plan = set(loader.graph.forwards_plan(("migrations", "7_auto")))
return len(plan - loader.applied_migrations.keys())
# Empty database: use squashed migration
loader.build_graph()
self.assertEqual(num_nodes(), 5)
# Starting at 1 or 2 should use the squashed migration too
self.record_applied(recorder, "migrations", "1_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 4)
self.record_applied(recorder, "migrations", "2_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 3)
# However, starting at 3 or 4, nonexistent migrations would be needed.
msg = (
"Migration migrations.6_auto depends on nonexistent node "
"('migrations', '5_auto'). Django tried to replace migration "
"migrations.5_auto with any of [migrations.3_squashed_5] but wasn't able "
"to because some of the replaced migrations are already applied."
)
self.record_applied(recorder, "migrations", "3_auto")
with self.assertRaisesMessage(NodeNotFoundError, msg):
loader.build_graph()
self.record_applied(recorder, "migrations", "4_auto")
with self.assertRaisesMessage(NodeNotFoundError, msg):
loader.build_graph()
# Starting at 5 to 7 we are passed the squashed migrations
self.record_applied(recorder, "migrations", "5_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 2)
self.record_applied(recorder, "migrations", "6_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 1)
self.record_applied(recorder, "migrations", "7_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations"},
INSTALLED_APPS=["migrations"],
)
def test_check_consistent_history(self):
loader = MigrationLoader(connection=None)
loader.check_consistent_history(connection)
recorder = MigrationRecorder(connection)
self.record_applied(recorder, "migrations", "0002_second")
msg = (
"Migration migrations.0002_second is applied before its dependency "
"migrations.0001_initial on database 'default'."
)
with self.assertRaisesMessage(InconsistentMigrationHistory, msg):
loader.check_consistent_history(connection)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_extra"},
INSTALLED_APPS=["migrations"],
)
def test_check_consistent_history_squashed(self):
"""
MigrationLoader.check_consistent_history() should ignore unapplied
squashed migrations that have all of their `replaces` applied.
"""
loader = MigrationLoader(connection=None)
recorder = MigrationRecorder(connection)
self.record_applied(recorder, "migrations", "0001_initial")
self.record_applied(recorder, "migrations", "0002_second")
loader.check_consistent_history(connection)
self.record_applied(recorder, "migrations", "0003_third")
loader.check_consistent_history(connection)
@override_settings(
MIGRATION_MODULES={
"app1": "migrations.test_migrations_squashed_ref_squashed.app1",
"app2": "migrations.test_migrations_squashed_ref_squashed.app2",
}
)
@modify_settings(
INSTALLED_APPS={
"append": [
"migrations.test_migrations_squashed_ref_squashed.app1",
"migrations.test_migrations_squashed_ref_squashed.app2",
]
}
)
def test_loading_squashed_ref_squashed(self):
"Tests loading a squashed migration with a new migration referencing it"
r"""
The sample migrations are structured like this:
app_1 1 --> 2 ---------------------*--> 3 *--> 4
\ / /
*-------------------*----/--> 2_sq_3 --*
\ / /
=============== \ ============= / == / ======================
app_2 *--> 1_sq_2 --* /
\ /
*--> 1 --> 2 --*
Where 2_sq_3 is a replacing migration for 2 and 3 in app_1,
as 1_sq_2 is a replacing migration for 1 and 2 in app_2.
"""
loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
self.addCleanup(recorder.flush)
# Load with nothing applied: both migrations squashed.
loader.build_graph()
plan = set(loader.graph.forwards_plan(("app1", "4_auto")))
plan -= loader.applied_migrations.keys()
expected_plan = {
("app1", "1_auto"),
("app2", "1_squashed_2"),
("app1", "2_squashed_3"),
("app1", "4_auto"),
}
self.assertEqual(plan, expected_plan)
# Load with nothing applied and migrate to a replaced migration.
# Not possible if loader.replace_migrations is True (default).
loader.build_graph()
msg = "Node ('app1', '3_auto') not a valid node"
with self.assertRaisesMessage(NodeNotFoundError, msg):
loader.graph.forwards_plan(("app1", "3_auto"))
# Possible if loader.replace_migrations is False.
loader.replace_migrations = False
loader.build_graph()
plan = set(loader.graph.forwards_plan(("app1", "3_auto")))
plan -= loader.applied_migrations.keys()
expected_plan = {
("app1", "1_auto"),
("app2", "1_auto"),
("app2", "2_auto"),
("app1", "2_auto"),
("app1", "3_auto"),
}
self.assertEqual(plan, expected_plan)
loader.replace_migrations = True
# Fake-apply a few from app1: unsquashes migration in app1.
self.record_applied(recorder, "app1", "1_auto")
self.record_applied(recorder, "app1", "2_auto")
loader.build_graph()
plan = set(loader.graph.forwards_plan(("app1", "4_auto")))
plan -= loader.applied_migrations.keys()
expected_plan = {
("app2", "1_squashed_2"),
("app1", "3_auto"),
("app1", "4_auto"),
}
self.assertEqual(plan, expected_plan)
# Fake-apply one from app2: unsquashes migration in app2 too.
self.record_applied(recorder, "app2", "1_auto")
loader.build_graph()
plan = set(loader.graph.forwards_plan(("app1", "4_auto")))
plan -= loader.applied_migrations.keys()
expected_plan = {
("app2", "2_auto"),
("app1", "3_auto"),
("app1", "4_auto"),
}
self.assertEqual(plan, expected_plan)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_private"}
)
def test_ignore_files(self):
"""Files prefixed with underscore, tilde, or dot aren't loaded."""
loader = MigrationLoader(connection)
loader.load_disk()
migrations = [
name for app, name in loader.disk_migrations if app == "migrations"
]
self.assertEqual(migrations, ["0001_initial"])
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_namespace_package"
},
)
def test_loading_namespace_package(self):
"""Migration directories without an __init__.py file are ignored."""
loader = MigrationLoader(connection)
loader.load_disk()
migrations = [
name for app, name in loader.disk_migrations if app == "migrations"
]
self.assertEqual(migrations, [])
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_loading_package_without__file__(self):
"""
To support frozen environments, MigrationLoader loads migrations from
regular packages with no __file__ attribute.
"""
test_module = import_module("migrations.test_migrations")
loader = MigrationLoader(connection)
# __file__ == __spec__.origin or the latter is None and former is
# undefined.
module_file = test_module.__file__
module_origin = test_module.__spec__.origin
module_has_location = test_module.__spec__.has_location
try:
del test_module.__file__
test_module.__spec__.origin = None
test_module.__spec__.has_location = False
loader.load_disk()
migrations = [
name for app, name in loader.disk_migrations if app == "migrations"
]
self.assertCountEqual(migrations, ["0001_initial", "0002_second"])
finally:
test_module.__file__ = module_file
test_module.__spec__.origin = module_origin
test_module.__spec__.has_location = module_has_location
class PycLoaderTests(MigrationTestBase):
def test_valid(self):
"""
To support frozen environments, MigrationLoader loads .pyc migrations.
"""
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
# Compile .py files to .pyc files and delete .py files.
compileall.compile_dir(migration_dir, force=True, quiet=1, legacy=True)
for name in os.listdir(migration_dir):
if name.endswith(".py"):
os.remove(os.path.join(migration_dir, name))
loader = MigrationLoader(connection)
self.assertIn(("migrations", "0001_initial"), loader.disk_migrations)
def test_invalid(self):
"""
MigrationLoader reraises ImportErrors caused by "bad magic number" pyc
files with a more helpful message.
"""
with self.temporary_migration_module(
module="migrations.test_migrations_bad_pyc"
) as migration_dir:
# The -tpl suffix is to avoid the pyc exclusion in MANIFEST.in.
os.rename(
os.path.join(migration_dir, "0001_initial.pyc-tpl"),
os.path.join(migration_dir, "0001_initial.pyc"),
)
msg = (
r"Couldn't import '\w+.migrations.0001_initial' as it appears "
"to be a stale .pyc file."
)
with self.assertRaisesRegex(ImportError, msg):
MigrationLoader(connection)
|
70587f06fecf695ac3c884041edd92fa93b7885dcf5ec6ced44a48d9d28acd17 | import datetime
import math
import re
from decimal import Decimal
from django.core.exceptions import FieldError
from django.db import connection
from django.db.models import (
Avg,
Case,
Count,
DateField,
DateTimeField,
DecimalField,
DurationField,
Exists,
F,
FloatField,
IntegerField,
Max,
Min,
OuterRef,
Q,
StdDev,
Subquery,
Sum,
TimeField,
Value,
Variance,
When,
)
from django.db.models.expressions import Func, RawSQL
from django.db.models.functions import (
Cast,
Coalesce,
Greatest,
Least,
Lower,
Mod,
Now,
Pi,
TruncDate,
TruncHour,
)
from django.test import TestCase
from django.test.testcases import skipUnlessDBFeature
from django.test.utils import Approximate, CaptureQueriesContext
from django.utils import timezone
from .models import Author, Book, Publisher, Store
class NowUTC(Now):
template = "CURRENT_TIMESTAMP"
output_field = DateTimeField()
def as_sql(self, compiler, connection, **extra_context):
if connection.features.test_now_utc_template:
extra_context["template"] = connection.features.test_now_utc_template
return super().as_sql(compiler, connection, **extra_context)
class AggregateTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name="Adrian Holovaty", age=34)
cls.a2 = Author.objects.create(name="Jacob Kaplan-Moss", age=35)
cls.a3 = Author.objects.create(name="Brad Dayley", age=45)
cls.a4 = Author.objects.create(name="James Bennett", age=29)
cls.a5 = Author.objects.create(name="Jeffrey Forcier", age=37)
cls.a6 = Author.objects.create(name="Paul Bissex", age=29)
cls.a7 = Author.objects.create(name="Wesley J. Chun", age=25)
cls.a8 = Author.objects.create(name="Peter Norvig", age=57)
cls.a9 = Author.objects.create(name="Stuart Russell", age=46)
cls.a1.friends.add(cls.a2, cls.a4)
cls.a2.friends.add(cls.a1, cls.a7)
cls.a4.friends.add(cls.a1)
cls.a5.friends.add(cls.a6, cls.a7)
cls.a6.friends.add(cls.a5, cls.a7)
cls.a7.friends.add(cls.a2, cls.a5, cls.a6)
cls.a8.friends.add(cls.a9)
cls.a9.friends.add(cls.a8)
cls.p1 = Publisher.objects.create(
name="Apress", num_awards=3, duration=datetime.timedelta(days=1)
)
cls.p2 = Publisher.objects.create(
name="Sams", num_awards=1, duration=datetime.timedelta(days=2)
)
cls.p3 = Publisher.objects.create(name="Prentice Hall", num_awards=7)
cls.p4 = Publisher.objects.create(name="Morgan Kaufmann", num_awards=9)
cls.p5 = Publisher.objects.create(name="Jonno's House of Books", num_awards=0)
cls.b1 = Book.objects.create(
isbn="159059725",
name="The Definitive Guide to Django: Web Development Done Right",
pages=447,
rating=4.5,
price=Decimal("30.00"),
contact=cls.a1,
publisher=cls.p1,
pubdate=datetime.date(2007, 12, 6),
)
cls.b2 = Book.objects.create(
isbn="067232959",
name="Sams Teach Yourself Django in 24 Hours",
pages=528,
rating=3.0,
price=Decimal("23.09"),
contact=cls.a3,
publisher=cls.p2,
pubdate=datetime.date(2008, 3, 3),
)
cls.b3 = Book.objects.create(
isbn="159059996",
name="Practical Django Projects",
pages=300,
rating=4.0,
price=Decimal("29.69"),
contact=cls.a4,
publisher=cls.p1,
pubdate=datetime.date(2008, 6, 23),
)
cls.b4 = Book.objects.create(
isbn="013235613",
name="Python Web Development with Django",
pages=350,
rating=4.0,
price=Decimal("29.69"),
contact=cls.a5,
publisher=cls.p3,
pubdate=datetime.date(2008, 11, 3),
)
cls.b5 = Book.objects.create(
isbn="013790395",
name="Artificial Intelligence: A Modern Approach",
pages=1132,
rating=4.0,
price=Decimal("82.80"),
contact=cls.a8,
publisher=cls.p3,
pubdate=datetime.date(1995, 1, 15),
)
cls.b6 = Book.objects.create(
isbn="155860191",
name=(
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp"
),
pages=946,
rating=5.0,
price=Decimal("75.00"),
contact=cls.a8,
publisher=cls.p4,
pubdate=datetime.date(1991, 10, 15),
)
cls.b1.authors.add(cls.a1, cls.a2)
cls.b2.authors.add(cls.a3)
cls.b3.authors.add(cls.a4)
cls.b4.authors.add(cls.a5, cls.a6, cls.a7)
cls.b5.authors.add(cls.a8, cls.a9)
cls.b6.authors.add(cls.a8)
s1 = Store.objects.create(
name="Amazon.com",
original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42),
friday_night_closing=datetime.time(23, 59, 59),
)
s2 = Store.objects.create(
name="Books.com",
original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37),
friday_night_closing=datetime.time(23, 59, 59),
)
s3 = Store.objects.create(
name="Mamma and Pappa's Books",
original_opening=datetime.datetime(1945, 4, 25, 16, 24, 14),
friday_night_closing=datetime.time(21, 30),
)
s1.books.add(cls.b1, cls.b2, cls.b3, cls.b4, cls.b5, cls.b6)
s2.books.add(cls.b1, cls.b3, cls.b5, cls.b6)
s3.books.add(cls.b3, cls.b4, cls.b6)
def test_empty_aggregate(self):
self.assertEqual(Author.objects.aggregate(), {})
def test_aggregate_in_order_by(self):
msg = (
"Using an aggregate in order_by() without also including it in "
"annotate() is not allowed: Avg(F(book__rating)"
)
with self.assertRaisesMessage(FieldError, msg):
Author.objects.values("age").order_by(Avg("book__rating"))
def test_single_aggregate(self):
vals = Author.objects.aggregate(Avg("age"))
self.assertEqual(vals, {"age__avg": Approximate(37.4, places=1)})
def test_multiple_aggregates(self):
vals = Author.objects.aggregate(Sum("age"), Avg("age"))
self.assertEqual(
vals, {"age__sum": 337, "age__avg": Approximate(37.4, places=1)}
)
def test_filter_aggregate(self):
vals = Author.objects.filter(age__gt=29).aggregate(Sum("age"))
self.assertEqual(vals, {"age__sum": 254})
def test_related_aggregate(self):
vals = Author.objects.aggregate(Avg("friends__age"))
self.assertEqual(vals, {"friends__age__avg": Approximate(34.07, places=2)})
vals = Book.objects.filter(rating__lt=4.5).aggregate(Avg("authors__age"))
self.assertEqual(vals, {"authors__age__avg": Approximate(38.2857, places=2)})
vals = Author.objects.filter(name__contains="a").aggregate(Avg("book__rating"))
self.assertEqual(vals, {"book__rating__avg": 4.0})
vals = Book.objects.aggregate(Sum("publisher__num_awards"))
self.assertEqual(vals, {"publisher__num_awards__sum": 30})
vals = Publisher.objects.aggregate(Sum("book__price"))
self.assertEqual(vals, {"book__price__sum": Decimal("270.27")})
def test_aggregate_multi_join(self):
vals = Store.objects.aggregate(Max("books__authors__age"))
self.assertEqual(vals, {"books__authors__age__max": 57})
vals = Author.objects.aggregate(Min("book__publisher__num_awards"))
self.assertEqual(vals, {"book__publisher__num_awards__min": 1})
def test_aggregate_alias(self):
vals = Store.objects.filter(name="Amazon.com").aggregate(
amazon_mean=Avg("books__rating")
)
self.assertEqual(vals, {"amazon_mean": Approximate(4.08, places=2)})
def test_aggregate_transform(self):
vals = Store.objects.aggregate(min_month=Min("original_opening__month"))
self.assertEqual(vals, {"min_month": 3})
def test_aggregate_join_transform(self):
vals = Publisher.objects.aggregate(min_year=Min("book__pubdate__year"))
self.assertEqual(vals, {"min_year": 1991})
def test_annotate_basic(self):
self.assertQuerySetEqual(
Book.objects.annotate().order_by("pk"),
[
"The Definitive Guide to Django: Web Development Done Right",
"Sams Teach Yourself Django in 24 Hours",
"Practical Django Projects",
"Python Web Development with Django",
"Artificial Intelligence: A Modern Approach",
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp",
],
lambda b: b.name,
)
books = Book.objects.annotate(mean_age=Avg("authors__age"))
b = books.get(pk=self.b1.pk)
self.assertEqual(
b.name, "The Definitive Guide to Django: Web Development Done Right"
)
self.assertEqual(b.mean_age, 34.5)
def test_annotate_defer(self):
qs = (
Book.objects.annotate(page_sum=Sum("pages"))
.defer("name")
.filter(pk=self.b1.pk)
)
rows = [
(
self.b1.id,
"159059725",
447,
"The Definitive Guide to Django: Web Development Done Right",
)
]
self.assertQuerySetEqual(
qs.order_by("pk"), rows, lambda r: (r.id, r.isbn, r.page_sum, r.name)
)
def test_annotate_defer_select_related(self):
qs = (
Book.objects.select_related("contact")
.annotate(page_sum=Sum("pages"))
.defer("name")
.filter(pk=self.b1.pk)
)
rows = [
(
self.b1.id,
"159059725",
447,
"Adrian Holovaty",
"The Definitive Guide to Django: Web Development Done Right",
)
]
self.assertQuerySetEqual(
qs.order_by("pk"),
rows,
lambda r: (r.id, r.isbn, r.page_sum, r.contact.name, r.name),
)
def test_annotate_m2m(self):
books = (
Book.objects.filter(rating__lt=4.5)
.annotate(Avg("authors__age"))
.order_by("name")
)
self.assertQuerySetEqual(
books,
[
("Artificial Intelligence: A Modern Approach", 51.5),
("Practical Django Projects", 29.0),
("Python Web Development with Django", Approximate(30.3, places=1)),
("Sams Teach Yourself Django in 24 Hours", 45.0),
],
lambda b: (b.name, b.authors__age__avg),
)
books = Book.objects.annotate(num_authors=Count("authors")).order_by("name")
self.assertQuerySetEqual(
books,
[
("Artificial Intelligence: A Modern Approach", 2),
(
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp",
1,
),
("Practical Django Projects", 1),
("Python Web Development with Django", 3),
("Sams Teach Yourself Django in 24 Hours", 1),
("The Definitive Guide to Django: Web Development Done Right", 2),
],
lambda b: (b.name, b.num_authors),
)
def test_backwards_m2m_annotate(self):
authors = (
Author.objects.filter(name__contains="a")
.annotate(Avg("book__rating"))
.order_by("name")
)
self.assertQuerySetEqual(
authors,
[
("Adrian Holovaty", 4.5),
("Brad Dayley", 3.0),
("Jacob Kaplan-Moss", 4.5),
("James Bennett", 4.0),
("Paul Bissex", 4.0),
("Stuart Russell", 4.0),
],
lambda a: (a.name, a.book__rating__avg),
)
authors = Author.objects.annotate(num_books=Count("book")).order_by("name")
self.assertQuerySetEqual(
authors,
[
("Adrian Holovaty", 1),
("Brad Dayley", 1),
("Jacob Kaplan-Moss", 1),
("James Bennett", 1),
("Jeffrey Forcier", 1),
("Paul Bissex", 1),
("Peter Norvig", 2),
("Stuart Russell", 1),
("Wesley J. Chun", 1),
],
lambda a: (a.name, a.num_books),
)
def test_reverse_fkey_annotate(self):
books = Book.objects.annotate(Sum("publisher__num_awards")).order_by("name")
self.assertQuerySetEqual(
books,
[
("Artificial Intelligence: A Modern Approach", 7),
(
"Paradigms of Artificial Intelligence Programming: Case Studies in "
"Common Lisp",
9,
),
("Practical Django Projects", 3),
("Python Web Development with Django", 7),
("Sams Teach Yourself Django in 24 Hours", 1),
("The Definitive Guide to Django: Web Development Done Right", 3),
],
lambda b: (b.name, b.publisher__num_awards__sum),
)
publishers = Publisher.objects.annotate(Sum("book__price")).order_by("name")
self.assertQuerySetEqual(
publishers,
[
("Apress", Decimal("59.69")),
("Jonno's House of Books", None),
("Morgan Kaufmann", Decimal("75.00")),
("Prentice Hall", Decimal("112.49")),
("Sams", Decimal("23.09")),
],
lambda p: (p.name, p.book__price__sum),
)
def test_annotate_values(self):
books = list(
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values()
)
self.assertEqual(
books,
[
{
"contact_id": self.a1.id,
"id": self.b1.id,
"isbn": "159059725",
"mean_age": 34.5,
"name": (
"The Definitive Guide to Django: Web Development Done Right"
),
"pages": 447,
"price": Approximate(Decimal("30")),
"pubdate": datetime.date(2007, 12, 6),
"publisher_id": self.p1.id,
"rating": 4.5,
}
],
)
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values("pk", "isbn", "mean_age")
)
self.assertEqual(
list(books),
[
{
"pk": self.b1.pk,
"isbn": "159059725",
"mean_age": 34.5,
}
],
)
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values("name")
)
self.assertEqual(
list(books),
[{"name": "The Definitive Guide to Django: Web Development Done Right"}],
)
books = (
Book.objects.filter(pk=self.b1.pk)
.values()
.annotate(mean_age=Avg("authors__age"))
)
self.assertEqual(
list(books),
[
{
"contact_id": self.a1.id,
"id": self.b1.id,
"isbn": "159059725",
"mean_age": 34.5,
"name": (
"The Definitive Guide to Django: Web Development Done Right"
),
"pages": 447,
"price": Approximate(Decimal("30")),
"pubdate": datetime.date(2007, 12, 6),
"publisher_id": self.p1.id,
"rating": 4.5,
}
],
)
books = (
Book.objects.values("rating")
.annotate(n_authors=Count("authors__id"), mean_age=Avg("authors__age"))
.order_by("rating")
)
self.assertEqual(
list(books),
[
{
"rating": 3.0,
"n_authors": 1,
"mean_age": 45.0,
},
{
"rating": 4.0,
"n_authors": 6,
"mean_age": Approximate(37.16, places=1),
},
{
"rating": 4.5,
"n_authors": 2,
"mean_age": 34.5,
},
{
"rating": 5.0,
"n_authors": 1,
"mean_age": 57.0,
},
],
)
authors = Author.objects.annotate(Avg("friends__age")).order_by("name")
self.assertQuerySetEqual(
authors,
[
("Adrian Holovaty", 32.0),
("Brad Dayley", None),
("Jacob Kaplan-Moss", 29.5),
("James Bennett", 34.0),
("Jeffrey Forcier", 27.0),
("Paul Bissex", 31.0),
("Peter Norvig", 46.0),
("Stuart Russell", 57.0),
("Wesley J. Chun", Approximate(33.66, places=1)),
],
lambda a: (a.name, a.friends__age__avg),
)
def test_count(self):
vals = Book.objects.aggregate(Count("rating"))
self.assertEqual(vals, {"rating__count": 6})
def test_count_star(self):
with self.assertNumQueries(1) as ctx:
Book.objects.aggregate(n=Count("*"))
sql = ctx.captured_queries[0]["sql"]
self.assertIn("SELECT COUNT(*) ", sql)
def test_count_distinct_expression(self):
aggs = Book.objects.aggregate(
distinct_ratings=Count(
Case(When(pages__gt=300, then="rating")), distinct=True
),
)
self.assertEqual(aggs["distinct_ratings"], 4)
def test_distinct_on_aggregate(self):
for aggregate, expected_result in (
(Avg, 4.125),
(Count, 4),
(Sum, 16.5),
):
with self.subTest(aggregate=aggregate.__name__):
books = Book.objects.aggregate(
ratings=aggregate("rating", distinct=True)
)
self.assertEqual(books["ratings"], expected_result)
def test_non_grouped_annotation_not_in_group_by(self):
"""
An annotation not included in values() before an aggregate should be
excluded from the group by clause.
"""
qs = (
Book.objects.annotate(xprice=F("price"))
.filter(rating=4.0)
.values("rating")
.annotate(count=Count("publisher_id", distinct=True))
.values("count", "rating")
.order_by("count")
)
self.assertEqual(list(qs), [{"rating": 4.0, "count": 2}])
def test_grouped_annotation_in_group_by(self):
"""
An annotation included in values() before an aggregate should be
included in the group by clause.
"""
qs = (
Book.objects.annotate(xprice=F("price"))
.filter(rating=4.0)
.values("rating", "xprice")
.annotate(count=Count("publisher_id", distinct=True))
.values("count", "rating")
.order_by("count")
)
self.assertEqual(
list(qs),
[
{"rating": 4.0, "count": 1},
{"rating": 4.0, "count": 2},
],
)
def test_fkey_aggregate(self):
explicit = list(Author.objects.annotate(Count("book__id")))
implicit = list(Author.objects.annotate(Count("book")))
self.assertCountEqual(explicit, implicit)
def test_annotate_ordering(self):
books = (
Book.objects.values("rating")
.annotate(oldest=Max("authors__age"))
.order_by("oldest", "rating")
)
self.assertEqual(
list(books),
[
{"rating": 4.5, "oldest": 35},
{"rating": 3.0, "oldest": 45},
{"rating": 4.0, "oldest": 57},
{"rating": 5.0, "oldest": 57},
],
)
books = (
Book.objects.values("rating")
.annotate(oldest=Max("authors__age"))
.order_by("-oldest", "-rating")
)
self.assertEqual(
list(books),
[
{"rating": 5.0, "oldest": 57},
{"rating": 4.0, "oldest": 57},
{"rating": 3.0, "oldest": 45},
{"rating": 4.5, "oldest": 35},
],
)
def test_aggregate_annotation(self):
vals = Book.objects.annotate(num_authors=Count("authors__id")).aggregate(
Avg("num_authors")
)
self.assertEqual(vals, {"num_authors__avg": Approximate(1.66, places=1)})
def test_avg_duration_field(self):
# Explicit `output_field`.
self.assertEqual(
Publisher.objects.aggregate(Avg("duration", output_field=DurationField())),
{"duration__avg": datetime.timedelta(days=1, hours=12)},
)
# Implicit `output_field`.
self.assertEqual(
Publisher.objects.aggregate(Avg("duration")),
{"duration__avg": datetime.timedelta(days=1, hours=12)},
)
def test_sum_duration_field(self):
self.assertEqual(
Publisher.objects.aggregate(Sum("duration", output_field=DurationField())),
{"duration__sum": datetime.timedelta(days=3)},
)
def test_sum_distinct_aggregate(self):
"""
Sum on a distinct() QuerySet should aggregate only the distinct items.
"""
authors = Author.objects.filter(book__in=[self.b5, self.b6])
self.assertEqual(authors.count(), 3)
distinct_authors = authors.distinct()
self.assertEqual(distinct_authors.count(), 2)
# Selected author ages are 57 and 46
age_sum = distinct_authors.aggregate(Sum("age"))
self.assertEqual(age_sum["age__sum"], 103)
def test_filtering(self):
p = Publisher.objects.create(name="Expensive Publisher", num_awards=0)
Book.objects.create(
name="ExpensiveBook1",
pages=1,
isbn="111",
rating=3.5,
price=Decimal("1000"),
publisher=p,
contact_id=self.a1.id,
pubdate=datetime.date(2008, 12, 1),
)
Book.objects.create(
name="ExpensiveBook2",
pages=1,
isbn="222",
rating=4.0,
price=Decimal("1000"),
publisher=p,
contact_id=self.a1.id,
pubdate=datetime.date(2008, 12, 2),
)
Book.objects.create(
name="ExpensiveBook3",
pages=1,
isbn="333",
rating=4.5,
price=Decimal("35"),
publisher=p,
contact_id=self.a1.id,
pubdate=datetime.date(2008, 12, 3),
)
publishers = (
Publisher.objects.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1)
.order_by("pk")
)
self.assertQuerySetEqual(
publishers,
["Apress", "Prentice Hall", "Expensive Publisher"],
lambda p: p.name,
)
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).order_by(
"pk"
)
self.assertQuerySetEqual(
publishers,
[
"Apress",
"Apress",
"Sams",
"Prentice Hall",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = (
Publisher.objects.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1, book__price__lt=Decimal("40.0"))
.order_by("pk")
)
self.assertQuerySetEqual(
publishers,
["Apress", "Prentice Hall", "Expensive Publisher"],
lambda p: p.name,
)
publishers = (
Publisher.objects.filter(book__price__lt=Decimal("40.0"))
.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1)
.order_by("pk")
)
self.assertQuerySetEqual(publishers, ["Apress"], lambda p: p.name)
publishers = (
Publisher.objects.annotate(num_books=Count("book"))
.filter(num_books__range=[1, 3])
.order_by("pk")
)
self.assertQuerySetEqual(
publishers,
[
"Apress",
"Sams",
"Prentice Hall",
"Morgan Kaufmann",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = (
Publisher.objects.annotate(num_books=Count("book"))
.filter(num_books__range=[1, 2])
.order_by("pk")
)
self.assertQuerySetEqual(
publishers,
["Apress", "Sams", "Prentice Hall", "Morgan Kaufmann"],
lambda p: p.name,
)
publishers = (
Publisher.objects.annotate(num_books=Count("book"))
.filter(num_books__in=[1, 3])
.order_by("pk")
)
self.assertQuerySetEqual(
publishers,
["Sams", "Morgan Kaufmann", "Expensive Publisher"],
lambda p: p.name,
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(
num_books__isnull=True
)
self.assertEqual(len(publishers), 0)
def test_annotation(self):
vals = Author.objects.filter(pk=self.a1.pk).aggregate(Count("friends__id"))
self.assertEqual(vals, {"friends__id__count": 2})
books = (
Book.objects.annotate(num_authors=Count("authors__name"))
.filter(num_authors__exact=2)
.order_by("pk")
)
self.assertQuerySetEqual(
books,
[
"The Definitive Guide to Django: Web Development Done Right",
"Artificial Intelligence: A Modern Approach",
],
lambda b: b.name,
)
authors = (
Author.objects.annotate(num_friends=Count("friends__id", distinct=True))
.filter(num_friends=0)
.order_by("pk")
)
self.assertQuerySetEqual(authors, ["Brad Dayley"], lambda a: a.name)
publishers = (
Publisher.objects.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1)
.order_by("pk")
)
self.assertQuerySetEqual(
publishers, ["Apress", "Prentice Hall"], lambda p: p.name
)
publishers = (
Publisher.objects.filter(book__price__lt=Decimal("40.0"))
.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1)
)
self.assertQuerySetEqual(publishers, ["Apress"], lambda p: p.name)
books = Book.objects.annotate(num_authors=Count("authors__id")).filter(
authors__name__contains="Norvig", num_authors__gt=1
)
self.assertQuerySetEqual(
books, ["Artificial Intelligence: A Modern Approach"], lambda b: b.name
)
def test_more_aggregation(self):
a = Author.objects.get(name__contains="Norvig")
b = Book.objects.get(name__contains="Done Right")
b.authors.add(a)
b.save()
vals = (
Book.objects.annotate(num_authors=Count("authors__id"))
.filter(authors__name__contains="Norvig", num_authors__gt=1)
.aggregate(Avg("rating"))
)
self.assertEqual(vals, {"rating__avg": 4.25})
def test_even_more_aggregate(self):
publishers = (
Publisher.objects.annotate(
earliest_book=Min("book__pubdate"),
)
.exclude(earliest_book=None)
.order_by("earliest_book")
.values(
"earliest_book",
"num_awards",
"id",
"name",
)
)
self.assertEqual(
list(publishers),
[
{
"earliest_book": datetime.date(1991, 10, 15),
"num_awards": 9,
"id": self.p4.id,
"name": "Morgan Kaufmann",
},
{
"earliest_book": datetime.date(1995, 1, 15),
"num_awards": 7,
"id": self.p3.id,
"name": "Prentice Hall",
},
{
"earliest_book": datetime.date(2007, 12, 6),
"num_awards": 3,
"id": self.p1.id,
"name": "Apress",
},
{
"earliest_book": datetime.date(2008, 3, 3),
"num_awards": 1,
"id": self.p2.id,
"name": "Sams",
},
],
)
vals = Store.objects.aggregate(
Max("friday_night_closing"), Min("original_opening")
)
self.assertEqual(
vals,
{
"friday_night_closing__max": datetime.time(23, 59, 59),
"original_opening__min": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
)
def test_annotate_values_list(self):
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values_list("pk", "isbn", "mean_age")
)
self.assertEqual(list(books), [(self.b1.id, "159059725", 34.5)])
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values_list("isbn")
)
self.assertEqual(list(books), [("159059725",)])
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values_list("mean_age")
)
self.assertEqual(list(books), [(34.5,)])
books = (
Book.objects.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values_list("mean_age", flat=True)
)
self.assertEqual(list(books), [34.5])
books = (
Book.objects.values_list("price")
.annotate(count=Count("price"))
.order_by("-count", "price")
)
self.assertEqual(
list(books),
[
(Decimal("29.69"), 2),
(Decimal("23.09"), 1),
(Decimal("30"), 1),
(Decimal("75"), 1),
(Decimal("82.8"), 1),
],
)
def test_dates_with_aggregation(self):
"""
.dates() returns a distinct set of dates when applied to a
QuerySet with aggregation.
Refs #18056. Previously, .dates() would return distinct (date_kind,
aggregation) sets, in this case (year, num_authors), so 2008 would be
returned twice because there are books from 2008 with a different
number of authors.
"""
dates = Book.objects.annotate(num_authors=Count("authors")).dates(
"pubdate", "year"
)
self.assertSequenceEqual(
dates,
[
datetime.date(1991, 1, 1),
datetime.date(1995, 1, 1),
datetime.date(2007, 1, 1),
datetime.date(2008, 1, 1),
],
)
def test_values_aggregation(self):
# Refs #20782
max_rating = Book.objects.values("rating").aggregate(max_rating=Max("rating"))
self.assertEqual(max_rating["max_rating"], 5)
max_books_per_rating = (
Book.objects.values("rating")
.annotate(books_per_rating=Count("id"))
.aggregate(Max("books_per_rating"))
)
self.assertEqual(max_books_per_rating, {"books_per_rating__max": 3})
def test_ticket17424(self):
"""
Doing exclude() on a foreign model after annotate() doesn't crash.
"""
all_books = list(Book.objects.values_list("pk", flat=True).order_by("pk"))
annotated_books = Book.objects.order_by("pk").annotate(one=Count("id"))
# The value doesn't matter, we just need any negative
# constraint on a related model that's a noop.
excluded_books = annotated_books.exclude(publisher__name="__UNLIKELY_VALUE__")
# Try to generate query tree
str(excluded_books.query)
self.assertQuerySetEqual(excluded_books, all_books, lambda x: x.pk)
# Check internal state
self.assertIsNone(annotated_books.query.alias_map["aggregation_book"].join_type)
self.assertIsNone(excluded_books.query.alias_map["aggregation_book"].join_type)
def test_ticket12886(self):
"""
Aggregation over sliced queryset works correctly.
"""
qs = Book.objects.order_by("-rating")[0:3]
vals = qs.aggregate(average_top3_rating=Avg("rating"))["average_top3_rating"]
self.assertAlmostEqual(vals, 4.5, places=2)
def test_ticket11881(self):
"""
Subqueries do not needlessly contain ORDER BY, SELECT FOR UPDATE or
select_related() stuff.
"""
qs = (
Book.objects.select_for_update()
.order_by("pk")
.select_related("publisher")
.annotate(max_pk=Max("pk"))
)
with CaptureQueriesContext(connection) as captured_queries:
qs.aggregate(avg_pk=Avg("max_pk"))
self.assertEqual(len(captured_queries), 1)
qstr = captured_queries[0]["sql"].lower()
self.assertNotIn("for update", qstr)
forced_ordering = connection.ops.force_no_ordering()
if forced_ordering:
# If the backend needs to force an ordering we make sure it's
# the only "ORDER BY" clause present in the query.
self.assertEqual(
re.findall(r"order by (\w+)", qstr),
[", ".join(f[1][0] for f in forced_ordering).lower()],
)
else:
self.assertNotIn("order by", qstr)
self.assertEqual(qstr.count(" join "), 0)
def test_decimal_max_digits_has_no_effect(self):
Book.objects.all().delete()
a1 = Author.objects.first()
p1 = Publisher.objects.first()
thedate = timezone.now()
for i in range(10):
Book.objects.create(
isbn="abcde{}".format(i),
name="none",
pages=10,
rating=4.0,
price=9999.98,
contact=a1,
publisher=p1,
pubdate=thedate,
)
book = Book.objects.aggregate(price_sum=Sum("price"))
self.assertEqual(book["price_sum"], Decimal("99999.80"))
def test_nonaggregate_aggregation_throws(self):
with self.assertRaisesMessage(TypeError, "fail is not an aggregate expression"):
Book.objects.aggregate(fail=F("price"))
def test_nonfield_annotation(self):
book = Book.objects.annotate(val=Max(Value(2))).first()
self.assertEqual(book.val, 2)
book = Book.objects.annotate(
val=Max(Value(2), output_field=IntegerField())
).first()
self.assertEqual(book.val, 2)
book = Book.objects.annotate(val=Max(2, output_field=IntegerField())).first()
self.assertEqual(book.val, 2)
def test_annotation_expressions(self):
authors = Author.objects.annotate(
combined_ages=Sum(F("age") + F("friends__age"))
).order_by("name")
authors2 = Author.objects.annotate(
combined_ages=Sum("age") + Sum("friends__age")
).order_by("name")
for qs in (authors, authors2):
self.assertQuerySetEqual(
qs,
[
("Adrian Holovaty", 132),
("Brad Dayley", None),
("Jacob Kaplan-Moss", 129),
("James Bennett", 63),
("Jeffrey Forcier", 128),
("Paul Bissex", 120),
("Peter Norvig", 103),
("Stuart Russell", 103),
("Wesley J. Chun", 176),
],
lambda a: (a.name, a.combined_ages),
)
def test_aggregation_expressions(self):
a1 = Author.objects.aggregate(av_age=Sum("age") / Count("*"))
a2 = Author.objects.aggregate(av_age=Sum("age") / Count("age"))
a3 = Author.objects.aggregate(av_age=Avg("age"))
self.assertEqual(a1, {"av_age": 37})
self.assertEqual(a2, {"av_age": 37})
self.assertEqual(a3, {"av_age": Approximate(37.4, places=1)})
def test_avg_decimal_field(self):
v = Book.objects.filter(rating=4).aggregate(avg_price=(Avg("price")))[
"avg_price"
]
self.assertIsInstance(v, Decimal)
self.assertEqual(v, Approximate(Decimal("47.39"), places=2))
def test_order_of_precedence(self):
p1 = Book.objects.filter(rating=4).aggregate(avg_price=(Avg("price") + 2) * 3)
self.assertEqual(p1, {"avg_price": Approximate(Decimal("148.18"), places=2)})
p2 = Book.objects.filter(rating=4).aggregate(avg_price=Avg("price") + 2 * 3)
self.assertEqual(p2, {"avg_price": Approximate(Decimal("53.39"), places=2)})
def test_combine_different_types(self):
msg = (
"Cannot infer type of '+' expression involving these types: FloatField, "
"DecimalField. You must set output_field."
)
qs = Book.objects.annotate(sums=Sum("rating") + Sum("pages") + Sum("price"))
with self.assertRaisesMessage(FieldError, msg):
qs.first()
with self.assertRaisesMessage(FieldError, msg):
qs.first()
b1 = Book.objects.annotate(
sums=Sum(F("rating") + F("pages") + F("price"), output_field=IntegerField())
).get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
b2 = Book.objects.annotate(
sums=Sum(F("rating") + F("pages") + F("price"), output_field=FloatField())
).get(pk=self.b4.pk)
self.assertEqual(b2.sums, 383.69)
b3 = Book.objects.annotate(
sums=Sum(F("rating") + F("pages") + F("price"), output_field=DecimalField())
).get(pk=self.b4.pk)
self.assertEqual(b3.sums, Approximate(Decimal("383.69"), places=2))
def test_complex_aggregations_require_kwarg(self):
with self.assertRaisesMessage(
TypeError, "Complex annotations require an alias"
):
Author.objects.annotate(Sum(F("age") + F("friends__age")))
with self.assertRaisesMessage(TypeError, "Complex aggregates require an alias"):
Author.objects.aggregate(Sum("age") / Count("age"))
with self.assertRaisesMessage(TypeError, "Complex aggregates require an alias"):
Author.objects.aggregate(Sum(1))
def test_aggregate_over_complex_annotation(self):
qs = Author.objects.annotate(combined_ages=Sum(F("age") + F("friends__age")))
age = qs.aggregate(max_combined_age=Max("combined_ages"))
self.assertEqual(age["max_combined_age"], 176)
age = qs.aggregate(max_combined_age_doubled=Max("combined_ages") * 2)
self.assertEqual(age["max_combined_age_doubled"], 176 * 2)
age = qs.aggregate(
max_combined_age_doubled=Max("combined_ages") + Max("combined_ages")
)
self.assertEqual(age["max_combined_age_doubled"], 176 * 2)
age = qs.aggregate(
max_combined_age_doubled=Max("combined_ages") + Max("combined_ages"),
sum_combined_age=Sum("combined_ages"),
)
self.assertEqual(age["max_combined_age_doubled"], 176 * 2)
self.assertEqual(age["sum_combined_age"], 954)
age = qs.aggregate(
max_combined_age_doubled=Max("combined_ages") + Max("combined_ages"),
sum_combined_age_doubled=Sum("combined_ages") + Sum("combined_ages"),
)
self.assertEqual(age["max_combined_age_doubled"], 176 * 2)
self.assertEqual(age["sum_combined_age_doubled"], 954 * 2)
def test_values_annotation_with_expression(self):
# ensure the F() is promoted to the group by clause
qs = Author.objects.values("name").annotate(another_age=Sum("age") + F("age"))
a = qs.get(name="Adrian Holovaty")
self.assertEqual(a["another_age"], 68)
qs = qs.annotate(friend_count=Count("friends"))
a = qs.get(name="Adrian Holovaty")
self.assertEqual(a["friend_count"], 2)
qs = (
qs.annotate(combined_age=Sum("age") + F("friends__age"))
.filter(name="Adrian Holovaty")
.order_by("-combined_age")
)
self.assertEqual(
list(qs),
[
{
"name": "Adrian Holovaty",
"another_age": 68,
"friend_count": 1,
"combined_age": 69,
},
{
"name": "Adrian Holovaty",
"another_age": 68,
"friend_count": 1,
"combined_age": 63,
},
],
)
vals = qs.values("name", "combined_age")
self.assertEqual(
list(vals),
[
{"name": "Adrian Holovaty", "combined_age": 69},
{"name": "Adrian Holovaty", "combined_age": 63},
],
)
def test_annotate_values_aggregate(self):
alias_age = (
Author.objects.annotate(age_alias=F("age"))
.values(
"age_alias",
)
.aggregate(sum_age=Sum("age_alias"))
)
age = Author.objects.values("age").aggregate(sum_age=Sum("age"))
self.assertEqual(alias_age["sum_age"], age["sum_age"])
def test_annotate_over_annotate(self):
author = (
Author.objects.annotate(age_alias=F("age"))
.annotate(sum_age=Sum("age_alias"))
.get(name="Adrian Holovaty")
)
other_author = Author.objects.annotate(sum_age=Sum("age")).get(
name="Adrian Holovaty"
)
self.assertEqual(author.sum_age, other_author.sum_age)
def test_aggregate_over_aggregate(self):
msg = "Cannot compute Avg('age_agg'): 'age_agg' is an aggregate"
with self.assertRaisesMessage(FieldError, msg):
Author.objects.aggregate(
age_agg=Sum(F("age")),
avg_age=Avg(F("age_agg")),
)
def test_annotated_aggregate_over_annotated_aggregate(self):
with self.assertRaisesMessage(
FieldError, "Cannot compute Sum('id__max'): 'id__max' is an aggregate"
):
Book.objects.annotate(Max("id")).annotate(Sum("id__max"))
class MyMax(Max):
def as_sql(self, compiler, connection):
self.set_source_expressions(self.get_source_expressions()[0:1])
return super().as_sql(compiler, connection)
with self.assertRaisesMessage(
FieldError, "Cannot compute Max('id__max'): 'id__max' is an aggregate"
):
Book.objects.annotate(Max("id")).annotate(my_max=MyMax("id__max", "price"))
def test_multi_arg_aggregate(self):
class MyMax(Max):
output_field = DecimalField()
def as_sql(self, compiler, connection):
copy = self.copy()
copy.set_source_expressions(copy.get_source_expressions()[0:1])
return super(MyMax, copy).as_sql(compiler, connection)
with self.assertRaisesMessage(TypeError, "Complex aggregates require an alias"):
Book.objects.aggregate(MyMax("pages", "price"))
with self.assertRaisesMessage(
TypeError, "Complex annotations require an alias"
):
Book.objects.annotate(MyMax("pages", "price"))
Book.objects.aggregate(max_field=MyMax("pages", "price"))
def test_add_implementation(self):
class MySum(Sum):
pass
# test completely changing how the output is rendered
def lower_case_function_override(self, compiler, connection):
sql, params = compiler.compile(self.source_expressions[0])
substitutions = {
"function": self.function.lower(),
"expressions": sql,
"distinct": "",
}
substitutions.update(self.extra)
return self.template % substitutions, params
setattr(MySum, "as_" + connection.vendor, lower_case_function_override)
qs = Book.objects.annotate(
sums=MySum(
F("rating") + F("pages") + F("price"), output_field=IntegerField()
)
)
self.assertEqual(str(qs.query).count("sum("), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
# test changing the dict and delegating
def lower_case_function_super(self, compiler, connection):
self.extra["function"] = self.function.lower()
return super(MySum, self).as_sql(compiler, connection)
setattr(MySum, "as_" + connection.vendor, lower_case_function_super)
qs = Book.objects.annotate(
sums=MySum(
F("rating") + F("pages") + F("price"), output_field=IntegerField()
)
)
self.assertEqual(str(qs.query).count("sum("), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
# test overriding all parts of the template
def be_evil(self, compiler, connection):
substitutions = {"function": "MAX", "expressions": "2", "distinct": ""}
substitutions.update(self.extra)
return self.template % substitutions, ()
setattr(MySum, "as_" + connection.vendor, be_evil)
qs = Book.objects.annotate(
sums=MySum(
F("rating") + F("pages") + F("price"), output_field=IntegerField()
)
)
self.assertEqual(str(qs.query).count("MAX("), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 2)
def test_complex_values_aggregation(self):
max_rating = Book.objects.values("rating").aggregate(
double_max_rating=Max("rating") + Max("rating")
)
self.assertEqual(max_rating["double_max_rating"], 5 * 2)
max_books_per_rating = (
Book.objects.values("rating")
.annotate(books_per_rating=Count("id") + 5)
.aggregate(Max("books_per_rating"))
)
self.assertEqual(max_books_per_rating, {"books_per_rating__max": 3 + 5})
def test_expression_on_aggregation(self):
qs = (
Publisher.objects.annotate(
price_or_median=Greatest(
Avg("book__rating", output_field=DecimalField()), Avg("book__price")
)
)
.filter(price_or_median__gte=F("num_awards"))
.order_by("num_awards")
)
self.assertQuerySetEqual(qs, [1, 3, 7, 9], lambda v: v.num_awards)
qs2 = (
Publisher.objects.annotate(
rating_or_num_awards=Greatest(
Avg("book__rating"), F("num_awards"), output_field=FloatField()
)
)
.filter(rating_or_num_awards__gt=F("num_awards"))
.order_by("num_awards")
)
self.assertQuerySetEqual(qs2, [1, 3], lambda v: v.num_awards)
def test_arguments_must_be_expressions(self):
msg = "QuerySet.aggregate() received non-expression(s): %s."
with self.assertRaisesMessage(TypeError, msg % FloatField()):
Book.objects.aggregate(FloatField())
with self.assertRaisesMessage(TypeError, msg % True):
Book.objects.aggregate(is_book=True)
with self.assertRaisesMessage(
TypeError, msg % ", ".join([str(FloatField()), "True"])
):
Book.objects.aggregate(FloatField(), Avg("price"), is_book=True)
def test_aggregation_subquery_annotation(self):
"""Subquery annotations are excluded from the GROUP BY if they are
not explicitly grouped against."""
latest_book_pubdate_qs = (
Book.objects.filter(publisher=OuterRef("pk"))
.order_by("-pubdate")
.values("pubdate")[:1]
)
publisher_qs = Publisher.objects.annotate(
latest_book_pubdate=Subquery(latest_book_pubdate_qs),
).annotate(count=Count("book"))
with self.assertNumQueries(1) as ctx:
list(publisher_qs)
self.assertEqual(ctx[0]["sql"].count("SELECT"), 2)
# The GROUP BY should not be by alias either.
self.assertEqual(ctx[0]["sql"].lower().count("latest_book_pubdate"), 1)
def test_aggregation_subquery_annotation_exists(self):
latest_book_pubdate_qs = (
Book.objects.filter(publisher=OuterRef("pk"))
.order_by("-pubdate")
.values("pubdate")[:1]
)
publisher_qs = Publisher.objects.annotate(
latest_book_pubdate=Subquery(latest_book_pubdate_qs),
count=Count("book"),
)
self.assertTrue(publisher_qs.exists())
def test_aggregation_filter_exists(self):
publishers_having_more_than_one_book_qs = (
Book.objects.values("publisher")
.annotate(cnt=Count("isbn"))
.filter(cnt__gt=1)
)
query = publishers_having_more_than_one_book_qs.query.exists()
_, _, group_by = query.get_compiler(connection=connection).pre_sql_setup()
self.assertEqual(len(group_by), 1)
def test_aggregation_exists_annotation(self):
published_books = Book.objects.filter(publisher=OuterRef("pk"))
publisher_qs = Publisher.objects.annotate(
published_book=Exists(published_books),
count=Count("book"),
).values_list("name", flat=True)
self.assertCountEqual(
list(publisher_qs),
[
"Apress",
"Morgan Kaufmann",
"Jonno's House of Books",
"Prentice Hall",
"Sams",
],
)
def test_aggregation_subquery_annotation_values(self):
"""
Subquery annotations and external aliases are excluded from the GROUP
BY if they are not selected.
"""
books_qs = (
Book.objects.annotate(
first_author_the_same_age=Subquery(
Author.objects.filter(
age=OuterRef("contact__friends__age"),
)
.order_by("age")
.values("id")[:1],
)
)
.filter(
publisher=self.p1,
first_author_the_same_age__isnull=False,
)
.annotate(
min_age=Min("contact__friends__age"),
)
.values("name", "min_age")
.order_by("name")
)
self.assertEqual(
list(books_qs),
[
{"name": "Practical Django Projects", "min_age": 34},
{
"name": (
"The Definitive Guide to Django: Web Development Done Right"
),
"min_age": 29,
},
],
)
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_aggregation_subquery_annotation_values_collision(self):
books_rating_qs = Book.objects.filter(
pk=OuterRef("book"),
).values("rating")
publisher_qs = (
Publisher.objects.filter(
book__contact__age__gt=20,
)
.annotate(
rating=Subquery(books_rating_qs),
)
.values("rating")
.annotate(total_count=Count("*"))
.order_by("rating")
)
self.assertEqual(
list(publisher_qs),
[
{"rating": 3.0, "total_count": 1},
{"rating": 4.0, "total_count": 3},
{"rating": 4.5, "total_count": 1},
{"rating": 5.0, "total_count": 1},
],
)
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_aggregation_subquery_annotation_multivalued(self):
"""
Subquery annotations must be included in the GROUP BY if they use
potentially multivalued relations (contain the LOOKUP_SEP).
"""
subquery_qs = Author.objects.filter(
pk=OuterRef("pk"),
book__name=OuterRef("book__name"),
).values("pk")
author_qs = Author.objects.annotate(
subquery_id=Subquery(subquery_qs),
).annotate(count=Count("book"))
self.assertEqual(author_qs.count(), Author.objects.count())
def test_aggregation_order_by_not_selected_annotation_values(self):
result_asc = [
self.b4.pk,
self.b3.pk,
self.b1.pk,
self.b2.pk,
self.b5.pk,
self.b6.pk,
]
result_desc = result_asc[::-1]
tests = [
("min_related_age", result_asc),
("-min_related_age", result_desc),
(F("min_related_age"), result_asc),
(F("min_related_age").asc(), result_asc),
(F("min_related_age").desc(), result_desc),
]
for ordering, expected_result in tests:
with self.subTest(ordering=ordering):
books_qs = (
Book.objects.annotate(
min_age=Min("authors__age"),
)
.annotate(
min_related_age=Coalesce("min_age", "contact__age"),
)
.order_by(ordering)
.values_list("pk", flat=True)
)
self.assertEqual(list(books_qs), expected_result)
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_group_by_subquery_annotation(self):
"""
Subquery annotations are included in the GROUP BY if they are
grouped against.
"""
long_books_count_qs = (
Book.objects.filter(
publisher=OuterRef("pk"),
pages__gt=400,
)
.values("publisher")
.annotate(count=Count("pk"))
.values("count")
)
groups = [
Subquery(long_books_count_qs),
long_books_count_qs,
long_books_count_qs.query,
]
for group in groups:
with self.subTest(group=group.__class__.__name__):
long_books_count_breakdown = Publisher.objects.values_list(
group,
).annotate(total=Count("*"))
self.assertEqual(dict(long_books_count_breakdown), {None: 1, 1: 4})
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_group_by_exists_annotation(self):
"""
Exists annotations are included in the GROUP BY if they are
grouped against.
"""
long_books_qs = Book.objects.filter(
publisher=OuterRef("pk"),
pages__gt=800,
)
has_long_books_breakdown = Publisher.objects.values_list(
Exists(long_books_qs),
).annotate(total=Count("*"))
self.assertEqual(dict(has_long_books_breakdown), {True: 2, False: 3})
def test_group_by_nested_expression_with_params(self):
books_qs = (
Book.objects.annotate(greatest_pages=Greatest("pages", Value(600)))
.values(
"greatest_pages",
)
.annotate(
min_pages=Min("pages"),
least=Least("min_pages", "greatest_pages"),
)
.values_list("least", flat=True)
)
self.assertCountEqual(books_qs, [300, 946, 1132])
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_aggregation_subquery_annotation_related_field(self):
publisher = Publisher.objects.create(name=self.a9.name, num_awards=2)
book = Book.objects.create(
isbn="159059999",
name="Test book.",
pages=819,
rating=2.5,
price=Decimal("14.44"),
contact=self.a9,
publisher=publisher,
pubdate=datetime.date(2019, 12, 6),
)
book.authors.add(self.a5, self.a6, self.a7)
books_qs = (
Book.objects.annotate(
contact_publisher=Subquery(
Publisher.objects.filter(
pk=OuterRef("publisher"),
name=OuterRef("contact__name"),
).values("name")[:1],
)
)
.filter(
contact_publisher__isnull=False,
)
.annotate(count=Count("authors"))
)
with self.assertNumQueries(1) as ctx:
self.assertSequenceEqual(books_qs, [book])
if connection.features.allows_group_by_select_index:
self.assertEqual(ctx[0]["sql"].count("SELECT"), 3)
@skipUnlessDBFeature("supports_subqueries_in_group_by")
def test_aggregation_nested_subquery_outerref(self):
publisher_with_same_name = Publisher.objects.filter(
id__in=Subquery(
Publisher.objects.filter(
name=OuterRef(OuterRef("publisher__name")),
).values("id"),
),
).values(publisher_count=Count("id"))[:1]
books_breakdown = Book.objects.annotate(
publisher_count=Subquery(publisher_with_same_name),
authors_count=Count("authors"),
).values_list("publisher_count", flat=True)
self.assertSequenceEqual(books_breakdown, [1] * 6)
def test_aggregation_exists_multivalued_outeref(self):
self.assertCountEqual(
Publisher.objects.annotate(
books_exists=Exists(
Book.objects.filter(publisher=OuterRef("book__publisher"))
),
books_count=Count("book"),
),
Publisher.objects.all(),
)
def test_filter_in_subquery_or_aggregation(self):
"""
Filtering against an aggregate requires the usage of the HAVING clause.
If such a filter is unionized to a non-aggregate one the latter will
also need to be moved to the HAVING clause and have its grouping
columns used in the GROUP BY.
When this is done with a subquery the specialized logic in charge of
using outer reference columns to group should be used instead of the
subquery itself as the latter might return multiple rows.
"""
authors = Author.objects.annotate(
Count("book"),
).filter(Q(book__count__gt=0) | Q(pk__in=Book.objects.values("authors")))
self.assertCountEqual(authors, Author.objects.all())
def test_aggregation_random_ordering(self):
"""Random() is not included in the GROUP BY when used for ordering."""
authors = Author.objects.annotate(contact_count=Count("book")).order_by("?")
self.assertQuerySetEqual(
authors,
[
("Adrian Holovaty", 1),
("Jacob Kaplan-Moss", 1),
("Brad Dayley", 1),
("James Bennett", 1),
("Jeffrey Forcier", 1),
("Paul Bissex", 1),
("Wesley J. Chun", 1),
("Stuart Russell", 1),
("Peter Norvig", 2),
],
lambda a: (a.name, a.contact_count),
ordered=False,
)
def test_empty_result_optimization(self):
with self.assertNumQueries(0):
self.assertEqual(
Publisher.objects.none().aggregate(
sum_awards=Sum("num_awards"),
books_count=Count("book"),
),
{
"sum_awards": None,
"books_count": 0,
},
)
# Expression without empty_result_set_value forces queries to be
# executed even if they would return an empty result set.
raw_books_count = Func("book", function="COUNT")
raw_books_count.contains_aggregate = True
with self.assertNumQueries(1):
self.assertEqual(
Publisher.objects.none().aggregate(
sum_awards=Sum("num_awards"),
books_count=raw_books_count,
),
{
"sum_awards": None,
"books_count": 0,
},
)
def test_coalesced_empty_result_set(self):
with self.assertNumQueries(0):
self.assertEqual(
Publisher.objects.none().aggregate(
sum_awards=Coalesce(Sum("num_awards"), 0),
)["sum_awards"],
0,
)
# Multiple expressions.
with self.assertNumQueries(0):
self.assertEqual(
Publisher.objects.none().aggregate(
sum_awards=Coalesce(Sum("num_awards"), None, 0),
)["sum_awards"],
0,
)
# Nested coalesce.
with self.assertNumQueries(0):
self.assertEqual(
Publisher.objects.none().aggregate(
sum_awards=Coalesce(Coalesce(Sum("num_awards"), None), 0),
)["sum_awards"],
0,
)
# Expression coalesce.
with self.assertNumQueries(1):
self.assertIsInstance(
Store.objects.none().aggregate(
latest_opening=Coalesce(
Max("original_opening"),
RawSQL("CURRENT_TIMESTAMP", []),
),
)["latest_opening"],
datetime.datetime,
)
def test_aggregation_default_unsupported_by_count(self):
msg = "Count does not allow default."
with self.assertRaisesMessage(TypeError, msg):
Count("age", default=0)
def test_aggregation_default_unset(self):
for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]:
with self.subTest(Aggregate):
result = Author.objects.filter(age__gt=100).aggregate(
value=Aggregate("age"),
)
self.assertIsNone(result["value"])
def test_aggregation_default_zero(self):
for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]:
with self.subTest(Aggregate):
result = Author.objects.filter(age__gt=100).aggregate(
value=Aggregate("age", default=0),
)
self.assertEqual(result["value"], 0)
def test_aggregation_default_integer(self):
for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]:
with self.subTest(Aggregate):
result = Author.objects.filter(age__gt=100).aggregate(
value=Aggregate("age", default=21),
)
self.assertEqual(result["value"], 21)
def test_aggregation_default_expression(self):
for Aggregate in [Avg, Max, Min, StdDev, Sum, Variance]:
with self.subTest(Aggregate):
result = Author.objects.filter(age__gt=100).aggregate(
value=Aggregate("age", default=Value(5) * Value(7)),
)
self.assertEqual(result["value"], 35)
def test_aggregation_default_group_by(self):
qs = (
Publisher.objects.values("name")
.annotate(
books=Count("book"),
pages=Sum("book__pages", default=0),
)
.filter(books=0)
)
self.assertSequenceEqual(
qs,
[{"name": "Jonno's House of Books", "books": 0, "pages": 0}],
)
def test_aggregation_default_compound_expression(self):
# Scale rating to a percentage; default to 50% if no books published.
formula = Avg("book__rating", default=2.5) * 20.0
queryset = Publisher.objects.annotate(rating=formula).order_by("name")
self.assertSequenceEqual(
queryset.values("name", "rating"),
[
{"name": "Apress", "rating": 85.0},
{"name": "Jonno's House of Books", "rating": 50.0},
{"name": "Morgan Kaufmann", "rating": 100.0},
{"name": "Prentice Hall", "rating": 80.0},
{"name": "Sams", "rating": 60.0},
],
)
def test_aggregation_default_using_time_from_python(self):
expr = Min(
"store__friday_night_closing",
filter=~Q(store__name="Amazon.com"),
default=datetime.time(17),
)
if connection.vendor == "mysql":
# Workaround for #30224 for MySQL & MariaDB.
expr.default = Cast(expr.default, TimeField())
queryset = Book.objects.annotate(oldest_store_opening=expr).order_by("isbn")
self.assertSequenceEqual(
queryset.values("isbn", "oldest_store_opening"),
[
{"isbn": "013235613", "oldest_store_opening": datetime.time(21, 30)},
{
"isbn": "013790395",
"oldest_store_opening": datetime.time(23, 59, 59),
},
{"isbn": "067232959", "oldest_store_opening": datetime.time(17)},
{"isbn": "155860191", "oldest_store_opening": datetime.time(21, 30)},
{
"isbn": "159059725",
"oldest_store_opening": datetime.time(23, 59, 59),
},
{"isbn": "159059996", "oldest_store_opening": datetime.time(21, 30)},
],
)
def test_aggregation_default_using_time_from_database(self):
now = timezone.now().astimezone(datetime.timezone.utc)
expr = Min(
"store__friday_night_closing",
filter=~Q(store__name="Amazon.com"),
default=TruncHour(NowUTC(), output_field=TimeField()),
)
queryset = Book.objects.annotate(oldest_store_opening=expr).order_by("isbn")
self.assertSequenceEqual(
queryset.values("isbn", "oldest_store_opening"),
[
{"isbn": "013235613", "oldest_store_opening": datetime.time(21, 30)},
{
"isbn": "013790395",
"oldest_store_opening": datetime.time(23, 59, 59),
},
{"isbn": "067232959", "oldest_store_opening": datetime.time(now.hour)},
{"isbn": "155860191", "oldest_store_opening": datetime.time(21, 30)},
{
"isbn": "159059725",
"oldest_store_opening": datetime.time(23, 59, 59),
},
{"isbn": "159059996", "oldest_store_opening": datetime.time(21, 30)},
],
)
def test_aggregation_default_using_date_from_python(self):
expr = Min("book__pubdate", default=datetime.date(1970, 1, 1))
if connection.vendor == "mysql":
# Workaround for #30224 for MySQL & MariaDB.
expr.default = Cast(expr.default, DateField())
queryset = Publisher.objects.annotate(earliest_pubdate=expr).order_by("name")
self.assertSequenceEqual(
queryset.values("name", "earliest_pubdate"),
[
{"name": "Apress", "earliest_pubdate": datetime.date(2007, 12, 6)},
{
"name": "Jonno's House of Books",
"earliest_pubdate": datetime.date(1970, 1, 1),
},
{
"name": "Morgan Kaufmann",
"earliest_pubdate": datetime.date(1991, 10, 15),
},
{
"name": "Prentice Hall",
"earliest_pubdate": datetime.date(1995, 1, 15),
},
{"name": "Sams", "earliest_pubdate": datetime.date(2008, 3, 3)},
],
)
def test_aggregation_default_using_date_from_database(self):
now = timezone.now().astimezone(datetime.timezone.utc)
expr = Min("book__pubdate", default=TruncDate(NowUTC()))
queryset = Publisher.objects.annotate(earliest_pubdate=expr).order_by("name")
self.assertSequenceEqual(
queryset.values("name", "earliest_pubdate"),
[
{"name": "Apress", "earliest_pubdate": datetime.date(2007, 12, 6)},
{"name": "Jonno's House of Books", "earliest_pubdate": now.date()},
{
"name": "Morgan Kaufmann",
"earliest_pubdate": datetime.date(1991, 10, 15),
},
{
"name": "Prentice Hall",
"earliest_pubdate": datetime.date(1995, 1, 15),
},
{"name": "Sams", "earliest_pubdate": datetime.date(2008, 3, 3)},
],
)
def test_aggregation_default_using_datetime_from_python(self):
expr = Min(
"store__original_opening",
filter=~Q(store__name="Amazon.com"),
default=datetime.datetime(1970, 1, 1),
)
if connection.vendor == "mysql":
# Workaround for #30224 for MySQL & MariaDB.
expr.default = Cast(expr.default, DateTimeField())
queryset = Book.objects.annotate(oldest_store_opening=expr).order_by("isbn")
self.assertSequenceEqual(
queryset.values("isbn", "oldest_store_opening"),
[
{
"isbn": "013235613",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
{
"isbn": "013790395",
"oldest_store_opening": datetime.datetime(2001, 3, 15, 11, 23, 37),
},
{
"isbn": "067232959",
"oldest_store_opening": datetime.datetime(1970, 1, 1),
},
{
"isbn": "155860191",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
{
"isbn": "159059725",
"oldest_store_opening": datetime.datetime(2001, 3, 15, 11, 23, 37),
},
{
"isbn": "159059996",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
],
)
def test_aggregation_default_using_datetime_from_database(self):
now = timezone.now().astimezone(datetime.timezone.utc)
expr = Min(
"store__original_opening",
filter=~Q(store__name="Amazon.com"),
default=TruncHour(NowUTC(), output_field=DateTimeField()),
)
queryset = Book.objects.annotate(oldest_store_opening=expr).order_by("isbn")
self.assertSequenceEqual(
queryset.values("isbn", "oldest_store_opening"),
[
{
"isbn": "013235613",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
{
"isbn": "013790395",
"oldest_store_opening": datetime.datetime(2001, 3, 15, 11, 23, 37),
},
{
"isbn": "067232959",
"oldest_store_opening": now.replace(
minute=0, second=0, microsecond=0, tzinfo=None
),
},
{
"isbn": "155860191",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
{
"isbn": "159059725",
"oldest_store_opening": datetime.datetime(2001, 3, 15, 11, 23, 37),
},
{
"isbn": "159059996",
"oldest_store_opening": datetime.datetime(1945, 4, 25, 16, 24, 14),
},
],
)
def test_aggregation_default_using_duration_from_python(self):
result = Publisher.objects.filter(num_awards__gt=3).aggregate(
value=Sum("duration", default=datetime.timedelta(0)),
)
self.assertEqual(result["value"], datetime.timedelta(0))
def test_aggregation_default_using_duration_from_database(self):
result = Publisher.objects.filter(num_awards__gt=3).aggregate(
value=Sum("duration", default=Now() - Now()),
)
self.assertEqual(result["value"], datetime.timedelta(0))
def test_aggregation_default_using_decimal_from_python(self):
result = Book.objects.filter(rating__lt=3.0).aggregate(
value=Sum("price", default=Decimal("0.00")),
)
self.assertEqual(result["value"], Decimal("0.00"))
def test_aggregation_default_using_decimal_from_database(self):
result = Book.objects.filter(rating__lt=3.0).aggregate(
value=Sum("price", default=Pi()),
)
self.assertAlmostEqual(result["value"], Decimal.from_float(math.pi), places=6)
def test_aggregation_default_passed_another_aggregate(self):
result = Book.objects.aggregate(
value=Sum("price", filter=Q(rating__lt=3.0), default=Avg("pages") / 10.0),
)
self.assertAlmostEqual(result["value"], Decimal("61.72"), places=2)
def test_aggregation_default_after_annotation(self):
result = Publisher.objects.annotate(
double_num_awards=F("num_awards") * 2,
).aggregate(value=Sum("double_num_awards", default=0))
self.assertEqual(result["value"], 40)
def test_aggregation_default_not_in_aggregate(self):
result = Publisher.objects.annotate(
avg_rating=Avg("book__rating", default=2.5),
).aggregate(Sum("num_awards"))
self.assertEqual(result["num_awards__sum"], 20)
def test_exists_none_with_aggregate(self):
qs = Book.objects.annotate(
count=Count("id"),
exists=Exists(Author.objects.none()),
)
self.assertEqual(len(qs), 6)
def test_alias_sql_injection(self):
crafted_alias = """injected_name" from "aggregation_author"; --"""
msg = (
"Column aliases cannot contain whitespace characters, quotation marks, "
"semicolons, or SQL comments."
)
with self.assertRaisesMessage(ValueError, msg):
Author.objects.aggregate(**{crafted_alias: Avg("age")})
def test_exists_extra_where_with_aggregate(self):
qs = Book.objects.annotate(
count=Count("id"),
exists=Exists(Author.objects.extra(where=["1=0"])),
)
self.assertEqual(len(qs), 6)
def test_multiple_aggregate_references(self):
aggregates = Author.objects.aggregate(
total_books=Count("book"),
coalesced_total_books=Coalesce("total_books", 0),
)
self.assertEqual(
aggregates,
{
"total_books": 10,
"coalesced_total_books": 10,
},
)
class AggregateAnnotationPruningTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(age=1)
cls.a2 = Author.objects.create(age=2)
cls.p1 = Publisher.objects.create(num_awards=1)
cls.p2 = Publisher.objects.create(num_awards=0)
cls.b1 = Book.objects.create(
name="b1",
publisher=cls.p1,
pages=100,
rating=4.5,
price=10,
contact=cls.a1,
pubdate=datetime.date.today(),
)
cls.b1.authors.add(cls.a1)
cls.b2 = Book.objects.create(
name="b2",
publisher=cls.p2,
pages=1000,
rating=3.2,
price=50,
contact=cls.a2,
pubdate=datetime.date.today(),
)
cls.b2.authors.add(cls.a1, cls.a2)
def test_unused_aliased_aggregate_pruned(self):
with CaptureQueriesContext(connection) as ctx:
cnt = Book.objects.alias(
authors_count=Count("authors"),
).count()
self.assertEqual(cnt, 2)
sql = ctx.captured_queries[0]["sql"].lower()
self.assertEqual(sql.count("select"), 2, "Subquery wrapping required")
self.assertNotIn("authors_count", sql)
def test_non_aggregate_annotation_pruned(self):
with CaptureQueriesContext(connection) as ctx:
Book.objects.annotate(
name_lower=Lower("name"),
).count()
sql = ctx.captured_queries[0]["sql"].lower()
self.assertEqual(sql.count("select"), 1, "No subquery wrapping required")
self.assertNotIn("name_lower", sql)
def test_unreferenced_aggregate_annotation_pruned(self):
with CaptureQueriesContext(connection) as ctx:
cnt = Book.objects.annotate(
authors_count=Count("authors"),
).count()
self.assertEqual(cnt, 2)
sql = ctx.captured_queries[0]["sql"].lower()
self.assertEqual(sql.count("select"), 2, "Subquery wrapping required")
self.assertNotIn("authors_count", sql)
def test_referenced_aggregate_annotation_kept(self):
with CaptureQueriesContext(connection) as ctx:
Book.objects.annotate(
authors_count=Count("authors"),
).aggregate(Avg("authors_count"))
sql = ctx.captured_queries[0]["sql"].lower()
self.assertEqual(sql.count("select"), 2, "Subquery wrapping required")
self.assertEqual(sql.count("authors_count"), 2)
def test_referenced_group_by_annotation_kept(self):
queryset = Book.objects.values(pages_mod=Mod("pages", 10)).annotate(
mod_count=Count("*")
)
self.assertEqual(queryset.count(), 1)
def test_referenced_subquery_requires_wrapping(self):
total_books_qs = (
Author.book_set.through.objects.values("author")
.filter(author=OuterRef("pk"))
.annotate(total=Count("book"))
)
with self.assertNumQueries(1) as ctx:
aggregate = (
Author.objects.annotate(
total_books=Subquery(total_books_qs.values("total"))
)
.values("pk", "total_books")
.aggregate(
sum_total_books=Sum("total_books"),
)
)
sql = ctx.captured_queries[0]["sql"].lower()
self.assertEqual(sql.count("select"), 3, "Subquery wrapping required")
self.assertEqual(aggregate, {"sum_total_books": 3})
|
74619d0ce7125e6cd2748aefc4d7125106ecb3946b470749fc6264ef515987be | import datetime
import os
import re
import unittest
import zoneinfo
from unittest import mock
from urllib.parse import parse_qsl, urljoin, urlparse
from django.contrib import admin
from django.contrib.admin import AdminSite, ModelAdmin
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.models import ADDITION, DELETION, LogEntry
from django.contrib.admin.options import TO_FIELD_VAR
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.admin.tests import AdminSeleniumTestCase
from django.contrib.admin.utils import quote
from django.contrib.admin.views.main import IS_POPUP_VAR
from django.contrib.auth import REDIRECT_FIELD_NAME, get_permission_codename
from django.contrib.auth.models import Group, Permission, User
from django.contrib.contenttypes.models import ContentType
from django.core import mail
from django.core.checks import Error
from django.core.files import temp as tempfile
from django.db import connection
from django.forms.utils import ErrorList
from django.template.response import TemplateResponse
from django.test import (
TestCase,
ignore_warnings,
modify_settings,
override_settings,
skipUnlessDBFeature,
)
from django.test.utils import override_script_prefix
from django.urls import NoReverseMatch, resolve, reverse
from django.utils import formats, translation
from django.utils.cache import get_max_age
from django.utils.deprecation import RemovedInDjango60Warning
from django.utils.encoding import iri_to_uri
from django.utils.html import escape
from django.utils.http import urlencode
from . import customadmin
from .admin import CityAdmin, site, site2
from .models import (
Actor,
AdminOrderedAdminMethod,
AdminOrderedCallable,
AdminOrderedField,
AdminOrderedModelMethod,
Album,
Answer,
Answer2,
Article,
BarAccount,
Book,
Bookmark,
Box,
Category,
Chapter,
ChapterXtra1,
ChapterXtra2,
Character,
Child,
Choice,
City,
Collector,
Color,
ComplexSortedPerson,
CoverLetter,
CustomArticle,
CyclicOne,
CyclicTwo,
DooHickey,
Employee,
EmptyModel,
Fabric,
FancyDoodad,
FieldOverridePost,
FilteredManager,
FooAccount,
FoodDelivery,
FunkyTag,
Gallery,
Grommet,
Inquisition,
Language,
Link,
MainPrepopulated,
Media,
ModelWithStringPrimaryKey,
OtherStory,
Paper,
Parent,
ParentWithDependentChildren,
ParentWithUUIDPK,
Person,
Persona,
Picture,
Pizza,
Plot,
PlotDetails,
PluggableSearchPerson,
Podcast,
Post,
PrePopulatedPost,
Promo,
Question,
ReadablePizza,
ReadOnlyPizza,
ReadOnlyRelatedField,
Recommendation,
Recommender,
RelatedPrepopulated,
RelatedWithUUIDPKModel,
Report,
Restaurant,
RowLevelChangePermissionModel,
SecretHideout,
Section,
ShortMessage,
Simple,
Song,
State,
Story,
SuperSecretHideout,
SuperVillain,
Telegram,
TitleTranslation,
Topping,
Traveler,
UnchangeableObject,
UndeletableObject,
UnorderedObject,
UserProxy,
Villain,
Vodcast,
Whatsit,
Widget,
Worker,
WorkHour,
)
ERROR_MESSAGE = "Please enter the correct username and password \
for a staff account. Note that both fields may be case-sensitive."
MULTIPART_ENCTYPE = 'enctype="multipart/form-data"'
def make_aware_datetimes(dt, iana_key):
"""Makes one aware datetime for each supported time zone provider."""
yield dt.replace(tzinfo=zoneinfo.ZoneInfo(iana_key))
class AdminFieldExtractionMixin:
"""
Helper methods for extracting data from AdminForm.
"""
def get_admin_form_fields(self, response):
"""
Return a list of AdminFields for the AdminForm in the response.
"""
fields = []
for fieldset in response.context["adminform"]:
for field_line in fieldset:
fields.extend(field_line)
return fields
def get_admin_readonly_fields(self, response):
"""
Return the readonly fields for the response's AdminForm.
"""
return [f for f in self.get_admin_form_fields(response) if f.is_readonly]
def get_admin_readonly_field(self, response, field_name):
"""
Return the readonly field for the given field_name.
"""
admin_readonly_fields = self.get_admin_readonly_fields(response)
for field in admin_readonly_fields:
if field.field["name"] == field_name:
return field
@override_settings(ROOT_URLCONF="admin_views.urls", USE_I18N=True, LANGUAGE_CODE="en")
class AdminViewBasicTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
title="Article 1",
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
title="Article 2",
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.color1 = Color.objects.create(value="Red", warm=True)
cls.color2 = Color.objects.create(value="Orange", warm=True)
cls.color3 = Color.objects.create(value="Blue", warm=False)
cls.color4 = Color.objects.create(value="Green", warm=False)
cls.fab1 = Fabric.objects.create(surface="x")
cls.fab2 = Fabric.objects.create(surface="y")
cls.fab3 = Fabric.objects.create(surface="plain")
cls.b1 = Book.objects.create(name="Book 1")
cls.b2 = Book.objects.create(name="Book 2")
cls.pro1 = Promo.objects.create(name="Promo 1", book=cls.b1)
cls.pro1 = Promo.objects.create(name="Promo 2", book=cls.b2)
cls.chap1 = Chapter.objects.create(
title="Chapter 1", content="[ insert contents here ]", book=cls.b1
)
cls.chap2 = Chapter.objects.create(
title="Chapter 2", content="[ insert contents here ]", book=cls.b1
)
cls.chap3 = Chapter.objects.create(
title="Chapter 1", content="[ insert contents here ]", book=cls.b2
)
cls.chap4 = Chapter.objects.create(
title="Chapter 2", content="[ insert contents here ]", book=cls.b2
)
cls.cx1 = ChapterXtra1.objects.create(chap=cls.chap1, xtra="ChapterXtra1 1")
cls.cx2 = ChapterXtra1.objects.create(chap=cls.chap3, xtra="ChapterXtra1 2")
Actor.objects.create(name="Palin", age=27)
# Post data for edit inline
cls.inline_post_data = {
"name": "Test section",
# inline data
"article_set-TOTAL_FORMS": "6",
"article_set-INITIAL_FORMS": "3",
"article_set-MAX_NUM_FORMS": "0",
"article_set-0-id": cls.a1.pk,
# there is no title in database, give one here or formset will fail.
"article_set-0-title": "Norske bostaver æøå skaper problemer",
"article_set-0-content": "<p>Middle content</p>",
"article_set-0-date_0": "2008-03-18",
"article_set-0-date_1": "11:54:58",
"article_set-0-section": cls.s1.pk,
"article_set-1-id": cls.a2.pk,
"article_set-1-title": "Need a title.",
"article_set-1-content": "<p>Oldest content</p>",
"article_set-1-date_0": "2000-03-18",
"article_set-1-date_1": "11:54:58",
"article_set-2-id": cls.a3.pk,
"article_set-2-title": "Need a title.",
"article_set-2-content": "<p>Newest content</p>",
"article_set-2-date_0": "2009-03-18",
"article_set-2-date_1": "11:54:58",
"article_set-3-id": "",
"article_set-3-title": "",
"article_set-3-content": "",
"article_set-3-date_0": "",
"article_set-3-date_1": "",
"article_set-4-id": "",
"article_set-4-title": "",
"article_set-4-content": "",
"article_set-4-date_0": "",
"article_set-4-date_1": "",
"article_set-5-id": "",
"article_set-5-title": "",
"article_set-5-content": "",
"article_set-5-date_0": "",
"article_set-5-date_1": "",
}
def setUp(self):
self.client.force_login(self.superuser)
def assertContentBefore(self, response, text1, text2, failing_msg=None):
"""
Testing utility asserting that text1 appears before text2 in response
content.
"""
self.assertEqual(response.status_code, 200)
self.assertLess(
response.content.index(text1.encode()),
response.content.index(text2.encode()),
(failing_msg or "")
+ "\nResponse:\n"
+ response.content.decode(response.charset),
)
class AdminViewBasicTest(AdminViewBasicTestCase):
def test_trailing_slash_required(self):
"""
If you leave off the trailing slash, app should redirect and add it.
"""
add_url = reverse("admin:admin_views_article_add")
response = self.client.get(add_url[:-1])
self.assertRedirects(response, add_url, status_code=301)
def test_basic_add_GET(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_add_with_GET_args(self):
response = self.client.get(
reverse("admin:admin_views_section_add"), {"name": "My Section"}
)
self.assertContains(
response,
'value="My Section"',
msg_prefix="Couldn't find an input with the right value in the response",
)
def test_basic_edit_GET(self):
"""
A smoke test to ensure GET on the change_view works.
"""
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_basic_edit_GET_string_PK(self):
"""
GET on the change_view (when passing a string as the PK argument for a
model with an integer PK field) redirects to the index page with a
message saying the object doesn't exist.
"""
response = self.client.get(
reverse("admin:admin_views_section_change", args=(quote("abc/<b>"),)),
follow=True,
)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["section with ID “abc/<b>” doesn’t exist. Perhaps it was deleted?"],
)
def test_basic_edit_GET_old_url_redirect(self):
"""
The change URL changed in Django 1.9, but the old one still redirects.
"""
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)).replace(
"change/", ""
)
)
self.assertRedirects(
response, reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
def test_basic_inheritance_GET_string_PK(self):
"""
GET on the change_view (for inherited models) redirects to the index
page with a message saying the object doesn't exist.
"""
response = self.client.get(
reverse("admin:admin_views_supervillain_change", args=("abc",)), follow=True
)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["super villain with ID “abc” doesn’t exist. Perhaps it was deleted?"],
)
def test_basic_add_POST(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
"name": "Another Section",
# inline data
"article_set-TOTAL_FORMS": "3",
"article_set-INITIAL_FORMS": "0",
"article_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse("admin:admin_views_section_add"), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_popup_add_POST(self):
"""HTTP response from a popup is properly escaped."""
post_data = {
IS_POPUP_VAR: "1",
"title": "title with a new\nline",
"content": "some content",
"date_0": "2010-09-10",
"date_1": "14:55:39",
}
response = self.client.post(reverse("admin:admin_views_article_add"), post_data)
self.assertContains(response, "title with a new\\nline")
def test_basic_edit_POST(self):
"""
A smoke test to ensure POST on edit_view works.
"""
url = reverse("admin:admin_views_section_change", args=(self.s1.pk,))
response = self.client.post(url, self.inline_post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as(self):
"""
Test "save as".
"""
post_data = self.inline_post_data.copy()
post_data.update(
{
"_saveasnew": "Save+as+new",
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-3-section": "1",
"article_set-4-section": "1",
"article_set-5-section": "1",
}
)
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), post_data
)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as_delete_inline(self):
"""
Should be able to "Save as new" while also deleting an inline.
"""
post_data = self.inline_post_data.copy()
post_data.update(
{
"_saveasnew": "Save+as+new",
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-2-DELETE": "1",
"article_set-3-section": "1",
}
)
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), post_data
)
self.assertEqual(response.status_code, 302)
# started with 3 articles, one was deleted.
self.assertEqual(Section.objects.latest("id").article_set.count(), 2)
def test_change_list_column_field_classes(self):
response = self.client.get(reverse("admin:admin_views_article_changelist"))
# callables display the callable name.
self.assertContains(response, "column-callable_year")
self.assertContains(response, "field-callable_year")
# lambdas display as "lambda" + index that they appear in list_display.
self.assertContains(response, "column-lambda8")
self.assertContains(response, "field-lambda8")
def test_change_list_sorting_callable(self):
"""
Ensure we can sort on a list_display field that is a callable
(column 2 is callable_year in ArticleAdmin)
"""
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": 2}
)
self.assertContentBefore(
response,
"Oldest content",
"Middle content",
"Results of sorting on callable are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Newest content",
"Results of sorting on callable are out of order.",
)
def test_change_list_sorting_property(self):
"""
Sort on a list_display field that is a property (column 10 is
a property in Article model).
"""
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": 10}
)
self.assertContentBefore(
response,
"Oldest content",
"Middle content",
"Results of sorting on property are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Newest content",
"Results of sorting on property are out of order.",
)
def test_change_list_sorting_callable_query_expression(self):
"""Query expressions may be used for admin_order_field."""
tests = [
("order_by_expression", 9),
("order_by_f_expression", 12),
("order_by_orderby_expression", 13),
]
for admin_order_field, index in tests:
with self.subTest(admin_order_field):
response = self.client.get(
reverse("admin:admin_views_article_changelist"),
{"o": index},
)
self.assertContentBefore(
response,
"Oldest content",
"Middle content",
"Results of sorting on callable are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Newest content",
"Results of sorting on callable are out of order.",
)
def test_change_list_sorting_callable_query_expression_reverse(self):
tests = [
("order_by_expression", -9),
("order_by_f_expression", -12),
("order_by_orderby_expression", -13),
]
for admin_order_field, index in tests:
with self.subTest(admin_order_field):
response = self.client.get(
reverse("admin:admin_views_article_changelist"),
{"o": index},
)
self.assertContentBefore(
response,
"Middle content",
"Oldest content",
"Results of sorting on callable are out of order.",
)
self.assertContentBefore(
response,
"Newest content",
"Middle content",
"Results of sorting on callable are out of order.",
)
def test_change_list_sorting_model(self):
"""
Ensure we can sort on a list_display field that is a Model method
(column 3 is 'model_year' in ArticleAdmin)
"""
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": "-3"}
)
self.assertContentBefore(
response,
"Newest content",
"Middle content",
"Results of sorting on Model method are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Oldest content",
"Results of sorting on Model method are out of order.",
)
def test_change_list_sorting_model_admin(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin method
(column 4 is 'modeladmin_year' in ArticleAdmin)
"""
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": "4"}
)
self.assertContentBefore(
response,
"Oldest content",
"Middle content",
"Results of sorting on ModelAdmin method are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Newest content",
"Results of sorting on ModelAdmin method are out of order.",
)
def test_change_list_sorting_model_admin_reverse(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin
method in reverse order (i.e. admin_order_field uses the '-' prefix)
(column 6 is 'model_year_reverse' in ArticleAdmin)
"""
td = '<td class="field-model_property_year">%s</td>'
td_2000, td_2008, td_2009 = td % 2000, td % 2008, td % 2009
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": "6"}
)
self.assertContentBefore(
response,
td_2009,
td_2008,
"Results of sorting on ModelAdmin method are out of order.",
)
self.assertContentBefore(
response,
td_2008,
td_2000,
"Results of sorting on ModelAdmin method are out of order.",
)
# Let's make sure the ordering is right and that we don't get a
# FieldError when we change to descending order
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": "-6"}
)
self.assertContentBefore(
response,
td_2000,
td_2008,
"Results of sorting on ModelAdmin method are out of order.",
)
self.assertContentBefore(
response,
td_2008,
td_2009,
"Results of sorting on ModelAdmin method are out of order.",
)
def test_change_list_sorting_multiple(self):
p1 = Person.objects.create(name="Chris", gender=1, alive=True)
p2 = Person.objects.create(name="Chris", gender=2, alive=True)
p3 = Person.objects.create(name="Bob", gender=1, alive=True)
link1 = reverse("admin:admin_views_person_change", args=(p1.pk,))
link2 = reverse("admin:admin_views_person_change", args=(p2.pk,))
link3 = reverse("admin:admin_views_person_change", args=(p3.pk,))
# Sort by name, gender
response = self.client.get(
reverse("admin:admin_views_person_changelist"), {"o": "1.2"}
)
self.assertContentBefore(response, link3, link1)
self.assertContentBefore(response, link1, link2)
# Sort by gender descending, name
response = self.client.get(
reverse("admin:admin_views_person_changelist"), {"o": "-2.1"}
)
self.assertContentBefore(response, link2, link3)
self.assertContentBefore(response, link3, link1)
def test_change_list_sorting_preserve_queryset_ordering(self):
"""
If no ordering is defined in `ModelAdmin.ordering` or in the query
string, then the underlying order of the queryset should not be
changed, even if it is defined in `Modeladmin.get_queryset()`.
Refs #11868, #7309.
"""
p1 = Person.objects.create(name="Amy", gender=1, alive=True, age=80)
p2 = Person.objects.create(name="Bob", gender=1, alive=True, age=70)
p3 = Person.objects.create(name="Chris", gender=2, alive=False, age=60)
link1 = reverse("admin:admin_views_person_change", args=(p1.pk,))
link2 = reverse("admin:admin_views_person_change", args=(p2.pk,))
link3 = reverse("admin:admin_views_person_change", args=(p3.pk,))
response = self.client.get(reverse("admin:admin_views_person_changelist"), {})
self.assertContentBefore(response, link3, link2)
self.assertContentBefore(response, link2, link1)
def test_change_list_sorting_model_meta(self):
# Test ordering on Model Meta is respected
l1 = Language.objects.create(iso="ur", name="Urdu")
l2 = Language.objects.create(iso="ar", name="Arabic")
link1 = reverse("admin:admin_views_language_change", args=(quote(l1.pk),))
link2 = reverse("admin:admin_views_language_change", args=(quote(l2.pk),))
response = self.client.get(reverse("admin:admin_views_language_changelist"), {})
self.assertContentBefore(response, link2, link1)
# Test we can override with query string
response = self.client.get(
reverse("admin:admin_views_language_changelist"), {"o": "-1"}
)
self.assertContentBefore(response, link1, link2)
def test_change_list_sorting_override_model_admin(self):
# Test ordering on Model Admin is respected, and overrides Model Meta
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse("admin:admin_views_podcast_change", args=(p1.pk,))
link2 = reverse("admin:admin_views_podcast_change", args=(p2.pk,))
response = self.client.get(reverse("admin:admin_views_podcast_changelist"), {})
self.assertContentBefore(response, link1, link2)
def test_multiple_sort_same_field(self):
# The changelist displays the correct columns if two columns correspond
# to the same ordering field.
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse("admin:admin_views_podcast_change", args=(quote(p1.pk),))
link2 = reverse("admin:admin_views_podcast_change", args=(quote(p2.pk),))
response = self.client.get(reverse("admin:admin_views_podcast_changelist"), {})
self.assertContentBefore(response, link1, link2)
p1 = ComplexSortedPerson.objects.create(name="Bob", age=10)
p2 = ComplexSortedPerson.objects.create(name="Amy", age=20)
link1 = reverse("admin:admin_views_complexsortedperson_change", args=(p1.pk,))
link2 = reverse("admin:admin_views_complexsortedperson_change", args=(p2.pk,))
response = self.client.get(
reverse("admin:admin_views_complexsortedperson_changelist"), {}
)
# Should have 5 columns (including action checkbox col)
self.assertContains(response, '<th scope="col"', count=5)
self.assertContains(response, "Name")
self.assertContains(response, "Colored name")
# Check order
self.assertContentBefore(response, "Name", "Colored name")
# Check sorting - should be by name
self.assertContentBefore(response, link2, link1)
def test_sort_indicators_admin_order(self):
"""
The admin shows default sort indicators for all kinds of 'ordering'
fields: field names, method on the model admin and model itself, and
other callables. See #17252.
"""
models = [
(AdminOrderedField, "adminorderedfield"),
(AdminOrderedModelMethod, "adminorderedmodelmethod"),
(AdminOrderedAdminMethod, "adminorderedadminmethod"),
(AdminOrderedCallable, "adminorderedcallable"),
]
for model, url in models:
model.objects.create(stuff="The Last Item", order=3)
model.objects.create(stuff="The First Item", order=1)
model.objects.create(stuff="The Middle Item", order=2)
response = self.client.get(
reverse("admin:admin_views_%s_changelist" % url), {}
)
# Should have 3 columns including action checkbox col.
self.assertContains(response, '<th scope="col"', count=3, msg_prefix=url)
# Check if the correct column was selected. 2 is the index of the
# 'order' column in the model admin's 'list_display' with 0 being
# the implicit 'action_checkbox' and 1 being the column 'stuff'.
self.assertEqual(
response.context["cl"].get_ordering_field_columns(), {2: "asc"}
)
# Check order of records.
self.assertContentBefore(response, "The First Item", "The Middle Item")
self.assertContentBefore(response, "The Middle Item", "The Last Item")
def test_has_related_field_in_list_display_fk(self):
"""Joins shouldn't be performed for <FK>_id fields in list display."""
state = State.objects.create(name="Karnataka")
City.objects.create(state=state, name="Bangalore")
response = self.client.get(reverse("admin:admin_views_city_changelist"), {})
response.context["cl"].list_display = ["id", "name", "state"]
self.assertIs(response.context["cl"].has_related_field_in_list_display(), True)
response.context["cl"].list_display = ["id", "name", "state_id"]
self.assertIs(response.context["cl"].has_related_field_in_list_display(), False)
def test_has_related_field_in_list_display_o2o(self):
"""Joins shouldn't be performed for <O2O>_id fields in list display."""
media = Media.objects.create(name="Foo")
Vodcast.objects.create(media=media)
response = self.client.get(reverse("admin:admin_views_vodcast_changelist"), {})
response.context["cl"].list_display = ["media"]
self.assertIs(response.context["cl"].has_related_field_in_list_display(), True)
response.context["cl"].list_display = ["media_id"]
self.assertIs(response.context["cl"].has_related_field_in_list_display(), False)
def test_limited_filter(self):
"""
Admin changelist filters do not contain objects excluded via
limit_choices_to.
"""
response = self.client.get(reverse("admin:admin_views_thing_changelist"))
self.assertContains(
response,
'<div id="changelist-filter">',
msg_prefix="Expected filter not found in changelist view",
)
self.assertNotContains(
response,
'<a href="?color__id__exact=3">Blue</a>',
msg_prefix="Changelist filter not correctly limited by limit_choices_to",
)
def test_change_list_facet_toggle(self):
# Toggle is visible when show_facet is the default of
# admin.ShowFacets.ALLOW.
admin_url = reverse("admin:admin_views_album_changelist")
response = self.client.get(admin_url)
self.assertContains(
response,
'<a href="?_facets=True" class="viewlink">Show counts</a>',
msg_prefix="Expected facet filter toggle not found in changelist view",
)
response = self.client.get(f"{admin_url}?_facets=True")
self.assertContains(
response,
'<a href="?" class="hidelink">Hide counts</a>',
msg_prefix="Expected facet filter toggle not found in changelist view",
)
# Toggle is not visible when show_facet is admin.ShowFacets.ALWAYS.
response = self.client.get(reverse("admin:admin_views_workhour_changelist"))
self.assertNotContains(
response,
"Show counts",
msg_prefix="Expected not to find facet filter toggle in changelist view",
)
self.assertNotContains(
response,
"Hide counts",
msg_prefix="Expected not to find facet filter toggle in changelist view",
)
# Toggle is not visible when show_facet is admin.ShowFacets.NEVER.
response = self.client.get(reverse("admin:admin_views_fooddelivery_changelist"))
self.assertNotContains(
response,
"Show counts",
msg_prefix="Expected not to find facet filter toggle in changelist view",
)
self.assertNotContains(
response,
"Hide counts",
msg_prefix="Expected not to find facet filter toggle in changelist view",
)
def test_relation_spanning_filters(self):
changelist_url = reverse("admin:admin_views_chapterxtra1_changelist")
response = self.client.get(changelist_url)
self.assertContains(response, '<div id="changelist-filter">')
filters = {
"chap__id__exact": {
"values": [c.id for c in Chapter.objects.all()],
"test": lambda obj, value: obj.chap.id == value,
},
"chap__title": {
"values": [c.title for c in Chapter.objects.all()],
"test": lambda obj, value: obj.chap.title == value,
},
"chap__book__id__exact": {
"values": [b.id for b in Book.objects.all()],
"test": lambda obj, value: obj.chap.book.id == value,
},
"chap__book__name": {
"values": [b.name for b in Book.objects.all()],
"test": lambda obj, value: obj.chap.book.name == value,
},
"chap__book__promo__id__exact": {
"values": [p.id for p in Promo.objects.all()],
"test": lambda obj, value: obj.chap.book.promo_set.filter(
id=value
).exists(),
},
"chap__book__promo__name": {
"values": [p.name for p in Promo.objects.all()],
"test": lambda obj, value: obj.chap.book.promo_set.filter(
name=value
).exists(),
},
# A forward relation (book) after a reverse relation (promo).
"guest_author__promo__book__id__exact": {
"values": [p.id for p in Book.objects.all()],
"test": lambda obj, value: obj.guest_author.promo_set.filter(
book=value
).exists(),
},
}
for filter_path, params in filters.items():
for value in params["values"]:
query_string = urlencode({filter_path: value})
# ensure filter link exists
self.assertContains(response, '<a href="?%s"' % query_string)
# ensure link works
filtered_response = self.client.get(
"%s?%s" % (changelist_url, query_string)
)
self.assertEqual(filtered_response.status_code, 200)
# ensure changelist contains only valid objects
for obj in filtered_response.context["cl"].queryset.all():
self.assertTrue(params["test"](obj, value))
def test_incorrect_lookup_parameters(self):
"""Ensure incorrect lookup parameters are handled gracefully."""
changelist_url = reverse("admin:admin_views_thing_changelist")
response = self.client.get(changelist_url, {"notarealfield": "5"})
self.assertRedirects(response, "%s?e=1" % changelist_url)
# Spanning relationships through a nonexistent related object (Refs #16716)
response = self.client.get(changelist_url, {"notarealfield__whatever": "5"})
self.assertRedirects(response, "%s?e=1" % changelist_url)
response = self.client.get(
changelist_url, {"color__id__exact": "StringNotInteger!"}
)
self.assertRedirects(response, "%s?e=1" % changelist_url)
# Regression test for #18530
response = self.client.get(changelist_url, {"pub_date__gte": "foo"})
self.assertRedirects(response, "%s?e=1" % changelist_url)
def test_isnull_lookups(self):
"""Ensure is_null is handled correctly."""
Article.objects.create(
title="I Could Go Anywhere",
content="Versatile",
date=datetime.datetime.now(),
)
changelist_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(changelist_url)
self.assertContains(response, "4 articles")
response = self.client.get(changelist_url, {"section__isnull": "false"})
self.assertContains(response, "3 articles")
response = self.client.get(changelist_url, {"section__isnull": "0"})
self.assertContains(response, "3 articles")
response = self.client.get(changelist_url, {"section__isnull": "true"})
self.assertContains(response, "1 article")
response = self.client.get(changelist_url, {"section__isnull": "1"})
self.assertContains(response, "1 article")
def test_logout_and_password_change_URLs(self):
response = self.client.get(reverse("admin:admin_views_article_changelist"))
self.assertContains(
response,
'<form id="logout-form" method="post" action="%s">'
% reverse("admin:logout"),
)
self.assertContains(
response, '<a href="%s">' % reverse("admin:password_change")
)
def test_named_group_field_choices_change_list(self):
"""
Ensures the admin changelist shows correct values in the relevant column
for rows corresponding to instances of a model in which a named group
has been used in the choices option of a field.
"""
link1 = reverse("admin:admin_views_fabric_change", args=(self.fab1.pk,))
link2 = reverse("admin:admin_views_fabric_change", args=(self.fab2.pk,))
response = self.client.get(reverse("admin:admin_views_fabric_changelist"))
fail_msg = (
"Changelist table isn't showing the right human-readable values "
"set by a model field 'choices' option named group."
)
self.assertContains(
response,
'<a href="%s">Horizontal</a>' % link1,
msg_prefix=fail_msg,
html=True,
)
self.assertContains(
response,
'<a href="%s">Vertical</a>' % link2,
msg_prefix=fail_msg,
html=True,
)
def test_named_group_field_choices_filter(self):
"""
Ensures the filter UI shows correctly when at least one named group has
been used in the choices option of a model field.
"""
response = self.client.get(reverse("admin:admin_views_fabric_changelist"))
fail_msg = (
"Changelist filter isn't showing options contained inside a model "
"field 'choices' option named group."
)
self.assertContains(response, '<div id="changelist-filter">')
self.assertContains(
response,
'<a href="?surface__exact=x">Horizontal</a>',
msg_prefix=fail_msg,
html=True,
)
self.assertContains(
response,
'<a href="?surface__exact=y">Vertical</a>',
msg_prefix=fail_msg,
html=True,
)
def test_change_list_null_boolean_display(self):
Post.objects.create(public=None)
response = self.client.get(reverse("admin:admin_views_post_changelist"))
self.assertContains(response, "icon-unknown.svg")
def test_display_decorator_with_boolean_and_empty_value(self):
msg = (
"The boolean and empty_value arguments to the @display decorator "
"are mutually exclusive."
)
with self.assertRaisesMessage(ValueError, msg):
class BookAdmin(admin.ModelAdmin):
@admin.display(boolean=True, empty_value="(Missing)")
def is_published(self, obj):
return obj.publish_date is not None
def test_i18n_language_non_english_default(self):
"""
Check if the JavaScript i18n view returns an empty language catalog
if the default language is non-English but the selected language
is English. See #13388 and #3594 for more details.
"""
with self.settings(LANGUAGE_CODE="fr"), translation.override("en-us"):
response = self.client.get(reverse("admin:jsi18n"))
self.assertNotContains(response, "Choisir une heure")
def test_i18n_language_non_english_fallback(self):
"""
Makes sure that the fallback language is still working properly
in cases where the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE="fr"), translation.override("none"):
response = self.client.get(reverse("admin:jsi18n"))
self.assertContains(response, "Choisir une heure")
def test_jsi18n_with_context(self):
response = self.client.get(reverse("admin-extra-context:jsi18n"))
self.assertEqual(response.status_code, 200)
def test_jsi18n_format_fallback(self):
"""
The JavaScript i18n view doesn't return localized date/time formats
when the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE="ru"), translation.override("none"):
response = self.client.get(reverse("admin:jsi18n"))
self.assertNotContains(response, "%d.%m.%Y %H:%M:%S")
self.assertContains(response, "%Y-%m-%d %H:%M:%S")
def test_disallowed_filtering(self):
with self.assertLogs("django.security.DisallowedModelAdminLookup", "ERROR"):
response = self.client.get(
"%s?owner__email__startswith=fuzzy"
% reverse("admin:admin_views_album_changelist")
)
self.assertEqual(response.status_code, 400)
# Filters are allowed if explicitly included in list_filter
response = self.client.get(
"%s?color__value__startswith=red"
% reverse("admin:admin_views_thing_changelist")
)
self.assertEqual(response.status_code, 200)
response = self.client.get(
"%s?color__value=red" % reverse("admin:admin_views_thing_changelist")
)
self.assertEqual(response.status_code, 200)
# Filters should be allowed if they involve a local field without the
# need to allow them in list_filter or date_hierarchy.
response = self.client.get(
"%s?age__gt=30" % reverse("admin:admin_views_person_changelist")
)
self.assertEqual(response.status_code, 200)
e1 = Employee.objects.create(
name="Anonymous", gender=1, age=22, alive=True, code="123"
)
e2 = Employee.objects.create(
name="Visitor", gender=2, age=19, alive=True, code="124"
)
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e1)
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e2)
response = self.client.get(reverse("admin:admin_views_workhour_changelist"))
self.assertContains(response, "employee__person_ptr__exact")
response = self.client.get(
"%s?employee__person_ptr__exact=%d"
% (reverse("admin:admin_views_workhour_changelist"), e1.pk)
)
self.assertEqual(response.status_code, 200)
def test_disallowed_to_field(self):
url = reverse("admin:admin_views_section_changelist")
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.get(url, {TO_FIELD_VAR: "missing_field"})
self.assertEqual(response.status_code, 400)
# Specifying a field that is not referred by any other model registered
# to this admin site should raise an exception.
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.get(
reverse("admin:admin_views_section_changelist"), {TO_FIELD_VAR: "name"}
)
self.assertEqual(response.status_code, 400)
# Primary key should always be allowed, even if the referenced model
# isn't registered.
response = self.client.get(
reverse("admin:admin_views_notreferenced_changelist"), {TO_FIELD_VAR: "id"}
)
self.assertEqual(response.status_code, 200)
# Specifying a field referenced by another model though a m2m should be
# allowed.
response = self.client.get(
reverse("admin:admin_views_recipe_changelist"), {TO_FIELD_VAR: "rname"}
)
self.assertEqual(response.status_code, 200)
# Specifying a field referenced through a reverse m2m relationship
# should be allowed.
response = self.client.get(
reverse("admin:admin_views_ingredient_changelist"), {TO_FIELD_VAR: "iname"}
)
self.assertEqual(response.status_code, 200)
# Specifying a field that is not referred by any other model directly
# registered to this admin site but registered through inheritance
# should be allowed.
response = self.client.get(
reverse("admin:admin_views_referencedbyparent_changelist"),
{TO_FIELD_VAR: "name"},
)
self.assertEqual(response.status_code, 200)
# Specifying a field that is only referred to by a inline of a
# registered model should be allowed.
response = self.client.get(
reverse("admin:admin_views_referencedbyinline_changelist"),
{TO_FIELD_VAR: "name"},
)
self.assertEqual(response.status_code, 200)
# #25622 - Specifying a field of a model only referred by a generic
# relation should raise DisallowedModelAdminToField.
url = reverse("admin:admin_views_referencedbygenrel_changelist")
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.get(url, {TO_FIELD_VAR: "object_id"})
self.assertEqual(response.status_code, 400)
# We also want to prevent the add, change, and delete views from
# leaking a disallowed field value.
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.post(
reverse("admin:admin_views_section_add"), {TO_FIELD_VAR: "name"}
)
self.assertEqual(response.status_code, 400)
section = Section.objects.create()
url = reverse("admin:admin_views_section_change", args=(section.pk,))
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.post(url, {TO_FIELD_VAR: "name"})
self.assertEqual(response.status_code, 400)
url = reverse("admin:admin_views_section_delete", args=(section.pk,))
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.post(url, {TO_FIELD_VAR: "name"})
self.assertEqual(response.status_code, 400)
def test_allowed_filtering_15103(self):
"""
Regressions test for ticket 15103 - filtering on fields defined in a
ForeignKey 'limit_choices_to' should be allowed, otherwise raw_id_fields
can break.
"""
# Filters should be allowed if they are defined on a ForeignKey
# pointing to this model.
url = "%s?leader__name=Palin&leader__age=27" % reverse(
"admin:admin_views_inquisition_changelist"
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_popup_dismiss_related(self):
"""
Regression test for ticket 20664 - ensure the pk is properly quoted.
"""
actor = Actor.objects.create(name="Palin", age=27)
response = self.client.get(
"%s?%s" % (reverse("admin:admin_views_actor_changelist"), IS_POPUP_VAR)
)
self.assertContains(response, 'data-popup-opener="%s"' % actor.pk)
def test_hide_change_password(self):
"""
Tests if the "change password" link in the admin is hidden if the User
does not have a usable password set.
(against 9bea85795705d015cdadc82c68b99196a8554f5c)
"""
user = User.objects.get(username="super")
user.set_unusable_password()
user.save()
self.client.force_login(user)
response = self.client.get(reverse("admin:index"))
self.assertNotContains(
response,
reverse("admin:password_change"),
msg_prefix=(
'The "change password" link should not be displayed if a user does not '
"have a usable password."
),
)
def test_change_view_with_show_delete_extra_context(self):
"""
The 'show_delete' context variable in the admin's change view controls
the display of the delete button.
"""
instance = UndeletableObject.objects.create(name="foo")
response = self.client.get(
reverse("admin:admin_views_undeletableobject_change", args=(instance.pk,))
)
self.assertNotContains(response, "deletelink")
def test_change_view_logs_m2m_field_changes(self):
"""Changes to ManyToManyFields are included in the object's history."""
pizza = ReadablePizza.objects.create(name="Cheese")
cheese = Topping.objects.create(name="cheese")
post_data = {"name": pizza.name, "toppings": [cheese.pk]}
response = self.client.post(
reverse("admin:admin_views_readablepizza_change", args=(pizza.pk,)),
post_data,
)
self.assertRedirects(
response, reverse("admin:admin_views_readablepizza_changelist")
)
pizza_ctype = ContentType.objects.get_for_model(
ReadablePizza, for_concrete_model=False
)
log = LogEntry.objects.filter(
content_type=pizza_ctype, object_id=pizza.pk
).first()
self.assertEqual(log.get_change_message(), "Changed Toppings.")
def test_allows_attributeerror_to_bubble_up(self):
"""
AttributeErrors are allowed to bubble when raised inside a change list
view. Requires a model to be created so there's something to display.
Refs: #16655, #18593, and #18747
"""
Simple.objects.create()
with self.assertRaises(AttributeError):
self.client.get(reverse("admin:admin_views_simple_changelist"))
def test_changelist_with_no_change_url(self):
"""
ModelAdmin.changelist_view shouldn't result in a NoReverseMatch if url
for change_view is removed from get_urls (#20934).
"""
o = UnchangeableObject.objects.create()
response = self.client.get(
reverse("admin:admin_views_unchangeableobject_changelist")
)
# Check the format of the shown object -- shouldn't contain a change link
self.assertContains(
response, '<th class="field-__str__">%s</th>' % o, html=True
)
def test_invalid_appindex_url(self):
"""
#21056 -- URL reversing shouldn't work for nonexistent apps.
"""
good_url = "/test_admin/admin/admin_views/"
confirm_good_url = reverse(
"admin:app_list", kwargs={"app_label": "admin_views"}
)
self.assertEqual(good_url, confirm_good_url)
with self.assertRaises(NoReverseMatch):
reverse("admin:app_list", kwargs={"app_label": "this_should_fail"})
with self.assertRaises(NoReverseMatch):
reverse("admin:app_list", args=("admin_views2",))
def test_resolve_admin_views(self):
index_match = resolve("/test_admin/admin4/")
list_match = resolve("/test_admin/admin4/auth/user/")
self.assertIs(index_match.func.admin_site, customadmin.simple_site)
self.assertIsInstance(
list_match.func.model_admin, customadmin.CustomPwdTemplateUserAdmin
)
def test_adminsite_display_site_url(self):
"""
#13749 - Admin should display link to front-end site 'View site'
"""
url = reverse("admin:index")
response = self.client.get(url)
self.assertEqual(response.context["site_url"], "/my-site-url/")
self.assertContains(response, '<a href="/my-site-url/">View site</a>')
def test_date_hierarchy_empty_queryset(self):
self.assertIs(Question.objects.exists(), False)
response = self.client.get(reverse("admin:admin_views_answer2_changelist"))
self.assertEqual(response.status_code, 200)
@override_settings(TIME_ZONE="America/Sao_Paulo", USE_TZ=True)
def test_date_hierarchy_timezone_dst(self):
# This datetime doesn't exist in this timezone due to DST.
for date in make_aware_datetimes(
datetime.datetime(2016, 10, 16, 15), "America/Sao_Paulo"
):
with self.subTest(repr(date.tzinfo)):
q = Question.objects.create(question="Why?", expires=date)
Answer2.objects.create(question=q, answer="Because.")
response = self.client.get(
reverse("admin:admin_views_answer2_changelist")
)
self.assertContains(response, "question__expires__day=16")
self.assertContains(response, "question__expires__month=10")
self.assertContains(response, "question__expires__year=2016")
@override_settings(TIME_ZONE="America/Los_Angeles", USE_TZ=True)
def test_date_hierarchy_local_date_differ_from_utc(self):
# This datetime is 2017-01-01 in UTC.
for date in make_aware_datetimes(
datetime.datetime(2016, 12, 31, 16), "America/Los_Angeles"
):
with self.subTest(repr(date.tzinfo)):
q = Question.objects.create(question="Why?", expires=date)
Answer2.objects.create(question=q, answer="Because.")
response = self.client.get(
reverse("admin:admin_views_answer2_changelist")
)
self.assertContains(response, "question__expires__day=31")
self.assertContains(response, "question__expires__month=12")
self.assertContains(response, "question__expires__year=2016")
def test_sortable_by_columns_subset(self):
expected_sortable_fields = ("date", "callable_year")
expected_not_sortable_fields = (
"content",
"model_year",
"modeladmin_year",
"model_year_reversed",
"section",
)
response = self.client.get(reverse("admin6:admin_views_article_changelist"))
for field_name in expected_sortable_fields:
self.assertContains(
response, '<th scope="col" class="sortable column-%s">' % field_name
)
for field_name in expected_not_sortable_fields:
self.assertContains(
response, '<th scope="col" class="column-%s">' % field_name
)
def test_get_sortable_by_columns_subset(self):
response = self.client.get(reverse("admin6:admin_views_actor_changelist"))
self.assertContains(response, '<th scope="col" class="sortable column-age">')
self.assertContains(response, '<th scope="col" class="column-name">')
def test_sortable_by_no_column(self):
expected_not_sortable_fields = ("title", "book")
response = self.client.get(reverse("admin6:admin_views_chapter_changelist"))
for field_name in expected_not_sortable_fields:
self.assertContains(
response, '<th scope="col" class="column-%s">' % field_name
)
self.assertNotContains(response, '<th scope="col" class="sortable column')
def test_get_sortable_by_no_column(self):
response = self.client.get(reverse("admin6:admin_views_color_changelist"))
self.assertContains(response, '<th scope="col" class="column-value">')
self.assertNotContains(response, '<th scope="col" class="sortable column')
def test_app_index_context(self):
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertContains(
response,
"<title>Admin_Views administration | Django site admin</title>",
)
self.assertEqual(response.context["title"], "Admin_Views administration")
self.assertEqual(response.context["app_label"], "admin_views")
# Models are sorted alphabetically by default.
models = [model["name"] for model in response.context["app_list"][0]["models"]]
self.assertSequenceEqual(models, sorted(models))
def test_app_index_context_reordered(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse("admin2:app_list", args=("admin_views",)))
self.assertContains(
response,
"<title>Admin_Views administration | Django site admin</title>",
)
# Models are in reverse order.
models = [model["name"] for model in response.context["app_list"][0]["models"]]
self.assertSequenceEqual(models, sorted(models, reverse=True))
def test_change_view_subtitle_per_object(self):
response = self.client.get(
reverse("admin:admin_views_article_change", args=(self.a1.pk,)),
)
self.assertContains(
response,
"<title>Article 1 | Change article | Django site admin</title>",
)
self.assertContains(response, "<h1>Change article</h1>")
self.assertContains(response, "<h2>Article 1</h2>")
response = self.client.get(
reverse("admin:admin_views_article_change", args=(self.a2.pk,)),
)
self.assertContains(
response,
"<title>Article 2 | Change article | Django site admin</title>",
)
self.assertContains(response, "<h1>Change article</h1>")
self.assertContains(response, "<h2>Article 2</h2>")
def test_view_subtitle_per_object(self):
viewuser = User.objects.create_user(
username="viewuser",
password="secret",
is_staff=True,
)
viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("view", Article._meta)),
)
self.client.force_login(viewuser)
response = self.client.get(
reverse("admin:admin_views_article_change", args=(self.a1.pk,)),
)
self.assertContains(
response,
"<title>Article 1 | View article | Django site admin</title>",
)
self.assertContains(response, "<h1>View article</h1>")
self.assertContains(response, "<h2>Article 1</h2>")
response = self.client.get(
reverse("admin:admin_views_article_change", args=(self.a2.pk,)),
)
self.assertContains(
response,
"<title>Article 2 | View article | Django site admin</title>",
)
self.assertContains(response, "<h1>View article</h1>")
self.assertContains(response, "<h2>Article 2</h2>")
def test_formset_kwargs_can_be_overridden(self):
response = self.client.get(reverse("admin:admin_views_city_add"))
self.assertContains(response, "overridden_name")
def test_render_views_no_subtitle(self):
tests = [
reverse("admin:index"),
reverse("admin:password_change"),
reverse("admin:app_list", args=("admin_views",)),
reverse("admin:admin_views_article_delete", args=(self.a1.pk,)),
reverse("admin:admin_views_article_history", args=(self.a1.pk,)),
]
for url in tests:
with self.subTest(url=url):
with self.assertNoLogs("django.template", "DEBUG"):
self.client.get(url)
# Login must be after logout.
with self.assertNoLogs("django.template", "DEBUG"):
self.client.post(reverse("admin:logout"))
self.client.get(reverse("admin:login"))
def test_render_delete_selected_confirmation_no_subtitle(self):
post_data = {
"action": "delete_selected",
"selected_across": "0",
"index": "0",
"_selected_action": self.a1.pk,
}
with self.assertNoLogs("django.template", "DEBUG"):
self.client.post(reverse("admin:admin_views_article_changelist"), post_data)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
{
"NAME": (
"django.contrib.auth.password_validation."
"NumericPasswordValidator"
)
},
]
)
def test_password_change_helptext(self):
response = self.client.get(reverse("admin:password_change"))
self.assertContains(
response, '<div class="help" id="id_new_password1_helptext">'
)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
{
"NAME": (
"django.contrib.auth.password_validation." "NumericPasswordValidator"
)
},
],
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
# Put this app's and the shared tests templates dirs in DIRS to
# take precedence over the admin's templates dir.
"DIRS": [
os.path.join(os.path.dirname(__file__), "templates"),
os.path.join(os.path.dirname(os.path.dirname(__file__)), "templates"),
],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
],
)
class AdminCustomTemplateTests(AdminViewBasicTestCase):
def test_custom_model_admin_templates(self):
# Test custom change list template with custom extra context
response = self.client.get(
reverse("admin:admin_views_customarticle_changelist")
)
self.assertContains(response, "var hello = 'Hello!';")
self.assertTemplateUsed(response, "custom_admin/change_list.html")
# Test custom add form template
response = self.client.get(reverse("admin:admin_views_customarticle_add"))
self.assertTemplateUsed(response, "custom_admin/add_form.html")
# Add an article so we can test delete, change, and history views
post = self.client.post(
reverse("admin:admin_views_customarticle_add"),
{
"content": "<p>great article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
},
)
self.assertRedirects(
post, reverse("admin:admin_views_customarticle_changelist")
)
self.assertEqual(CustomArticle.objects.count(), 1)
article_pk = CustomArticle.objects.all()[0].pk
# Test custom delete, change, and object history templates
# Test custom change form template
response = self.client.get(
reverse("admin:admin_views_customarticle_change", args=(article_pk,))
)
self.assertTemplateUsed(response, "custom_admin/change_form.html")
response = self.client.get(
reverse("admin:admin_views_customarticle_delete", args=(article_pk,))
)
self.assertTemplateUsed(response, "custom_admin/delete_confirmation.html")
response = self.client.post(
reverse("admin:admin_views_customarticle_changelist"),
data={
"index": 0,
"action": ["delete_selected"],
"_selected_action": ["1"],
},
)
self.assertTemplateUsed(
response, "custom_admin/delete_selected_confirmation.html"
)
response = self.client.get(
reverse("admin:admin_views_customarticle_history", args=(article_pk,))
)
self.assertTemplateUsed(response, "custom_admin/object_history.html")
# A custom popup response template may be specified by
# ModelAdmin.popup_response_template.
response = self.client.post(
reverse("admin:admin_views_customarticle_add") + "?%s=1" % IS_POPUP_VAR,
{
"content": "<p>great article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
IS_POPUP_VAR: "1",
},
)
self.assertEqual(response.template_name, "custom_admin/popup_response.html")
def test_extended_bodyclass_template_change_form(self):
"""
The admin/change_form.html template uses block.super in the
bodyclass block.
"""
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertContains(response, "bodyclass_consistency_check ")
def test_change_password_template(self):
user = User.objects.get(username="super")
response = self.client.get(
reverse("admin:auth_user_password_change", args=(user.id,))
)
# The auth/user/change_password.html template uses super in the
# bodyclass block.
self.assertContains(response, "bodyclass_consistency_check ")
# When a site has multiple passwords in the browser's password manager,
# a browser pop up asks which user the new password is for. To prevent
# this, the username is added to the change password form.
self.assertContains(
response, '<input type="text" name="username" value="super" class="hidden">'
)
# help text for passwords has an id.
self.assertContains(
response,
'<div class="help" id="id_password1_helptext"><ul><li>'
"Your password can’t be too similar to your other personal information."
"</li><li>Your password can’t be entirely numeric.</li></ul></div>",
)
self.assertContains(
response,
'<div class="help" id="id_password2_helptext">'
"Enter the same password as before, for verification.</div>",
)
def test_extended_bodyclass_template_index(self):
"""
The admin/index.html template uses block.super in the bodyclass block.
"""
response = self.client.get(reverse("admin:index"))
self.assertContains(response, "bodyclass_consistency_check ")
def test_extended_bodyclass_change_list(self):
"""
The admin/change_list.html' template uses block.super
in the bodyclass block.
"""
response = self.client.get(reverse("admin:admin_views_article_changelist"))
self.assertContains(response, "bodyclass_consistency_check ")
def test_extended_bodyclass_template_login(self):
"""
The admin/login.html template uses block.super in the
bodyclass block.
"""
self.client.logout()
response = self.client.get(reverse("admin:login"))
self.assertContains(response, "bodyclass_consistency_check ")
def test_extended_bodyclass_template_delete_confirmation(self):
"""
The admin/delete_confirmation.html template uses
block.super in the bodyclass block.
"""
group = Group.objects.create(name="foogroup")
response = self.client.get(reverse("admin:auth_group_delete", args=(group.id,)))
self.assertContains(response, "bodyclass_consistency_check ")
def test_extended_bodyclass_template_delete_selected_confirmation(self):
"""
The admin/delete_selected_confirmation.html template uses
block.super in bodyclass block.
"""
group = Group.objects.create(name="foogroup")
post_data = {
"action": "delete_selected",
"selected_across": "0",
"index": "0",
"_selected_action": group.id,
}
response = self.client.post(reverse("admin:auth_group_changelist"), post_data)
self.assertEqual(response.context["site_header"], "Django administration")
self.assertContains(response, "bodyclass_consistency_check ")
def test_filter_with_custom_template(self):
"""
A custom template can be used to render an admin filter.
"""
response = self.client.get(reverse("admin:admin_views_color2_changelist"))
self.assertTemplateUsed(response, "custom_filter_template.html")
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewFormUrlTest(TestCase):
current_app = "admin3"
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_change_form_URL_has_correct_value(self):
"""
change_view has form_url in response.context
"""
response = self.client.get(
reverse(
"admin:admin_views_section_change",
args=(self.s1.pk,),
current_app=self.current_app,
)
)
self.assertIn(
"form_url", response.context, msg="form_url not present in response.context"
)
self.assertEqual(response.context["form_url"], "pony")
def test_initial_data_can_be_overridden(self):
"""
The behavior for setting initial form data can be overridden in the
ModelAdmin class. Usually, the initial value is set via the GET params.
"""
response = self.client.get(
reverse("admin:admin_views_restaurant_add", current_app=self.current_app),
{"name": "test_value"},
)
# this would be the usual behaviour
self.assertNotContains(response, 'value="test_value"')
# this is the overridden behaviour
self.assertContains(response, 'value="overridden_value"')
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminJavaScriptTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_js_minified_only_if_debug_is_false(self):
"""
The minified versions of the JS files are only used when DEBUG is False.
"""
with override_settings(DEBUG=False):
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertNotContains(response, "vendor/jquery/jquery.js")
self.assertContains(response, "vendor/jquery/jquery.min.js")
self.assertContains(response, "prepopulate.js")
self.assertContains(response, "actions.js")
self.assertContains(response, "collapse.js")
self.assertContains(response, "inlines.js")
with override_settings(DEBUG=True):
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertContains(response, "vendor/jquery/jquery.js")
self.assertNotContains(response, "vendor/jquery/jquery.min.js")
self.assertContains(response, "prepopulate.js")
self.assertContains(response, "actions.js")
self.assertContains(response, "collapse.js")
self.assertContains(response, "inlines.js")
@override_settings(ROOT_URLCONF="admin_views.urls")
class SaveAsTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.per1 = Person.objects.create(name="John Mauchly", gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_as_duplication(self):
"""'save as' creates a new person"""
post_data = {"_saveasnew": "", "name": "John M", "gender": 1, "age": 42}
response = self.client.post(
reverse("admin:admin_views_person_change", args=(self.per1.pk,)), post_data
)
self.assertEqual(len(Person.objects.filter(name="John M")), 1)
self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1)
new_person = Person.objects.latest("id")
self.assertRedirects(
response, reverse("admin:admin_views_person_change", args=(new_person.pk,))
)
def test_save_as_continue_false(self):
"""
Saving a new object using "Save as new" redirects to the changelist
instead of the change view when ModelAdmin.save_as_continue=False.
"""
post_data = {"_saveasnew": "", "name": "John M", "gender": 1, "age": 42}
url = reverse(
"admin:admin_views_person_change",
args=(self.per1.pk,),
current_app=site2.name,
)
response = self.client.post(url, post_data)
self.assertEqual(len(Person.objects.filter(name="John M")), 1)
self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1)
self.assertRedirects(
response,
reverse("admin:admin_views_person_changelist", current_app=site2.name),
)
def test_save_as_new_with_validation_errors(self):
"""
When you click "Save as new" and have a validation error,
you only see the "Save as new" button and not the other save buttons,
and that only the "Save as" button is visible.
"""
response = self.client.post(
reverse("admin:admin_views_person_change", args=(self.per1.pk,)),
{
"_saveasnew": "",
"gender": "invalid",
"_addanother": "fail",
},
)
self.assertContains(response, "Please correct the errors below.")
self.assertFalse(response.context["show_save_and_add_another"])
self.assertFalse(response.context["show_save_and_continue"])
self.assertTrue(response.context["show_save_as_new"])
def test_save_as_new_with_validation_errors_with_inlines(self):
parent = Parent.objects.create(name="Father")
child = Child.objects.create(parent=parent, name="Child")
response = self.client.post(
reverse("admin:admin_views_parent_change", args=(parent.pk,)),
{
"_saveasnew": "Save as new",
"child_set-0-parent": parent.pk,
"child_set-0-id": child.pk,
"child_set-0-name": "Child",
"child_set-INITIAL_FORMS": 1,
"child_set-MAX_NUM_FORMS": 1000,
"child_set-MIN_NUM_FORMS": 0,
"child_set-TOTAL_FORMS": 4,
"name": "_invalid",
},
)
self.assertContains(response, "Please correct the error below.")
self.assertFalse(response.context["show_save_and_add_another"])
self.assertFalse(response.context["show_save_and_continue"])
self.assertTrue(response.context["show_save_as_new"])
def test_save_as_new_with_inlines_with_validation_errors(self):
parent = Parent.objects.create(name="Father")
child = Child.objects.create(parent=parent, name="Child")
response = self.client.post(
reverse("admin:admin_views_parent_change", args=(parent.pk,)),
{
"_saveasnew": "Save as new",
"child_set-0-parent": parent.pk,
"child_set-0-id": child.pk,
"child_set-0-name": "_invalid",
"child_set-INITIAL_FORMS": 1,
"child_set-MAX_NUM_FORMS": 1000,
"child_set-MIN_NUM_FORMS": 0,
"child_set-TOTAL_FORMS": 4,
"name": "Father",
},
)
self.assertContains(response, "Please correct the error below.")
self.assertFalse(response.context["show_save_and_add_another"])
self.assertFalse(response.context["show_save_and_continue"])
self.assertTrue(response.context["show_save_as_new"])
@override_settings(ROOT_URLCONF="admin_views.urls")
class CustomModelAdminTest(AdminViewBasicTestCase):
def test_custom_admin_site_login_form(self):
self.client.logout()
response = self.client.get(reverse("admin2:index"), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
login = self.client.post(
reverse("admin2:login"),
{
REDIRECT_FIELD_NAME: reverse("admin2:index"),
"username": "customform",
"password": "secret",
},
follow=True,
)
self.assertIsInstance(login, TemplateResponse)
self.assertContains(login, "custom form error")
self.assertContains(login, "path/to/media.css")
def test_custom_admin_site_login_template(self):
self.client.logout()
response = self.client.get(reverse("admin2:index"), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/login.html")
self.assertContains(response, "Hello from a custom login template")
def test_custom_admin_site_logout_template(self):
response = self.client.post(reverse("admin2:logout"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/logout.html")
self.assertContains(response, "Hello from a custom logout template")
def test_custom_admin_site_index_view_and_template(self):
response = self.client.get(reverse("admin2:index"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/index.html")
self.assertContains(response, "Hello from a custom index template *bar*")
def test_custom_admin_site_app_index_view_and_template(self):
response = self.client.get(reverse("admin2:app_list", args=("admin_views",)))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/app_index.html")
self.assertContains(response, "Hello from a custom app_index template")
def test_custom_admin_site_password_change_template(self):
response = self.client.get(reverse("admin2:password_change"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/password_change_form.html")
self.assertContains(
response, "Hello from a custom password change form template"
)
def test_custom_admin_site_password_change_with_extra_context(self):
response = self.client.get(reverse("admin2:password_change"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/password_change_form.html")
self.assertContains(response, "eggs")
def test_custom_admin_site_password_change_done_template(self):
response = self.client.get(reverse("admin2:password_change_done"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/password_change_done.html")
self.assertContains(
response, "Hello from a custom password change done template"
)
def test_custom_admin_site_view(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse("admin2:my_view"))
self.assertEqual(response.content, b"Django is a magical pony!")
def test_pwd_change_custom_template(self):
self.client.force_login(self.superuser)
su = User.objects.get(username="super")
response = self.client.get(
reverse("admin4:auth_user_password_change", args=(su.pk,))
)
self.assertEqual(response.status_code, 200)
def get_perm(Model, codename):
"""Return the permission object, for the Model"""
ct = ContentType.objects.get_for_model(Model, for_concrete_model=False)
return Permission.objects.get(content_type=ct, codename=codename)
@override_settings(
ROOT_URLCONF="admin_views.urls",
# Test with the admin's documented list of required context processors.
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
],
)
class AdminViewPermissionsTest(TestCase):
"""Tests for Admin Views Permissions."""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.viewuser = User.objects.create_user(
username="viewuser", password="secret", is_staff=True
)
cls.adduser = User.objects.create_user(
username="adduser", password="secret", is_staff=True
)
cls.changeuser = User.objects.create_user(
username="changeuser", password="secret", is_staff=True
)
cls.deleteuser = User.objects.create_user(
username="deleteuser", password="secret", is_staff=True
)
cls.joepublicuser = User.objects.create_user(
username="joepublic", password="secret"
)
cls.nostaffuser = User.objects.create_user(
username="nostaff", password="secret"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
another_section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
# Setup permissions, for our users who can add, change, and delete.
opts = Article._meta
# User who can view Articles
cls.viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("view", opts))
)
# User who can add Articles
cls.adduser.user_permissions.add(
get_perm(Article, get_permission_codename("add", opts))
)
# User who can change Articles
cls.changeuser.user_permissions.add(
get_perm(Article, get_permission_codename("change", opts))
)
cls.nostaffuser.user_permissions.add(
get_perm(Article, get_permission_codename("change", opts))
)
# User who can delete Articles
cls.deleteuser.user_permissions.add(
get_perm(Article, get_permission_codename("delete", opts))
)
cls.deleteuser.user_permissions.add(
get_perm(Section, get_permission_codename("delete", Section._meta))
)
# login POST dicts
cls.index_url = reverse("admin:index")
cls.super_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "super",
"password": "secret",
}
cls.super_email_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "[email protected]",
"password": "secret",
}
cls.super_email_bad_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "[email protected]",
"password": "notsecret",
}
cls.adduser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "adduser",
"password": "secret",
}
cls.changeuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "changeuser",
"password": "secret",
}
cls.deleteuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "deleteuser",
"password": "secret",
}
cls.nostaff_login = {
REDIRECT_FIELD_NAME: reverse("has_permission_admin:index"),
"username": "nostaff",
"password": "secret",
}
cls.joepublic_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "joepublic",
"password": "secret",
}
cls.viewuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "viewuser",
"password": "secret",
}
cls.no_username_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"password": "secret",
}
def test_login(self):
"""
Make sure only staff members can log in.
Successful posts to the login page will redirect to the original url.
Unsuccessful attempts will continue to render the login page with
a 200 status code.
"""
login_url = "%s?next=%s" % (reverse("admin:login"), reverse("admin:index"))
# Super User
response = self.client.get(self.index_url)
self.assertRedirects(response, login_url)
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
# Test if user enters email address
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# only correct passwords get a username hint
login = self.client.post(login_url, self.super_email_bad_login)
self.assertContains(login, ERROR_MESSAGE)
new_user = User(username="jondoe", password="secret", email="[email protected]")
new_user.save()
# check to ensure if there are multiple email addresses a user doesn't get a 500
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# View User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.viewuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
# Add User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.adduser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
# Change User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.changeuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
# Delete User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.deleteuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
# Regular User should not be able to login.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.joepublic_login)
self.assertContains(login, ERROR_MESSAGE)
# Requests without username should not return 500 errors.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.no_username_login)
self.assertEqual(login.status_code, 200)
self.assertFormError(
login.context["form"], "username", ["This field is required."]
)
def test_login_redirect_for_direct_get(self):
"""
Login redirect should be to the admin index page when going directly to
/admin/login/.
"""
response = self.client.get(reverse("admin:login"))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context[REDIRECT_FIELD_NAME], reverse("admin:index"))
def test_login_has_permission(self):
# Regular User should not be able to login.
response = self.client.get(reverse("has_permission_admin:index"))
self.assertEqual(response.status_code, 302)
login = self.client.post(
reverse("has_permission_admin:login"), self.joepublic_login
)
self.assertContains(login, "permission denied")
# User with permissions should be able to login.
response = self.client.get(reverse("has_permission_admin:index"))
self.assertEqual(response.status_code, 302)
login = self.client.post(
reverse("has_permission_admin:login"), self.nostaff_login
)
self.assertRedirects(login, reverse("has_permission_admin:index"))
self.assertFalse(login.context)
self.client.post(reverse("has_permission_admin:logout"))
# Staff should be able to login.
response = self.client.get(reverse("has_permission_admin:index"))
self.assertEqual(response.status_code, 302)
login = self.client.post(
reverse("has_permission_admin:login"),
{
REDIRECT_FIELD_NAME: reverse("has_permission_admin:index"),
"username": "deleteuser",
"password": "secret",
},
)
self.assertRedirects(login, reverse("has_permission_admin:index"))
self.assertFalse(login.context)
self.client.post(reverse("has_permission_admin:logout"))
def test_login_successfully_redirects_to_original_URL(self):
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
query_string = "the-answer=42"
redirect_url = "%s?%s" % (self.index_url, query_string)
new_next = {REDIRECT_FIELD_NAME: redirect_url}
post_data = self.super_login.copy()
post_data.pop(REDIRECT_FIELD_NAME)
login = self.client.post(
"%s?%s" % (reverse("admin:login"), urlencode(new_next)), post_data
)
self.assertRedirects(login, redirect_url)
def test_double_login_is_not_allowed(self):
"""Regression test for #19327"""
login_url = "%s?next=%s" % (reverse("admin:login"), reverse("admin:index"))
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with non-admin user fails
login = self.client.post(login_url, self.joepublic_login)
self.assertContains(login, ERROR_MESSAGE)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with admin user while already logged in
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
def test_login_page_notice_for_non_staff_users(self):
"""
A logged-in non-staff user trying to access the admin index should be
presented with the login page and a hint indicating that the current
user doesn't have access to it.
"""
hint_template = "You are authenticated as {}"
# Anonymous user should not be shown the hint
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, "login-form")
self.assertNotContains(response, hint_template.format(""), status_code=200)
# Non-staff user should be shown the hint
self.client.force_login(self.nostaffuser)
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, "login-form")
self.assertContains(
response, hint_template.format(self.nostaffuser.username), status_code=200
)
def test_add_view(self):
"""Test add view restricts access and actually adds items."""
add_dict = {
"title": "Døm ikke",
"content": "<p>great article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
"section": self.s1.pk,
}
# Change User should not have access to add articles
self.client.force_login(self.changeuser)
# make sure the view removes test cookie
self.assertIs(self.client.session.test_cookie_worked(), False)
response = self.client.get(reverse("admin:admin_views_article_add"))
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post(reverse("admin:admin_views_article_add"), add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.post(reverse("admin:logout"))
# View User should not have access to add articles
self.client.force_login(self.viewuser)
response = self.client.get(reverse("admin:admin_views_article_add"))
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post(reverse("admin:admin_views_article_add"), add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
# Now give the user permission to add but not change.
self.viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("add", Article._meta))
)
response = self.client.get(reverse("admin:admin_views_article_add"))
self.assertEqual(response.context["title"], "Add article")
self.assertContains(response, "<title>Add article | Django site admin</title>")
self.assertContains(
response, '<input type="submit" value="Save and view" name="_continue">'
)
post = self.client.post(
reverse("admin:admin_views_article_add"), add_dict, follow=False
)
self.assertEqual(post.status_code, 302)
self.assertEqual(Article.objects.count(), 4)
article = Article.objects.latest("pk")
response = self.client.get(
reverse("admin:admin_views_article_change", args=(article.pk,))
)
self.assertContains(
response,
'<li class="success">The article “Døm ikke” was added successfully.</li>',
)
article.delete()
self.client.post(reverse("admin:logout"))
# Add user may login and POST to add view, then redirect to admin root
self.client.force_login(self.adduser)
addpage = self.client.get(reverse("admin:admin_views_article_add"))
change_list_link = '› <a href="%s">Articles</a>' % reverse(
"admin:admin_views_article_changelist"
)
self.assertNotContains(
addpage,
change_list_link,
msg_prefix=(
"User restricted to add permission is given link to change list view "
"in breadcrumbs."
),
)
post = self.client.post(reverse("admin:admin_views_article_add"), add_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 4)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(mail.outbox[0].subject, "Greetings from a created object")
self.client.post(reverse("admin:logout"))
# The addition was logged correctly
addition_log = LogEntry.objects.all()[0]
new_article = Article.objects.last()
article_ct = ContentType.objects.get_for_model(Article)
self.assertEqual(addition_log.user_id, self.adduser.pk)
self.assertEqual(addition_log.content_type_id, article_ct.pk)
self.assertEqual(addition_log.object_id, str(new_article.pk))
self.assertEqual(addition_log.object_repr, "Døm ikke")
self.assertEqual(addition_log.action_flag, ADDITION)
self.assertEqual(addition_log.get_change_message(), "Added.")
# Super can add too, but is redirected to the change list view
self.client.force_login(self.superuser)
addpage = self.client.get(reverse("admin:admin_views_article_add"))
self.assertContains(
addpage,
change_list_link,
msg_prefix=(
"Unrestricted user is not given link to change list view in "
"breadcrumbs."
),
)
post = self.client.post(reverse("admin:admin_views_article_add"), add_dict)
self.assertRedirects(post, reverse("admin:admin_views_article_changelist"))
self.assertEqual(Article.objects.count(), 5)
self.client.post(reverse("admin:logout"))
# 8509 - if a normal user is already logged in, it is possible
# to change user into the superuser without error
self.client.force_login(self.joepublicuser)
# Check and make sure that if user expires, data still persists
self.client.force_login(self.superuser)
# make sure the view removes test cookie
self.assertIs(self.client.session.test_cookie_worked(), False)
@mock.patch("django.contrib.admin.options.InlineModelAdmin.has_change_permission")
def test_add_view_with_view_only_inlines(self, has_change_permission):
"""User with add permission to a section but view-only for inlines."""
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("add", Section._meta))
)
self.client.force_login(self.viewuser)
# Valid POST creates a new section.
data = {
"name": "New obj",
"article_set-TOTAL_FORMS": 0,
"article_set-INITIAL_FORMS": 0,
}
response = self.client.post(reverse("admin:admin_views_section_add"), data)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(Section.objects.latest("id").name, data["name"])
# InlineModelAdmin.has_change_permission()'s obj argument is always
# None during object add.
self.assertEqual(
[obj for (request, obj), _ in has_change_permission.call_args_list],
[None, None],
)
def test_change_view(self):
"""Change view should restrict access and allow users to edit items."""
change_dict = {
"title": "Ikke fordømt",
"content": "<p>edited article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
"section": self.s1.pk,
}
article_change_url = reverse(
"admin:admin_views_article_change", args=(self.a1.pk,)
)
article_changelist_url = reverse("admin:admin_views_article_changelist")
# add user should not be able to view the list of article or change any of them
self.client.force_login(self.adduser)
response = self.client.get(article_changelist_url)
self.assertEqual(response.status_code, 403)
response = self.client.get(article_change_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(article_change_url, change_dict)
self.assertEqual(post.status_code, 403)
self.client.post(reverse("admin:logout"))
# view user can view articles but not make changes.
self.client.force_login(self.viewuser)
response = self.client.get(article_changelist_url)
self.assertContains(
response,
"<title>Select article to view | Django site admin</title>",
)
self.assertContains(response, "<h1>Select article to view</h1>")
self.assertEqual(response.context["title"], "Select article to view")
response = self.client.get(article_change_url)
self.assertContains(response, "<title>View article | Django site admin</title>")
self.assertContains(response, "<h1>View article</h1>")
self.assertContains(response, "<label>Extra form field:</label>")
self.assertContains(
response,
'<a href="/test_admin/admin/admin_views/article/" class="closelink">Close'
"</a>",
)
self.assertEqual(response.context["title"], "View article")
post = self.client.post(article_change_url, change_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(
Article.objects.get(pk=self.a1.pk).content, "<p>Middle content</p>"
)
self.client.post(reverse("admin:logout"))
# change user can view all items and edit them
self.client.force_login(self.changeuser)
response = self.client.get(article_changelist_url)
self.assertEqual(response.context["title"], "Select article to change")
self.assertContains(
response,
"<title>Select article to change | Django site admin</title>",
)
self.assertContains(response, "<h1>Select article to change</h1>")
response = self.client.get(article_change_url)
self.assertEqual(response.context["title"], "Change article")
self.assertContains(
response,
"<title>Change article | Django site admin</title>",
)
self.assertContains(response, "<h1>Change article</h1>")
post = self.client.post(article_change_url, change_dict)
self.assertRedirects(post, article_changelist_url)
self.assertEqual(
Article.objects.get(pk=self.a1.pk).content, "<p>edited article</p>"
)
# one error in form should produce singular error message, multiple
# errors plural.
change_dict["title"] = ""
post = self.client.post(article_change_url, change_dict)
self.assertContains(
post,
"Please correct the error below.",
msg_prefix=(
"Singular error message not found in response to post with one error"
),
)
change_dict["content"] = ""
post = self.client.post(article_change_url, change_dict)
self.assertContains(
post,
"Please correct the errors below.",
msg_prefix=(
"Plural error message not found in response to post with multiple "
"errors"
),
)
self.client.post(reverse("admin:logout"))
# Test redirection when using row-level change permissions. Refs #11513.
r1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
r2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id")
r3 = RowLevelChangePermissionModel.objects.create(id=3, name="odd id mult 3")
r6 = RowLevelChangePermissionModel.objects.create(id=6, name="even id mult 3")
change_url_1 = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_change", args=(r1.pk,)
)
change_url_2 = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_change", args=(r2.pk,)
)
change_url_3 = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_change", args=(r3.pk,)
)
change_url_6 = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_change", args=(r6.pk,)
)
logins = [
self.superuser,
self.viewuser,
self.adduser,
self.changeuser,
self.deleteuser,
]
for login_user in logins:
with self.subTest(login_user.username):
self.client.force_login(login_user)
response = self.client.get(change_url_1)
self.assertEqual(response.status_code, 403)
response = self.client.post(change_url_1, {"name": "changed"})
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=1).name, "odd id"
)
self.assertEqual(response.status_code, 403)
response = self.client.get(change_url_2)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_2, {"name": "changed"})
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=2).name, "changed"
)
self.assertRedirects(response, self.index_url)
response = self.client.get(change_url_3)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_3, {"name": "changed"})
self.assertEqual(response.status_code, 403)
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=3).name,
"odd id mult 3",
)
response = self.client.get(change_url_6)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_6, {"name": "changed"})
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=6).name, "changed"
)
self.assertRedirects(response, self.index_url)
self.client.post(reverse("admin:logout"))
for login_user in [self.joepublicuser, self.nostaffuser]:
with self.subTest(login_user.username):
self.client.force_login(login_user)
response = self.client.get(change_url_1, follow=True)
self.assertContains(response, "login-form")
response = self.client.post(
change_url_1, {"name": "changed"}, follow=True
)
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=1).name, "odd id"
)
self.assertContains(response, "login-form")
response = self.client.get(change_url_2, follow=True)
self.assertContains(response, "login-form")
response = self.client.post(
change_url_2, {"name": "changed again"}, follow=True
)
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=2).name, "changed"
)
self.assertContains(response, "login-form")
self.client.post(reverse("admin:logout"))
def test_change_view_without_object_change_permission(self):
"""
The object should be read-only if the user has permission to view it
and change objects of that type but not to change the current object.
"""
change_url = reverse("admin9:admin_views_article_change", args=(self.a1.pk,))
self.client.force_login(self.viewuser)
response = self.client.get(change_url)
self.assertEqual(response.context["title"], "View article")
self.assertContains(response, "<title>View article | Django site admin</title>")
self.assertContains(response, "<h1>View article</h1>")
self.assertContains(
response,
'<a href="/test_admin/admin9/admin_views/article/" class="closelink">Close'
"</a>",
)
def test_change_view_save_as_new(self):
"""
'Save as new' should raise PermissionDenied for users without the 'add'
permission.
"""
change_dict_save_as_new = {
"_saveasnew": "Save as new",
"title": "Ikke fordømt",
"content": "<p>edited article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
"section": self.s1.pk,
}
article_change_url = reverse(
"admin:admin_views_article_change", args=(self.a1.pk,)
)
# Add user can perform "Save as new".
article_count = Article.objects.count()
self.client.force_login(self.adduser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), article_count + 1)
self.client.logout()
# Change user cannot perform "Save as new" (no 'add' permission).
article_count = Article.objects.count()
self.client.force_login(self.changeuser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), article_count)
# User with both add and change permissions should be redirected to the
# change page for the newly created object.
article_count = Article.objects.count()
self.client.force_login(self.superuser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertEqual(Article.objects.count(), article_count + 1)
new_article = Article.objects.latest("id")
self.assertRedirects(
post, reverse("admin:admin_views_article_change", args=(new_article.pk,))
)
def test_change_view_with_view_only_inlines(self):
"""
User with change permission to a section but view-only for inlines.
"""
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("change", Section._meta))
)
self.client.force_login(self.viewuser)
# GET shows inlines.
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 3)
# Valid POST changes the name.
data = {
"name": "Can edit name with view-only inlines",
"article_set-TOTAL_FORMS": 3,
"article_set-INITIAL_FORMS": 3,
}
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertRedirects(response, reverse("admin:admin_views_section_changelist"))
self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data["name"])
# Invalid POST reshows inlines.
del data["name"]
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 3)
def test_change_view_with_view_only_last_inline(self):
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("view", Section._meta))
)
self.client.force_login(self.viewuser)
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 3)
# The last inline is not marked as empty.
self.assertContains(response, 'id="article_set-2"')
def test_change_view_with_view_and_add_inlines(self):
"""User has view and add permissions on the inline model."""
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("change", Section._meta))
)
self.viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("add", Article._meta))
)
self.client.force_login(self.viewuser)
# GET shows inlines.
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 6)
# Valid POST creates a new article.
data = {
"name": "Can edit name with view-only inlines",
"article_set-TOTAL_FORMS": 6,
"article_set-INITIAL_FORMS": 3,
"article_set-3-id": [""],
"article_set-3-title": ["A title"],
"article_set-3-content": ["Added content"],
"article_set-3-date_0": ["2008-3-18"],
"article_set-3-date_1": ["11:54:58"],
"article_set-3-section": [str(self.s1.pk)],
}
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertRedirects(response, reverse("admin:admin_views_section_changelist"))
self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data["name"])
self.assertEqual(Article.objects.count(), 4)
# Invalid POST reshows inlines.
del data["name"]
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 6)
def test_change_view_with_view_and_delete_inlines(self):
"""User has view and delete permissions on the inline model."""
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("change", Section._meta))
)
self.client.force_login(self.viewuser)
data = {
"name": "Name is required.",
"article_set-TOTAL_FORMS": 6,
"article_set-INITIAL_FORMS": 3,
"article_set-0-id": [str(self.a1.pk)],
"article_set-0-DELETE": ["on"],
}
# Inline POST details are ignored without delete permission.
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertRedirects(response, reverse("admin:admin_views_section_changelist"))
self.assertEqual(Article.objects.count(), 3)
# Deletion successful when delete permission is added.
self.viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("delete", Article._meta))
)
data = {
"name": "Name is required.",
"article_set-TOTAL_FORMS": 6,
"article_set-INITIAL_FORMS": 3,
"article_set-0-id": [str(self.a1.pk)],
"article_set-0-DELETE": ["on"],
}
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertRedirects(response, reverse("admin:admin_views_section_changelist"))
self.assertEqual(Article.objects.count(), 2)
def test_delete_view(self):
"""Delete view should restrict access and actually delete items."""
delete_dict = {"post": "yes"}
delete_url = reverse("admin:admin_views_article_delete", args=(self.a1.pk,))
# add user should not be able to delete articles
self.client.force_login(self.adduser)
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(delete_url, delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.logout()
# view user should not be able to delete articles
self.client.force_login(self.viewuser)
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(delete_url, delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.logout()
# Delete user can delete
self.client.force_login(self.deleteuser)
response = self.client.get(
reverse("admin:admin_views_section_delete", args=(self.s1.pk,))
)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 3</li>")
# test response contains link to related Article
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
response = self.client.get(delete_url)
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 1</li>")
post = self.client.post(delete_url, delete_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 2)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "Greetings from a deleted object")
article_ct = ContentType.objects.get_for_model(Article)
logged = LogEntry.objects.get(content_type=article_ct, action_flag=DELETION)
self.assertEqual(logged.object_id, str(self.a1.pk))
def test_delete_view_with_no_default_permissions(self):
"""
The delete view allows users to delete collected objects without a
'delete' permission (ReadOnlyPizza.Meta.default_permissions is empty).
"""
pizza = ReadOnlyPizza.objects.create(name="Double Cheese")
delete_url = reverse("admin:admin_views_readonlypizza_delete", args=(pizza.pk,))
self.client.force_login(self.adduser)
response = self.client.get(delete_url)
self.assertContains(response, "admin_views/readonlypizza/%s/" % pizza.pk)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Read only pizzas: 1</li>")
post = self.client.post(delete_url, {"post": "yes"})
self.assertRedirects(
post, reverse("admin:admin_views_readonlypizza_changelist")
)
self.assertEqual(ReadOnlyPizza.objects.count(), 0)
def test_delete_view_nonexistent_obj(self):
self.client.force_login(self.deleteuser)
url = reverse("admin:admin_views_article_delete", args=("nonexistent",))
response = self.client.get(url, follow=True)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["article with ID “nonexistent” doesn’t exist. Perhaps it was deleted?"],
)
def test_history_view(self):
"""History view should restrict access."""
# add user should not be able to view the list of article or change any of them
self.client.force_login(self.adduser)
response = self.client.get(
reverse("admin:admin_views_article_history", args=(self.a1.pk,))
)
self.assertEqual(response.status_code, 403)
self.client.post(reverse("admin:logout"))
# view user can view all items
self.client.force_login(self.viewuser)
response = self.client.get(
reverse("admin:admin_views_article_history", args=(self.a1.pk,))
)
self.assertEqual(response.status_code, 200)
self.client.post(reverse("admin:logout"))
# change user can view all items and edit them
self.client.force_login(self.changeuser)
response = self.client.get(
reverse("admin:admin_views_article_history", args=(self.a1.pk,))
)
self.assertEqual(response.status_code, 200)
# Test redirection when using row-level change permissions. Refs #11513.
rl1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
rl2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id")
logins = [
self.superuser,
self.viewuser,
self.adduser,
self.changeuser,
self.deleteuser,
]
for login_user in logins:
with self.subTest(login_user.username):
self.client.force_login(login_user)
url = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_history",
args=(rl1.pk,),
)
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
url = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_history",
args=(rl2.pk,),
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.client.post(reverse("admin:logout"))
for login_user in [self.joepublicuser, self.nostaffuser]:
with self.subTest(login_user.username):
self.client.force_login(login_user)
url = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_history",
args=(rl1.pk,),
)
response = self.client.get(url, follow=True)
self.assertContains(response, "login-form")
url = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_history",
args=(rl2.pk,),
)
response = self.client.get(url, follow=True)
self.assertContains(response, "login-form")
self.client.post(reverse("admin:logout"))
def test_history_view_bad_url(self):
self.client.force_login(self.changeuser)
response = self.client.get(
reverse("admin:admin_views_article_history", args=("foo",)), follow=True
)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["article with ID “foo” doesn’t exist. Perhaps it was deleted?"],
)
def test_conditionally_show_add_section_link(self):
"""
The foreign key widget should only show the "add related" button if the
user has permission to add that related item.
"""
self.client.force_login(self.adduser)
# The user can't add sections yet, so they shouldn't see the "add section" link.
url = reverse("admin:admin_views_article_add")
add_link_text = "add_id_section"
response = self.client.get(url)
self.assertNotContains(response, add_link_text)
# Allow the user to add sections too. Now they can see the "add section" link.
user = User.objects.get(username="adduser")
perm = get_perm(Section, get_permission_codename("add", Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertContains(response, add_link_text)
def test_conditionally_show_change_section_link(self):
"""
The foreign key widget should only show the "change related" button if
the user has permission to change that related item.
"""
def get_change_related(response):
return (
response.context["adminform"]
.form.fields["section"]
.widget.can_change_related
)
self.client.force_login(self.adduser)
# The user can't change sections yet, so they shouldn't see the
# "change section" link.
url = reverse("admin:admin_views_article_add")
change_link_text = "change_id_section"
response = self.client.get(url)
self.assertFalse(get_change_related(response))
self.assertNotContains(response, change_link_text)
# Allow the user to change sections too. Now they can see the
# "change section" link.
user = User.objects.get(username="adduser")
perm = get_perm(Section, get_permission_codename("change", Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_change_related(response))
self.assertContains(response, change_link_text)
def test_conditionally_show_delete_section_link(self):
"""
The foreign key widget should only show the "delete related" button if
the user has permission to delete that related item.
"""
def get_delete_related(response):
return (
response.context["adminform"]
.form.fields["sub_section"]
.widget.can_delete_related
)
self.client.force_login(self.adduser)
# The user can't delete sections yet, so they shouldn't see the
# "delete section" link.
url = reverse("admin:admin_views_article_add")
delete_link_text = "delete_id_sub_section"
response = self.client.get(url)
self.assertFalse(get_delete_related(response))
self.assertNotContains(response, delete_link_text)
# Allow the user to delete sections too. Now they can see the
# "delete section" link.
user = User.objects.get(username="adduser")
perm = get_perm(Section, get_permission_codename("delete", Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_delete_related(response))
self.assertContains(response, delete_link_text)
def test_disabled_permissions_when_logged_in(self):
self.client.force_login(self.superuser)
superuser = User.objects.get(username="super")
superuser.is_active = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, "Log out")
response = self.client.get(reverse("secure_view"), follow=True)
self.assertContains(response, 'id="login-form"')
def test_disabled_staff_permissions_when_logged_in(self):
self.client.force_login(self.superuser)
superuser = User.objects.get(username="super")
superuser.is_staff = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, "Log out")
response = self.client.get(reverse("secure_view"), follow=True)
self.assertContains(response, 'id="login-form"')
def test_app_list_permissions(self):
"""
If a user has no module perms, the app list returns a 404.
"""
opts = Article._meta
change_user = User.objects.get(username="changeuser")
permission = get_perm(Article, get_permission_codename("change", opts))
self.client.force_login(self.changeuser)
# the user has no module permissions
change_user.user_permissions.remove(permission)
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertEqual(response.status_code, 404)
# the user now has module permissions
change_user.user_permissions.add(permission)
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertEqual(response.status_code, 200)
def test_shortcut_view_only_available_to_staff(self):
"""
Only admin users should be able to use the admin shortcut view.
"""
model_ctype = ContentType.objects.get_for_model(ModelWithStringPrimaryKey)
obj = ModelWithStringPrimaryKey.objects.create(string_pk="foo")
shortcut_url = reverse("admin:view_on_site", args=(model_ctype.pk, obj.pk))
# Not logged in: we should see the login page.
response = self.client.get(shortcut_url, follow=True)
self.assertTemplateUsed(response, "admin/login.html")
# Logged in? Redirect.
self.client.force_login(self.superuser)
response = self.client.get(shortcut_url, follow=False)
# Can't use self.assertRedirects() because User.get_absolute_url() is silly.
self.assertEqual(response.status_code, 302)
# Domain may depend on contrib.sites tests also run
self.assertRegex(response.url, "http://(testserver|example.com)/dummy/foo/")
def test_has_module_permission(self):
"""
has_module_permission() returns True for all users who
have any permission for that module (add, change, or delete), so that
the module is displayed on the admin index page.
"""
self.client.force_login(self.superuser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
self.client.logout()
self.client.force_login(self.viewuser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
self.client.logout()
self.client.force_login(self.adduser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
self.client.logout()
self.client.force_login(self.changeuser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
self.client.logout()
self.client.force_login(self.deleteuser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
def test_overriding_has_module_permission(self):
"""
If has_module_permission() always returns False, the module shouldn't
be displayed on the admin index page for any users.
"""
articles = Article._meta.verbose_name_plural.title()
sections = Section._meta.verbose_name_plural.title()
index_url = reverse("admin7:index")
self.client.force_login(self.superuser)
response = self.client.get(index_url)
self.assertContains(response, sections)
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.viewuser)
response = self.client.get(index_url)
self.assertNotContains(response, "admin_views")
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.adduser)
response = self.client.get(index_url)
self.assertNotContains(response, "admin_views")
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.changeuser)
response = self.client.get(index_url)
self.assertNotContains(response, "admin_views")
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.deleteuser)
response = self.client.get(index_url)
self.assertNotContains(response, articles)
# The app list displays Sections but not Articles as the latter has
# ModelAdmin.has_module_permission() = False.
self.client.force_login(self.superuser)
response = self.client.get(reverse("admin7:app_list", args=("admin_views",)))
self.assertContains(response, sections)
self.assertNotContains(response, articles)
def test_post_save_message_no_forbidden_links_visible(self):
"""
Post-save message shouldn't contain a link to the change form if the
user doesn't have the change permission.
"""
self.client.force_login(self.adduser)
# Emulate Article creation for user with add-only permission.
post_data = {
"title": "Fun & games",
"content": "Some content",
"date_0": "2015-10-31",
"date_1": "16:35:00",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_article_add"), post_data, follow=True
)
self.assertContains(
response,
'<li class="success">The article “Fun & games” was added successfully.'
"</li>",
html=True,
)
@override_settings(
ROOT_URLCONF="admin_views.urls",
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
],
)
class AdminViewProxyModelPermissionsTests(TestCase):
"""Tests for proxy models permissions in the admin."""
@classmethod
def setUpTestData(cls):
cls.viewuser = User.objects.create_user(
username="viewuser", password="secret", is_staff=True
)
cls.adduser = User.objects.create_user(
username="adduser", password="secret", is_staff=True
)
cls.changeuser = User.objects.create_user(
username="changeuser", password="secret", is_staff=True
)
cls.deleteuser = User.objects.create_user(
username="deleteuser", password="secret", is_staff=True
)
# Setup permissions.
opts = UserProxy._meta
cls.viewuser.user_permissions.add(
get_perm(UserProxy, get_permission_codename("view", opts))
)
cls.adduser.user_permissions.add(
get_perm(UserProxy, get_permission_codename("add", opts))
)
cls.changeuser.user_permissions.add(
get_perm(UserProxy, get_permission_codename("change", opts))
)
cls.deleteuser.user_permissions.add(
get_perm(UserProxy, get_permission_codename("delete", opts))
)
# UserProxy instances.
cls.user_proxy = UserProxy.objects.create(
username="user_proxy", password="secret"
)
def test_add(self):
self.client.force_login(self.adduser)
url = reverse("admin:admin_views_userproxy_add")
data = {
"username": "can_add",
"password": "secret",
"date_joined_0": "2019-01-15",
"date_joined_1": "16:59:10",
}
response = self.client.post(url, data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(UserProxy.objects.filter(username="can_add").exists())
def test_view(self):
self.client.force_login(self.viewuser)
response = self.client.get(reverse("admin:admin_views_userproxy_changelist"))
self.assertContains(response, "<h1>Select user proxy to view</h1>")
response = self.client.get(
reverse("admin:admin_views_userproxy_change", args=(self.user_proxy.pk,))
)
self.assertContains(response, "<h1>View user proxy</h1>")
self.assertContains(response, '<div class="readonly">user_proxy</div>')
def test_change(self):
self.client.force_login(self.changeuser)
data = {
"password": self.user_proxy.password,
"username": self.user_proxy.username,
"date_joined_0": self.user_proxy.date_joined.strftime("%Y-%m-%d"),
"date_joined_1": self.user_proxy.date_joined.strftime("%H:%M:%S"),
"first_name": "first_name",
}
url = reverse("admin:admin_views_userproxy_change", args=(self.user_proxy.pk,))
response = self.client.post(url, data)
self.assertRedirects(
response, reverse("admin:admin_views_userproxy_changelist")
)
self.assertEqual(
UserProxy.objects.get(pk=self.user_proxy.pk).first_name, "first_name"
)
def test_delete(self):
self.client.force_login(self.deleteuser)
url = reverse("admin:admin_views_userproxy_delete", args=(self.user_proxy.pk,))
response = self.client.post(url, {"post": "yes"}, follow=True)
self.assertEqual(response.status_code, 200)
self.assertFalse(UserProxy.objects.filter(pk=self.user_proxy.pk).exists())
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewsNoUrlTest(TestCase):
"""Regression test for #17333"""
@classmethod
def setUpTestData(cls):
# User who can change Reports
cls.changeuser = User.objects.create_user(
username="changeuser", password="secret", is_staff=True
)
cls.changeuser.user_permissions.add(
get_perm(Report, get_permission_codename("change", Report._meta))
)
def test_no_standard_modeladmin_urls(self):
"""Admin index views don't break when user's ModelAdmin removes standard urls"""
self.client.force_login(self.changeuser)
r = self.client.get(reverse("admin:index"))
# we shouldn't get a 500 error caused by a NoReverseMatch
self.assertEqual(r.status_code, 200)
self.client.post(reverse("admin:logout"))
@skipUnlessDBFeature("can_defer_constraint_checks")
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewDeletedObjectsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.deleteuser = User.objects.create_user(
username="deleteuser", password="secret", is_staff=True
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.v1 = Villain.objects.create(name="Adam")
cls.v2 = Villain.objects.create(name="Sue")
cls.sv1 = SuperVillain.objects.create(name="Bob")
cls.pl1 = Plot.objects.create(
name="World Domination", team_leader=cls.v1, contact=cls.v2
)
cls.pl2 = Plot.objects.create(
name="World Peace", team_leader=cls.v2, contact=cls.v2
)
cls.pl3 = Plot.objects.create(
name="Corn Conspiracy", team_leader=cls.v1, contact=cls.v1
)
cls.pd1 = PlotDetails.objects.create(details="almost finished", plot=cls.pl1)
cls.sh1 = SecretHideout.objects.create(
location="underground bunker", villain=cls.v1
)
cls.sh2 = SecretHideout.objects.create(
location="floating castle", villain=cls.sv1
)
cls.ssh1 = SuperSecretHideout.objects.create(
location="super floating castle!", supervillain=cls.sv1
)
cls.cy1 = CyclicOne.objects.create(pk=1, name="I am recursive", two_id=1)
cls.cy2 = CyclicTwo.objects.create(pk=1, name="I am recursive too", one_id=1)
def setUp(self):
self.client.force_login(self.superuser)
def test_nesting(self):
"""
Objects should be nested to display the relationships that
cause them to be scheduled for deletion.
"""
pattern = re.compile(
r'<li>Plot: <a href="%s">World Domination</a>\s*<ul>\s*'
r'<li>Plot details: <a href="%s">almost finished</a>'
% (
reverse("admin:admin_views_plot_change", args=(self.pl1.pk,)),
reverse("admin:admin_views_plotdetails_change", args=(self.pd1.pk,)),
)
)
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v1.pk,))
)
self.assertRegex(response.content.decode(), pattern)
def test_cyclic(self):
"""
Cyclic relationships should still cause each object to only be
listed once.
"""
one = '<li>Cyclic one: <a href="%s">I am recursive</a>' % (
reverse("admin:admin_views_cyclicone_change", args=(self.cy1.pk,)),
)
two = '<li>Cyclic two: <a href="%s">I am recursive too</a>' % (
reverse("admin:admin_views_cyclictwo_change", args=(self.cy2.pk,)),
)
response = self.client.get(
reverse("admin:admin_views_cyclicone_delete", args=(self.cy1.pk,))
)
self.assertContains(response, one, 1)
self.assertContains(response, two, 1)
def test_perms_needed(self):
self.client.logout()
delete_user = User.objects.get(username="deleteuser")
delete_user.user_permissions.add(
get_perm(Plot, get_permission_codename("delete", Plot._meta))
)
self.client.force_login(self.deleteuser)
response = self.client.get(
reverse("admin:admin_views_plot_delete", args=(self.pl1.pk,))
)
self.assertContains(
response,
"your account doesn't have permission to delete the following types of "
"objects",
)
self.assertContains(response, "<li>plot details</li>")
def test_protected(self):
q = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q, answer="Because.")
a2 = Answer.objects.create(question=q, answer="Yes.")
response = self.client.get(
reverse("admin:admin_views_question_delete", args=(q.pk,))
)
self.assertContains(
response, "would require deleting the following protected related objects"
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Because.</a></li>'
% reverse("admin:admin_views_answer_change", args=(a1.pk,)),
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Yes.</a></li>'
% reverse("admin:admin_views_answer_change", args=(a2.pk,)),
)
def test_post_delete_protected(self):
"""
A POST request to delete protected objects should display the page
which says the deletion is prohibited.
"""
q = Question.objects.create(question="Why?")
Answer.objects.create(question=q, answer="Because.")
response = self.client.post(
reverse("admin:admin_views_question_delete", args=(q.pk,)), {"post": "yes"}
)
self.assertEqual(Question.objects.count(), 1)
self.assertContains(
response, "would require deleting the following protected related objects"
)
def test_restricted(self):
album = Album.objects.create(title="Amaryllis")
song = Song.objects.create(album=album, name="Unity")
response = self.client.get(
reverse("admin:admin_views_album_delete", args=(album.pk,))
)
self.assertContains(
response,
"would require deleting the following protected related objects",
)
self.assertContains(
response,
'<li>Song: <a href="%s">Unity</a></li>'
% reverse("admin:admin_views_song_change", args=(song.pk,)),
)
def test_post_delete_restricted(self):
album = Album.objects.create(title="Amaryllis")
Song.objects.create(album=album, name="Unity")
response = self.client.post(
reverse("admin:admin_views_album_delete", args=(album.pk,)),
{"post": "yes"},
)
self.assertEqual(Album.objects.count(), 1)
self.assertContains(
response,
"would require deleting the following protected related objects",
)
def test_not_registered(self):
should_contain = """<li>Secret hideout: underground bunker"""
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v1.pk,))
)
self.assertContains(response, should_contain, 1)
def test_multiple_fkeys_to_same_model(self):
"""
If a deleted object has two relationships from another model,
both of those should be followed in looking for related
objects to delete.
"""
should_contain = '<li>Plot: <a href="%s">World Domination</a>' % reverse(
"admin:admin_views_plot_change", args=(self.pl1.pk,)
)
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v1.pk,))
)
self.assertContains(response, should_contain)
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v2.pk,))
)
self.assertContains(response, should_contain)
def test_multiple_fkeys_to_same_instance(self):
"""
If a deleted object has two relationships pointing to it from
another object, the other object should still only be listed
once.
"""
should_contain = '<li>Plot: <a href="%s">World Peace</a></li>' % reverse(
"admin:admin_views_plot_change", args=(self.pl2.pk,)
)
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v2.pk,))
)
self.assertContains(response, should_contain, 1)
def test_inheritance(self):
"""
In the case of an inherited model, if either the child or
parent-model instance is deleted, both instances are listed
for deletion, as well as any relationships they have.
"""
should_contain = [
'<li>Villain: <a href="%s">Bob</a>'
% reverse("admin:admin_views_villain_change", args=(self.sv1.pk,)),
'<li>Super villain: <a href="%s">Bob</a>'
% reverse("admin:admin_views_supervillain_change", args=(self.sv1.pk,)),
"<li>Secret hideout: floating castle",
"<li>Super secret hideout: super floating castle!",
]
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.sv1.pk,))
)
for should in should_contain:
self.assertContains(response, should, 1)
response = self.client.get(
reverse("admin:admin_views_supervillain_delete", args=(self.sv1.pk,))
)
for should in should_contain:
self.assertContains(response, should, 1)
def test_generic_relations(self):
"""
If a deleted object has GenericForeignKeys pointing to it,
those objects should be listed for deletion.
"""
plot = self.pl3
tag = FunkyTag.objects.create(content_object=plot, name="hott")
should_contain = '<li>Funky tag: <a href="%s">hott' % reverse(
"admin:admin_views_funkytag_change", args=(tag.id,)
)
response = self.client.get(
reverse("admin:admin_views_plot_delete", args=(plot.pk,))
)
self.assertContains(response, should_contain)
def test_generic_relations_with_related_query_name(self):
"""
If a deleted object has GenericForeignKey with
GenericRelation(related_query_name='...') pointing to it, those objects
should be listed for deletion.
"""
bookmark = Bookmark.objects.create(name="djangoproject")
tag = FunkyTag.objects.create(content_object=bookmark, name="django")
tag_url = reverse("admin:admin_views_funkytag_change", args=(tag.id,))
should_contain = '<li>Funky tag: <a href="%s">django' % tag_url
response = self.client.get(
reverse("admin:admin_views_bookmark_delete", args=(bookmark.pk,))
)
self.assertContains(response, should_contain)
def test_delete_view_uses_get_deleted_objects(self):
"""The delete view uses ModelAdmin.get_deleted_objects()."""
book = Book.objects.create(name="Test Book")
response = self.client.get(
reverse("admin2:admin_views_book_delete", args=(book.pk,))
)
# BookAdmin.get_deleted_objects() returns custom text.
self.assertContains(response, "a deletable object")
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestGenericRelations(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.v1 = Villain.objects.create(name="Adam")
cls.pl3 = Plot.objects.create(
name="Corn Conspiracy", team_leader=cls.v1, contact=cls.v1
)
def setUp(self):
self.client.force_login(self.superuser)
def test_generic_content_object_in_list_display(self):
FunkyTag.objects.create(content_object=self.pl3, name="hott")
response = self.client.get(reverse("admin:admin_views_funkytag_changelist"))
self.assertContains(response, "%s</td>" % self.pl3)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewStringPrimaryKeyTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.pk = (
"abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 "
r"""-_.!~*'() ;/?:@&=+$, <>#%" {}|\^[]`"""
)
cls.m1 = ModelWithStringPrimaryKey.objects.create(string_pk=cls.pk)
content_type_pk = ContentType.objects.get_for_model(
ModelWithStringPrimaryKey
).pk
user_pk = cls.superuser.pk
LogEntry.objects.log_action(
user_pk,
content_type_pk,
cls.pk,
cls.pk,
2,
change_message="Changed something",
)
def setUp(self):
self.client.force_login(self.superuser)
def test_get_history_view(self):
"""
Retrieving the history for an object using urlencoded form of primary
key should work.
Refs #12349, #18550.
"""
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_history", args=(self.pk,)
)
)
self.assertContains(response, escape(self.pk))
self.assertContains(response, "Changed something")
def test_get_change_view(self):
"Retrieving the object using urlencoded form of primary key should work"
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change", args=(self.pk,)
)
)
self.assertContains(response, escape(self.pk))
def test_changelist_to_changeform_link(self):
"""
Link to the changeform of the object in changelist should use reverse()
and be quoted.
"""
response = self.client.get(
reverse("admin:admin_views_modelwithstringprimarykey_changelist")
)
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
pk_final_url = escape(iri_to_uri(quote(self.pk)))
change_url = reverse(
"admin:admin_views_modelwithstringprimarykey_change", args=("__fk__",)
).replace("__fk__", pk_final_url)
should_contain = '<th class="field-__str__"><a href="%s">%s</a></th>' % (
change_url,
escape(self.pk),
)
self.assertContains(response, should_contain)
def test_recentactions_link(self):
"""
The link from the recent actions list referring to the changeform of
the object should be quoted.
"""
response = self.client.get(reverse("admin:index"))
link = reverse(
"admin:admin_views_modelwithstringprimarykey_change", args=(quote(self.pk),)
)
should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk))
self.assertContains(response, should_contain)
def test_deleteconfirmation_link(self):
""" "
The link from the delete confirmation page referring back to the
changeform of the object should be quoted.
"""
url = reverse(
"admin:admin_views_modelwithstringprimarykey_delete", args=(quote(self.pk),)
)
response = self.client.get(url)
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
change_url = reverse(
"admin:admin_views_modelwithstringprimarykey_change", args=("__fk__",)
).replace("__fk__", escape(iri_to_uri(quote(self.pk))))
should_contain = '<a href="%s">%s</a>' % (change_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_url_conflicts_with_add(self):
"A model with a primary key that ends with add or is `add` should be visible"
add_model = ModelWithStringPrimaryKey.objects.create(
pk="i have something to add"
)
add_model.save()
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(add_model.pk),),
)
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
add_model2 = ModelWithStringPrimaryKey.objects.create(pk="add")
add_url = reverse("admin:admin_views_modelwithstringprimarykey_add")
change_url = reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(add_model2.pk),),
)
self.assertNotEqual(add_url, change_url)
def test_url_conflicts_with_delete(self):
"A model with a primary key that ends with delete should be visible"
delete_model = ModelWithStringPrimaryKey(pk="delete")
delete_model.save()
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(delete_model.pk),),
)
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_url_conflicts_with_history(self):
"A model with a primary key that ends with history should be visible"
history_model = ModelWithStringPrimaryKey(pk="history")
history_model.save()
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(history_model.pk),),
)
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_shortcut_view_with_escaping(self):
"'View on site should' work properly with char fields"
model = ModelWithStringPrimaryKey(pk="abc_123")
model.save()
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(model.pk),),
)
)
should_contain = '/%s/" class="viewsitelink">' % model.pk
self.assertContains(response, should_contain)
def test_change_view_history_link(self):
"""Object history button link should work and contain the pk value quoted."""
url = reverse(
"admin:%s_modelwithstringprimarykey_change"
% ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),),
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
expected_link = reverse(
"admin:%s_modelwithstringprimarykey_history"
% ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),),
)
self.assertContains(
response, '<a href="%s" class="historylink"' % escape(expected_link)
)
def test_redirect_on_add_view_continue_button(self):
"""As soon as an object is added using "Save and continue editing"
button, the user should be redirected to the object's change_view.
In case primary key is a string containing some special characters
like slash or underscore, these characters must be escaped (see #22266)
"""
response = self.client.post(
reverse("admin:admin_views_modelwithstringprimarykey_add"),
{
"string_pk": "123/history",
"_continue": "1", # Save and continue editing
},
)
self.assertEqual(response.status_code, 302) # temporary redirect
self.assertIn("/123_2Fhistory/", response.headers["location"]) # PK is quoted
@override_settings(ROOT_URLCONF="admin_views.urls")
class SecureViewTests(TestCase):
"""
Test behavior of a view protected by the staff_member_required decorator.
"""
def test_secure_view_shows_login_if_not_logged_in(self):
secure_url = reverse("secure_view")
response = self.client.get(secure_url)
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin:login"), secure_url)
)
response = self.client.get(secure_url, follow=True)
self.assertTemplateUsed(response, "admin/login.html")
self.assertEqual(response.context[REDIRECT_FIELD_NAME], secure_url)
def test_staff_member_required_decorator_works_with_argument(self):
"""
Staff_member_required decorator works with an argument
(redirect_field_name).
"""
secure_url = "/test_admin/admin/secure-view2/"
response = self.client.get(secure_url)
self.assertRedirects(
response, "%s?myfield=%s" % (reverse("admin:login"), secure_url)
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewUnicodeTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.b1 = Book.objects.create(name="Lærdommer")
cls.p1 = Promo.objects.create(name="<Promo for Lærdommer>", book=cls.b1)
cls.chap1 = Chapter.objects.create(
title="Norske bostaver æøå skaper problemer",
content="<p>Svært frustrerende med UnicodeDecodeErro</p>",
book=cls.b1,
)
cls.chap2 = Chapter.objects.create(
title="Kjærlighet",
content="<p>La kjærligheten til de lidende seire.</p>",
book=cls.b1,
)
cls.chap3 = Chapter.objects.create(
title="Kjærlighet", content="<p>Noe innhold</p>", book=cls.b1
)
cls.chap4 = ChapterXtra1.objects.create(
chap=cls.chap1, xtra="<Xtra(1) Norske bostaver æøå skaper problemer>"
)
cls.chap5 = ChapterXtra1.objects.create(
chap=cls.chap2, xtra="<Xtra(1) Kjærlighet>"
)
cls.chap6 = ChapterXtra1.objects.create(
chap=cls.chap3, xtra="<Xtra(1) Kjærlighet>"
)
cls.chap7 = ChapterXtra2.objects.create(
chap=cls.chap1, xtra="<Xtra(2) Norske bostaver æøå skaper problemer>"
)
cls.chap8 = ChapterXtra2.objects.create(
chap=cls.chap2, xtra="<Xtra(2) Kjærlighet>"
)
cls.chap9 = ChapterXtra2.objects.create(
chap=cls.chap3, xtra="<Xtra(2) Kjærlighet>"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_unicode_edit(self):
"""
A test to ensure that POST on edit_view handles non-ASCII characters.
"""
post_data = {
"name": "Test lærdommer",
# inline data
"chapter_set-TOTAL_FORMS": "6",
"chapter_set-INITIAL_FORMS": "3",
"chapter_set-MAX_NUM_FORMS": "0",
"chapter_set-0-id": self.chap1.pk,
"chapter_set-0-title": "Norske bostaver æøå skaper problemer",
"chapter_set-0-content": (
"<p>Svært frustrerende med UnicodeDecodeError</p>"
),
"chapter_set-1-id": self.chap2.id,
"chapter_set-1-title": "Kjærlighet.",
"chapter_set-1-content": (
"<p>La kjærligheten til de lidende seire.</p>"
),
"chapter_set-2-id": self.chap3.id,
"chapter_set-2-title": "Need a title.",
"chapter_set-2-content": "<p>Newest content</p>",
"chapter_set-3-id": "",
"chapter_set-3-title": "",
"chapter_set-3-content": "",
"chapter_set-4-id": "",
"chapter_set-4-title": "",
"chapter_set-4-content": "",
"chapter_set-5-id": "",
"chapter_set-5-title": "",
"chapter_set-5-content": "",
}
response = self.client.post(
reverse("admin:admin_views_book_change", args=(self.b1.pk,)), post_data
)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_unicode_delete(self):
"""
The delete_view handles non-ASCII characters
"""
delete_dict = {"post": "yes"}
delete_url = reverse("admin:admin_views_book_delete", args=(self.b1.pk,))
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 200)
response = self.client.post(delete_url, delete_dict)
self.assertRedirects(response, reverse("admin:admin_views_book_changelist"))
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewListEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.per1 = Person.objects.create(name="John Mauchly", gender=1, alive=True)
cls.per2 = Person.objects.create(name="Grace Hopper", gender=1, alive=False)
cls.per3 = Person.objects.create(name="Guido van Rossum", gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_inheritance(self):
Podcast.objects.create(
name="This Week in Django", release_date=datetime.date.today()
)
response = self.client.get(reverse("admin:admin_views_podcast_changelist"))
self.assertEqual(response.status_code, 200)
def test_inheritance_2(self):
Vodcast.objects.create(name="This Week in Django", released=True)
response = self.client.get(reverse("admin:admin_views_vodcast_changelist"))
self.assertEqual(response.status_code, 200)
def test_custom_pk(self):
Language.objects.create(iso="en", name="English", english_name="English")
response = self.client.get(reverse("admin:admin_views_language_changelist"))
self.assertEqual(response.status_code, 200)
def test_changelist_input_html(self):
response = self.client.get(reverse("admin:admin_views_person_changelist"))
# 2 inputs per object(the field and the hidden id field) = 6
# 4 management hidden fields = 4
# 4 action inputs (3 regular checkboxes, 1 checkbox to select all)
# main form submit button = 1
# search field and search submit button = 2
# CSRF field = 2
# field to track 'select all' across paginated views = 1
# 6 + 4 + 4 + 1 + 2 + 2 + 1 = 20 inputs
self.assertContains(response, "<input", count=21)
# 1 select per object = 3 selects
self.assertContains(response, "<select", count=4)
def test_post_messages(self):
# Ticket 12707: Saving inline editable should not show admin
# action warnings
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": str(self.per1.pk),
"form-1-gender": "2",
"form-1-id": str(self.per2.pk),
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": str(self.per3.pk),
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_person_changelist"), data, follow=True
)
self.assertEqual(len(response.context["messages"]), 1)
def test_post_submission(self):
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": str(self.per1.pk),
"form-1-gender": "2",
"form-1-id": str(self.per2.pk),
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": str(self.per3.pk),
"_save": "Save",
}
self.client.post(reverse("admin:admin_views_person_changelist"), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
# test a filtered page
data = {
"form-TOTAL_FORMS": "2",
"form-INITIAL_FORMS": "2",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per1.pk),
"form-0-gender": "1",
"form-0-alive": "checked",
"form-1-id": str(self.per3.pk),
"form-1-gender": "1",
"form-1-alive": "checked",
"_save": "Save",
}
self.client.post(
reverse("admin:admin_views_person_changelist") + "?gender__exact=1", data
)
self.assertIs(Person.objects.get(name="John Mauchly").alive, True)
# test a searched page
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per1.pk),
"form-0-gender": "1",
"_save": "Save",
}
self.client.post(
reverse("admin:admin_views_person_changelist") + "?q=john", data
)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
def test_non_field_errors(self):
"""
Non-field errors are displayed for each of the forms in the
changelist's formset.
"""
fd1 = FoodDelivery.objects.create(
reference="123", driver="bill", restaurant="thai"
)
fd2 = FoodDelivery.objects.create(
reference="456", driver="bill", restaurant="india"
)
fd3 = FoodDelivery.objects.create(
reference="789", driver="bill", restaurant="pizza"
)
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "pizza",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_fooddelivery_changelist"), data
)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
"with this Driver and Restaurant already exists.</li></ul></td></tr>",
1,
html=True,
)
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
# Same data also.
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "thai",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_fooddelivery_changelist"), data
)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
"with this Driver and Restaurant already exists.</li></ul></td></tr>",
2,
html=True,
)
def test_non_form_errors(self):
# test if non-form errors are handled; ticket #12716
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per2.pk),
"form-0-alive": "1",
"form-0-gender": "2",
# The form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_person_changelist"), data
)
self.assertContains(response, "Grace is not a Zombie")
def test_non_form_errors_is_errorlist(self):
# test if non-form errors are correctly handled; ticket #12878
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per2.pk),
"form-0-alive": "1",
"form-0-gender": "2",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_person_changelist"), data
)
non_form_errors = response.context["cl"].formset.non_form_errors()
self.assertIsInstance(non_form_errors, ErrorList)
self.assertEqual(
str(non_form_errors),
str(ErrorList(["Grace is not a Zombie"], error_class="nonform")),
)
def test_list_editable_ordering(self):
collector = Collector.objects.create(id=1, name="Frederick Clegg")
Category.objects.create(id=1, order=1, collector=collector)
Category.objects.create(id=2, order=2, collector=collector)
Category.objects.create(id=3, order=0, collector=collector)
Category.objects.create(id=4, order=0, collector=collector)
# NB: The order values must be changed so that the items are reordered.
data = {
"form-TOTAL_FORMS": "4",
"form-INITIAL_FORMS": "4",
"form-MAX_NUM_FORMS": "0",
"form-0-order": "14",
"form-0-id": "1",
"form-0-collector": "1",
"form-1-order": "13",
"form-1-id": "2",
"form-1-collector": "1",
"form-2-order": "1",
"form-2-id": "3",
"form-2-collector": "1",
"form-3-order": "0",
"form-3-id": "4",
"form-3-collector": "1",
# The form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_category_changelist"), data
)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# The order values have been applied to the right objects
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
def test_list_editable_pagination(self):
"""
Pagination works for list_editable items.
"""
UnorderedObject.objects.create(id=1, name="Unordered object #1")
UnorderedObject.objects.create(id=2, name="Unordered object #2")
UnorderedObject.objects.create(id=3, name="Unordered object #3")
response = self.client.get(
reverse("admin:admin_views_unorderedobject_changelist")
)
self.assertContains(response, "Unordered object #3")
self.assertContains(response, "Unordered object #2")
self.assertNotContains(response, "Unordered object #1")
response = self.client.get(
reverse("admin:admin_views_unorderedobject_changelist") + "?p=2"
)
self.assertNotContains(response, "Unordered object #3")
self.assertNotContains(response, "Unordered object #2")
self.assertContains(response, "Unordered object #1")
def test_list_editable_action_submit(self):
# List editable changes should not be executed if the action "Go" button is
# used to submit the form.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"index": "0",
"_selected_action": ["3"],
"action": ["", "delete_selected"],
}
self.client.post(reverse("admin:admin_views_person_changelist"), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, True)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 1)
def test_list_editable_action_choices(self):
# List editable changes should be executed if the "Save" button is
# used to submit the form - any action choices should be ignored.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": str(self.per1.pk),
"form-1-gender": "2",
"form-1-id": str(self.per2.pk),
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": str(self.per3.pk),
"_save": "Save",
"_selected_action": ["1"],
"action": ["", "delete_selected"],
}
self.client.post(reverse("admin:admin_views_person_changelist"), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
def test_list_editable_popup(self):
"""
Fields should not be list-editable in popups.
"""
response = self.client.get(reverse("admin:admin_views_person_changelist"))
self.assertNotEqual(response.context["cl"].list_editable, ())
response = self.client.get(
reverse("admin:admin_views_person_changelist") + "?%s" % IS_POPUP_VAR
)
self.assertEqual(response.context["cl"].list_editable, ())
def test_pk_hidden_fields(self):
"""
hidden pk fields aren't displayed in the table body and their
corresponding human-readable value is displayed instead. The hidden pk
fields are displayed but separately (not in the table) and only once.
"""
story1 = Story.objects.create(
title="The adventures of Guido", content="Once upon a time in Djangoland..."
)
story2 = Story.objects.create(
title="Crouching Tiger, Hidden Python",
content="The Python was sneaking into...",
)
response = self.client.get(reverse("admin:admin_views_story_changelist"))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n'
"</div>" % (story2.id, story1.id),
html=True,
)
self.assertContains(response, '<td class="field-id">%d</td>' % story1.id, 1)
self.assertContains(response, '<td class="field-id">%d</td>' % story2.id, 1)
def test_pk_hidden_fields_with_list_display_links(self):
"""Similarly as test_pk_hidden_fields, but when the hidden pk fields are
referenced in list_display_links.
Refs #12475.
"""
story1 = OtherStory.objects.create(
title="The adventures of Guido",
content="Once upon a time in Djangoland...",
)
story2 = OtherStory.objects.create(
title="Crouching Tiger, Hidden Python",
content="The Python was sneaking into...",
)
link1 = reverse("admin:admin_views_otherstory_change", args=(story1.pk,))
link2 = reverse("admin:admin_views_otherstory_change", args=(story2.pk,))
response = self.client.get(reverse("admin:admin_views_otherstory_changelist"))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n'
"</div>" % (story2.id, story1.id),
html=True,
)
self.assertContains(
response,
'<th class="field-id"><a href="%s">%d</a></th>' % (link1, story1.id),
1,
)
self.assertContains(
response,
'<th class="field-id"><a href="%s">%d</a></th>' % (link2, story2.id),
1,
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminSearchTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.joepublicuser = User.objects.create_user(
username="joepublic", password="secret"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.per1 = Person.objects.create(name="John Mauchly", gender=1, alive=True)
cls.per2 = Person.objects.create(name="Grace Hopper", gender=1, alive=False)
cls.per3 = Person.objects.create(name="Guido van Rossum", gender=1, alive=True)
Person.objects.create(name="John Doe", gender=1)
Person.objects.create(name='John O"Hara', gender=1)
Person.objects.create(name="John O'Hara", gender=1)
cls.t1 = Recommender.objects.create()
cls.t2 = Recommendation.objects.create(the_recommender=cls.t1)
cls.t3 = Recommender.objects.create()
cls.t4 = Recommendation.objects.create(the_recommender=cls.t3)
cls.tt1 = TitleTranslation.objects.create(title=cls.t1, text="Bar")
cls.tt2 = TitleTranslation.objects.create(title=cls.t2, text="Foo")
cls.tt3 = TitleTranslation.objects.create(title=cls.t3, text="Few")
cls.tt4 = TitleTranslation.objects.create(title=cls.t4, text="Bas")
def setUp(self):
self.client.force_login(self.superuser)
def test_search_on_sibling_models(self):
"A search that mentions sibling models"
response = self.client.get(
reverse("admin:admin_views_recommendation_changelist") + "?q=bar"
)
# confirm the search returned 1 object
self.assertContains(response, "\n1 recommendation\n")
def test_with_fk_to_field(self):
"""
The to_field GET parameter is preserved when a search is performed.
Refs #10918.
"""
response = self.client.get(
reverse("admin:auth_user_changelist") + "?q=joe&%s=id" % TO_FIELD_VAR
)
self.assertContains(response, "\n1 user\n")
self.assertContains(
response,
'<input type="hidden" name="%s" value="id">' % TO_FIELD_VAR,
html=True,
)
def test_exact_matches(self):
response = self.client.get(
reverse("admin:admin_views_recommendation_changelist") + "?q=bar"
)
# confirm the search returned one object
self.assertContains(response, "\n1 recommendation\n")
response = self.client.get(
reverse("admin:admin_views_recommendation_changelist") + "?q=ba"
)
# confirm the search returned zero objects
self.assertContains(response, "\n0 recommendations\n")
def test_beginning_matches(self):
response = self.client.get(
reverse("admin:admin_views_person_changelist") + "?q=Gui"
)
# confirm the search returned one object
self.assertContains(response, "\n1 person\n")
self.assertContains(response, "Guido")
response = self.client.get(
reverse("admin:admin_views_person_changelist") + "?q=uido"
)
# confirm the search returned zero objects
self.assertContains(response, "\n0 persons\n")
self.assertNotContains(response, "Guido")
def test_pluggable_search(self):
PluggableSearchPerson.objects.create(name="Bob", age=10)
PluggableSearchPerson.objects.create(name="Amy", age=20)
response = self.client.get(
reverse("admin:admin_views_pluggablesearchperson_changelist") + "?q=Bob"
)
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Bob")
response = self.client.get(
reverse("admin:admin_views_pluggablesearchperson_changelist") + "?q=20"
)
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Amy")
def test_reset_link(self):
"""
Test presence of reset link in search bar ("1 result (_x total_)").
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
# + 1 for total count
with self.assertNumQueries(5):
response = self.client.get(
reverse("admin:admin_views_person_changelist") + "?q=Gui"
)
self.assertContains(
response,
"""<span class="small quiet">1 result (<a href="?">6 total</a>)</span>""",
html=True,
)
def test_no_total_count(self):
"""
#8408 -- "Show all" should be displayed instead of the total count if
ModelAdmin.show_full_result_count is False.
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
with self.assertNumQueries(4):
response = self.client.get(
reverse("admin:admin_views_recommendation_changelist") + "?q=bar"
)
self.assertContains(
response,
"""<span class="small quiet">1 result (<a href="?">Show all</a>)</span>""",
html=True,
)
self.assertTrue(response.context["cl"].show_admin_actions)
def test_search_with_spaces(self):
url = reverse("admin:admin_views_person_changelist") + "?q=%s"
tests = [
('"John Doe"', 1),
("'John Doe'", 1),
("John Doe", 0),
('"John Doe" John', 1),
("'John Doe' John", 1),
("John Doe John", 0),
('"John Do"', 1),
("'John Do'", 1),
("'John O'Hara'", 0),
("'John O\\'Hara'", 1),
('"John O"Hara"', 0),
('"John O\\"Hara"', 1),
]
for search, hits in tests:
with self.subTest(search=search):
response = self.client.get(url % search)
self.assertContains(response, "\n%s person" % hits)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminInheritedInlinesTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_inline(self):
"""
Inline models which inherit from a common parent are correctly handled.
"""
foo_user = "foo username"
bar_user = "bar username"
name_re = re.compile(b'name="(.*?)"')
# test the add case
response = self.client.get(reverse("admin:admin_views_persona_add"))
names = name_re.findall(response.content)
names.remove(b"csrfmiddlewaretoken")
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
# test the add case
post_data = {
"name": "Test Name",
# inline data
"accounts-TOTAL_FORMS": "1",
"accounts-INITIAL_FORMS": "0",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": foo_user,
"accounts-2-TOTAL_FORMS": "1",
"accounts-2-INITIAL_FORMS": "0",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": bar_user,
}
response = self.client.post(reverse("admin:admin_views_persona_add"), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
persona_id = Persona.objects.all()[0].id
foo_id = FooAccount.objects.all()[0].id
bar_id = BarAccount.objects.all()[0].id
# test the edit case
response = self.client.get(
reverse("admin:admin_views_persona_change", args=(persona_id,))
)
names = name_re.findall(response.content)
names.remove(b"csrfmiddlewaretoken")
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
post_data = {
"name": "Test Name",
"accounts-TOTAL_FORMS": "2",
"accounts-INITIAL_FORMS": "1",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": "%s-1" % foo_user,
"accounts-0-account_ptr": str(foo_id),
"accounts-0-persona": str(persona_id),
"accounts-2-TOTAL_FORMS": "2",
"accounts-2-INITIAL_FORMS": "1",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": "%s-1" % bar_user,
"accounts-2-0-account_ptr": str(bar_id),
"accounts-2-0-persona": str(persona_id),
}
response = self.client.post(
reverse("admin:admin_views_persona_change", args=(persona_id,)), post_data
)
self.assertEqual(response.status_code, 302)
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, "%s-1" % foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, "%s-1" % bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestCustomChangeList(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_custom_changelist(self):
"""
Validate that a custom ChangeList class can be used (#9749)
"""
# Insert some data
post_data = {"name": "First Gadget"}
response = self.client.post(reverse("admin:admin_views_gadget_add"), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
# Hit the page once to get messages out of the queue message list
response = self.client.get(reverse("admin:admin_views_gadget_changelist"))
# Data is still not visible on the page
response = self.client.get(reverse("admin:admin_views_gadget_changelist"))
self.assertNotContains(response, "First Gadget")
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestInlineNotEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_GET_parent_add(self):
"""
InlineModelAdmin broken?
"""
response = self.client.get(reverse("admin:admin_views_parent_add"))
self.assertEqual(response.status_code, 200)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminCustomQuerysetTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.pks = [EmptyModel.objects.create().id for i in range(3)]
def setUp(self):
self.client.force_login(self.superuser)
self.super_login = {
REDIRECT_FIELD_NAME: reverse("admin:index"),
"username": "super",
"password": "secret",
}
def test_changelist_view(self):
response = self.client.get(reverse("admin:admin_views_emptymodel_changelist"))
for i in self.pks:
if i > 1:
self.assertContains(response, "Primary key = %s" % i)
else:
self.assertNotContains(response, "Primary key = %s" % i)
def test_changelist_view_count_queries(self):
# create 2 Person objects
Person.objects.create(name="person1", gender=1)
Person.objects.create(name="person2", gender=2)
changelist_url = reverse("admin:admin_views_person_changelist")
# 5 queries are expected: 1 for the session, 1 for the user,
# 2 for the counts and 1 for the objects on the page
with self.assertNumQueries(5):
resp = self.client.get(changelist_url)
self.assertEqual(resp.context["selection_note"], "0 of 2 selected")
self.assertEqual(resp.context["selection_note_all"], "All 2 selected")
with self.assertNumQueries(5):
extra = {"q": "not_in_name"}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context["selection_note"], "0 of 0 selected")
self.assertEqual(resp.context["selection_note_all"], "All 0 selected")
with self.assertNumQueries(5):
extra = {"q": "person"}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context["selection_note"], "0 of 2 selected")
self.assertEqual(resp.context["selection_note_all"], "All 2 selected")
with self.assertNumQueries(5):
extra = {"gender__exact": "1"}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context["selection_note"], "0 of 1 selected")
self.assertEqual(resp.context["selection_note_all"], "1 selected")
def test_change_view(self):
for i in self.pks:
url = reverse("admin:admin_views_emptymodel_change", args=(i,))
response = self.client.get(url, follow=True)
if i > 1:
self.assertEqual(response.status_code, 200)
else:
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["empty model with ID “1” doesn’t exist. Perhaps it was deleted?"],
)
def test_add_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __str__ method
self.assertEqual(CoverLetter.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"author": "Candidate, Best",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_coverletter_add"), post_data, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = CoverLetter.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The cover letter “<a href="%s">'
"Candidate, Best</a>” was added successfully.</li>"
% reverse("admin:admin_views_coverletter_change", args=(pk,)),
html=True,
)
# model has no __str__ method
self.assertEqual(ShortMessage.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"content": "What's this SMS thing?",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_shortmessage_add"), post_data, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name
sm = ShortMessage.objects.all()[0]
self.assertContains(
response,
'<li class="success">The short message “<a href="%s">'
"%s</a>” was added successfully.</li>"
% (reverse("admin:admin_views_shortmessage_change", args=(sm.pk,)), sm),
html=True,
)
def test_add_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __str__ method
self.assertEqual(Telegram.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "Urgent telegram",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_telegram_add"), post_data, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = Telegram.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The telegram “<a href="%s">'
"Urgent telegram</a>” was added successfully.</li>"
% reverse("admin:admin_views_telegram_change", args=(pk,)),
html=True,
)
# model has no __str__ method
self.assertEqual(Paper.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_paper_add"), post_data, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name
p = Paper.objects.all()[0]
self.assertContains(
response,
'<li class="success">The paper “<a href="%s">'
"%s</a>” was added successfully.</li>"
% (reverse("admin:admin_views_paper_change", args=(p.pk,)), p),
html=True,
)
def test_edit_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __str__ method
cl = CoverLetter.objects.create(author="John Doe")
self.assertEqual(CoverLetter.objects.count(), 1)
response = self.client.get(
reverse("admin:admin_views_coverletter_change", args=(cl.pk,))
)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"author": "John Doe II",
"_save": "Save",
}
url = reverse("admin:admin_views_coverletter_change", args=(cl.pk,))
response = self.client.post(url, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name. Instance
# representation is set by model's __str__()
self.assertContains(
response,
'<li class="success">The cover letter “<a href="%s">'
"John Doe II</a>” was changed successfully.</li>"
% reverse("admin:admin_views_coverletter_change", args=(cl.pk,)),
html=True,
)
# model has no __str__ method
sm = ShortMessage.objects.create(content="This is expensive")
self.assertEqual(ShortMessage.objects.count(), 1)
response = self.client.get(
reverse("admin:admin_views_shortmessage_change", args=(sm.pk,))
)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"content": "Too expensive",
"_save": "Save",
}
url = reverse("admin:admin_views_shortmessage_change", args=(sm.pk,))
response = self.client.post(url, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by __str__().
self.assertContains(
response,
'<li class="success">The short message “<a href="%s">'
"%s</a>” was changed successfully.</li>"
% (reverse("admin:admin_views_shortmessage_change", args=(sm.pk,)), sm),
html=True,
)
def test_edit_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __str__ method
t = Telegram.objects.create(title="First Telegram")
self.assertEqual(Telegram.objects.count(), 1)
response = self.client.get(
reverse("admin:admin_views_telegram_change", args=(t.pk,))
)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "Telegram without typo",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_telegram_change", args=(t.pk,)),
post_data,
follow=True,
)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name. The instance
# representation is set by model's __str__()
self.assertContains(
response,
'<li class="success">The telegram “<a href="%s">'
"Telegram without typo</a>” was changed successfully.</li>"
% reverse("admin:admin_views_telegram_change", args=(t.pk,)),
html=True,
)
# model has no __str__ method
p = Paper.objects.create(title="My Paper Title")
self.assertEqual(Paper.objects.count(), 1)
response = self.client.get(
reverse("admin:admin_views_paper_change", args=(p.pk,))
)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_paper_change", args=(p.pk,)),
post_data,
follow=True,
)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by __str__().
self.assertContains(
response,
'<li class="success">The paper “<a href="%s">'
"%s</a>” was changed successfully.</li>"
% (reverse("admin:admin_views_paper_change", args=(p.pk,)), p),
html=True,
)
def test_history_view_custom_qs(self):
"""
Custom querysets are considered for the admin history view.
"""
self.client.post(reverse("admin:login"), self.super_login)
FilteredManager.objects.create(pk=1)
FilteredManager.objects.create(pk=2)
response = self.client.get(
reverse("admin:admin_views_filteredmanager_changelist")
)
self.assertContains(response, "PK=1")
self.assertContains(response, "PK=2")
self.assertEqual(
self.client.get(
reverse("admin:admin_views_filteredmanager_history", args=(1,))
).status_code,
200,
)
self.assertEqual(
self.client.get(
reverse("admin:admin_views_filteredmanager_history", args=(2,))
).status_code,
200,
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminInlineFileUploadTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
file1 = tempfile.NamedTemporaryFile(suffix=".file1")
file1.write(b"a" * (2**21))
filename = file1.name
file1.close()
cls.gallery = Gallery.objects.create(name="Test Gallery")
cls.picture = Picture.objects.create(
name="Test Picture",
image=filename,
gallery=cls.gallery,
)
def setUp(self):
self.client.force_login(self.superuser)
def test_form_has_multipart_enctype(self):
response = self.client.get(
reverse("admin:admin_views_gallery_change", args=(self.gallery.id,))
)
self.assertIs(response.context["has_file_field"], True)
self.assertContains(response, MULTIPART_ENCTYPE)
def test_inline_file_upload_edit_validation_error_post(self):
"""
Inline file uploads correctly display prior data (#10002).
"""
post_data = {
"name": "Test Gallery",
"pictures-TOTAL_FORMS": "2",
"pictures-INITIAL_FORMS": "1",
"pictures-MAX_NUM_FORMS": "0",
"pictures-0-id": str(self.picture.id),
"pictures-0-gallery": str(self.gallery.id),
"pictures-0-name": "Test Picture",
"pictures-0-image": "",
"pictures-1-id": "",
"pictures-1-gallery": str(self.gallery.id),
"pictures-1-name": "Test Picture 2",
"pictures-1-image": "",
}
response = self.client.post(
reverse("admin:admin_views_gallery_change", args=(self.gallery.id,)),
post_data,
)
self.assertContains(response, b"Currently")
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminInlineTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.collector = Collector.objects.create(pk=1, name="John Fowles")
def setUp(self):
self.post_data = {
"name": "Test Name",
"widget_set-TOTAL_FORMS": "3",
"widget_set-INITIAL_FORMS": "0",
"widget_set-MAX_NUM_FORMS": "0",
"widget_set-0-id": "",
"widget_set-0-owner": "1",
"widget_set-0-name": "",
"widget_set-1-id": "",
"widget_set-1-owner": "1",
"widget_set-1-name": "",
"widget_set-2-id": "",
"widget_set-2-owner": "1",
"widget_set-2-name": "",
"doohickey_set-TOTAL_FORMS": "3",
"doohickey_set-INITIAL_FORMS": "0",
"doohickey_set-MAX_NUM_FORMS": "0",
"doohickey_set-0-owner": "1",
"doohickey_set-0-code": "",
"doohickey_set-0-name": "",
"doohickey_set-1-owner": "1",
"doohickey_set-1-code": "",
"doohickey_set-1-name": "",
"doohickey_set-2-owner": "1",
"doohickey_set-2-code": "",
"doohickey_set-2-name": "",
"grommet_set-TOTAL_FORMS": "3",
"grommet_set-INITIAL_FORMS": "0",
"grommet_set-MAX_NUM_FORMS": "0",
"grommet_set-0-code": "",
"grommet_set-0-owner": "1",
"grommet_set-0-name": "",
"grommet_set-1-code": "",
"grommet_set-1-owner": "1",
"grommet_set-1-name": "",
"grommet_set-2-code": "",
"grommet_set-2-owner": "1",
"grommet_set-2-name": "",
"whatsit_set-TOTAL_FORMS": "3",
"whatsit_set-INITIAL_FORMS": "0",
"whatsit_set-MAX_NUM_FORMS": "0",
"whatsit_set-0-owner": "1",
"whatsit_set-0-index": "",
"whatsit_set-0-name": "",
"whatsit_set-1-owner": "1",
"whatsit_set-1-index": "",
"whatsit_set-1-name": "",
"whatsit_set-2-owner": "1",
"whatsit_set-2-index": "",
"whatsit_set-2-name": "",
"fancydoodad_set-TOTAL_FORMS": "3",
"fancydoodad_set-INITIAL_FORMS": "0",
"fancydoodad_set-MAX_NUM_FORMS": "0",
"fancydoodad_set-0-doodad_ptr": "",
"fancydoodad_set-0-owner": "1",
"fancydoodad_set-0-name": "",
"fancydoodad_set-0-expensive": "on",
"fancydoodad_set-1-doodad_ptr": "",
"fancydoodad_set-1-owner": "1",
"fancydoodad_set-1-name": "",
"fancydoodad_set-1-expensive": "on",
"fancydoodad_set-2-doodad_ptr": "",
"fancydoodad_set-2-owner": "1",
"fancydoodad_set-2-name": "",
"fancydoodad_set-2-expensive": "on",
"category_set-TOTAL_FORMS": "3",
"category_set-INITIAL_FORMS": "0",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "",
"category_set-0-id": "",
"category_set-0-collector": "1",
"category_set-1-order": "",
"category_set-1-id": "",
"category_set-1-collector": "1",
"category_set-2-order": "",
"category_set-2-id": "",
"category_set-2-collector": "1",
}
self.client.force_login(self.superuser)
def test_simple_inline(self):
"A simple model can be saved as inlines"
# First add a new inline
self.post_data["widget_set-0-name"] = "Widget 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
widget_id = Widget.objects.all()[0].id
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="widget_set-0-id"')
# No file or image fields, no enctype on the forms
self.assertIs(response.context["has_file_field"], False)
self.assertNotContains(response, MULTIPART_ENCTYPE)
# Now resave that inline
self.post_data["widget_set-INITIAL_FORMS"] = "1"
self.post_data["widget_set-0-id"] = str(widget_id)
self.post_data["widget_set-0-name"] = "Widget 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
# Now modify that inline
self.post_data["widget_set-INITIAL_FORMS"] = "1"
self.post_data["widget_set-0-id"] = str(widget_id)
self.post_data["widget_set-0-name"] = "Widget 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1 Updated")
def test_explicit_autofield_inline(self):
"""
A model with an explicit autofield primary key can be saved as inlines.
"""
# First add a new inline
self.post_data["grommet_set-0-name"] = "Grommet 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="grommet_set-0-code"')
# Now resave that inline
self.post_data["grommet_set-INITIAL_FORMS"] = "1"
self.post_data["grommet_set-0-code"] = str(Grommet.objects.all()[0].code)
self.post_data["grommet_set-0-name"] = "Grommet 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Now modify that inline
self.post_data["grommet_set-INITIAL_FORMS"] = "1"
self.post_data["grommet_set-0-code"] = str(Grommet.objects.all()[0].code)
self.post_data["grommet_set-0-name"] = "Grommet 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1 Updated")
def test_char_pk_inline(self):
"A model with a character PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data["doohickey_set-0-code"] = "DH1"
self.post_data["doohickey_set-0-name"] = "Doohickey 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="doohickey_set-0-code"')
# Now resave that inline
self.post_data["doohickey_set-INITIAL_FORMS"] = "1"
self.post_data["doohickey_set-0-code"] = "DH1"
self.post_data["doohickey_set-0-name"] = "Doohickey 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Now modify that inline
self.post_data["doohickey_set-INITIAL_FORMS"] = "1"
self.post_data["doohickey_set-0-code"] = "DH1"
self.post_data["doohickey_set-0-name"] = "Doohickey 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1 Updated")
def test_integer_pk_inline(self):
"A model with an integer PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data["whatsit_set-0-index"] = "42"
self.post_data["whatsit_set-0-name"] = "Whatsit 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="whatsit_set-0-index"')
# Now resave that inline
self.post_data["whatsit_set-INITIAL_FORMS"] = "1"
self.post_data["whatsit_set-0-index"] = "42"
self.post_data["whatsit_set-0-name"] = "Whatsit 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Now modify that inline
self.post_data["whatsit_set-INITIAL_FORMS"] = "1"
self.post_data["whatsit_set-0-index"] = "42"
self.post_data["whatsit_set-0-name"] = "Whatsit 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1 Updated")
def test_inherited_inline(self):
"An inherited model can be saved as inlines. Regression for #11042"
# First add a new inline
self.post_data["fancydoodad_set-0-name"] = "Fancy Doodad 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
doodad_pk = FancyDoodad.objects.all()[0].pk
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="fancydoodad_set-0-doodad_ptr"')
# Now resave that inline
self.post_data["fancydoodad_set-INITIAL_FORMS"] = "1"
self.post_data["fancydoodad_set-0-doodad_ptr"] = str(doodad_pk)
self.post_data["fancydoodad_set-0-name"] = "Fancy Doodad 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
# Now modify that inline
self.post_data["fancydoodad_set-INITIAL_FORMS"] = "1"
self.post_data["fancydoodad_set-0-doodad_ptr"] = str(doodad_pk)
self.post_data["fancydoodad_set-0-name"] = "Fancy Doodad 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1 Updated")
def test_ordered_inline(self):
"""
An inline with an editable ordering fields is updated correctly.
"""
# Create some objects with an initial ordering
Category.objects.create(id=1, order=1, collector=self.collector)
Category.objects.create(id=2, order=2, collector=self.collector)
Category.objects.create(id=3, order=0, collector=self.collector)
Category.objects.create(id=4, order=0, collector=self.collector)
# NB: The order values must be changed so that the items are reordered.
self.post_data.update(
{
"name": "Frederick Clegg",
"category_set-TOTAL_FORMS": "7",
"category_set-INITIAL_FORMS": "4",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "14",
"category_set-0-id": "1",
"category_set-0-collector": "1",
"category_set-1-order": "13",
"category_set-1-id": "2",
"category_set-1-collector": "1",
"category_set-2-order": "1",
"category_set-2-id": "3",
"category_set-2-collector": "1",
"category_set-3-order": "0",
"category_set-3-id": "4",
"category_set-3-collector": "1",
"category_set-4-order": "",
"category_set-4-id": "",
"category_set-4-collector": "1",
"category_set-5-order": "",
"category_set-5-id": "",
"category_set-5-collector": "1",
"category_set-6-order": "",
"category_set-6-id": "",
"category_set-6-collector": "1",
}
)
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# The order values have been applied to the right objects
self.assertEqual(self.collector.category_set.count(), 4)
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
@override_settings(ROOT_URLCONF="admin_views.urls")
class NeverCacheTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
def setUp(self):
self.client.force_login(self.superuser)
def test_admin_index(self):
"Check the never-cache status of the main index"
response = self.client.get(reverse("admin:index"))
self.assertEqual(get_max_age(response), 0)
def test_app_index(self):
"Check the never-cache status of an application index"
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertEqual(get_max_age(response), 0)
def test_model_index(self):
"Check the never-cache status of a model index"
response = self.client.get(reverse("admin:admin_views_fabric_changelist"))
self.assertEqual(get_max_age(response), 0)
def test_model_add(self):
"Check the never-cache status of a model add page"
response = self.client.get(reverse("admin:admin_views_fabric_add"))
self.assertEqual(get_max_age(response), 0)
def test_model_view(self):
"Check the never-cache status of a model edit page"
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertEqual(get_max_age(response), 0)
def test_model_history(self):
"Check the never-cache status of a model history page"
response = self.client.get(
reverse("admin:admin_views_section_history", args=(self.s1.pk,))
)
self.assertEqual(get_max_age(response), 0)
def test_model_delete(self):
"Check the never-cache status of a model delete page"
response = self.client.get(
reverse("admin:admin_views_section_delete", args=(self.s1.pk,))
)
self.assertEqual(get_max_age(response), 0)
def test_login(self):
"Check the never-cache status of login views"
self.client.logout()
response = self.client.get(reverse("admin:index"))
self.assertEqual(get_max_age(response), 0)
def test_logout(self):
"Check the never-cache status of logout view"
response = self.client.post(reverse("admin:logout"))
self.assertEqual(get_max_age(response), 0)
def test_password_change(self):
"Check the never-cache status of the password change view"
self.client.logout()
response = self.client.get(reverse("admin:password_change"))
self.assertIsNone(get_max_age(response))
def test_password_change_done(self):
"Check the never-cache status of the password change done view"
response = self.client.get(reverse("admin:password_change_done"))
self.assertIsNone(get_max_age(response))
def test_JS_i18n(self):
"Check the never-cache status of the JavaScript i18n view"
response = self.client.get(reverse("admin:jsi18n"))
self.assertIsNone(get_max_age(response))
@override_settings(ROOT_URLCONF="admin_views.urls")
class PrePopulatedTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_prepopulated_on(self):
response = self.client.get(reverse("admin:admin_views_prepopulatedpost_add"))
self.assertContains(response, ""id": "#id_slug"")
self.assertContains(
response, ""dependency_ids": ["#id_title"]"
)
self.assertContains(
response,
""id": "#id_prepopulatedsubpost_set-0-subslug"",
)
def test_prepopulated_off(self):
response = self.client.get(
reverse("admin:admin_views_prepopulatedpost_change", args=(self.p1.pk,))
)
self.assertContains(response, "A Long Title")
self.assertNotContains(response, ""id": "#id_slug"")
self.assertNotContains(
response, ""dependency_ids": ["#id_title"]"
)
self.assertNotContains(
response,
""id": "#id_prepopulatedsubpost_set-0-subslug"",
)
@override_settings(USE_THOUSAND_SEPARATOR=True)
def test_prepopulated_maxlength_localized(self):
"""
Regression test for #15938: if USE_THOUSAND_SEPARATOR is set, make sure
that maxLength (in the JavaScript) is rendered without separators.
"""
response = self.client.get(
reverse("admin:admin_views_prepopulatedpostlargeslug_add")
)
self.assertContains(response, ""maxLength": 1000") # instead of 1,000
def test_view_only_add_form(self):
"""
PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug'
which is present in the add view, even if the
ModelAdmin.has_change_permission() returns False.
"""
response = self.client.get(reverse("admin7:admin_views_prepopulatedpost_add"))
self.assertContains(response, "data-prepopulated-fields=")
self.assertContains(response, ""id": "#id_slug"")
def test_view_only_change_form(self):
"""
PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug'. That
doesn't break a view-only change view.
"""
response = self.client.get(
reverse("admin7:admin_views_prepopulatedpost_change", args=(self.p1.pk,))
)
self.assertContains(response, 'data-prepopulated-fields="[]"')
self.assertContains(response, '<div class="readonly">%s</div>' % self.p1.slug)
@override_settings(ROOT_URLCONF="admin_views.urls")
class SeleniumTests(AdminSeleniumTestCase):
available_apps = ["admin_views"] + AdminSeleniumTestCase.available_apps
def setUp(self):
self.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
self.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
def test_login_button_centered(self):
from selenium.webdriver.common.by import By
self.selenium.get(self.live_server_url + reverse("admin:login"))
button = self.selenium.find_element(By.CSS_SELECTOR, ".submit-row input")
offset_left = button.get_property("offsetLeft")
offset_right = button.get_property("offsetParent").get_property(
"offsetWidth"
) - (offset_left + button.get_property("offsetWidth"))
# Use assertAlmostEqual to avoid pixel rounding errors.
self.assertAlmostEqual(offset_left, offset_right, delta=3)
def test_prepopulated_fields(self):
"""
The JavaScript-automated prepopulated fields work with the main form
and with stacked and tabular inlines.
Refs #13068, #9264, #9983, #9784.
"""
from selenium.webdriver.common.by import By
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_mainprepopulated_add")
)
self.wait_for(".select2")
# Main form ----------------------------------------------------------
self.selenium.find_element(By.ID, "id_pubdate").send_keys("2012-02-18")
self.select_option("#id_status", "option two")
self.selenium.find_element(By.ID, "id_name").send_keys(
" the mAin nÀMë and it's awεšomeıııİ"
)
slug1 = self.selenium.find_element(By.ID, "id_slug1").get_attribute("value")
slug2 = self.selenium.find_element(By.ID, "id_slug2").get_attribute("value")
slug3 = self.selenium.find_element(By.ID, "id_slug3").get_attribute("value")
self.assertEqual(slug1, "the-main-name-and-its-awesomeiiii-2012-02-18")
self.assertEqual(slug2, "option-two-the-main-name-and-its-awesomeiiii")
self.assertEqual(
slug3, "the-main-n\xe0m\xeb-and-its-aw\u03b5\u0161ome\u0131\u0131\u0131i"
)
# Stacked inlines with fieldsets -------------------------------------
# Initial inline
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-0-pubdate"
).send_keys("2011-12-17")
self.select_option("#id_relatedprepopulated_set-0-status", "option one")
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-0-name"
).send_keys(" here is a sŤāÇkeð inline ! ")
slug1 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-0-slug1"
).get_attribute("value")
slug2 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-0-slug2"
).get_attribute("value")
self.assertEqual(slug1, "here-is-a-stacked-inline-2011-12-17")
self.assertEqual(slug2, "option-one-here-is-a-stacked-inline")
initial_select2_inputs = self.selenium.find_elements(
By.CLASS_NAME, "select2-selection"
)
# Inline formsets have empty/invisible forms.
# Only the 4 visible select2 inputs are initialized.
num_initial_select2_inputs = len(initial_select2_inputs)
self.assertEqual(num_initial_select2_inputs, 4)
# Add an inline
self.selenium.find_elements(By.LINK_TEXT, "Add another Related prepopulated")[
0
].click()
self.assertEqual(
len(self.selenium.find_elements(By.CLASS_NAME, "select2-selection")),
num_initial_select2_inputs + 2,
)
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-1-pubdate"
).send_keys("1999-01-25")
self.select_option("#id_relatedprepopulated_set-1-status", "option two")
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-1-name"
).send_keys(
" now you haVe anöther sŤāÇkeð inline with a very ... "
"loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog "
"text... "
)
slug1 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-1-slug1"
).get_attribute("value")
slug2 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-1-slug2"
).get_attribute("value")
# 50 characters maximum for slug1 field
self.assertEqual(slug1, "now-you-have-another-stacked-inline-with-a-very-lo")
# 60 characters maximum for slug2 field
self.assertEqual(
slug2, "option-two-now-you-have-another-stacked-inline-with-a-very-l"
)
# Tabular inlines ----------------------------------------------------
# Initial inline
element = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-status"
)
self.selenium.execute_script("window.scrollTo(0, %s);" % element.location["y"])
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-pubdate"
).send_keys("1234-12-07")
self.select_option("#id_relatedprepopulated_set-2-0-status", "option two")
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-name"
).send_keys("And now, with a tÃbűlaŘ inline !!!")
slug1 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-slug1"
).get_attribute("value")
slug2 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-slug2"
).get_attribute("value")
self.assertEqual(slug1, "and-now-with-a-tabular-inline-1234-12-07")
self.assertEqual(slug2, "option-two-and-now-with-a-tabular-inline")
# Add an inline
# Button may be outside the browser frame.
element = self.selenium.find_elements(
By.LINK_TEXT, "Add another Related prepopulated"
)[1]
self.selenium.execute_script("window.scrollTo(0, %s);" % element.location["y"])
element.click()
self.assertEqual(
len(self.selenium.find_elements(By.CLASS_NAME, "select2-selection")),
num_initial_select2_inputs + 4,
)
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-1-pubdate"
).send_keys("1981-08-22")
self.select_option("#id_relatedprepopulated_set-2-1-status", "option one")
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-1-name"
).send_keys(r'tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters')
slug1 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-1-slug1"
).get_attribute("value")
slug2 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-1-slug2"
).get_attribute("value")
self.assertEqual(slug1, "tabular-inline-with-ignored-characters-1981-08-22")
self.assertEqual(slug2, "option-one-tabular-inline-with-ignored-characters")
# Add an inline without an initial inline.
# The button is outside of the browser frame.
self.selenium.execute_script("window.scrollTo(0, document.body.scrollHeight);")
self.selenium.find_elements(By.LINK_TEXT, "Add another Related prepopulated")[
2
].click()
self.assertEqual(
len(self.selenium.find_elements(By.CLASS_NAME, "select2-selection")),
num_initial_select2_inputs + 6,
)
# Stacked Inlines without fieldsets ----------------------------------
# Initial inline.
row_id = "id_relatedprepopulated_set-4-0-"
self.selenium.find_element(By.ID, f"{row_id}pubdate").send_keys("2011-12-12")
self.select_option(f"#{row_id}status", "option one")
self.selenium.find_element(By.ID, f"{row_id}name").send_keys(
" sŤāÇkeð inline ! "
)
slug1 = self.selenium.find_element(By.ID, f"{row_id}slug1").get_attribute(
"value"
)
slug2 = self.selenium.find_element(By.ID, f"{row_id}slug2").get_attribute(
"value"
)
self.assertEqual(slug1, "stacked-inline-2011-12-12")
self.assertEqual(slug2, "option-one")
# Add inline.
self.selenium.find_elements(
By.LINK_TEXT,
"Add another Related prepopulated",
)[3].click()
row_id = "id_relatedprepopulated_set-4-1-"
self.selenium.find_element(By.ID, f"{row_id}pubdate").send_keys("1999-01-20")
self.select_option(f"#{row_id}status", "option two")
self.selenium.find_element(By.ID, f"{row_id}name").send_keys(
" now you haVe anöther sŤāÇkeð inline with a very loooong "
)
slug1 = self.selenium.find_element(By.ID, f"{row_id}slug1").get_attribute(
"value"
)
slug2 = self.selenium.find_element(By.ID, f"{row_id}slug2").get_attribute(
"value"
)
self.assertEqual(slug1, "now-you-have-another-stacked-inline-with-a-very-lo")
self.assertEqual(slug2, "option-two")
# Save and check that everything is properly stored in the database
with self.wait_page_loaded():
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.assertEqual(MainPrepopulated.objects.count(), 1)
MainPrepopulated.objects.get(
name=" the mAin nÀMë and it's awεšomeıııİ",
pubdate="2012-02-18",
status="option two",
slug1="the-main-name-and-its-awesomeiiii-2012-02-18",
slug2="option-two-the-main-name-and-its-awesomeiiii",
slug3="the-main-nàmë-and-its-awεšomeıııi",
)
self.assertEqual(RelatedPrepopulated.objects.count(), 6)
RelatedPrepopulated.objects.get(
name=" here is a sŤāÇkeð inline ! ",
pubdate="2011-12-17",
status="option one",
slug1="here-is-a-stacked-inline-2011-12-17",
slug2="option-one-here-is-a-stacked-inline",
)
RelatedPrepopulated.objects.get(
# 75 characters in name field
name=(
" now you haVe anöther sŤāÇkeð inline with a very ... "
"loooooooooooooooooo"
),
pubdate="1999-01-25",
status="option two",
slug1="now-you-have-another-stacked-inline-with-a-very-lo",
slug2="option-two-now-you-have-another-stacked-inline-with-a-very-l",
)
RelatedPrepopulated.objects.get(
name="And now, with a tÃbűlaŘ inline !!!",
pubdate="1234-12-07",
status="option two",
slug1="and-now-with-a-tabular-inline-1234-12-07",
slug2="option-two-and-now-with-a-tabular-inline",
)
RelatedPrepopulated.objects.get(
name=r'tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters',
pubdate="1981-08-22",
status="option one",
slug1="tabular-inline-with-ignored-characters-1981-08-22",
slug2="option-one-tabular-inline-with-ignored-characters",
)
def test_populate_existing_object(self):
"""
The prepopulation works for existing objects too, as long as
the original field is empty (#19082).
"""
from selenium.webdriver.common.by import By
# Slugs are empty to start with.
item = MainPrepopulated.objects.create(
name=" this is the mAin nÀMë",
pubdate="2012-02-18",
status="option two",
slug1="",
slug2="",
)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
object_url = self.live_server_url + reverse(
"admin:admin_views_mainprepopulated_change", args=(item.id,)
)
self.selenium.get(object_url)
self.selenium.find_element(By.ID, "id_name").send_keys(" the best")
# The slugs got prepopulated since they were originally empty
slug1 = self.selenium.find_element(By.ID, "id_slug1").get_attribute("value")
slug2 = self.selenium.find_element(By.ID, "id_slug2").get_attribute("value")
self.assertEqual(slug1, "this-is-the-main-name-the-best-2012-02-18")
self.assertEqual(slug2, "option-two-this-is-the-main-name-the-best")
# Save the object
with self.wait_page_loaded():
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.get(object_url)
self.selenium.find_element(By.ID, "id_name").send_keys(" hello")
# The slugs got prepopulated didn't change since they were originally not empty
slug1 = self.selenium.find_element(By.ID, "id_slug1").get_attribute("value")
slug2 = self.selenium.find_element(By.ID, "id_slug2").get_attribute("value")
self.assertEqual(slug1, "this-is-the-main-name-the-best-2012-02-18")
self.assertEqual(slug2, "option-two-this-is-the-main-name-the-best")
def test_collapsible_fieldset(self):
"""
The 'collapse' class in fieldsets definition allows to
show/hide the appropriate field section.
"""
from selenium.webdriver.common.by import By
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_article_add")
)
self.assertFalse(self.selenium.find_element(By.ID, "id_title").is_displayed())
self.selenium.find_elements(By.LINK_TEXT, "Show")[0].click()
self.assertTrue(self.selenium.find_element(By.ID, "id_title").is_displayed())
self.assertEqual(
self.selenium.find_element(By.ID, "fieldsetcollapser0").text, "Hide"
)
def test_selectbox_height_collapsible_fieldset(self):
from selenium.webdriver.common.by import By
self.admin_login(
username="super",
password="secret",
login_url=reverse("admin7:index"),
)
url = self.live_server_url + reverse("admin7:admin_views_pizza_add")
self.selenium.get(url)
self.selenium.find_elements(By.LINK_TEXT, "Show")[0].click()
from_filter_box = self.selenium.find_element(By.ID, "id_toppings_filter")
from_box = self.selenium.find_element(By.ID, "id_toppings_from")
to_filter_box = self.selenium.find_element(By.ID, "id_toppings_filter_selected")
to_box = self.selenium.find_element(By.ID, "id_toppings_to")
self.assertEqual(
(
to_filter_box.get_property("offsetHeight")
+ to_box.get_property("offsetHeight")
),
(
from_filter_box.get_property("offsetHeight")
+ from_box.get_property("offsetHeight")
),
)
def test_selectbox_height_not_collapsible_fieldset(self):
from selenium.webdriver.common.by import By
self.admin_login(
username="super",
password="secret",
login_url=reverse("admin7:index"),
)
url = self.live_server_url + reverse("admin7:admin_views_question_add")
self.selenium.get(url)
from_filter_box = self.selenium.find_element(
By.ID, "id_related_questions_filter"
)
from_box = self.selenium.find_element(By.ID, "id_related_questions_from")
to_filter_box = self.selenium.find_element(
By.ID, "id_related_questions_filter_selected"
)
to_box = self.selenium.find_element(By.ID, "id_related_questions_to")
self.assertEqual(
(
to_filter_box.get_property("offsetHeight")
+ to_box.get_property("offsetHeight")
),
(
from_filter_box.get_property("offsetHeight")
+ from_box.get_property("offsetHeight")
),
)
def test_first_field_focus(self):
"""JavaScript-assisted auto-focus on first usable form field."""
from selenium.webdriver.common.by import By
# First form field has a single widget
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
with self.wait_page_loaded():
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_picture_add")
)
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element(By.ID, "id_name"),
)
# First form field has a MultiWidget
with self.wait_page_loaded():
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_reservation_add")
)
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element(By.ID, "id_start_date_0"),
)
def test_cancel_delete_confirmation(self):
"Cancelling the deletion of an object takes the user back one page."
from selenium.webdriver.common.by import By
pizza = Pizza.objects.create(name="Double Cheese")
url = reverse("admin:admin_views_pizza_change", args=(pizza.id,))
full_url = self.live_server_url + url
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(full_url)
self.selenium.find_element(By.CLASS_NAME, "deletelink").click()
# Click 'cancel' on the delete page.
self.selenium.find_element(By.CLASS_NAME, "cancel-link").click()
# Wait until we're back on the change page.
self.wait_for_text("#content h1", "Change pizza")
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
def test_cancel_delete_related_confirmation(self):
"""
Cancelling the deletion of an object with relations takes the user back
one page.
"""
from selenium.webdriver.common.by import By
pizza = Pizza.objects.create(name="Double Cheese")
topping1 = Topping.objects.create(name="Cheddar")
topping2 = Topping.objects.create(name="Mozzarella")
pizza.toppings.add(topping1, topping2)
url = reverse("admin:admin_views_pizza_change", args=(pizza.id,))
full_url = self.live_server_url + url
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(full_url)
self.selenium.find_element(By.CLASS_NAME, "deletelink").click()
# Click 'cancel' on the delete page.
self.selenium.find_element(By.CLASS_NAME, "cancel-link").click()
# Wait until we're back on the change page.
self.wait_for_text("#content h1", "Change pizza")
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
self.assertEqual(Topping.objects.count(), 2)
def test_list_editable_popups(self):
"""
list_editable foreign keys have add/change popups.
"""
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
s1 = Section.objects.create(name="Test section")
Article.objects.create(
title="foo",
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=s1,
)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_article_changelist")
)
# Change popup
self.selenium.find_element(By.ID, "change_id_form-0-section").click()
self.wait_for_and_switch_to_popup()
self.wait_for_text("#content h1", "Change section")
name_input = self.selenium.find_element(By.ID, "id_name")
name_input.clear()
name_input.send_keys("<i>edited section</i>")
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
# Hide sidebar.
toggle_button = self.selenium.find_element(
By.CSS_SELECTOR, "#toggle-nav-sidebar"
)
toggle_button.click()
select = Select(self.selenium.find_element(By.ID, "id_form-0-section"))
self.assertEqual(select.first_selected_option.text, "<i>edited section</i>")
# Rendered select2 input.
select2_display = self.selenium.find_element(
By.CLASS_NAME, "select2-selection__rendered"
)
# Clear button (×\n) is included in text.
self.assertEqual(select2_display.text, "×\n<i>edited section</i>")
# Add popup
self.selenium.find_element(By.ID, "add_id_form-0-section").click()
self.wait_for_and_switch_to_popup()
self.wait_for_text("#content h1", "Add section")
self.selenium.find_element(By.ID, "id_name").send_keys("new section")
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element(By.ID, "id_form-0-section"))
self.assertEqual(select.first_selected_option.text, "new section")
select2_display = self.selenium.find_element(
By.CLASS_NAME, "select2-selection__rendered"
)
# Clear button (×\n) is included in text.
self.assertEqual(select2_display.text, "×\nnew section")
def test_inline_uuid_pk_edit_with_popup(self):
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
parent = ParentWithUUIDPK.objects.create(title="test")
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
change_url = reverse(
"admin:admin_views_relatedwithuuidpkmodel_change",
args=(related_with_parent.id,),
)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element(By.ID, "change_id_parent").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element(By.ID, "id_parent"))
self.assertEqual(select.first_selected_option.text, str(parent.id))
self.assertEqual(
select.first_selected_option.get_attribute("value"), str(parent.id)
)
def test_inline_uuid_pk_add_with_popup(self):
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(
self.live_server_url
+ reverse("admin:admin_views_relatedwithuuidpkmodel_add")
)
self.selenium.find_element(By.ID, "add_id_parent").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.ID, "id_title").send_keys("test")
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element(By.ID, "id_parent"))
uuid_id = str(ParentWithUUIDPK.objects.first().id)
self.assertEqual(select.first_selected_option.text, uuid_id)
self.assertEqual(select.first_selected_option.get_attribute("value"), uuid_id)
def test_inline_uuid_pk_delete_with_popup(self):
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
parent = ParentWithUUIDPK.objects.create(title="test")
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
change_url = reverse(
"admin:admin_views_relatedwithuuidpkmodel_change",
args=(related_with_parent.id,),
)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element(By.ID, "delete_id_parent").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.XPATH, '//input[@value="Yes, I’m sure"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element(By.ID, "id_parent"))
self.assertEqual(ParentWithUUIDPK.objects.count(), 0)
self.assertEqual(select.first_selected_option.text, "---------")
self.assertEqual(select.first_selected_option.get_attribute("value"), "")
def test_inline_with_popup_cancel_delete(self):
"""Clicking ""No, take me back" on a delete popup closes the window."""
from selenium.webdriver.common.by import By
parent = ParentWithUUIDPK.objects.create(title="test")
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
change_url = reverse(
"admin:admin_views_relatedwithuuidpkmodel_change",
args=(related_with_parent.id,),
)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element(By.ID, "delete_id_parent").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.XPATH, '//a[text()="No, take me back"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
self.assertEqual(len(self.selenium.window_handles), 1)
def test_list_editable_raw_id_fields(self):
from selenium.webdriver.common.by import By
parent = ParentWithUUIDPK.objects.create(title="test")
parent2 = ParentWithUUIDPK.objects.create(title="test2")
RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
change_url = reverse(
"admin:admin_views_relatedwithuuidpkmodel_changelist",
current_app=site2.name,
)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element(By.ID, "lookup_id_form-0-parent").click()
self.wait_for_and_switch_to_popup()
# Select "parent2" in the popup.
self.selenium.find_element(By.LINK_TEXT, str(parent2.pk)).click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
# The newly selected pk should appear in the raw id input.
value = self.selenium.find_element(By.ID, "id_form-0-parent").get_attribute(
"value"
)
self.assertEqual(value, str(parent2.pk))
def test_input_element_font(self):
"""
Browsers' default stylesheets override the font of inputs. The admin
adds additional CSS to handle this.
"""
from selenium.webdriver.common.by import By
self.selenium.get(self.live_server_url + reverse("admin:login"))
element = self.selenium.find_element(By.ID, "id_username")
# Some browsers quotes the fonts, some don't.
fonts = [
font.strip().strip('"')
for font in element.value_of_css_property("font-family").split(",")
]
self.assertEqual(
fonts,
[
"Segoe UI",
"system-ui",
"Roboto",
"Helvetica Neue",
"Arial",
"sans-serif",
"Apple Color Emoji",
"Segoe UI Emoji",
"Segoe UI Symbol",
"Noto Color Emoji",
],
)
def test_search_input_filtered_page(self):
from selenium.webdriver.common.by import By
Person.objects.create(name="Guido van Rossum", gender=1, alive=True)
Person.objects.create(name="Grace Hopper", gender=1, alive=False)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
person_url = reverse("admin:admin_views_person_changelist") + "?q=Gui"
self.selenium.get(self.live_server_url + person_url)
self.assertGreater(
self.selenium.find_element(By.ID, "searchbar").rect["width"],
50,
)
def test_related_popup_index(self):
"""
Create a chain of 'self' related objects via popups.
"""
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
add_url = reverse("admin:admin_views_box_add", current_app=site.name)
self.selenium.get(self.live_server_url + add_url)
base_window = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup()
popup_window_test = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "id_title").send_keys("test")
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup(num_windows=3)
popup_window_test2 = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "id_title").send_keys("test2")
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup(num_windows=4)
self.selenium.find_element(By.ID, "id_title").send_keys("test3")
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(popup_window_test2)
select = Select(self.selenium.find_element(By.ID, "id_next_box"))
next_box_id = str(Box.objects.get(title="test3").id)
self.assertEqual(
select.first_selected_option.get_attribute("value"), next_box_id
)
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(popup_window_test)
select = Select(self.selenium.find_element(By.ID, "id_next_box"))
next_box_id = str(Box.objects.get(title="test2").id)
self.assertEqual(
select.first_selected_option.get_attribute("value"), next_box_id
)
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(base_window)
select = Select(self.selenium.find_element(By.ID, "id_next_box"))
next_box_id = str(Box.objects.get(title="test").id)
self.assertEqual(
select.first_selected_option.get_attribute("value"), next_box_id
)
def test_related_popup_incorrect_close(self):
"""
Cleanup child popups when closing a parent popup.
"""
from selenium.webdriver.common.by import By
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
add_url = reverse("admin:admin_views_box_add", current_app=site.name)
self.selenium.get(self.live_server_url + add_url)
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup()
test_window = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "id_title").send_keys("test")
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup(num_windows=3)
test2_window = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "id_title").send_keys("test2")
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup(num_windows=4)
self.assertEqual(len(self.selenium.window_handles), 4)
self.selenium.switch_to.window(test2_window)
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.wait_until(lambda d: len(d.window_handles) == 2, 1)
self.assertEqual(len(self.selenium.window_handles), 2)
# Close final popup to clean up test.
self.selenium.switch_to.window(test_window)
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.wait_until(lambda d: len(d.window_handles) == 1, 1)
self.selenium.switch_to.window(self.selenium.window_handles[-1])
def test_hidden_fields_small_window(self):
from selenium.webdriver.common.by import By
self.admin_login(
username="super",
password="secret",
login_url=reverse("admin:index"),
)
self.selenium.get(self.live_server_url + reverse("admin:admin_views_story_add"))
field_title = self.selenium.find_element(By.CLASS_NAME, "field-title")
current_size = self.selenium.get_window_size()
try:
self.selenium.set_window_size(1024, 768)
self.assertIs(field_title.is_displayed(), False)
self.selenium.set_window_size(767, 575)
self.assertIs(field_title.is_displayed(), False)
finally:
self.selenium.set_window_size(current_size["width"], current_size["height"])
def test_updating_related_objects_updates_fk_selects_except_autocompletes(self):
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
born_country_select_id = "id_born_country"
living_country_select_id = "id_living_country"
living_country_select2_textbox_id = "select2-id_living_country-container"
favorite_country_to_vacation_select_id = "id_favorite_country_to_vacation"
continent_select_id = "id_continent"
def _get_HTML_inside_element_by_id(id_):
return self.selenium.find_element(By.ID, id_).get_attribute("innerHTML")
def _get_text_inside_element_by_selector(selector):
return self.selenium.find_element(By.CSS_SELECTOR, selector).get_attribute(
"innerText"
)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
add_url = reverse("admin:admin_views_traveler_add")
self.selenium.get(self.live_server_url + add_url)
# Add new Country from the born_country select.
self.selenium.find_element(By.ID, f"add_{born_country_select_id}").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.ID, "id_name").send_keys("Argentina")
continent_select = Select(
self.selenium.find_element(By.ID, continent_select_id)
)
continent_select.select_by_visible_text("South America")
self.selenium.find_element(By.CSS_SELECTOR, '[type="submit"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(born_country_select_id),
"""
<option value="" selected="">---------</option>
<option value="1" selected="">Argentina</option>
""",
)
# Argentina isn't added to the living_country select nor selected by
# the select2 widget.
self.assertEqual(
_get_text_inside_element_by_selector(f"#{living_country_select_id}"), ""
)
self.assertEqual(
_get_text_inside_element_by_selector(
f"#{living_country_select2_textbox_id}"
),
"",
)
# Argentina won't appear because favorite_country_to_vacation field has
# limit_choices_to.
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(favorite_country_to_vacation_select_id),
'<option value="" selected="">---------</option>',
)
# Add new Country from the living_country select.
self.selenium.find_element(By.ID, f"add_{living_country_select_id}").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.ID, "id_name").send_keys("Spain")
continent_select = Select(
self.selenium.find_element(By.ID, continent_select_id)
)
continent_select.select_by_visible_text("Europe")
self.selenium.find_element(By.CSS_SELECTOR, '[type="submit"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(born_country_select_id),
"""
<option value="" selected="">---------</option>
<option value="1" selected="">Argentina</option>
<option value="2">Spain</option>
""",
)
# Spain is added to the living_country select and it's also selected by
# the select2 widget.
self.assertEqual(
_get_text_inside_element_by_selector(f"#{living_country_select_id} option"),
"Spain",
)
self.assertEqual(
_get_text_inside_element_by_selector(
f"#{living_country_select2_textbox_id}"
),
"Spain",
)
# Spain won't appear because favorite_country_to_vacation field has
# limit_choices_to.
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(favorite_country_to_vacation_select_id),
'<option value="" selected="">---------</option>',
)
# Edit second Country created from living_country select.
favorite_select = Select(
self.selenium.find_element(By.ID, living_country_select_id)
)
favorite_select.select_by_visible_text("Spain")
self.selenium.find_element(By.ID, f"change_{living_country_select_id}").click()
self.wait_for_and_switch_to_popup()
favorite_name_input = self.selenium.find_element(By.ID, "id_name")
favorite_name_input.clear()
favorite_name_input.send_keys("Italy")
self.selenium.find_element(By.CSS_SELECTOR, '[type="submit"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(born_country_select_id),
"""
<option value="" selected="">---------</option>
<option value="1" selected="">Argentina</option>
<option value="2">Italy</option>
""",
)
# Italy is added to the living_country select and it's also selected by
# the select2 widget.
self.assertEqual(
_get_text_inside_element_by_selector(f"#{living_country_select_id} option"),
"Italy",
)
self.assertEqual(
_get_text_inside_element_by_selector(
f"#{living_country_select2_textbox_id}"
),
"Italy",
)
# favorite_country_to_vacation field has no options.
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(favorite_country_to_vacation_select_id),
'<option value="" selected="">---------</option>',
)
# Add a new Asian country.
self.selenium.find_element(
By.ID, f"add_{favorite_country_to_vacation_select_id}"
).click()
self.wait_for_and_switch_to_popup()
favorite_name_input = self.selenium.find_element(By.ID, "id_name")
favorite_name_input.send_keys("Qatar")
continent_select = Select(
self.selenium.find_element(By.ID, continent_select_id)
)
continent_select.select_by_visible_text("Asia")
self.selenium.find_element(By.CSS_SELECTOR, '[type="submit"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
# Submit the new Traveler.
self.selenium.find_element(By.CSS_SELECTOR, '[name="_save"]').click()
traveler = Traveler.objects.get()
self.assertEqual(traveler.born_country.name, "Argentina")
self.assertEqual(traveler.living_country.name, "Italy")
self.assertEqual(traveler.favorite_country_to_vacation.name, "Qatar")
def test_redirect_on_add_view_add_another_button(self):
from selenium.webdriver.common.by import By
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
add_url = reverse("admin7:admin_views_section_add")
self.selenium.get(self.live_server_url + add_url)
name_input = self.selenium.find_element(By.ID, "id_name")
name_input.send_keys("Test section 1")
self.selenium.find_element(
By.XPATH, '//input[@value="Save and add another"]'
).click()
self.assertEqual(Section.objects.count(), 1)
name_input = self.selenium.find_element(By.ID, "id_name")
name_input.send_keys("Test section 2")
self.selenium.find_element(
By.XPATH, '//input[@value="Save and add another"]'
).click()
self.assertEqual(Section.objects.count(), 2)
def test_redirect_on_add_view_continue_button(self):
from selenium.webdriver.common.by import By
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
add_url = reverse("admin7:admin_views_section_add")
self.selenium.get(self.live_server_url + add_url)
name_input = self.selenium.find_element(By.ID, "id_name")
name_input.send_keys("Test section 1")
self.selenium.find_element(
By.XPATH, '//input[@value="Save and continue editing"]'
).click()
self.assertEqual(Section.objects.count(), 1)
name_input = self.selenium.find_element(By.ID, "id_name")
name_input_value = name_input.get_attribute("value")
self.assertEqual(name_input_value, "Test section 1")
@override_settings(ROOT_URLCONF="admin_views.urls")
class ReadonlyTest(AdminFieldExtractionMixin, TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
@ignore_warnings(category=RemovedInDjango60Warning)
def test_readonly_get(self):
response = self.client.get(reverse("admin:admin_views_post_add"))
self.assertNotContains(response, 'name="posted"')
# 3 fields + 2 submit buttons + 5 inline management form fields, + 2
# hidden fields for inlines + 1 field for the inline + 2 empty form
# + 1 logout form.
self.assertContains(response, "<input", count=17)
self.assertContains(response, formats.localize(datetime.date.today()))
self.assertContains(response, "<label>Awesomeness level:</label>")
self.assertContains(response, "Very awesome.")
self.assertContains(response, "Unknown coolness.")
self.assertContains(response, "foo")
# Multiline text in a readonly field gets <br> tags
self.assertContains(response, "Multiline<br>test<br>string")
self.assertContains(
response,
'<div class="readonly">Multiline<br>html<br>content</div>',
html=True,
)
self.assertContains(response, "InlineMultiline<br>test<br>string")
self.assertContains(
response,
formats.localize(datetime.date.today() - datetime.timedelta(days=7)),
)
self.assertContains(response, '<div class="form-row field-coolness">')
self.assertContains(response, '<div class="form-row field-awesomeness_level">')
self.assertContains(response, '<div class="form-row field-posted">')
self.assertContains(response, '<div class="form-row field-value">')
self.assertContains(response, '<div class="form-row">')
self.assertContains(response, '<div class="help"', 3)
self.assertContains(
response,
'<div class="help" id="id_title_helptext"><div>Some help text for the '
"title (with Unicode ŠĐĆŽćžšđ)</div></div>",
html=True,
)
self.assertContains(
response,
'<div class="help" id="id_content_helptext"><div>Some help text for the '
"content (with Unicode ŠĐĆŽćžšđ)</div></div>",
html=True,
)
self.assertContains(
response,
'<div class="help"><div>Some help text for the date (with Unicode ŠĐĆŽćžšđ)'
"</div></div>",
html=True,
)
p = Post.objects.create(
title="I worked on readonly_fields", content="Its good stuff"
)
response = self.client.get(
reverse("admin:admin_views_post_change", args=(p.pk,))
)
self.assertContains(response, "%d amount of cool" % p.pk)
@ignore_warnings(category=RemovedInDjango60Warning)
def test_readonly_text_field(self):
p = Post.objects.create(
title="Readonly test",
content="test",
readonly_content="test\r\n\r\ntest\r\n\r\ntest\r\n\r\ntest",
)
Link.objects.create(
url="http://www.djangoproject.com",
post=p,
readonly_link_content="test\r\nlink",
)
response = self.client.get(
reverse("admin:admin_views_post_change", args=(p.pk,))
)
# Checking readonly field.
self.assertContains(response, "test<br><br>test<br><br>test<br><br>test")
# Checking readonly field in inline.
self.assertContains(response, "test<br>link")
@ignore_warnings(category=RemovedInDjango60Warning)
def test_readonly_post(self):
data = {
"title": "Django Got Readonly Fields",
"content": "This is an incredible development.",
"link_set-TOTAL_FORMS": "1",
"link_set-INITIAL_FORMS": "0",
"link_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse("admin:admin_views_post_add"), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 1)
p = Post.objects.get()
self.assertEqual(p.posted, datetime.date.today())
data["posted"] = "10-8-1990" # some date that's not today
response = self.client.post(reverse("admin:admin_views_post_add"), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 2)
p = Post.objects.order_by("-id")[0]
self.assertEqual(p.posted, datetime.date.today())
def test_readonly_manytomany(self):
"Regression test for #13004"
response = self.client.get(reverse("admin:admin_views_pizza_add"))
self.assertEqual(response.status_code, 200)
def test_user_password_change_limited_queryset(self):
su = User.objects.filter(is_superuser=True)[0]
response = self.client.get(
reverse("admin2:auth_user_password_change", args=(su.pk,))
)
self.assertEqual(response.status_code, 404)
def test_change_form_renders_correct_null_choice_value(self):
"""
Regression test for #17911.
"""
choice = Choice.objects.create(choice=None)
response = self.client.get(
reverse("admin:admin_views_choice_change", args=(choice.pk,))
)
self.assertContains(
response, '<div class="readonly">No opinion</div>', html=True
)
def _test_readonly_foreignkey_links(self, admin_site):
"""
ForeignKey readonly fields render as links if the target model is
registered in admin.
"""
chapter = Chapter.objects.create(
title="Chapter 1",
content="content",
book=Book.objects.create(name="Book 1"),
)
language = Language.objects.create(iso="_40", name="Test")
obj = ReadOnlyRelatedField.objects.create(
chapter=chapter,
language=language,
user=self.superuser,
)
response = self.client.get(
reverse(
f"{admin_site}:admin_views_readonlyrelatedfield_change", args=(obj.pk,)
),
)
# Related ForeignKey object registered in admin.
user_url = reverse(f"{admin_site}:auth_user_change", args=(self.superuser.pk,))
self.assertContains(
response,
'<div class="readonly"><a href="%s">super</a></div>' % user_url,
html=True,
)
# Related ForeignKey with the string primary key registered in admin.
language_url = reverse(
f"{admin_site}:admin_views_language_change",
args=(quote(language.pk),),
)
self.assertContains(
response,
'<div class="readonly"><a href="%s">_40</a></div>' % language_url,
html=True,
)
# Related ForeignKey object not registered in admin.
self.assertContains(
response, '<div class="readonly">Chapter 1</div>', html=True
)
def test_readonly_foreignkey_links_default_admin_site(self):
self._test_readonly_foreignkey_links("admin")
def test_readonly_foreignkey_links_custom_admin_site(self):
self._test_readonly_foreignkey_links("namespaced_admin")
def test_readonly_manytomany_backwards_ref(self):
"""
Regression test for #16433 - backwards references for related objects
broke if the related field is read-only due to the help_text attribute
"""
topping = Topping.objects.create(name="Salami")
pizza = Pizza.objects.create(name="Americano")
pizza.toppings.add(topping)
response = self.client.get(reverse("admin:admin_views_topping_add"))
self.assertEqual(response.status_code, 200)
def test_readonly_manytomany_forwards_ref(self):
topping = Topping.objects.create(name="Salami")
pizza = Pizza.objects.create(name="Americano")
pizza.toppings.add(topping)
response = self.client.get(
reverse("admin:admin_views_pizza_change", args=(pizza.pk,))
)
self.assertContains(response, "<label>Toppings:</label>", html=True)
self.assertContains(response, '<div class="readonly">Salami</div>', html=True)
def test_readonly_onetoone_backwards_ref(self):
"""
Can reference a reverse OneToOneField in ModelAdmin.readonly_fields.
"""
v1 = Villain.objects.create(name="Adam")
pl = Plot.objects.create(name="Test Plot", team_leader=v1, contact=v1)
pd = PlotDetails.objects.create(details="Brand New Plot", plot=pl)
response = self.client.get(
reverse("admin:admin_views_plotproxy_change", args=(pl.pk,))
)
field = self.get_admin_readonly_field(response, "plotdetails")
pd_url = reverse("admin:admin_views_plotdetails_change", args=(pd.pk,))
self.assertEqual(field.contents(), '<a href="%s">Brand New Plot</a>' % pd_url)
# The reverse relation also works if the OneToOneField is null.
pd.plot = None
pd.save()
response = self.client.get(
reverse("admin:admin_views_plotproxy_change", args=(pl.pk,))
)
field = self.get_admin_readonly_field(response, "plotdetails")
self.assertEqual(field.contents(), "-") # default empty value
@ignore_warnings(category=RemovedInDjango60Warning)
def test_readonly_field_overrides(self):
"""
Regression test for #22087 - ModelForm Meta overrides are ignored by
AdminReadonlyField
"""
p = FieldOverridePost.objects.create(title="Test Post", content="Test Content")
response = self.client.get(
reverse("admin:admin_views_fieldoverridepost_change", args=(p.pk,))
)
self.assertContains(
response,
'<div class="help"><div>Overridden help text for the date</div></div>',
html=True,
)
self.assertContains(
response,
'<label for="id_public">Overridden public label:</label>',
html=True,
)
self.assertNotContains(
response, "Some help text for the date (with Unicode ŠĐĆŽćžšđ)"
)
def test_correct_autoescaping(self):
"""
Make sure that non-field readonly elements are properly autoescaped (#24461)
"""
section = Section.objects.create(name="<a>evil</a>")
response = self.client.get(
reverse("admin:admin_views_section_change", args=(section.pk,))
)
self.assertNotContains(response, "<a>evil</a>", status_code=200)
self.assertContains(response, "<a>evil</a>", status_code=200)
def test_label_suffix_translated(self):
pizza = Pizza.objects.create(name="Americano")
url = reverse("admin:admin_views_pizza_change", args=(pizza.pk,))
with self.settings(LANGUAGE_CODE="fr"):
response = self.client.get(url)
self.assertContains(response, "<label>Toppings\u00A0:</label>", html=True)
@override_settings(ROOT_URLCONF="admin_views.urls")
class LimitChoicesToInAdminTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_limit_choices_to_as_callable(self):
"""Test for ticket 2445 changes to admin."""
threepwood = Character.objects.create(
username="threepwood",
last_action=datetime.datetime.today() + datetime.timedelta(days=1),
)
marley = Character.objects.create(
username="marley",
last_action=datetime.datetime.today() - datetime.timedelta(days=1),
)
response = self.client.get(reverse("admin:admin_views_stumpjoke_add"))
# The allowed option should appear twice; the limited option should not appear.
self.assertContains(response, threepwood.username, count=2)
self.assertNotContains(response, marley.username)
@override_settings(ROOT_URLCONF="admin_views.urls")
class RawIdFieldsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_limit_choices_to(self):
"""Regression test for 14880"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True, leader=actor, country="England")
Inquisition.objects.create(expected=False, leader=actor, country="Spain")
response = self.client.get(reverse("admin:admin_views_sketch_add"))
# Find the link
m = re.search(
rb'<a href="([^"]*)"[^>]* id="lookup_id_inquisition"', response.content
)
self.assertTrue(m) # Got a match
popup_url = m[1].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request["PATH_INFO"], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step also tests integers, strings and booleans in the
# lookup query string; in model we define inquisition field to have a
# limit_choices_to option that includes a filter on a string field
# (inquisition__actor__name), a filter on an integer field
# (inquisition__actor__age), and a filter on a boolean field
# (inquisition__expected).
response2 = self.client.get(popup_url)
self.assertContains(response2, "Spain")
self.assertNotContains(response2, "England")
def test_limit_choices_to_isnull_false(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse("admin:admin_views_sketch_add"))
# Find the link
m = re.search(
rb'<a href="([^"]*)"[^>]* id="lookup_id_defendant0"', response.content
)
self.assertTrue(m) # Got a match
popup_url = m[1].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request["PATH_INFO"], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=0 gets parsed correctly from the
# lookup query string; in model we define defendant0 field to have a
# limit_choices_to option that includes "actor__title__isnull=False".
response2 = self.client.get(popup_url)
self.assertContains(response2, "Kilbraken")
self.assertNotContains(response2, "Palin")
def test_limit_choices_to_isnull_true(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse("admin:admin_views_sketch_add"))
# Find the link
m = re.search(
rb'<a href="([^"]*)"[^>]* id="lookup_id_defendant1"', response.content
)
self.assertTrue(m) # Got a match
popup_url = m[1].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request["PATH_INFO"], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=1 gets parsed correctly from the
# lookup query string; in model we define defendant1 field to have a
# limit_choices_to option that includes "actor__title__isnull=True".
response2 = self.client.get(popup_url)
self.assertNotContains(response2, "Kilbraken")
self.assertContains(response2, "Palin")
def test_list_display_method_same_name_as_reverse_accessor(self):
"""
Should be able to use a ModelAdmin method in list_display that has the
same name as a reverse model field ("sketch" in this case).
"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True, leader=actor, country="England")
response = self.client.get(reverse("admin:admin_views_inquisition_changelist"))
self.assertContains(response, "list-display-sketch")
@override_settings(ROOT_URLCONF="admin_views.urls")
class UserAdminTest(TestCase):
"""
Tests user CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.adduser = User.objects.create_user(
username="adduser", password="secret", is_staff=True
)
cls.changeuser = User.objects.create_user(
username="changeuser", password="secret", is_staff=True
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.per1 = Person.objects.create(name="John Mauchly", gender=1, alive=True)
cls.per2 = Person.objects.create(name="Grace Hopper", gender=1, alive=False)
cls.per3 = Person.objects.create(name="Guido van Rossum", gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_button(self):
user_count = User.objects.count()
response = self.client.post(
reverse("admin:auth_user_add"),
{
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
},
)
new_user = User.objects.get(username="newuser")
self.assertRedirects(
response, reverse("admin:auth_user_change", args=(new_user.pk,))
)
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_save_continue_editing_button(self):
user_count = User.objects.count()
response = self.client.post(
reverse("admin:auth_user_add"),
{
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
"_continue": "1",
},
)
new_user = User.objects.get(username="newuser")
new_user_url = reverse("admin:auth_user_change", args=(new_user.pk,))
self.assertRedirects(response, new_user_url, fetch_redirect_response=False)
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
response = self.client.get(new_user_url)
self.assertContains(
response,
'<li class="success">The user “<a href="%s">'
"%s</a>” was added successfully. You may edit it again below.</li>"
% (new_user_url, new_user),
html=True,
)
def test_password_mismatch(self):
response = self.client.post(
reverse("admin:auth_user_add"),
{
"username": "newuser",
"password1": "newpassword",
"password2": "mismatch",
},
)
self.assertEqual(response.status_code, 200)
self.assertFormError(response.context["adminform"], "password1", [])
self.assertFormError(
response.context["adminform"],
"password2",
["The two password fields didn’t match."],
)
def test_user_fk_add_popup(self):
"""
User addition through a FK popup should return the appropriate
JavaScript response.
"""
response = self.client.get(reverse("admin:admin_views_album_add"))
self.assertContains(response, reverse("admin:auth_user_add"))
self.assertContains(
response,
'class="related-widget-wrapper-link add-related" id="add_id_owner"',
)
response = self.client.get(
reverse("admin:auth_user_add") + "?%s=1" % IS_POPUP_VAR
)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
IS_POPUP_VAR: "1",
"_save": "1",
}
response = self.client.post(
reverse("admin:auth_user_add") + "?%s=1" % IS_POPUP_VAR, data, follow=True
)
self.assertContains(response, ""obj": "newuser"")
def test_user_fk_change_popup(self):
"""
User change through a FK popup should return the appropriate JavaScript
response.
"""
response = self.client.get(reverse("admin:admin_views_album_add"))
self.assertContains(
response, reverse("admin:auth_user_change", args=("__fk__",))
)
self.assertContains(
response,
'class="related-widget-wrapper-link change-related" id="change_id_owner"',
)
user = User.objects.get(username="changeuser")
url = (
reverse("admin:auth_user_change", args=(user.pk,)) + "?%s=1" % IS_POPUP_VAR
)
response = self.client.get(url)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
"last_login_0": "2007-05-30",
"last_login_1": "13:20:10",
"date_joined_0": "2007-05-30",
"date_joined_1": "13:20:10",
IS_POPUP_VAR: "1",
"_save": "1",
}
response = self.client.post(url, data, follow=True)
self.assertContains(response, ""obj": "newuser"")
self.assertContains(response, ""action": "change"")
def test_user_fk_delete_popup(self):
"""
User deletion through a FK popup should return the appropriate
JavaScript response.
"""
response = self.client.get(reverse("admin:admin_views_album_add"))
self.assertContains(
response, reverse("admin:auth_user_delete", args=("__fk__",))
)
self.assertContains(
response,
'class="related-widget-wrapper-link change-related" id="change_id_owner"',
)
user = User.objects.get(username="changeuser")
url = (
reverse("admin:auth_user_delete", args=(user.pk,)) + "?%s=1" % IS_POPUP_VAR
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = {
"post": "yes",
IS_POPUP_VAR: "1",
}
response = self.client.post(url, data, follow=True)
self.assertContains(response, ""action": "delete"")
def test_save_add_another_button(self):
user_count = User.objects.count()
response = self.client.post(
reverse("admin:auth_user_add"),
{
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
"_addanother": "1",
},
)
new_user = User.objects.order_by("-id")[0]
self.assertRedirects(response, reverse("admin:auth_user_add"))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_user_permission_performance(self):
u = User.objects.all()[0]
# Don't depend on a warm cache, see #17377.
ContentType.objects.clear_cache()
expected_num_queries = 10 if connection.features.uses_savepoints else 8
with self.assertNumQueries(expected_num_queries):
response = self.client.get(reverse("admin:auth_user_change", args=(u.pk,)))
self.assertEqual(response.status_code, 200)
def test_form_url_present_in_context(self):
u = User.objects.all()[0]
response = self.client.get(
reverse("admin3:auth_user_password_change", args=(u.pk,))
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["form_url"], "pony")
@override_settings(ROOT_URLCONF="admin_views.urls")
class GroupAdminTest(TestCase):
"""
Tests group CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_button(self):
group_count = Group.objects.count()
response = self.client.post(
reverse("admin:auth_group_add"),
{
"name": "newgroup",
},
)
Group.objects.order_by("-id")[0]
self.assertRedirects(response, reverse("admin:auth_group_changelist"))
self.assertEqual(Group.objects.count(), group_count + 1)
def test_group_permission_performance(self):
g = Group.objects.create(name="test_group")
# Ensure no queries are skipped due to cached content type for Group.
ContentType.objects.clear_cache()
expected_num_queries = 8 if connection.features.uses_savepoints else 6
with self.assertNumQueries(expected_num_queries):
response = self.client.get(reverse("admin:auth_group_change", args=(g.pk,)))
self.assertEqual(response.status_code, 200)
@override_settings(ROOT_URLCONF="admin_views.urls")
class CSSTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
def setUp(self):
self.client.force_login(self.superuser)
@ignore_warnings(category=RemovedInDjango60Warning)
def test_field_prefix_css_classes(self):
"""
Fields have a CSS class name with a 'field-' prefix.
"""
response = self.client.get(reverse("admin:admin_views_post_add"))
# The main form
self.assertContains(response, 'class="form-row field-title"')
self.assertContains(response, 'class="form-row field-content"')
self.assertContains(response, 'class="form-row field-public"')
self.assertContains(response, 'class="form-row field-awesomeness_level"')
self.assertContains(response, 'class="form-row field-coolness"')
self.assertContains(response, 'class="form-row field-value"')
self.assertContains(response, 'class="form-row"') # The lambda function
# The tabular inline
self.assertContains(response, '<td class="field-url">')
self.assertContains(response, '<td class="field-posted">')
def test_index_css_classes(self):
"""
CSS class names are used for each app and model on the admin index
pages (#17050).
"""
# General index page
response = self.client.get(reverse("admin:index"))
self.assertContains(response, '<div class="app-admin_views module')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
# App index page
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertContains(response, '<div class="app-admin_views module')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
def test_app_model_in_form_body_class(self):
"""
Ensure app and model tag are correctly read by change_form template
"""
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_list_body_class(self):
"""
Ensure app and model tag are correctly read by change_list template
"""
response = self.client.get(reverse("admin:admin_views_section_changelist"))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_delete_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by delete_confirmation
template
"""
response = self.client.get(
reverse("admin:admin_views_section_delete", args=(self.s1.pk,))
)
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_app_index_body_class(self):
"""
Ensure app and model tag are correctly read by app_index template
"""
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertContains(response, '<body class=" dashboard app-admin_views')
def test_app_model_in_delete_selected_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by
delete_selected_confirmation template
"""
action_data = {
ACTION_CHECKBOX_NAME: [self.s1.pk],
"action": "delete_selected",
"index": 0,
}
response = self.client.post(
reverse("admin:admin_views_section_changelist"), action_data
)
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_changelist_field_classes(self):
"""
Cells of the change list table should contain the field name in their
class attribute.
"""
Podcast.objects.create(name="Django Dose", release_date=datetime.date.today())
response = self.client.get(reverse("admin:admin_views_podcast_changelist"))
self.assertContains(response, '<th class="field-name">')
self.assertContains(response, '<td class="field-release_date nowrap">')
self.assertContains(response, '<td class="action-checkbox">')
try:
import docutils
except ImportError:
docutils = None
@unittest.skipUnless(docutils, "no docutils installed.")
@override_settings(ROOT_URLCONF="admin_views.urls")
@modify_settings(
INSTALLED_APPS={"append": ["django.contrib.admindocs", "django.contrib.flatpages"]}
)
class AdminDocsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_tags(self):
response = self.client.get(reverse("django-admindocs-tags"))
# The builtin tag group exists
self.assertContains(response, "<h2>Built-in tags</h2>", count=2, html=True)
# A builtin tag exists in both the index and detail
self.assertContains(
response, '<h3 id="built_in-autoescape">autoescape</h3>', html=True
)
self.assertContains(
response,
'<li><a href="#built_in-autoescape">autoescape</a></li>',
html=True,
)
# An app tag exists in both the index and detail
self.assertContains(
response, '<h3 id="flatpages-get_flatpages">get_flatpages</h3>', html=True
)
self.assertContains(
response,
'<li><a href="#flatpages-get_flatpages">get_flatpages</a></li>',
html=True,
)
# The admin list tag group exists
self.assertContains(response, "<h2>admin_list</h2>", count=2, html=True)
# An admin list tag exists in both the index and detail
self.assertContains(
response, '<h3 id="admin_list-admin_actions">admin_actions</h3>', html=True
)
self.assertContains(
response,
'<li><a href="#admin_list-admin_actions">admin_actions</a></li>',
html=True,
)
def test_filters(self):
response = self.client.get(reverse("django-admindocs-filters"))
# The builtin filter group exists
self.assertContains(response, "<h2>Built-in filters</h2>", count=2, html=True)
# A builtin filter exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-add">add</h3>', html=True)
self.assertContains(
response, '<li><a href="#built_in-add">add</a></li>', html=True
)
@override_settings(
ROOT_URLCONF="admin_views.urls",
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
],
)
class ValidXHTMLTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_lang_name_present(self):
with translation.override(None):
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertNotContains(response, ' lang=""')
self.assertNotContains(response, ' xml:lang=""')
@override_settings(ROOT_URLCONF="admin_views.urls", USE_THOUSAND_SEPARATOR=True)
class DateHierarchyTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def assert_non_localized_year(self, response, year):
"""
The year is not localized with USE_THOUSAND_SEPARATOR (#15234).
"""
self.assertNotContains(response, formats.number_format(year))
def assert_contains_year_link(self, response, date):
self.assertContains(response, '?release_date__year=%d"' % date.year)
def assert_contains_month_link(self, response, date):
self.assertContains(
response,
'?release_date__month=%d&release_date__year=%d"'
% (date.month, date.year),
)
def assert_contains_day_link(self, response, date):
self.assertContains(
response,
"?release_date__day=%d&"
'release_date__month=%d&release_date__year=%d"'
% (date.day, date.month, date.year),
)
def test_empty(self):
"""
No date hierarchy links display with empty changelist.
"""
response = self.client.get(reverse("admin:admin_views_podcast_changelist"))
self.assertNotContains(response, "release_date__year=")
self.assertNotContains(response, "release_date__month=")
self.assertNotContains(response, "release_date__day=")
def test_single(self):
"""
Single day-level date hierarchy appears for single object.
"""
DATE = datetime.date(2000, 6, 30)
Podcast.objects.create(release_date=DATE)
url = reverse("admin:admin_views_podcast_changelist")
response = self.client.get(url)
self.assert_contains_day_link(response, DATE)
self.assert_non_localized_year(response, 2000)
def test_within_month(self):
"""
day-level links appear for changelist within single month.
"""
DATES = (
datetime.date(2000, 6, 30),
datetime.date(2000, 6, 15),
datetime.date(2000, 6, 3),
)
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse("admin:admin_views_podcast_changelist")
response = self.client.get(url)
for date in DATES:
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_within_year(self):
"""
month-level links appear for changelist within single year.
"""
DATES = (
datetime.date(2000, 1, 30),
datetime.date(2000, 3, 15),
datetime.date(2000, 5, 3),
)
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse("admin:admin_views_podcast_changelist")
response = self.client.get(url)
# no day-level links
self.assertNotContains(response, "release_date__day=")
for date in DATES:
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_multiple_years(self):
"""
year-level links appear for year-spanning changelist.
"""
DATES = (
datetime.date(2001, 1, 30),
datetime.date(2003, 3, 15),
datetime.date(2005, 5, 3),
)
for date in DATES:
Podcast.objects.create(release_date=date)
response = self.client.get(reverse("admin:admin_views_podcast_changelist"))
# no day/month-level links
self.assertNotContains(response, "release_date__day=")
self.assertNotContains(response, "release_date__month=")
for date in DATES:
self.assert_contains_year_link(response, date)
# and make sure GET parameters still behave correctly
for date in DATES:
url = "%s?release_date__year=%d" % (
reverse("admin:admin_views_podcast_changelist"),
date.year,
)
response = self.client.get(url)
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
url = "%s?release_date__year=%d&release_date__month=%d" % (
reverse("admin:admin_views_podcast_changelist"),
date.year,
date.month,
)
response = self.client.get(url)
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
def test_related_field(self):
questions_data = (
# (posted data, number of answers),
(datetime.date(2001, 1, 30), 0),
(datetime.date(2003, 3, 15), 1),
(datetime.date(2005, 5, 3), 2),
)
for date, answer_count in questions_data:
question = Question.objects.create(posted=date)
for i in range(answer_count):
question.answer_set.create()
response = self.client.get(reverse("admin:admin_views_answer_changelist"))
for date, answer_count in questions_data:
link = '?question__posted__year=%d"' % date.year
if answer_count > 0:
self.assertContains(response, link)
else:
self.assertNotContains(response, link)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminCustomSaveRelatedTests(TestCase):
"""
One can easily customize the way related objects are saved.
Refs #16115.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_should_be_able_to_edit_related_objects_on_add_view(self):
post = {
"child_set-TOTAL_FORMS": "3",
"child_set-INITIAL_FORMS": "0",
"name": "Josh Stone",
"child_set-0-name": "Paul",
"child_set-1-name": "Catherine",
}
self.client.post(reverse("admin:admin_views_parent_add"), post)
self.assertEqual(1, Parent.objects.count())
self.assertEqual(2, Child.objects.count())
children_names = list(
Child.objects.order_by("name").values_list("name", flat=True)
)
self.assertEqual("Josh Stone", Parent.objects.latest("id").name)
self.assertEqual(["Catherine Stone", "Paul Stone"], children_names)
def test_should_be_able_to_edit_related_objects_on_change_view(self):
parent = Parent.objects.create(name="Josh Stone")
paul = Child.objects.create(parent=parent, name="Paul")
catherine = Child.objects.create(parent=parent, name="Catherine")
post = {
"child_set-TOTAL_FORMS": "5",
"child_set-INITIAL_FORMS": "2",
"name": "Josh Stone",
"child_set-0-name": "Paul",
"child_set-0-id": paul.id,
"child_set-1-name": "Catherine",
"child_set-1-id": catherine.id,
}
self.client.post(
reverse("admin:admin_views_parent_change", args=(parent.id,)), post
)
children_names = list(
Child.objects.order_by("name").values_list("name", flat=True)
)
self.assertEqual("Josh Stone", Parent.objects.latest("id").name)
self.assertEqual(["Catherine Stone", "Paul Stone"], children_names)
def test_should_be_able_to_edit_related_objects_on_changelist_view(self):
parent = Parent.objects.create(name="Josh Rock")
Child.objects.create(parent=parent, name="Paul")
Child.objects.create(parent=parent, name="Catherine")
post = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": parent.id,
"form-0-name": "Josh Stone",
"_save": "Save",
}
self.client.post(reverse("admin:admin_views_parent_changelist"), post)
children_names = list(
Child.objects.order_by("name").values_list("name", flat=True)
)
self.assertEqual("Josh Stone", Parent.objects.latest("id").name)
self.assertEqual(["Catherine Stone", "Paul Stone"], children_names)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewLogoutTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def test_logout(self):
self.client.force_login(self.superuser)
response = self.client.post(reverse("admin:logout"))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "registration/logged_out.html")
self.assertEqual(response.request["PATH_INFO"], reverse("admin:logout"))
self.assertFalse(response.context["has_permission"])
self.assertNotContains(
response, "user-tools"
) # user-tools div shouldn't visible.
def test_client_logout_url_can_be_used_to_login(self):
response = self.client.post(reverse("admin:logout"))
self.assertEqual(
response.status_code, 302
) # we should be redirected to the login page.
# follow the redirect and test results.
response = self.client.post(reverse("admin:logout"), follow=True)
self.assertContains(
response,
'<input type="hidden" name="next" value="%s">' % reverse("admin:index"),
)
self.assertTemplateUsed(response, "admin/login.html")
self.assertEqual(response.request["PATH_INFO"], reverse("admin:login"))
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminUserMessageTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def send_message(self, level):
"""
Helper that sends a post to the dummy test methods and asserts that a
message with the level has appeared in the response.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
"action": "message_%s" % level,
"index": 0,
}
response = self.client.post(
reverse("admin:admin_views_usermessenger_changelist"),
action_data,
follow=True,
)
self.assertContains(
response, '<li class="%s">Test %s</li>' % (level, level), html=True
)
@override_settings(MESSAGE_LEVEL=10) # Set to DEBUG for this request
def test_message_debug(self):
self.send_message("debug")
def test_message_info(self):
self.send_message("info")
def test_message_success(self):
self.send_message("success")
def test_message_warning(self):
self.send_message("warning")
def test_message_error(self):
self.send_message("error")
def test_message_extra_tags(self):
action_data = {
ACTION_CHECKBOX_NAME: [1],
"action": "message_extra_tags",
"index": 0,
}
response = self.client.post(
reverse("admin:admin_views_usermessenger_changelist"),
action_data,
follow=True,
)
self.assertContains(
response, '<li class="extra_tag info">Test tags</li>', html=True
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminKeepChangeListFiltersTests(TestCase):
admin_site = site
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.joepublicuser = User.objects.create_user(
username="joepublic", password="secret"
)
def setUp(self):
self.client.force_login(self.superuser)
def assertURLEqual(self, url1, url2, msg_prefix=""):
"""
Assert that two URLs are equal despite the ordering
of their querystring. Refs #22360.
"""
parsed_url1 = urlparse(url1)
path1 = parsed_url1.path
parsed_qs1 = dict(parse_qsl(parsed_url1.query))
parsed_url2 = urlparse(url2)
path2 = parsed_url2.path
parsed_qs2 = dict(parse_qsl(parsed_url2.query))
for parsed_qs in [parsed_qs1, parsed_qs2]:
if "_changelist_filters" in parsed_qs:
changelist_filters = parsed_qs["_changelist_filters"]
parsed_filters = dict(parse_qsl(changelist_filters))
parsed_qs["_changelist_filters"] = parsed_filters
self.assertEqual(path1, path2)
self.assertEqual(parsed_qs1, parsed_qs2)
def test_assert_url_equal(self):
# Test equality.
change_user_url = reverse(
"admin:auth_user_change", args=(self.joepublicuser.pk,)
)
self.assertURLEqual(
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
)
# Test inequality.
with self.assertRaises(AssertionError):
self.assertURLEqual(
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D1%26is_superuser__exact%3D1".format(change_user_url),
)
# Ignore scheme and host.
self.assertURLEqual(
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
"{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
)
# Ignore ordering of querystring.
self.assertURLEqual(
"{}?is_staff__exact=0&is_superuser__exact=0".format(
reverse("admin:auth_user_changelist")
),
"{}?is_superuser__exact=0&is_staff__exact=0".format(
reverse("admin:auth_user_changelist")
),
)
# Ignore ordering of _changelist_filters.
self.assertURLEqual(
"{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
"{}?_changelist_filters="
"is_superuser__exact%3D0%26is_staff__exact%3D0".format(change_user_url),
)
def get_changelist_filters(self):
return {
"is_superuser__exact": 0,
"is_staff__exact": 0,
}
def get_changelist_filters_querystring(self):
return urlencode(self.get_changelist_filters())
def get_preserved_filters_querystring(self):
return urlencode(
{"_changelist_filters": self.get_changelist_filters_querystring()}
)
def get_sample_user_id(self):
return self.joepublicuser.pk
def get_changelist_url(self):
return "%s?%s" % (
reverse("admin:auth_user_changelist", current_app=self.admin_site.name),
self.get_changelist_filters_querystring(),
)
def get_add_url(self, add_preserved_filters=True):
url = reverse("admin:auth_user_add", current_app=self.admin_site.name)
if add_preserved_filters:
url = "%s?%s" % (url, self.get_preserved_filters_querystring())
return url
def get_change_url(self, user_id=None, add_preserved_filters=True):
if user_id is None:
user_id = self.get_sample_user_id()
url = reverse(
"admin:auth_user_change", args=(user_id,), current_app=self.admin_site.name
)
if add_preserved_filters:
url = "%s?%s" % (url, self.get_preserved_filters_querystring())
return url
def get_history_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse(
"admin:auth_user_history",
args=(user_id,),
current_app=self.admin_site.name,
),
self.get_preserved_filters_querystring(),
)
def get_delete_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse(
"admin:auth_user_delete",
args=(user_id,),
current_app=self.admin_site.name,
),
self.get_preserved_filters_querystring(),
)
def test_changelist_view(self):
response = self.client.get(self.get_changelist_url())
self.assertEqual(response.status_code, 200)
# Check the `change_view` link has the correct querystring.
detail_link = re.search(
'<a href="(.*?)">{}</a>'.format(self.joepublicuser.username),
response.content.decode(),
)
self.assertURLEqual(detail_link[1], self.get_change_url())
def test_change_view(self):
# Get the `change_view`.
response = self.client.get(self.get_change_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form action="(.*?)" method="post" id="user_form" novalidate>',
response.content.decode(),
)
self.assertURLEqual(
form_action[1], "?%s" % self.get_preserved_filters_querystring()
)
# Check the history link.
history_link = re.search(
'<a href="(.*?)" class="historylink">History</a>', response.content.decode()
)
self.assertURLEqual(history_link[1], self.get_history_url())
# Check the delete link.
delete_link = re.search(
'<a href="(.*?)" class="deletelink">Delete</a>', response.content.decode()
)
self.assertURLEqual(delete_link[1], self.get_delete_url())
# Test redirect on "Save".
post_data = {
"username": "joepublic",
"last_login_0": "2007-05-30",
"last_login_1": "13:20:10",
"date_joined_0": "2007-05-30",
"date_joined_1": "13:20:10",
}
post_data["_save"] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertRedirects(response, self.get_changelist_url())
post_data.pop("_save")
# Test redirect on "Save and continue".
post_data["_continue"] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertRedirects(response, self.get_change_url())
post_data.pop("_continue")
# Test redirect on "Save and add new".
post_data["_addanother"] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertRedirects(response, self.get_add_url())
post_data.pop("_addanother")
def test_change_view_close_link(self):
viewuser = User.objects.create_user(
username="view", password="secret", is_staff=True
)
viewuser.user_permissions.add(
get_perm(User, get_permission_codename("view", User._meta))
)
self.client.force_login(viewuser)
response = self.client.get(self.get_change_url())
close_link = re.search(
'<a href="(.*?)" class="closelink">Close</a>', response.content.decode()
)
close_link = close_link[1].replace("&", "&")
self.assertURLEqual(close_link, self.get_changelist_url())
def test_change_view_without_preserved_filters(self):
response = self.client.get(self.get_change_url(add_preserved_filters=False))
# The action attribute is omitted.
self.assertContains(response, '<form method="post" id="user_form" novalidate>')
def test_add_view(self):
# Get the `add_view`.
response = self.client.get(self.get_add_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form action="(.*?)" method="post" id="user_form" novalidate>',
response.content.decode(),
)
self.assertURLEqual(
form_action[1], "?%s" % self.get_preserved_filters_querystring()
)
post_data = {
"username": "dummy",
"password1": "test",
"password2": "test",
}
# Test redirect on "Save".
post_data["_save"] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertRedirects(
response, self.get_change_url(User.objects.get(username="dummy").pk)
)
post_data.pop("_save")
# Test redirect on "Save and continue".
post_data["username"] = "dummy2"
post_data["_continue"] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertRedirects(
response, self.get_change_url(User.objects.get(username="dummy2").pk)
)
post_data.pop("_continue")
# Test redirect on "Save and add new".
post_data["username"] = "dummy3"
post_data["_addanother"] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertRedirects(response, self.get_add_url())
post_data.pop("_addanother")
def test_add_view_without_preserved_filters(self):
response = self.client.get(self.get_add_url(add_preserved_filters=False))
# The action attribute is omitted.
self.assertContains(response, '<form method="post" id="user_form" novalidate>')
def test_delete_view(self):
# Test redirect on "Delete".
response = self.client.post(self.get_delete_url(), {"post": "yes"})
self.assertRedirects(response, self.get_changelist_url())
def test_url_prefix(self):
context = {
"preserved_filters": self.get_preserved_filters_querystring(),
"opts": User._meta,
}
prefixes = ("", "/prefix/", "/後台/")
for prefix in prefixes:
with self.subTest(prefix=prefix), override_script_prefix(prefix):
url = reverse(
"admin:auth_user_changelist", current_app=self.admin_site.name
)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
class NamespacedAdminKeepChangeListFiltersTests(AdminKeepChangeListFiltersTests):
admin_site = site2
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestLabelVisibility(TestCase):
"""#11277 -Labels of hidden fields in admin were not hidden."""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_all_fields_visible(self):
response = self.client.get(reverse("admin:admin_views_emptymodelvisible_add"))
self.assert_fieldline_visible(response)
self.assert_field_visible(response, "first")
self.assert_field_visible(response, "second")
def test_all_fields_hidden(self):
response = self.client.get(reverse("admin:admin_views_emptymodelhidden_add"))
self.assert_fieldline_hidden(response)
self.assert_field_hidden(response, "first")
self.assert_field_hidden(response, "second")
def test_mixin(self):
response = self.client.get(reverse("admin:admin_views_emptymodelmixin_add"))
self.assert_fieldline_visible(response)
self.assert_field_hidden(response, "first")
self.assert_field_visible(response, "second")
def assert_field_visible(self, response, field_name):
self.assertContains(
response, f'<div class="flex-container fieldBox field-{field_name}">'
)
def assert_field_hidden(self, response, field_name):
self.assertContains(
response, f'<div class="flex-container fieldBox field-{field_name} hidden">'
)
def assert_fieldline_visible(self, response):
self.assertContains(response, '<div class="form-row field-first field-second">')
def assert_fieldline_hidden(self, response):
self.assertContains(response, '<div class="form-row hidden')
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewOnSiteTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = State.objects.create(name="New York")
cls.s2 = State.objects.create(name="Illinois")
cls.s3 = State.objects.create(name="California")
cls.c1 = City.objects.create(state=cls.s1, name="New York")
cls.c2 = City.objects.create(state=cls.s2, name="Chicago")
cls.c3 = City.objects.create(state=cls.s3, name="San Francisco")
cls.r1 = Restaurant.objects.create(city=cls.c1, name="Italian Pizza")
cls.r2 = Restaurant.objects.create(city=cls.c1, name="Boulevard")
cls.r3 = Restaurant.objects.create(city=cls.c2, name="Chinese Dinner")
cls.r4 = Restaurant.objects.create(city=cls.c2, name="Angels")
cls.r5 = Restaurant.objects.create(city=cls.c2, name="Take Away")
cls.r6 = Restaurant.objects.create(city=cls.c3, name="The Unknown Restaurant")
cls.w1 = Worker.objects.create(work_at=cls.r1, name="Mario", surname="Rossi")
cls.w2 = Worker.objects.create(
work_at=cls.r1, name="Antonio", surname="Bianchi"
)
cls.w3 = Worker.objects.create(work_at=cls.r1, name="John", surname="Doe")
def setUp(self):
self.client.force_login(self.superuser)
def test_add_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data.
Also, assertFormError() and assertFormSetError() is usable for admin
forms and formsets.
"""
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {
"family_name": "Test1",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": "",
"dependentchild_set-0-family_name": "Test2",
}
response = self.client.post(
reverse("admin:admin_views_parentwithdependentchildren_add"), post_data
)
self.assertFormError(
response.context["adminform"],
"some_required_info",
["This field is required."],
)
self.assertFormError(response.context["adminform"], None, [])
self.assertFormSetError(
response.context["inline_admin_formset"],
0,
None,
[
"Children must share a family name with their parents in this "
"contrived test case"
],
)
self.assertFormSetError(
response.context["inline_admin_formset"], None, None, []
)
def test_change_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data
"""
pwdc = ParentWithDependentChildren.objects.create(
some_required_info=6, family_name="Test1"
)
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {
"family_name": "Test2",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": str(pwdc.id),
"dependentchild_set-0-family_name": "Test1",
}
response = self.client.post(
reverse(
"admin:admin_views_parentwithdependentchildren_change", args=(pwdc.id,)
),
post_data,
)
self.assertFormError(
response.context["adminform"],
"some_required_info",
["This field is required."],
)
self.assertFormSetError(
response.context["inline_admin_formset"],
0,
None,
[
"Children must share a family name with their parents in this "
"contrived test case"
],
)
def test_check(self):
"The view_on_site value is either a boolean or a callable"
try:
admin = CityAdmin(City, AdminSite())
CityAdmin.view_on_site = True
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = False
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = lambda obj: obj.get_absolute_url()
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = []
self.assertEqual(
admin.check(),
[
Error(
"The value of 'view_on_site' must be a callable or a boolean "
"value.",
obj=CityAdmin,
id="admin.E025",
),
],
)
finally:
# Restore the original values for the benefit of other tests.
CityAdmin.view_on_site = True
def test_false(self):
"The 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(
reverse("admin:admin_views_restaurant_change", args=(self.r1.pk,))
)
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertNotContains(
response, reverse("admin:view_on_site", args=(content_type_pk, 1))
)
def test_true(self):
"The default behavior is followed if view_on_site is True"
response = self.client.get(
reverse("admin:admin_views_city_change", args=(self.c1.pk,))
)
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertContains(
response, reverse("admin:view_on_site", args=(content_type_pk, self.c1.pk))
)
def test_callable(self):
"The right link is displayed if view_on_site is a callable"
response = self.client.get(
reverse("admin:admin_views_worker_change", args=(self.w1.pk,))
)
self.assertContains(
response, '"/worker/%s/%s/"' % (self.w1.surname, self.w1.name)
)
def test_missing_get_absolute_url(self):
"None is returned if model doesn't have get_absolute_url"
model_admin = ModelAdmin(Worker, None)
self.assertIsNone(model_admin.get_view_on_site_url(Worker()))
def test_custom_admin_site(self):
model_admin = ModelAdmin(City, customadmin.site)
content_type_pk = ContentType.objects.get_for_model(City).pk
redirect_url = model_admin.get_view_on_site_url(self.c1)
self.assertEqual(
redirect_url,
reverse(
f"{customadmin.site.name}:view_on_site",
kwargs={
"content_type_id": content_type_pk,
"object_id": self.c1.pk,
},
),
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class InlineAdminViewOnSiteTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = State.objects.create(name="New York")
cls.s2 = State.objects.create(name="Illinois")
cls.s3 = State.objects.create(name="California")
cls.c1 = City.objects.create(state=cls.s1, name="New York")
cls.c2 = City.objects.create(state=cls.s2, name="Chicago")
cls.c3 = City.objects.create(state=cls.s3, name="San Francisco")
cls.r1 = Restaurant.objects.create(city=cls.c1, name="Italian Pizza")
cls.r2 = Restaurant.objects.create(city=cls.c1, name="Boulevard")
cls.r3 = Restaurant.objects.create(city=cls.c2, name="Chinese Dinner")
cls.r4 = Restaurant.objects.create(city=cls.c2, name="Angels")
cls.r5 = Restaurant.objects.create(city=cls.c2, name="Take Away")
cls.r6 = Restaurant.objects.create(city=cls.c3, name="The Unknown Restaurant")
cls.w1 = Worker.objects.create(work_at=cls.r1, name="Mario", surname="Rossi")
cls.w2 = Worker.objects.create(
work_at=cls.r1, name="Antonio", surname="Bianchi"
)
cls.w3 = Worker.objects.create(work_at=cls.r1, name="John", surname="Doe")
def setUp(self):
self.client.force_login(self.superuser)
def test_false(self):
"The 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(
reverse("admin:admin_views_state_change", args=(self.s1.pk,))
)
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertNotContains(
response, reverse("admin:view_on_site", args=(content_type_pk, self.c1.pk))
)
def test_true(self):
"The 'View on site' button is displayed if view_on_site is True"
response = self.client.get(
reverse("admin:admin_views_city_change", args=(self.c1.pk,))
)
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertContains(
response, reverse("admin:view_on_site", args=(content_type_pk, self.r1.pk))
)
def test_callable(self):
"The right link is displayed if view_on_site is a callable"
response = self.client.get(
reverse("admin:admin_views_restaurant_change", args=(self.r1.pk,))
)
self.assertContains(
response, '"/worker_inline/%s/%s/"' % (self.w1.surname, self.w1.name)
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class GetFormsetsWithInlinesArgumentTest(TestCase):
"""
#23934 - When adding a new model instance in the admin, the 'obj' argument
of get_formsets_with_inlines() should be None. When changing, it should be
equal to the existing model instance.
The GetFormsetsArgumentCheckingAdmin ModelAdmin throws an exception
if obj is not None during add_view or obj is None during change_view.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_explicitly_provided_pk(self):
post_data = {"name": "1"}
response = self.client.post(
reverse("admin:admin_views_explicitlyprovidedpk_add"), post_data
)
self.assertEqual(response.status_code, 302)
post_data = {"name": "2"}
response = self.client.post(
reverse("admin:admin_views_explicitlyprovidedpk_change", args=(1,)),
post_data,
)
self.assertEqual(response.status_code, 302)
def test_implicitly_generated_pk(self):
post_data = {"name": "1"}
response = self.client.post(
reverse("admin:admin_views_implicitlygeneratedpk_add"), post_data
)
self.assertEqual(response.status_code, 302)
post_data = {"name": "2"}
response = self.client.post(
reverse("admin:admin_views_implicitlygeneratedpk_change", args=(1,)),
post_data,
)
self.assertEqual(response.status_code, 302)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminSiteFinalCatchAllPatternTests(TestCase):
"""
Verifies the behaviour of the admin catch-all view.
* Anonynous/non-staff users are redirected to login for all URLs, whether
otherwise valid or not.
* APPEND_SLASH is applied for staff if needed.
* Otherwise Http404.
* Catch-all view disabled via AdminSite.final_catch_all_view.
"""
@classmethod
def setUpTestData(cls):
cls.staff_user = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
cls.non_staff_user = User.objects.create_user(
username="user",
password="secret",
email="[email protected]",
is_staff=False,
)
def test_unknown_url_redirects_login_if_not_authenticated(self):
unknown_url = "/test_admin/admin/unknown/"
response = self.client.get(unknown_url)
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin:login"), unknown_url)
)
def test_unknown_url_404_if_authenticated(self):
self.client.force_login(self.staff_user)
unknown_url = "/test_admin/admin/unknown/"
response = self.client.get(unknown_url)
self.assertEqual(response.status_code, 404)
def test_known_url_redirects_login_if_not_authenticated(self):
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url)
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin:login"), known_url)
)
def test_known_url_missing_slash_redirects_login_if_not_authenticated(self):
known_url = reverse("admin:admin_views_article_changelist")[:-1]
response = self.client.get(known_url)
# Redirects with the next URL also missing the slash.
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin:login"), known_url)
)
def test_non_admin_url_shares_url_prefix(self):
url = reverse("non_admin")[:-1]
response = self.client.get(url)
# Redirects with the next URL also missing the slash.
self.assertRedirects(response, "%s?next=%s" % (reverse("admin:login"), url))
def test_url_without_trailing_slash_if_not_authenticated(self):
url = reverse("admin:article_extra_json")
response = self.client.get(url)
self.assertRedirects(response, "%s?next=%s" % (reverse("admin:login"), url))
def test_unkown_url_without_trailing_slash_if_not_authenticated(self):
url = reverse("admin:article_extra_json")[:-1]
response = self.client.get(url)
self.assertRedirects(response, "%s?next=%s" % (reverse("admin:login"), url))
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_unknown_url(self):
self.client.force_login(self.staff_user)
unknown_url = "/test_admin/admin/unknown/"
response = self.client.get(unknown_url[:-1])
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true(self):
self.client.force_login(self.staff_user)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response, known_url, status_code=301, target_status_code=403
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_query_string(self):
self.client.force_login(self.staff_user)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get("%s?id=1" % known_url[:-1])
self.assertRedirects(
response,
f"{known_url}?id=1",
status_code=301,
fetch_redirect_response=False,
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_script_name(self):
self.client.force_login(self.staff_user)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1], SCRIPT_NAME="/prefix/")
self.assertRedirects(
response,
"/prefix" + known_url,
status_code=301,
fetch_redirect_response=False,
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_script_name_query_string(self):
self.client.force_login(self.staff_user)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get("%s?id=1" % known_url[:-1], SCRIPT_NAME="/prefix/")
self.assertRedirects(
response,
f"/prefix{known_url}?id=1",
status_code=301,
fetch_redirect_response=False,
)
@override_settings(APPEND_SLASH=True, FORCE_SCRIPT_NAME="/prefix/")
def test_missing_slash_append_slash_true_force_script_name(self):
self.client.force_login(self.staff_user)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response,
"/prefix" + known_url,
status_code=301,
fetch_redirect_response=False,
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_non_staff_user(self):
self.client.force_login(self.non_staff_user)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response,
"/test_admin/admin/login/?next=/test_admin/admin/admin_views/article",
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_non_staff_user_query_string(self):
self.client.force_login(self.non_staff_user)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get("%s?id=1" % known_url[:-1])
self.assertRedirects(
response,
"/test_admin/admin/login/?next=/test_admin/admin/admin_views/article"
"%3Fid%3D1",
)
@override_settings(APPEND_SLASH=False)
def test_missing_slash_append_slash_false(self):
self.client.force_login(self.staff_user)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_single_model_no_append_slash(self):
self.client.force_login(self.staff_user)
known_url = reverse("admin9:admin_views_actor_changelist")
response = self.client.get(known_url[:-1])
self.assertEqual(response.status_code, 404)
# Same tests above with final_catch_all_view=False.
def test_unknown_url_404_if_not_authenticated_without_final_catch_all_view(self):
unknown_url = "/test_admin/admin10/unknown/"
response = self.client.get(unknown_url)
self.assertEqual(response.status_code, 404)
def test_unknown_url_404_if_authenticated_without_final_catch_all_view(self):
self.client.force_login(self.staff_user)
unknown_url = "/test_admin/admin10/unknown/"
response = self.client.get(unknown_url)
self.assertEqual(response.status_code, 404)
def test_known_url_redirects_login_if_not_auth_without_final_catch_all_view(
self,
):
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get(known_url)
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin10:login"), known_url)
)
def test_known_url_missing_slash_redirects_with_slash_if_not_auth_no_catch_all_view(
self,
):
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response, known_url, status_code=301, fetch_redirect_response=False
)
def test_non_admin_url_shares_url_prefix_without_final_catch_all_view(self):
url = reverse("non_admin10")
response = self.client.get(url[:-1])
self.assertRedirects(response, url, status_code=301)
def test_url_no_trailing_slash_if_not_auth_without_final_catch_all_view(
self,
):
url = reverse("admin10:article_extra_json")
response = self.client.get(url)
self.assertRedirects(response, "%s?next=%s" % (reverse("admin10:login"), url))
def test_unknown_url_no_trailing_slash_if_not_auth_without_final_catch_all_view(
self,
):
url = reverse("admin10:article_extra_json")[:-1]
response = self.client.get(url)
# Matches test_admin/admin10/admin_views/article/<path:object_id>/
self.assertRedirects(
response, url + "/", status_code=301, fetch_redirect_response=False
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_unknown_url_without_final_catch_all_view(
self,
):
self.client.force_login(self.staff_user)
unknown_url = "/test_admin/admin10/unknown/"
response = self.client.get(unknown_url[:-1])
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_without_final_catch_all_view(self):
self.client.force_login(self.staff_user)
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response, known_url, status_code=301, target_status_code=403
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_query_without_final_catch_all_view(self):
self.client.force_login(self.staff_user)
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get("%s?id=1" % known_url[:-1])
self.assertRedirects(
response,
f"{known_url}?id=1",
status_code=301,
fetch_redirect_response=False,
)
@override_settings(APPEND_SLASH=False)
def test_missing_slash_append_slash_false_without_final_catch_all_view(self):
self.client.force_login(self.staff_user)
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertEqual(response.status_code, 404)
# Outside admin.
def test_non_admin_url_404_if_not_authenticated(self):
unknown_url = "/unknown/"
response = self.client.get(unknown_url)
# Does not redirect to the admin login.
self.assertEqual(response.status_code, 404)
|
979dc10aa5d8c3b2af555af69a3048219d07b5e0d213031469785493233da001 | import re
from io import StringIO
from unittest import mock, skipUnless
from django.core.management import call_command
from django.core.management.commands import inspectdb
from django.db import connection
from django.db.backends.base.introspection import TableInfo
from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature
from .models import PeopleMoreData, test_collation
def inspectdb_tables_only(table_name):
"""
Limit introspection to tables created for models of this app.
Some databases such as Oracle are extremely slow at introspection.
"""
return table_name.startswith("inspectdb_")
def inspectdb_views_only(table_name):
return table_name.startswith("inspectdb_") and table_name.endswith(
("_materialized", "_view")
)
def special_table_only(table_name):
return table_name.startswith("inspectdb_special")
class InspectDBTestCase(TestCase):
unique_re = re.compile(r".*unique_together = \((.+),\).*")
def test_stealth_table_name_filter_option(self):
out = StringIO()
call_command("inspectdb", table_name_filter=inspectdb_tables_only, stdout=out)
error_message = (
"inspectdb has examined a table that should have been filtered out."
)
# contrib.contenttypes is one of the apps always installed when running
# the Django test suite, check that one of its tables hasn't been
# inspected
self.assertNotIn(
"class DjangoContentType(models.Model):", out.getvalue(), msg=error_message
)
def test_table_option(self):
"""
inspectdb can inspect a subset of tables by passing the table names as
arguments.
"""
out = StringIO()
call_command("inspectdb", "inspectdb_people", stdout=out)
output = out.getvalue()
self.assertIn("class InspectdbPeople(models.Model):", output)
self.assertNotIn("InspectdbPeopledata", output)
def make_field_type_asserter(self):
"""
Call inspectdb and return a function to validate a field type in its
output.
"""
out = StringIO()
call_command("inspectdb", "inspectdb_columntypes", stdout=out)
output = out.getvalue()
def assertFieldType(name, definition):
out_def = re.search(r"^\s*%s = (models.*)$" % name, output, re.MULTILINE)[1]
self.assertEqual(definition, out_def)
return assertFieldType
def test_field_types(self):
"""Test introspection of various Django field types"""
assertFieldType = self.make_field_type_asserter()
introspected_field_types = connection.features.introspected_field_types
char_field_type = introspected_field_types["CharField"]
# Inspecting Oracle DB doesn't produce correct results (#19884):
# - it reports fields as blank=True when they aren't.
if (
not connection.features.interprets_empty_strings_as_nulls
and char_field_type == "CharField"
):
assertFieldType("char_field", "models.CharField(max_length=10)")
assertFieldType(
"null_char_field",
"models.CharField(max_length=10, blank=True, null=True)",
)
assertFieldType("email_field", "models.CharField(max_length=254)")
assertFieldType("file_field", "models.CharField(max_length=100)")
assertFieldType("file_path_field", "models.CharField(max_length=100)")
assertFieldType("slug_field", "models.CharField(max_length=50)")
assertFieldType("text_field", "models.TextField()")
assertFieldType("url_field", "models.CharField(max_length=200)")
if char_field_type == "TextField":
assertFieldType("char_field", "models.TextField()")
assertFieldType(
"null_char_field", "models.TextField(blank=True, null=True)"
)
assertFieldType("email_field", "models.TextField()")
assertFieldType("file_field", "models.TextField()")
assertFieldType("file_path_field", "models.TextField()")
assertFieldType("slug_field", "models.TextField()")
assertFieldType("text_field", "models.TextField()")
assertFieldType("url_field", "models.TextField()")
assertFieldType("date_field", "models.DateField()")
assertFieldType("date_time_field", "models.DateTimeField()")
if introspected_field_types["GenericIPAddressField"] == "GenericIPAddressField":
assertFieldType("gen_ip_address_field", "models.GenericIPAddressField()")
elif not connection.features.interprets_empty_strings_as_nulls:
assertFieldType("gen_ip_address_field", "models.CharField(max_length=39)")
assertFieldType(
"time_field", "models.%s()" % introspected_field_types["TimeField"]
)
if connection.features.has_native_uuid_field:
assertFieldType("uuid_field", "models.UUIDField()")
elif not connection.features.interprets_empty_strings_as_nulls:
assertFieldType("uuid_field", "models.CharField(max_length=32)")
@skipUnlessDBFeature("can_introspect_json_field", "supports_json_field")
def test_json_field(self):
out = StringIO()
call_command("inspectdb", "inspectdb_jsonfieldcolumntype", stdout=out)
output = out.getvalue()
if not connection.features.interprets_empty_strings_as_nulls:
self.assertIn("json_field = models.JSONField()", output)
self.assertIn(
"null_json_field = models.JSONField(blank=True, null=True)", output
)
@skipUnlessDBFeature("supports_comments")
def test_db_comments(self):
out = StringIO()
call_command("inspectdb", "inspectdb_dbcomment", stdout=out)
output = out.getvalue()
integer_field_type = connection.features.introspected_field_types[
"IntegerField"
]
self.assertIn(
f"rank = models.{integer_field_type}("
f"db_comment=\"'Rank' column comment\")",
output,
)
self.assertIn(
" db_table_comment = 'Custom table comment'",
output,
)
@skipUnlessDBFeature("supports_collation_on_charfield")
@skipUnless(test_collation, "Language collations are not supported.")
def test_char_field_db_collation(self):
out = StringIO()
call_command("inspectdb", "inspectdb_charfielddbcollation", stdout=out)
output = out.getvalue()
if not connection.features.interprets_empty_strings_as_nulls:
self.assertIn(
"char_field = models.CharField(max_length=10, "
"db_collation='%s')" % test_collation,
output,
)
else:
self.assertIn(
"char_field = models.CharField(max_length=10, "
"db_collation='%s', blank=True, null=True)" % test_collation,
output,
)
@skipUnlessDBFeature("supports_collation_on_textfield")
@skipUnless(test_collation, "Language collations are not supported.")
def test_text_field_db_collation(self):
out = StringIO()
call_command("inspectdb", "inspectdb_textfielddbcollation", stdout=out)
output = out.getvalue()
if not connection.features.interprets_empty_strings_as_nulls:
self.assertIn(
"text_field = models.TextField(db_collation='%s')" % test_collation,
output,
)
else:
self.assertIn(
"text_field = models.TextField(db_collation='%s, blank=True, "
"null=True)" % test_collation,
output,
)
@skipUnlessDBFeature("supports_unlimited_charfield")
def test_char_field_unlimited(self):
out = StringIO()
call_command("inspectdb", "inspectdb_charfieldunlimited", stdout=out)
output = out.getvalue()
self.assertIn("char_field = models.CharField()", output)
def test_number_field_types(self):
"""Test introspection of various Django field types"""
assertFieldType = self.make_field_type_asserter()
introspected_field_types = connection.features.introspected_field_types
auto_field_type = connection.features.introspected_field_types["AutoField"]
if auto_field_type != "AutoField":
assertFieldType(
"id", "models.%s(primary_key=True) # AutoField?" % auto_field_type
)
assertFieldType(
"big_int_field", "models.%s()" % introspected_field_types["BigIntegerField"]
)
bool_field_type = introspected_field_types["BooleanField"]
assertFieldType("bool_field", "models.{}()".format(bool_field_type))
assertFieldType(
"null_bool_field",
"models.{}(blank=True, null=True)".format(bool_field_type),
)
if connection.vendor != "sqlite":
assertFieldType(
"decimal_field", "models.DecimalField(max_digits=6, decimal_places=1)"
)
else: # Guessed arguments on SQLite, see #5014
assertFieldType(
"decimal_field",
"models.DecimalField(max_digits=10, decimal_places=5) "
"# max_digits and decimal_places have been guessed, "
"as this database handles decimal fields as float",
)
assertFieldType("float_field", "models.FloatField()")
assertFieldType(
"int_field", "models.%s()" % introspected_field_types["IntegerField"]
)
assertFieldType(
"pos_int_field",
"models.%s()" % introspected_field_types["PositiveIntegerField"],
)
assertFieldType(
"pos_big_int_field",
"models.%s()" % introspected_field_types["PositiveBigIntegerField"],
)
assertFieldType(
"pos_small_int_field",
"models.%s()" % introspected_field_types["PositiveSmallIntegerField"],
)
assertFieldType(
"small_int_field",
"models.%s()" % introspected_field_types["SmallIntegerField"],
)
@skipUnlessDBFeature("can_introspect_foreign_keys")
def test_attribute_name_not_python_keyword(self):
out = StringIO()
call_command("inspectdb", table_name_filter=inspectdb_tables_only, stdout=out)
output = out.getvalue()
error_message = (
"inspectdb generated an attribute name which is a Python keyword"
)
# Recursive foreign keys should be set to 'self'
self.assertIn("parent = models.ForeignKey('self', models.DO_NOTHING)", output)
self.assertNotIn(
"from = models.ForeignKey(InspectdbPeople, models.DO_NOTHING)",
output,
msg=error_message,
)
# As InspectdbPeople model is defined after InspectdbMessage, it should
# be quoted.
self.assertIn(
"from_field = models.ForeignKey('InspectdbPeople', models.DO_NOTHING, "
"db_column='from_id')",
output,
)
self.assertIn(
"people_pk = models.OneToOneField(InspectdbPeople, models.DO_NOTHING, "
"primary_key=True)",
output,
)
self.assertIn(
"people_unique = models.OneToOneField(InspectdbPeople, models.DO_NOTHING)",
output,
)
@skipUnlessDBFeature("can_introspect_foreign_keys")
def test_foreign_key_to_field(self):
out = StringIO()
call_command("inspectdb", "inspectdb_foreignkeytofield", stdout=out)
self.assertIn(
"to_field_fk = models.ForeignKey('InspectdbPeoplemoredata', "
"models.DO_NOTHING, to_field='people_unique_id')",
out.getvalue(),
)
def test_digits_column_name_introspection(self):
"""Introspection of column names consist/start with digits (#16536/#17676)"""
char_field_type = connection.features.introspected_field_types["CharField"]
out = StringIO()
call_command("inspectdb", "inspectdb_digitsincolumnname", stdout=out)
output = out.getvalue()
error_message = "inspectdb generated a model field name which is a number"
self.assertNotIn(
" 123 = models.%s" % char_field_type, output, msg=error_message
)
self.assertIn("number_123 = models.%s" % char_field_type, output)
error_message = (
"inspectdb generated a model field name which starts with a digit"
)
self.assertNotIn(
" 4extra = models.%s" % char_field_type, output, msg=error_message
)
self.assertIn("number_4extra = models.%s" % char_field_type, output)
self.assertNotIn(
" 45extra = models.%s" % char_field_type, output, msg=error_message
)
self.assertIn("number_45extra = models.%s" % char_field_type, output)
def test_special_column_name_introspection(self):
"""
Introspection of column names containing special characters,
unsuitable for Python identifiers
"""
out = StringIO()
call_command("inspectdb", table_name_filter=special_table_only, stdout=out)
output = out.getvalue()
base_name = connection.introspection.identifier_converter("Field")
integer_field_type = connection.features.introspected_field_types[
"IntegerField"
]
self.assertIn("field = models.%s()" % integer_field_type, output)
self.assertIn(
"field_field = models.%s(db_column='%s_')"
% (integer_field_type, base_name),
output,
)
self.assertIn(
"field_field_0 = models.%s(db_column='%s__')"
% (integer_field_type, base_name),
output,
)
self.assertIn(
"field_field_1 = models.%s(db_column='__field')" % integer_field_type,
output,
)
self.assertIn(
"prc_x = models.{}(db_column='prc(%) x')".format(integer_field_type), output
)
self.assertIn("tamaño = models.%s()" % integer_field_type, output)
def test_table_name_introspection(self):
"""
Introspection of table names containing special characters,
unsuitable for Python identifiers
"""
out = StringIO()
call_command("inspectdb", table_name_filter=special_table_only, stdout=out)
output = out.getvalue()
self.assertIn("class InspectdbSpecialTableName(models.Model):", output)
def test_custom_normalize_table_name(self):
def pascal_case_table_only(table_name):
return table_name.startswith("inspectdb_pascal")
class MyCommand(inspectdb.Command):
def normalize_table_name(self, table_name):
normalized_name = table_name.split(".")[1]
if connection.features.ignores_table_name_case:
normalized_name = normalized_name.lower()
return normalized_name
out = StringIO()
call_command(MyCommand(), table_name_filter=pascal_case_table_only, stdout=out)
if connection.features.ignores_table_name_case:
expected_model_name = "pascalcase"
else:
expected_model_name = "PascalCase"
self.assertIn(f"class {expected_model_name}(models.Model):", out.getvalue())
@skipUnlessDBFeature("supports_expression_indexes")
def test_table_with_func_unique_constraint(self):
out = StringIO()
call_command("inspectdb", "inspectdb_funcuniqueconstraint", stdout=out)
output = out.getvalue()
self.assertIn("class InspectdbFuncuniqueconstraint(models.Model):", output)
def test_managed_models(self):
"""
By default the command generates models with `Meta.managed = False`.
"""
out = StringIO()
call_command("inspectdb", "inspectdb_columntypes", stdout=out)
output = out.getvalue()
self.longMessage = False
self.assertIn(
" managed = False",
output,
msg="inspectdb should generate unmanaged models.",
)
def test_unique_together_meta(self):
out = StringIO()
call_command("inspectdb", "inspectdb_uniquetogether", stdout=out)
output = out.getvalue()
self.assertIn(" unique_together = (('", output)
unique_together_match = self.unique_re.findall(output)
# There should be one unique_together tuple.
self.assertEqual(len(unique_together_match), 1)
fields = unique_together_match[0]
# Fields with db_column = field name.
self.assertIn("('field1', 'field2')", fields)
# Fields from columns whose names are Python keywords.
self.assertIn("('field1', 'field2')", fields)
# Fields whose names normalize to the same Python field name and hence
# are given an integer suffix.
self.assertIn("('non_unique_column', 'non_unique_column_0')", fields)
@skipUnless(connection.vendor == "postgresql", "PostgreSQL specific SQL")
def test_unsupported_unique_together(self):
"""Unsupported index types (COALESCE here) are skipped."""
with connection.cursor() as c:
c.execute(
"CREATE UNIQUE INDEX Findex ON %s "
"(id, people_unique_id, COALESCE(message_id, -1))"
% PeopleMoreData._meta.db_table
)
try:
out = StringIO()
call_command(
"inspectdb",
table_name_filter=lambda tn: tn.startswith(
PeopleMoreData._meta.db_table
),
stdout=out,
)
output = out.getvalue()
self.assertIn("# A unique constraint could not be introspected.", output)
self.assertEqual(
self.unique_re.findall(output), ["('id', 'people_unique')"]
)
finally:
with connection.cursor() as c:
c.execute("DROP INDEX Findex")
@skipUnless(
connection.vendor == "sqlite",
"Only patched sqlite's DatabaseIntrospection.data_types_reverse for this test",
)
def test_custom_fields(self):
"""
Introspection of columns with a custom field (#21090)
"""
out = StringIO()
with mock.patch(
"django.db.connection.introspection.data_types_reverse."
"base_data_types_reverse",
{
"text": "myfields.TextField",
"bigint": "BigIntegerField",
},
):
call_command("inspectdb", "inspectdb_columntypes", stdout=out)
output = out.getvalue()
self.assertIn("text_field = myfields.TextField()", output)
self.assertIn("big_int_field = models.BigIntegerField()", output)
def test_introspection_errors(self):
"""
Introspection errors should not crash the command, and the error should
be visible in the output.
"""
out = StringIO()
with mock.patch(
"django.db.connection.introspection.get_table_list",
return_value=[TableInfo(name="nonexistent", type="t")],
):
call_command("inspectdb", stdout=out)
output = out.getvalue()
self.assertIn("# Unable to inspect table 'nonexistent'", output)
# The error message depends on the backend
self.assertIn("# The error was:", output)
def test_same_relations(self):
out = StringIO()
call_command("inspectdb", "inspectdb_message", stdout=out)
self.assertIn(
"author = models.ForeignKey('InspectdbPeople', models.DO_NOTHING, "
"related_name='inspectdbmessage_author_set')",
out.getvalue(),
)
class InspectDBTransactionalTests(TransactionTestCase):
available_apps = ["inspectdb"]
def test_include_views(self):
"""inspectdb --include-views creates models for database views."""
with connection.cursor() as cursor:
cursor.execute(
"CREATE VIEW inspectdb_people_view AS "
"SELECT id, name FROM inspectdb_people"
)
out = StringIO()
view_model = "class InspectdbPeopleView(models.Model):"
view_managed = "managed = False # Created from a view."
try:
call_command(
"inspectdb",
table_name_filter=inspectdb_views_only,
stdout=out,
)
no_views_output = out.getvalue()
self.assertNotIn(view_model, no_views_output)
self.assertNotIn(view_managed, no_views_output)
call_command(
"inspectdb",
table_name_filter=inspectdb_views_only,
include_views=True,
stdout=out,
)
with_views_output = out.getvalue()
self.assertIn(view_model, with_views_output)
self.assertIn(view_managed, with_views_output)
finally:
with connection.cursor() as cursor:
cursor.execute("DROP VIEW inspectdb_people_view")
@skipUnlessDBFeature("can_introspect_materialized_views")
def test_include_materialized_views(self):
"""inspectdb --include-views creates models for materialized views."""
with connection.cursor() as cursor:
cursor.execute(
"CREATE MATERIALIZED VIEW inspectdb_people_materialized AS "
"SELECT id, name FROM inspectdb_people"
)
out = StringIO()
view_model = "class InspectdbPeopleMaterialized(models.Model):"
view_managed = "managed = False # Created from a view."
try:
call_command(
"inspectdb",
table_name_filter=inspectdb_views_only,
stdout=out,
)
no_views_output = out.getvalue()
self.assertNotIn(view_model, no_views_output)
self.assertNotIn(view_managed, no_views_output)
call_command(
"inspectdb",
table_name_filter=inspectdb_views_only,
include_views=True,
stdout=out,
)
with_views_output = out.getvalue()
self.assertIn(view_model, with_views_output)
self.assertIn(view_managed, with_views_output)
finally:
with connection.cursor() as cursor:
cursor.execute("DROP MATERIALIZED VIEW inspectdb_people_materialized")
@skipUnless(connection.vendor == "postgresql", "PostgreSQL specific SQL")
def test_include_partitions(self):
"""inspectdb --include-partitions creates models for partitions."""
with connection.cursor() as cursor:
cursor.execute(
"""\
CREATE TABLE inspectdb_partition_parent (name text not null)
PARTITION BY LIST (left(upper(name), 1))
"""
)
cursor.execute(
"""\
CREATE TABLE inspectdb_partition_child
PARTITION OF inspectdb_partition_parent
FOR VALUES IN ('A', 'B', 'C')
"""
)
out = StringIO()
partition_model_parent = "class InspectdbPartitionParent(models.Model):"
partition_model_child = "class InspectdbPartitionChild(models.Model):"
partition_managed = "managed = False # Created from a partition."
try:
call_command(
"inspectdb", table_name_filter=inspectdb_tables_only, stdout=out
)
no_partitions_output = out.getvalue()
self.assertIn(partition_model_parent, no_partitions_output)
self.assertNotIn(partition_model_child, no_partitions_output)
self.assertNotIn(partition_managed, no_partitions_output)
call_command(
"inspectdb",
table_name_filter=inspectdb_tables_only,
include_partitions=True,
stdout=out,
)
with_partitions_output = out.getvalue()
self.assertIn(partition_model_parent, with_partitions_output)
self.assertIn(partition_model_child, with_partitions_output)
self.assertIn(partition_managed, with_partitions_output)
finally:
with connection.cursor() as cursor:
cursor.execute("DROP TABLE IF EXISTS inspectdb_partition_child")
cursor.execute("DROP TABLE IF EXISTS inspectdb_partition_parent")
@skipUnless(connection.vendor == "postgresql", "PostgreSQL specific SQL")
def test_foreign_data_wrapper(self):
with connection.cursor() as cursor:
cursor.execute("CREATE EXTENSION IF NOT EXISTS file_fdw")
cursor.execute(
"CREATE SERVER inspectdb_server FOREIGN DATA WRAPPER file_fdw"
)
cursor.execute(
"""
CREATE FOREIGN TABLE inspectdb_iris_foreign_table (
petal_length real,
petal_width real,
sepal_length real,
sepal_width real
) SERVER inspectdb_server OPTIONS (
program 'echo 1,2,3,4',
format 'csv'
)
"""
)
out = StringIO()
foreign_table_model = "class InspectdbIrisForeignTable(models.Model):"
foreign_table_managed = "managed = False"
try:
call_command(
"inspectdb",
table_name_filter=inspectdb_tables_only,
stdout=out,
)
output = out.getvalue()
self.assertIn(foreign_table_model, output)
self.assertIn(foreign_table_managed, output)
finally:
with connection.cursor() as cursor:
cursor.execute(
"DROP FOREIGN TABLE IF EXISTS inspectdb_iris_foreign_table"
)
cursor.execute("DROP SERVER IF EXISTS inspectdb_server")
cursor.execute("DROP EXTENSION IF EXISTS file_fdw")
@skipUnlessDBFeature("create_test_table_with_composite_primary_key")
def test_composite_primary_key(self):
table_name = "test_table_composite_pk"
with connection.cursor() as cursor:
cursor.execute(
connection.features.create_test_table_with_composite_primary_key
)
out = StringIO()
if connection.vendor == "sqlite":
field_type = connection.features.introspected_field_types["AutoField"]
else:
field_type = connection.features.introspected_field_types["IntegerField"]
try:
call_command("inspectdb", table_name, stdout=out)
output = out.getvalue()
self.assertIn(
f"column_1 = models.{field_type}(primary_key=True) # The composite "
f"primary key (column_1, column_2) found, that is not supported. The "
f"first column is selected.",
output,
)
self.assertIn(
"column_2 = models.%s()"
% connection.features.introspected_field_types["IntegerField"],
output,
)
finally:
with connection.cursor() as cursor:
cursor.execute("DROP TABLE %s" % table_name)
|
a49e41d22f33829e23e14f4555d265267d8b019535d46b3a5749eedf3b91f014 | from django.db import connection, models
from django.db.models.functions import Lower
class People(models.Model):
name = models.CharField(max_length=255)
parent = models.ForeignKey("self", models.CASCADE)
class Message(models.Model):
from_field = models.ForeignKey(People, models.CASCADE, db_column="from_id")
author = models.ForeignKey(People, models.CASCADE, related_name="message_authors")
class PeopleData(models.Model):
people_pk = models.ForeignKey(People, models.CASCADE, primary_key=True)
ssn = models.CharField(max_length=11)
class PeopleMoreData(models.Model):
people_unique = models.ForeignKey(People, models.CASCADE, unique=True)
message = models.ForeignKey(Message, models.CASCADE, blank=True, null=True)
license = models.CharField(max_length=255)
class ForeignKeyToField(models.Model):
to_field_fk = models.ForeignKey(
PeopleMoreData,
models.CASCADE,
to_field="people_unique",
)
class DigitsInColumnName(models.Model):
all_digits = models.CharField(max_length=11, db_column="123")
leading_digit = models.CharField(max_length=11, db_column="4extra")
leading_digits = models.CharField(max_length=11, db_column="45extra")
class SpecialName(models.Model):
field = models.IntegerField(db_column="field")
# Underscores
field_field_0 = models.IntegerField(db_column="Field_")
field_field_1 = models.IntegerField(db_column="Field__")
field_field_2 = models.IntegerField(db_column="__field")
# Other chars
prc_x = models.IntegerField(db_column="prc(%) x")
non_ascii = models.IntegerField(db_column="tamaño")
class Meta:
db_table = "inspectdb_special.table name"
class PascalCaseName(models.Model):
class Meta:
db_table = "inspectdb_pascal.PascalCase"
class ColumnTypes(models.Model):
id = models.AutoField(primary_key=True)
big_int_field = models.BigIntegerField()
bool_field = models.BooleanField(default=False)
null_bool_field = models.BooleanField(null=True)
char_field = models.CharField(max_length=10)
null_char_field = models.CharField(max_length=10, blank=True, null=True)
date_field = models.DateField()
date_time_field = models.DateTimeField()
decimal_field = models.DecimalField(max_digits=6, decimal_places=1)
email_field = models.EmailField()
file_field = models.FileField(upload_to="unused")
file_path_field = models.FilePathField()
float_field = models.FloatField()
int_field = models.IntegerField()
gen_ip_address_field = models.GenericIPAddressField(protocol="ipv4")
pos_big_int_field = models.PositiveBigIntegerField()
pos_int_field = models.PositiveIntegerField()
pos_small_int_field = models.PositiveSmallIntegerField()
slug_field = models.SlugField()
small_int_field = models.SmallIntegerField()
text_field = models.TextField()
time_field = models.TimeField()
url_field = models.URLField()
uuid_field = models.UUIDField()
class JSONFieldColumnType(models.Model):
json_field = models.JSONField()
null_json_field = models.JSONField(blank=True, null=True)
class Meta:
required_db_features = {
"can_introspect_json_field",
"supports_json_field",
}
test_collation = connection.features.test_collations.get("non_default")
class CharFieldDbCollation(models.Model):
char_field = models.CharField(max_length=10, db_collation=test_collation)
class Meta:
required_db_features = {"supports_collation_on_charfield"}
class TextFieldDbCollation(models.Model):
text_field = models.TextField(db_collation=test_collation)
class Meta:
required_db_features = {"supports_collation_on_textfield"}
class CharFieldUnlimited(models.Model):
char_field = models.CharField(max_length=None)
class Meta:
required_db_features = {"supports_unlimited_charfield"}
class UniqueTogether(models.Model):
field1 = models.IntegerField()
field2 = models.CharField(max_length=10)
from_field = models.IntegerField(db_column="from")
non_unique = models.IntegerField(db_column="non__unique_column")
non_unique_0 = models.IntegerField(db_column="non_unique__column")
class Meta:
unique_together = [
("field1", "field2"),
("from_field", "field1"),
("non_unique", "non_unique_0"),
]
class FuncUniqueConstraint(models.Model):
name = models.CharField(max_length=255)
rank = models.IntegerField()
class Meta:
constraints = [
models.UniqueConstraint(
Lower("name"), models.F("rank"), name="index_lower_name"
)
]
required_db_features = {"supports_expression_indexes"}
class DbComment(models.Model):
rank = models.IntegerField(db_comment="'Rank' column comment")
class Meta:
db_table_comment = "Custom table comment"
required_db_features = {"supports_comments"}
|
2ef5eca197fe34625908ba1053563b03c8cac177bc090817543f47a8b9f628e7 | from unittest import mock
from django.test import SimpleTestCase
from django.utils.functional import cached_property, classproperty, lazy
from django.utils.version import PY312
class FunctionalTests(SimpleTestCase):
def test_lazy(self):
t = lazy(lambda: tuple(range(3)), list, tuple)
for a, b in zip(t(), range(3)):
self.assertEqual(a, b)
def test_lazy_base_class(self):
"""lazy also finds base class methods in the proxy object"""
class Base:
def base_method(self):
pass
class Klazz(Base):
pass
t = lazy(lambda: Klazz(), Klazz)()
self.assertIn("base_method", dir(t))
def test_lazy_base_class_override(self):
"""lazy finds the correct (overridden) method implementation"""
class Base:
def method(self):
return "Base"
class Klazz(Base):
def method(self):
return "Klazz"
t = lazy(lambda: Klazz(), Base)()
self.assertEqual(t.method(), "Klazz")
def test_lazy_object_to_string(self):
class Klazz:
def __str__(self):
return "Î am ā Ǩlâzz."
def __bytes__(self):
return b"\xc3\x8e am \xc4\x81 binary \xc7\xa8l\xc3\xa2zz."
t = lazy(lambda: Klazz(), Klazz)()
self.assertEqual(str(t), "Î am ā Ǩlâzz.")
self.assertEqual(bytes(t), b"\xc3\x8e am \xc4\x81 binary \xc7\xa8l\xc3\xa2zz.")
def assertCachedPropertyWorks(self, attr, Class):
with self.subTest(attr=attr):
def get(source):
return getattr(source, attr)
obj = Class()
class SubClass(Class):
pass
subobj = SubClass()
# Docstring is preserved.
self.assertEqual(get(Class).__doc__, "Here is the docstring...")
self.assertEqual(get(SubClass).__doc__, "Here is the docstring...")
# It's cached.
self.assertEqual(get(obj), get(obj))
self.assertEqual(get(subobj), get(subobj))
# The correct value is returned.
self.assertEqual(get(obj)[0], 1)
self.assertEqual(get(subobj)[0], 1)
# State isn't shared between instances.
obj2 = Class()
subobj2 = SubClass()
self.assertNotEqual(get(obj), get(obj2))
self.assertNotEqual(get(subobj), get(subobj2))
# It behaves like a property when there's no instance.
self.assertIsInstance(get(Class), cached_property)
self.assertIsInstance(get(SubClass), cached_property)
# 'other_value' doesn't become a property.
self.assertTrue(callable(obj.other_value))
self.assertTrue(callable(subobj.other_value))
def test_cached_property(self):
"""cached_property caches its value and behaves like a property."""
class Class:
@cached_property
def value(self):
"""Here is the docstring..."""
return 1, object()
@cached_property
def __foo__(self):
"""Here is the docstring..."""
return 1, object()
def other_value(self):
"""Here is the docstring..."""
return 1, object()
other = cached_property(other_value)
attrs = ["value", "other", "__foo__"]
for attr in attrs:
self.assertCachedPropertyWorks(attr, Class)
def test_cached_property_auto_name(self):
"""
cached_property caches its value and behaves like a property
on mangled methods or when the name kwarg isn't set.
"""
class Class:
@cached_property
def __value(self):
"""Here is the docstring..."""
return 1, object()
def other_value(self):
"""Here is the docstring..."""
return 1, object()
other = cached_property(other_value)
attrs = ["_Class__value", "other"]
for attr in attrs:
self.assertCachedPropertyWorks(attr, Class)
def test_cached_property_reuse_different_names(self):
"""Disallow this case because the decorated function wouldn't be cached."""
type_msg = (
"Cannot assign the same cached_property to two different names ('a' and "
"'b')."
)
if PY312:
error_type = TypeError
msg = type_msg
else:
error_type = RuntimeError
msg = "Error calling __set_name__"
with self.assertRaisesMessage(error_type, msg) as ctx:
class ReusedCachedProperty:
@cached_property
def a(self):
pass
b = a
if not PY312:
self.assertEqual(str(ctx.exception.__context__), str(TypeError(type_msg)))
def test_cached_property_reuse_same_name(self):
"""
Reusing a cached_property on different classes under the same name is
allowed.
"""
counter = 0
@cached_property
def _cp(_self):
nonlocal counter
counter += 1
return counter
class A:
cp = _cp
class B:
cp = _cp
a = A()
b = B()
self.assertEqual(a.cp, 1)
self.assertEqual(b.cp, 2)
self.assertEqual(a.cp, 1)
def test_cached_property_set_name_not_called(self):
cp = cached_property(lambda s: None)
class Foo:
pass
Foo.cp = cp
msg = (
"Cannot use cached_property instance without calling __set_name__() on it."
)
with self.assertRaisesMessage(TypeError, msg):
Foo().cp
def test_lazy_add(self):
lazy_4 = lazy(lambda: 4, int)
lazy_5 = lazy(lambda: 5, int)
self.assertEqual(lazy_4() + lazy_5(), 9)
def test_lazy_equality(self):
"""
== and != work correctly for Promises.
"""
lazy_a = lazy(lambda: 4, int)
lazy_b = lazy(lambda: 4, int)
lazy_c = lazy(lambda: 5, int)
self.assertEqual(lazy_a(), lazy_b())
self.assertNotEqual(lazy_b(), lazy_c())
def test_lazy_repr_text(self):
original_object = "Lazy translation text"
lazy_obj = lazy(lambda: original_object, str)
self.assertEqual(repr(original_object), repr(lazy_obj()))
def test_lazy_repr_int(self):
original_object = 15
lazy_obj = lazy(lambda: original_object, int)
self.assertEqual(repr(original_object), repr(lazy_obj()))
def test_lazy_repr_bytes(self):
original_object = b"J\xc3\xbcst a str\xc3\xadng"
lazy_obj = lazy(lambda: original_object, bytes)
self.assertEqual(repr(original_object), repr(lazy_obj()))
def test_lazy_class_preparation_caching(self):
# lazy() should prepare the proxy class only once i.e. the first time
# it's used.
lazified = lazy(lambda: 0, int)
__proxy__ = lazified().__class__
with mock.patch.object(__proxy__, "__prepare_class__") as mocked:
lazified()
mocked.assert_not_called()
def test_lazy_bytes_and_str_result_classes(self):
lazy_obj = lazy(lambda: "test", str, bytes)
msg = "Cannot call lazy() with both bytes and text return types."
with self.assertRaisesMessage(ValueError, msg):
lazy_obj()
def test_lazy_str_cast_mixed_result_types(self):
lazy_value = lazy(lambda: [1], str, list)()
self.assertEqual(str(lazy_value), "[1]")
def test_classproperty_getter(self):
class Foo:
foo_attr = 123
def __init__(self):
self.foo_attr = 456
@classproperty
def foo(cls):
return cls.foo_attr
class Bar:
bar = classproperty()
@bar.getter
def bar(cls):
return 123
self.assertEqual(Foo.foo, 123)
self.assertEqual(Foo().foo, 123)
self.assertEqual(Bar.bar, 123)
self.assertEqual(Bar().bar, 123)
def test_classproperty_override_getter(self):
class Foo:
@classproperty
def foo(cls):
return 123
@foo.getter
def foo(cls):
return 456
self.assertEqual(Foo.foo, 456)
self.assertEqual(Foo().foo, 456)
|
0bfe7fe7c7ad31c6d30969dd9cd3143692e6cc71f9cf668e1a8805ae3f1d3525 | from asgiref.sync import iscoroutinefunction
from django.http import HttpRequest, HttpResponse
from django.test import SimpleTestCase
from django.views.decorators.common import no_append_slash
class NoAppendSlashTests(SimpleTestCase):
def test_wrapped_sync_function_is_not_coroutine_function(self):
def sync_view(request):
return HttpResponse()
wrapped_view = no_append_slash(sync_view)
self.assertIs(iscoroutinefunction(wrapped_view), False)
def test_wrapped_async_function_is_coroutine_function(self):
async def async_view(request):
return HttpResponse()
wrapped_view = no_append_slash(async_view)
self.assertIs(iscoroutinefunction(wrapped_view), True)
def test_no_append_slash_decorator(self):
@no_append_slash
def sync_view(request):
return HttpResponse()
self.assertIs(sync_view.should_append_slash, False)
self.assertIsInstance(sync_view(HttpRequest()), HttpResponse)
async def test_no_append_slash_decorator_async_view(self):
@no_append_slash
async def async_view(request):
return HttpResponse()
self.assertIs(async_view.should_append_slash, False)
self.assertIsInstance(await async_view(HttpRequest()), HttpResponse)
|
388e3ac8978c8c6019f0473913bd8b6101124c81a2866fc782d702a45f72143e | import math
from decimal import Decimal
from django.core import validators
from django.core.exceptions import ValidationError
from django.db import models
from django.test import TestCase
from .models import BigD, Foo
class DecimalFieldTests(TestCase):
def test_to_python(self):
f = models.DecimalField(max_digits=4, decimal_places=2)
self.assertEqual(f.to_python(3), Decimal("3"))
self.assertEqual(f.to_python("3.14"), Decimal("3.14"))
# to_python() converts floats and honors max_digits.
self.assertEqual(f.to_python(3.1415926535897), Decimal("3.142"))
self.assertEqual(f.to_python(2.4), Decimal("2.400"))
# Uses default rounding of ROUND_HALF_EVEN.
self.assertEqual(f.to_python(2.0625), Decimal("2.062"))
self.assertEqual(f.to_python(2.1875), Decimal("2.188"))
def test_invalid_value(self):
field = models.DecimalField(max_digits=4, decimal_places=2)
msg = "“%s” value must be a decimal number."
tests = [
(),
[],
{},
set(),
object(),
complex(),
"non-numeric string",
b"non-numeric byte-string",
]
for value in tests:
with self.subTest(value):
with self.assertRaisesMessage(ValidationError, msg % (value,)):
field.clean(value, None)
def test_default(self):
f = models.DecimalField(default=Decimal("0.00"))
self.assertEqual(f.get_default(), Decimal("0.00"))
def test_get_prep_value(self):
f = models.DecimalField(max_digits=5, decimal_places=1)
self.assertIsNone(f.get_prep_value(None))
self.assertEqual(f.get_prep_value("2.4"), Decimal("2.4"))
def test_filter_with_strings(self):
"""
Should be able to filter decimal fields using strings (#8023).
"""
foo = Foo.objects.create(a="abc", d=Decimal("12.34"))
self.assertEqual(list(Foo.objects.filter(d="12.34")), [foo])
def test_save_without_float_conversion(self):
"""
Ensure decimals don't go through a corrupting float conversion during
save (#5079).
"""
bd = BigD(d="12.9")
bd.save()
bd = BigD.objects.get(pk=bd.pk)
self.assertEqual(bd.d, Decimal("12.9"))
def test_save_nan_invalid(self):
msg = "“nan” value must be a decimal number."
for value in [float("nan"), math.nan, "nan"]:
with self.subTest(value), self.assertRaisesMessage(ValidationError, msg):
BigD.objects.create(d=value)
def test_save_inf_invalid(self):
msg = "“inf” value must be a decimal number."
for value in [float("inf"), math.inf, "inf"]:
with self.subTest(value), self.assertRaisesMessage(ValidationError, msg):
BigD.objects.create(d=value)
msg = "“-inf” value must be a decimal number."
for value in [float("-inf"), -math.inf, "-inf"]:
with self.subTest(value), self.assertRaisesMessage(ValidationError, msg):
BigD.objects.create(d=value)
def test_fetch_from_db_without_float_rounding(self):
big_decimal = BigD.objects.create(d=Decimal(".100000000000000000000000000005"))
big_decimal.refresh_from_db()
self.assertEqual(big_decimal.d, Decimal(".100000000000000000000000000005"))
def test_lookup_really_big_value(self):
"""
Really big values can be used in a filter statement.
"""
# This should not crash.
self.assertSequenceEqual(Foo.objects.filter(d__gte=100000000000), [])
def test_lookup_decimal_larger_than_max_digits(self):
self.assertSequenceEqual(Foo.objects.filter(d__lte=Decimal("123456")), [])
def test_max_digits_validation(self):
field = models.DecimalField(max_digits=2)
expected_message = validators.DecimalValidator.messages["max_digits"] % {
"max": 2
}
with self.assertRaisesMessage(ValidationError, expected_message):
field.clean(100, None)
def test_max_decimal_places_validation(self):
field = models.DecimalField(decimal_places=1)
expected_message = validators.DecimalValidator.messages[
"max_decimal_places"
] % {"max": 1}
with self.assertRaisesMessage(ValidationError, expected_message):
field.clean(Decimal("0.99"), None)
def test_max_whole_digits_validation(self):
field = models.DecimalField(max_digits=3, decimal_places=1)
expected_message = validators.DecimalValidator.messages["max_whole_digits"] % {
"max": 2
}
with self.assertRaisesMessage(ValidationError, expected_message):
field.clean(Decimal("999"), None)
def test_roundtrip_with_trailing_zeros(self):
"""Trailing zeros in the fractional part aren't truncated."""
obj = Foo.objects.create(a="bar", d=Decimal("8.320"))
obj.refresh_from_db()
self.assertEqual(obj.d.compare_total(Decimal("8.320")), Decimal("0"))
|
8f9615c5622a90ccd8b5ee51fea7c39ba6d9b52790daebf7536f797581fee7dc | from django.test import SimpleTestCase
from django.utils.safestring import mark_safe
from ..utils import setup
class EscapeseqTests(SimpleTestCase):
"""
The "escapeseq" filter works the same whether autoescape is on or off,
and has no effect on strings already marked as safe.
"""
@setup(
{
"escapeseq_basic": (
'{{ a|escapeseq|join:", " }} -- {{ b|escapeseq|join:", " }}'
),
}
)
def test_basic(self):
output = self.engine.render_to_string(
"escapeseq_basic",
{"a": ["x&y", "<p>"], "b": [mark_safe("x&y"), mark_safe("<p>")]},
)
self.assertEqual(output, "x&y, <p> -- x&y, <p>")
@setup(
{
"escapeseq_autoescape_off": (
'{% autoescape off %}{{ a|escapeseq|join:", " }}'
" -- "
'{{ b|escapeseq|join:", "}}{% endautoescape %}'
)
}
)
def test_autoescape_off(self):
output = self.engine.render_to_string(
"escapeseq_autoescape_off",
{"a": ["x&y", "<p>"], "b": [mark_safe("x&y"), mark_safe("<p>")]},
)
self.assertEqual(output, "x&y, <p> -- x&y, <p>")
@setup({"escapeseq_join": '{{ a|escapeseq|join:"<br/>" }}'})
def test_chain_join(self):
output = self.engine.render_to_string("escapeseq_join", {"a": ["x&y", "<p>"]})
self.assertEqual(output, "x&y<br/><p>")
@setup(
{
"escapeseq_join_autoescape_off": (
'{% autoescape off %}{{ a|escapeseq|join:"<br/>" }}{% endautoescape %}'
),
}
)
def test_chain_join_autoescape_off(self):
output = self.engine.render_to_string(
"escapeseq_join_autoescape_off", {"a": ["x&y", "<p>"]}
)
self.assertEqual(output, "x&y<br/><p>")
|
c9e95ce26f4c6e4b40b753b2bb5399ae1499ca0697e8d7c8805f22cb84cbdb1e | """HTML utilities suitable for global use."""
import html
import json
import re
import warnings
from html.parser import HTMLParser
from urllib.parse import parse_qsl, quote, unquote, urlencode, urlsplit, urlunsplit
from django.utils.deprecation import RemovedInDjango60Warning
from django.utils.encoding import punycode
from django.utils.functional import Promise, keep_lazy, keep_lazy_text
from django.utils.http import RFC3986_GENDELIMS, RFC3986_SUBDELIMS
from django.utils.regex_helper import _lazy_re_compile
from django.utils.safestring import SafeData, SafeString, mark_safe
from django.utils.text import normalize_newlines
@keep_lazy(SafeString)
def escape(text):
"""
Return the given text with ampersands, quotes and angle brackets encoded
for use in HTML.
Always escape input, even if it's already escaped and marked as such.
This may result in double-escaping. If this is a concern, use
conditional_escape() instead.
"""
return SafeString(html.escape(str(text)))
_js_escapes = {
ord("\\"): "\\u005C",
ord("'"): "\\u0027",
ord('"'): "\\u0022",
ord(">"): "\\u003E",
ord("<"): "\\u003C",
ord("&"): "\\u0026",
ord("="): "\\u003D",
ord("-"): "\\u002D",
ord(";"): "\\u003B",
ord("`"): "\\u0060",
ord("\u2028"): "\\u2028",
ord("\u2029"): "\\u2029",
}
# Escape every ASCII character with a value less than 32.
_js_escapes.update((ord("%c" % z), "\\u%04X" % z) for z in range(32))
@keep_lazy(SafeString)
def escapejs(value):
"""Hex encode characters for use in JavaScript strings."""
return mark_safe(str(value).translate(_js_escapes))
_json_script_escapes = {
ord(">"): "\\u003E",
ord("<"): "\\u003C",
ord("&"): "\\u0026",
}
def json_script(value, element_id=None, encoder=None):
"""
Escape all the HTML/XML special characters with their unicode escapes, so
value is safe to be output anywhere except for inside a tag attribute. Wrap
the escaped JSON in a script tag.
"""
from django.core.serializers.json import DjangoJSONEncoder
json_str = json.dumps(value, cls=encoder or DjangoJSONEncoder).translate(
_json_script_escapes
)
if element_id:
template = '<script id="{}" type="application/json">{}</script>'
args = (element_id, mark_safe(json_str))
else:
template = '<script type="application/json">{}</script>'
args = (mark_safe(json_str),)
return format_html(template, *args)
def conditional_escape(text):
"""
Similar to escape(), except that it doesn't operate on pre-escaped strings.
This function relies on the __html__ convention used both by Django's
SafeData class and by third-party libraries like markupsafe.
"""
if isinstance(text, Promise):
text = str(text)
if hasattr(text, "__html__"):
return text.__html__()
else:
return escape(text)
def format_html(format_string, *args, **kwargs):
"""
Similar to str.format, but pass all arguments through conditional_escape(),
and call mark_safe() on the result. This function should be used instead
of str.format or % interpolation to build up small HTML fragments.
"""
if not (args or kwargs):
# RemovedInDjango60Warning: when the deprecation ends, replace with:
# raise ValueError("args or kwargs must be provided.")
warnings.warn(
"Calling format_html() without passing args or kwargs is deprecated.",
RemovedInDjango60Warning,
)
args_safe = map(conditional_escape, args)
kwargs_safe = {k: conditional_escape(v) for (k, v) in kwargs.items()}
return mark_safe(format_string.format(*args_safe, **kwargs_safe))
def format_html_join(sep, format_string, args_generator):
"""
A wrapper of format_html, for the common case of a group of arguments that
need to be formatted using the same format string, and then joined using
'sep'. 'sep' is also passed through conditional_escape.
'args_generator' should be an iterator that returns the sequence of 'args'
that will be passed to format_html.
Example:
format_html_join('\n', "<li>{} {}</li>", ((u.first_name, u.last_name)
for u in users))
"""
return mark_safe(
conditional_escape(sep).join(
format_html(format_string, *args) for args in args_generator
)
)
@keep_lazy_text
def linebreaks(value, autoescape=False):
"""Convert newlines into <p> and <br>s."""
value = normalize_newlines(value)
paras = re.split("\n{2,}", str(value))
if autoescape:
paras = ["<p>%s</p>" % escape(p).replace("\n", "<br>") for p in paras]
else:
paras = ["<p>%s</p>" % p.replace("\n", "<br>") for p in paras]
return "\n\n".join(paras)
class MLStripper(HTMLParser):
def __init__(self):
super().__init__(convert_charrefs=False)
self.reset()
self.fed = []
def handle_data(self, d):
self.fed.append(d)
def handle_entityref(self, name):
self.fed.append("&%s;" % name)
def handle_charref(self, name):
self.fed.append("&#%s;" % name)
def get_data(self):
return "".join(self.fed)
def _strip_once(value):
"""
Internal tag stripping utility used by strip_tags.
"""
s = MLStripper()
s.feed(value)
s.close()
return s.get_data()
@keep_lazy_text
def strip_tags(value):
"""Return the given HTML with all tags stripped."""
# Note: in typical case this loop executes _strip_once once. Loop condition
# is redundant, but helps to reduce number of executions of _strip_once.
value = str(value)
while "<" in value and ">" in value:
new_value = _strip_once(value)
if value.count("<") == new_value.count("<"):
# _strip_once wasn't able to detect more tags.
break
value = new_value
return value
@keep_lazy_text
def strip_spaces_between_tags(value):
"""Return the given HTML with spaces between tags removed."""
return re.sub(r">\s+<", "><", str(value))
def smart_urlquote(url):
"""Quote a URL if it isn't already quoted."""
def unquote_quote(segment):
segment = unquote(segment)
# Tilde is part of RFC 3986 Section 2.3 Unreserved Characters,
# see also https://bugs.python.org/issue16285
return quote(segment, safe=RFC3986_SUBDELIMS + RFC3986_GENDELIMS + "~")
# Handle IDN before quoting.
try:
scheme, netloc, path, query, fragment = urlsplit(url)
except ValueError:
# invalid IPv6 URL (normally square brackets in hostname part).
return unquote_quote(url)
try:
netloc = punycode(netloc) # IDN -> ACE
except UnicodeError: # invalid domain part
return unquote_quote(url)
if query:
# Separately unquoting key/value, so as to not mix querystring separators
# included in query values. See #22267.
query_parts = [
(unquote(q[0]), unquote(q[1]))
for q in parse_qsl(query, keep_blank_values=True)
]
# urlencode will take care of quoting
query = urlencode(query_parts)
path = unquote_quote(path)
fragment = unquote_quote(fragment)
return urlunsplit((scheme, netloc, path, query, fragment))
class Urlizer:
"""
Convert any URLs in text into clickable links.
Work on http://, https://, www. links, and also on links ending in one of
the original seven gTLDs (.com, .edu, .gov, .int, .mil, .net, and .org).
Links can have trailing punctuation (periods, commas, close-parens) and
leading punctuation (opening parens) and it'll still do the right thing.
"""
trailing_punctuation_chars = ".,:;!"
wrapping_punctuation = [("(", ")"), ("[", "]")]
simple_url_re = _lazy_re_compile(r"^https?://\[?\w", re.IGNORECASE)
simple_url_2_re = _lazy_re_compile(
r"^www\.|^(?!http)\w[^@]+\.(com|edu|gov|int|mil|net|org)($|/.*)$", re.IGNORECASE
)
word_split_re = _lazy_re_compile(r"""([\s<>"']+)""")
mailto_template = "mailto:{local}@{domain}"
url_template = '<a href="{href}"{attrs}>{url}</a>'
def __call__(self, text, trim_url_limit=None, nofollow=False, autoescape=False):
"""
If trim_url_limit is not None, truncate the URLs in the link text
longer than this limit to trim_url_limit - 1 characters and append an
ellipsis.
If nofollow is True, give the links a rel="nofollow" attribute.
If autoescape is True, autoescape the link text and URLs.
"""
safe_input = isinstance(text, SafeData)
words = self.word_split_re.split(str(text))
return "".join(
[
self.handle_word(
word,
safe_input=safe_input,
trim_url_limit=trim_url_limit,
nofollow=nofollow,
autoescape=autoescape,
)
for word in words
]
)
def handle_word(
self,
word,
*,
safe_input,
trim_url_limit=None,
nofollow=False,
autoescape=False,
):
if "." in word or "@" in word or ":" in word:
# lead: Punctuation trimmed from the beginning of the word.
# middle: State of the word.
# trail: Punctuation trimmed from the end of the word.
lead, middle, trail = self.trim_punctuation(word)
# Make URL we want to point to.
url = None
nofollow_attr = ' rel="nofollow"' if nofollow else ""
if self.simple_url_re.match(middle):
url = smart_urlquote(html.unescape(middle))
elif self.simple_url_2_re.match(middle):
url = smart_urlquote("http://%s" % html.unescape(middle))
elif ":" not in middle and self.is_email_simple(middle):
local, domain = middle.rsplit("@", 1)
try:
domain = punycode(domain)
except UnicodeError:
return word
url = self.mailto_template.format(local=local, domain=domain)
nofollow_attr = ""
# Make link.
if url:
trimmed = self.trim_url(middle, limit=trim_url_limit)
if autoescape and not safe_input:
lead, trail = escape(lead), escape(trail)
trimmed = escape(trimmed)
middle = self.url_template.format(
href=escape(url),
attrs=nofollow_attr,
url=trimmed,
)
return mark_safe(f"{lead}{middle}{trail}")
else:
if safe_input:
return mark_safe(word)
elif autoescape:
return escape(word)
elif safe_input:
return mark_safe(word)
elif autoescape:
return escape(word)
return word
def trim_url(self, x, *, limit):
if limit is None or len(x) <= limit:
return x
return "%s…" % x[: max(0, limit - 1)]
def trim_punctuation(self, word):
"""
Trim trailing and wrapping punctuation from `word`. Return the items of
the new state.
"""
lead, middle, trail = "", word, ""
# Continue trimming until middle remains unchanged.
trimmed_something = True
while trimmed_something:
trimmed_something = False
# Trim wrapping punctuation.
for opening, closing in self.wrapping_punctuation:
if middle.startswith(opening):
middle = middle.removeprefix(opening)
lead += opening
trimmed_something = True
# Keep parentheses at the end only if they're balanced.
if (
middle.endswith(closing)
and middle.count(closing) == middle.count(opening) + 1
):
middle = middle.removesuffix(closing)
trail = closing + trail
trimmed_something = True
# Trim trailing punctuation (after trimming wrapping punctuation,
# as encoded entities contain ';'). Unescape entities to avoid
# breaking them by removing ';'.
middle_unescaped = html.unescape(middle)
stripped = middle_unescaped.rstrip(self.trailing_punctuation_chars)
if middle_unescaped != stripped:
punctuation_count = len(middle_unescaped) - len(stripped)
trail = middle[-punctuation_count:] + trail
middle = middle[:-punctuation_count]
trimmed_something = True
return lead, middle, trail
@staticmethod
def is_email_simple(value):
"""Return True if value looks like an email address."""
# An @ must be in the middle of the value.
if "@" not in value or value.startswith("@") or value.endswith("@"):
return False
try:
p1, p2 = value.split("@")
except ValueError:
# value contains more than one @.
return False
# Dot must be in p2 (e.g. example.com)
if "." not in p2 or p2.startswith("."):
return False
return True
urlizer = Urlizer()
@keep_lazy_text
def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False):
return urlizer(
text, trim_url_limit=trim_url_limit, nofollow=nofollow, autoescape=autoescape
)
def avoid_wrapping(value):
"""
Avoid text wrapping in the middle of a phrase by adding non-breaking
spaces where there previously were normal spaces.
"""
return value.replace(" ", "\xa0")
def html_safe(klass):
"""
A decorator that defines the __html__ method. This helps non-Django
templates to detect classes whose __str__ methods return SafeString.
"""
if "__html__" in klass.__dict__:
raise ValueError(
"can't apply @html_safe to %s because it defines "
"__html__()." % klass.__name__
)
if "__str__" not in klass.__dict__:
raise ValueError(
"can't apply @html_safe to %s because it doesn't "
"define __str__()." % klass.__name__
)
klass_str = klass.__str__
klass.__str__ = lambda self: mark_safe(klass_str(self))
klass.__html__ = lambda self: str(self)
return klass
|
7c2a28c220f7328d23e3dec60d45d492a50bababa4210fc671f4cf0e403cb5a1 | import copy
import itertools
import operator
from functools import wraps
class cached_property:
"""
Decorator that converts a method with a single self argument into a
property cached on the instance.
A cached property can be made out of an existing method:
(e.g. ``url = cached_property(get_absolute_url)``).
"""
name = None
@staticmethod
def func(instance):
raise TypeError(
"Cannot use cached_property instance without calling "
"__set_name__() on it."
)
def __init__(self, func):
self.real_func = func
self.__doc__ = getattr(func, "__doc__")
def __set_name__(self, owner, name):
if self.name is None:
self.name = name
self.func = self.real_func
elif name != self.name:
raise TypeError(
"Cannot assign the same cached_property to two different names "
"(%r and %r)." % (self.name, name)
)
def __get__(self, instance, cls=None):
"""
Call the function and put the return value in instance.__dict__ so that
subsequent attribute access on the instance returns the cached value
instead of calling cached_property.__get__().
"""
if instance is None:
return self
res = instance.__dict__[self.name] = self.func(instance)
return res
class classproperty:
"""
Decorator that converts a method with a single cls argument into a property
that can be accessed directly from the class.
"""
def __init__(self, method=None):
self.fget = method
def __get__(self, instance, cls=None):
return self.fget(cls)
def getter(self, method):
self.fget = method
return self
class Promise:
"""
Base class for the proxy class created in the closure of the lazy function.
It's used to recognize promises in code.
"""
pass
def lazy(func, *resultclasses):
"""
Turn any callable into a lazy evaluated callable. result classes or types
is required -- at least one is needed so that the automatic forcing of
the lazy evaluation code is triggered. Results are not memoized; the
function is evaluated on every access.
"""
class __proxy__(Promise):
"""
Encapsulate a function call and act as a proxy for methods that are
called on the result of that function. The function is not evaluated
until one of the methods on the result is called.
"""
__prepared = False
def __init__(self, args, kw):
self.__args = args
self.__kw = kw
if not self.__prepared:
self.__prepare_class__()
self.__class__.__prepared = True
def __reduce__(self):
return (
_lazy_proxy_unpickle,
(func, self.__args, self.__kw) + resultclasses,
)
def __repr__(self):
return repr(self.__cast())
@classmethod
def __prepare_class__(cls):
for resultclass in resultclasses:
for type_ in resultclass.mro():
for method_name in type_.__dict__:
# All __promise__ return the same wrapper method, they
# look up the correct implementation when called.
if hasattr(cls, method_name):
continue
meth = cls.__promise__(method_name)
setattr(cls, method_name, meth)
@classmethod
def __promise__(cls, method_name):
# Builds a wrapper around some magic method
def __wrapper__(self, *args, **kw):
# Automatically triggers the evaluation of a lazy value and
# applies the given magic method of the result type.
res = func(*self.__args, **self.__kw)
return getattr(res, method_name)(*args, **kw)
return __wrapper__
def __cast(self):
return func(*self.__args, **self.__kw)
def __str__(self):
# object defines __str__(), so __prepare_class__() won't overload
# a __str__() method from the proxied class.
return str(self.__cast())
def __eq__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return self.__cast() == other
def __ne__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return self.__cast() != other
def __lt__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return self.__cast() < other
def __le__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return self.__cast() <= other
def __gt__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return self.__cast() > other
def __ge__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return self.__cast() >= other
def __hash__(self):
return hash(self.__cast())
def __mod__(self, rhs):
return self.__cast() % rhs
def __add__(self, other):
return self.__cast() + other
def __radd__(self, other):
return other + self.__cast()
def __deepcopy__(self, memo):
# Instances of this class are effectively immutable. It's just a
# collection of functions. So we don't need to do anything
# complicated for copying.
memo[id(self)] = self
return self
@wraps(func)
def __wrapper__(*args, **kw):
# Creates the proxy object, instead of the actual value.
return __proxy__(args, kw)
return __wrapper__
def _lazy_proxy_unpickle(func, args, kwargs, *resultclasses):
return lazy(func, *resultclasses)(*args, **kwargs)
def lazystr(text):
"""
Shortcut for the common case of a lazy callable that returns str.
"""
return lazy(str, str)(text)
def keep_lazy(*resultclasses):
"""
A decorator that allows a function to be called with one or more lazy
arguments. If none of the args are lazy, the function is evaluated
immediately, otherwise a __proxy__ is returned that will evaluate the
function when needed.
"""
if not resultclasses:
raise TypeError("You must pass at least one argument to keep_lazy().")
def decorator(func):
lazy_func = lazy(func, *resultclasses)
@wraps(func)
def wrapper(*args, **kwargs):
if any(
isinstance(arg, Promise)
for arg in itertools.chain(args, kwargs.values())
):
return lazy_func(*args, **kwargs)
return func(*args, **kwargs)
return wrapper
return decorator
def keep_lazy_text(func):
"""
A decorator for functions that accept lazy arguments and return text.
"""
return keep_lazy(str)(func)
empty = object()
def new_method_proxy(func):
def inner(self, *args):
if (_wrapped := self._wrapped) is empty:
self._setup()
_wrapped = self._wrapped
return func(_wrapped, *args)
inner._mask_wrapped = False
return inner
class LazyObject:
"""
A wrapper for another class that can be used to delay instantiation of the
wrapped class.
By subclassing, you have the opportunity to intercept and alter the
instantiation. If you don't need to do that, use SimpleLazyObject.
"""
# Avoid infinite recursion when tracing __init__ (#19456).
_wrapped = None
def __init__(self):
# Note: if a subclass overrides __init__(), it will likely need to
# override __copy__() and __deepcopy__() as well.
self._wrapped = empty
def __getattribute__(self, name):
if name == "_wrapped":
# Avoid recursion when getting wrapped object.
return super().__getattribute__(name)
value = super().__getattribute__(name)
# If attribute is a proxy method, raise an AttributeError to call
# __getattr__() and use the wrapped object method.
if not getattr(value, "_mask_wrapped", True):
raise AttributeError
return value
__getattr__ = new_method_proxy(getattr)
def __setattr__(self, name, value):
if name == "_wrapped":
# Assign to __dict__ to avoid infinite __setattr__ loops.
self.__dict__["_wrapped"] = value
else:
if self._wrapped is empty:
self._setup()
setattr(self._wrapped, name, value)
def __delattr__(self, name):
if name == "_wrapped":
raise TypeError("can't delete _wrapped.")
if self._wrapped is empty:
self._setup()
delattr(self._wrapped, name)
def _setup(self):
"""
Must be implemented by subclasses to initialize the wrapped object.
"""
raise NotImplementedError(
"subclasses of LazyObject must provide a _setup() method"
)
# Because we have messed with __class__ below, we confuse pickle as to what
# class we are pickling. We're going to have to initialize the wrapped
# object to successfully pickle it, so we might as well just pickle the
# wrapped object since they're supposed to act the same way.
#
# Unfortunately, if we try to simply act like the wrapped object, the ruse
# will break down when pickle gets our id(). Thus we end up with pickle
# thinking, in effect, that we are a distinct object from the wrapped
# object, but with the same __dict__. This can cause problems (see #25389).
#
# So instead, we define our own __reduce__ method and custom unpickler. We
# pickle the wrapped object as the unpickler's argument, so that pickle
# will pickle it normally, and then the unpickler simply returns its
# argument.
def __reduce__(self):
if self._wrapped is empty:
self._setup()
return (unpickle_lazyobject, (self._wrapped,))
def __copy__(self):
if self._wrapped is empty:
# If uninitialized, copy the wrapper. Use type(self), not
# self.__class__, because the latter is proxied.
return type(self)()
else:
# If initialized, return a copy of the wrapped object.
return copy.copy(self._wrapped)
def __deepcopy__(self, memo):
if self._wrapped is empty:
# We have to use type(self), not self.__class__, because the
# latter is proxied.
result = type(self)()
memo[id(self)] = result
return result
return copy.deepcopy(self._wrapped, memo)
__bytes__ = new_method_proxy(bytes)
__str__ = new_method_proxy(str)
__bool__ = new_method_proxy(bool)
# Introspection support
__dir__ = new_method_proxy(dir)
# Need to pretend to be the wrapped class, for the sake of objects that
# care about this (especially in equality tests)
__class__ = property(new_method_proxy(operator.attrgetter("__class__")))
__eq__ = new_method_proxy(operator.eq)
__lt__ = new_method_proxy(operator.lt)
__gt__ = new_method_proxy(operator.gt)
__ne__ = new_method_proxy(operator.ne)
__hash__ = new_method_proxy(hash)
# List/Tuple/Dictionary methods support
__getitem__ = new_method_proxy(operator.getitem)
__setitem__ = new_method_proxy(operator.setitem)
__delitem__ = new_method_proxy(operator.delitem)
__iter__ = new_method_proxy(iter)
__len__ = new_method_proxy(len)
__contains__ = new_method_proxy(operator.contains)
def unpickle_lazyobject(wrapped):
"""
Used to unpickle lazy objects. Just return its argument, which will be the
wrapped object.
"""
return wrapped
class SimpleLazyObject(LazyObject):
"""
A lazy object initialized from any function.
Designed for compound objects of unknown type. For builtins or objects of
known type, use django.utils.functional.lazy.
"""
def __init__(self, func):
"""
Pass in a callable that returns the object to be wrapped.
If copies are made of the resulting SimpleLazyObject, which can happen
in various circumstances within Django, then you must ensure that the
callable can be safely run more than once and will return the same
value.
"""
self.__dict__["_setupfunc"] = func
super().__init__()
def _setup(self):
self._wrapped = self._setupfunc()
# Return a meaningful representation of the lazy object for debugging
# without evaluating the wrapped object.
def __repr__(self):
if self._wrapped is empty:
repr_attr = self._setupfunc
else:
repr_attr = self._wrapped
return "<%s: %r>" % (type(self).__name__, repr_attr)
def __copy__(self):
if self._wrapped is empty:
# If uninitialized, copy the wrapper. Use SimpleLazyObject, not
# self.__class__, because the latter is proxied.
return SimpleLazyObject(self._setupfunc)
else:
# If initialized, return a copy of the wrapped object.
return copy.copy(self._wrapped)
def __deepcopy__(self, memo):
if self._wrapped is empty:
# We have to use SimpleLazyObject, not self.__class__, because the
# latter is proxied.
result = SimpleLazyObject(self._setupfunc)
memo[id(self)] = result
return result
return copy.deepcopy(self._wrapped, memo)
__add__ = new_method_proxy(operator.add)
@new_method_proxy
def __radd__(self, other):
return other + self
def partition(predicate, values):
"""
Split the values into two sets, based on the return value of the function
(True/False). e.g.:
>>> partition(lambda x: x > 3, range(5))
[0, 1, 2, 3], [4]
"""
results = ([], [])
for item in values:
results[predicate(item)].append(item)
return results
|
486df88ec6f1bdcf470c7d69c7f30340c2da6e1919be0f3a34bbc43434b4e2d2 | from django.db import NotSupportedError
from django.db.models.expressions import Func, Value
from django.db.models.fields import CharField, IntegerField, TextField
from django.db.models.functions import Cast, Coalesce
from django.db.models.lookups import Transform
class MySQLSHA2Mixin:
def as_mysql(self, compiler, connection, **extra_context):
return super().as_sql(
compiler,
connection,
template="SHA2(%%(expressions)s, %s)" % self.function[3:],
**extra_context,
)
class OracleHashMixin:
def as_oracle(self, compiler, connection, **extra_context):
return super().as_sql(
compiler,
connection,
template=(
"LOWER(RAWTOHEX(STANDARD_HASH(UTL_I18N.STRING_TO_RAW("
"%(expressions)s, 'AL32UTF8'), '%(function)s')))"
),
**extra_context,
)
class PostgreSQLSHAMixin:
def as_postgresql(self, compiler, connection, **extra_context):
return super().as_sql(
compiler,
connection,
template="ENCODE(DIGEST(%(expressions)s, '%(function)s'), 'hex')",
function=self.function.lower(),
**extra_context,
)
class Chr(Transform):
function = "CHR"
lookup_name = "chr"
output_field = CharField()
def as_mysql(self, compiler, connection, **extra_context):
return super().as_sql(
compiler,
connection,
function="CHAR",
template="%(function)s(%(expressions)s USING utf16)",
**extra_context,
)
def as_oracle(self, compiler, connection, **extra_context):
return super().as_sql(
compiler,
connection,
template="%(function)s(%(expressions)s USING NCHAR_CS)",
**extra_context,
)
def as_sqlite(self, compiler, connection, **extra_context):
return super().as_sql(compiler, connection, function="CHAR", **extra_context)
class ConcatPair(Func):
"""
Concatenate two arguments together. This is used by `Concat` because not
all backend databases support more than two arguments.
"""
function = "CONCAT"
def as_sqlite(self, compiler, connection, **extra_context):
coalesced = self.coalesce()
return super(ConcatPair, coalesced).as_sql(
compiler,
connection,
template="%(expressions)s",
arg_joiner=" || ",
**extra_context,
)
def as_postgresql(self, compiler, connection, **extra_context):
copy = self.copy()
copy.set_source_expressions(
[
Cast(expression, TextField())
for expression in copy.get_source_expressions()
]
)
return super(ConcatPair, copy).as_sql(
compiler,
connection,
**extra_context,
)
def as_mysql(self, compiler, connection, **extra_context):
# Use CONCAT_WS with an empty separator so that NULLs are ignored.
return super().as_sql(
compiler,
connection,
function="CONCAT_WS",
template="%(function)s('', %(expressions)s)",
**extra_context,
)
def coalesce(self):
# null on either side results in null for expression, wrap with coalesce
c = self.copy()
c.set_source_expressions(
[
Coalesce(expression, Value(""))
for expression in c.get_source_expressions()
]
)
return c
class Concat(Func):
"""
Concatenate text fields together. Backends that result in an entire
null expression when any arguments are null will wrap each argument in
coalesce functions to ensure a non-null result.
"""
function = None
template = "%(expressions)s"
def __init__(self, *expressions, **extra):
if len(expressions) < 2:
raise ValueError("Concat must take at least two expressions")
paired = self._paired(expressions)
super().__init__(paired, **extra)
def _paired(self, expressions):
# wrap pairs of expressions in successive concat functions
# exp = [a, b, c, d]
# -> ConcatPair(a, ConcatPair(b, ConcatPair(c, d))))
if len(expressions) == 2:
return ConcatPair(*expressions)
return ConcatPair(expressions[0], self._paired(expressions[1:]))
class Left(Func):
function = "LEFT"
arity = 2
output_field = CharField()
def __init__(self, expression, length, **extra):
"""
expression: the name of a field, or an expression returning a string
length: the number of characters to return from the start of the string
"""
if not hasattr(length, "resolve_expression"):
if length < 1:
raise ValueError("'length' must be greater than 0.")
super().__init__(expression, length, **extra)
def get_substr(self):
return Substr(self.source_expressions[0], Value(1), self.source_expressions[1])
def as_oracle(self, compiler, connection, **extra_context):
return self.get_substr().as_oracle(compiler, connection, **extra_context)
def as_sqlite(self, compiler, connection, **extra_context):
return self.get_substr().as_sqlite(compiler, connection, **extra_context)
class Length(Transform):
"""Return the number of characters in the expression."""
function = "LENGTH"
lookup_name = "length"
output_field = IntegerField()
def as_mysql(self, compiler, connection, **extra_context):
return super().as_sql(
compiler, connection, function="CHAR_LENGTH", **extra_context
)
class Lower(Transform):
function = "LOWER"
lookup_name = "lower"
class LPad(Func):
function = "LPAD"
output_field = CharField()
def __init__(self, expression, length, fill_text=Value(" "), **extra):
if (
not hasattr(length, "resolve_expression")
and length is not None
and length < 0
):
raise ValueError("'length' must be greater or equal to 0.")
super().__init__(expression, length, fill_text, **extra)
class LTrim(Transform):
function = "LTRIM"
lookup_name = "ltrim"
class MD5(OracleHashMixin, Transform):
function = "MD5"
lookup_name = "md5"
class Ord(Transform):
function = "ASCII"
lookup_name = "ord"
output_field = IntegerField()
def as_mysql(self, compiler, connection, **extra_context):
return super().as_sql(compiler, connection, function="ORD", **extra_context)
def as_sqlite(self, compiler, connection, **extra_context):
return super().as_sql(compiler, connection, function="UNICODE", **extra_context)
class Repeat(Func):
function = "REPEAT"
output_field = CharField()
def __init__(self, expression, number, **extra):
if (
not hasattr(number, "resolve_expression")
and number is not None
and number < 0
):
raise ValueError("'number' must be greater or equal to 0.")
super().__init__(expression, number, **extra)
def as_oracle(self, compiler, connection, **extra_context):
expression, number = self.source_expressions
length = None if number is None else Length(expression) * number
rpad = RPad(expression, length, expression)
return rpad.as_sql(compiler, connection, **extra_context)
class Replace(Func):
function = "REPLACE"
def __init__(self, expression, text, replacement=Value(""), **extra):
super().__init__(expression, text, replacement, **extra)
class Reverse(Transform):
function = "REVERSE"
lookup_name = "reverse"
def as_oracle(self, compiler, connection, **extra_context):
# REVERSE in Oracle is undocumented and doesn't support multi-byte
# strings. Use a special subquery instead.
sql, params = super().as_sql(
compiler,
connection,
template=(
"(SELECT LISTAGG(s) WITHIN GROUP (ORDER BY n DESC) FROM "
"(SELECT LEVEL n, SUBSTR(%(expressions)s, LEVEL, 1) s "
"FROM DUAL CONNECT BY LEVEL <= LENGTH(%(expressions)s)) "
"GROUP BY %(expressions)s)"
),
**extra_context,
)
return sql, params * 3
class Right(Left):
function = "RIGHT"
def get_substr(self):
return Substr(
self.source_expressions[0],
self.source_expressions[1] * Value(-1),
self.source_expressions[1],
)
class RPad(LPad):
function = "RPAD"
class RTrim(Transform):
function = "RTRIM"
lookup_name = "rtrim"
class SHA1(OracleHashMixin, PostgreSQLSHAMixin, Transform):
function = "SHA1"
lookup_name = "sha1"
class SHA224(MySQLSHA2Mixin, PostgreSQLSHAMixin, Transform):
function = "SHA224"
lookup_name = "sha224"
def as_oracle(self, compiler, connection, **extra_context):
raise NotSupportedError("SHA224 is not supported on Oracle.")
class SHA256(MySQLSHA2Mixin, OracleHashMixin, PostgreSQLSHAMixin, Transform):
function = "SHA256"
lookup_name = "sha256"
class SHA384(MySQLSHA2Mixin, OracleHashMixin, PostgreSQLSHAMixin, Transform):
function = "SHA384"
lookup_name = "sha384"
class SHA512(MySQLSHA2Mixin, OracleHashMixin, PostgreSQLSHAMixin, Transform):
function = "SHA512"
lookup_name = "sha512"
class StrIndex(Func):
"""
Return a positive integer corresponding to the 1-indexed position of the
first occurrence of a substring inside another string, or 0 if the
substring is not found.
"""
function = "INSTR"
arity = 2
output_field = IntegerField()
def as_postgresql(self, compiler, connection, **extra_context):
return super().as_sql(compiler, connection, function="STRPOS", **extra_context)
class Substr(Func):
function = "SUBSTRING"
output_field = CharField()
def __init__(self, expression, pos, length=None, **extra):
"""
expression: the name of a field, or an expression returning a string
pos: an integer > 0, or an expression returning an integer
length: an optional number of characters to return
"""
if not hasattr(pos, "resolve_expression"):
if pos < 1:
raise ValueError("'pos' must be greater than 0")
expressions = [expression, pos]
if length is not None:
expressions.append(length)
super().__init__(*expressions, **extra)
def as_sqlite(self, compiler, connection, **extra_context):
return super().as_sql(compiler, connection, function="SUBSTR", **extra_context)
def as_oracle(self, compiler, connection, **extra_context):
return super().as_sql(compiler, connection, function="SUBSTR", **extra_context)
class Trim(Transform):
function = "TRIM"
lookup_name = "trim"
class Upper(Transform):
function = "UPPER"
lookup_name = "upper"
|
2bb43d4a2404c0506719f83a948c5382691ed66db20ca64b797298a944569d56 | """
Create SQL statements for QuerySets.
The code in here encapsulates all of the SQL construction so that QuerySets
themselves do not have to (and could be backed by things other than SQL
databases). The abstraction barrier only works one way: this module has to know
all about the internals of models in order to get the information it needs.
"""
import copy
import difflib
import functools
import sys
from collections import Counter, namedtuple
from collections.abc import Iterator, Mapping
from itertools import chain, count, product
from string import ascii_uppercase
from django.core.exceptions import FieldDoesNotExist, FieldError
from django.db import DEFAULT_DB_ALIAS, NotSupportedError, connections
from django.db.models.aggregates import Count
from django.db.models.constants import LOOKUP_SEP
from django.db.models.expressions import (
BaseExpression,
Col,
Exists,
F,
OuterRef,
Ref,
ResolvedOuterRef,
Value,
)
from django.db.models.fields import Field
from django.db.models.fields.related_lookups import MultiColSource
from django.db.models.lookups import Lookup
from django.db.models.query_utils import (
Q,
check_rel_lookup_compatibility,
refs_expression,
)
from django.db.models.sql.constants import INNER, LOUTER, ORDER_DIR, SINGLE
from django.db.models.sql.datastructures import BaseTable, Empty, Join, MultiJoin
from django.db.models.sql.where import AND, OR, ExtraWhere, NothingNode, WhereNode
from django.utils.functional import cached_property
from django.utils.regex_helper import _lazy_re_compile
from django.utils.tree import Node
__all__ = ["Query", "RawQuery"]
# Quotation marks ('"`[]), whitespace characters, semicolons, or inline
# SQL comments are forbidden in column aliases.
FORBIDDEN_ALIAS_PATTERN = _lazy_re_compile(r"['`\"\]\[;\s]|--|/\*|\*/")
# Inspired from
# https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS
EXPLAIN_OPTIONS_PATTERN = _lazy_re_compile(r"[\w\-]+")
def get_field_names_from_opts(opts):
if opts is None:
return set()
return set(
chain.from_iterable(
(f.name, f.attname) if f.concrete else (f.name,) for f in opts.get_fields()
)
)
def get_children_from_q(q):
for child in q.children:
if isinstance(child, Node):
yield from get_children_from_q(child)
else:
yield child
def rename_prefix_from_q(prefix, replacement, q):
return Q.create(
[
rename_prefix_from_q(prefix, replacement, c)
if isinstance(c, Node)
else (c[0].replace(prefix, replacement, 1), c[1])
for c in q.children
],
q.connector,
q.negated,
)
JoinInfo = namedtuple(
"JoinInfo",
("final_field", "targets", "opts", "joins", "path", "transform_function"),
)
class RawQuery:
"""A single raw SQL query."""
def __init__(self, sql, using, params=()):
self.params = params
self.sql = sql
self.using = using
self.cursor = None
# Mirror some properties of a normal query so that
# the compiler can be used to process results.
self.low_mark, self.high_mark = 0, None # Used for offset/limit
self.extra_select = {}
self.annotation_select = {}
def chain(self, using):
return self.clone(using)
def clone(self, using):
return RawQuery(self.sql, using, params=self.params)
def get_columns(self):
if self.cursor is None:
self._execute_query()
converter = connections[self.using].introspection.identifier_converter
return [converter(column_meta[0]) for column_meta in self.cursor.description]
def __iter__(self):
# Always execute a new query for a new iterator.
# This could be optimized with a cache at the expense of RAM.
self._execute_query()
if not connections[self.using].features.can_use_chunked_reads:
# If the database can't use chunked reads we need to make sure we
# evaluate the entire query up front.
result = list(self.cursor)
else:
result = self.cursor
return iter(result)
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self)
@property
def params_type(self):
if self.params is None:
return None
return dict if isinstance(self.params, Mapping) else tuple
def __str__(self):
if self.params_type is None:
return self.sql
return self.sql % self.params_type(self.params)
def _execute_query(self):
connection = connections[self.using]
# Adapt parameters to the database, as much as possible considering
# that the target type isn't known. See #17755.
params_type = self.params_type
adapter = connection.ops.adapt_unknown_value
if params_type is tuple:
params = tuple(adapter(val) for val in self.params)
elif params_type is dict:
params = {key: adapter(val) for key, val in self.params.items()}
elif params_type is None:
params = None
else:
raise RuntimeError("Unexpected params type: %s" % params_type)
self.cursor = connection.cursor()
self.cursor.execute(self.sql, params)
ExplainInfo = namedtuple("ExplainInfo", ("format", "options"))
class Query(BaseExpression):
"""A single SQL query."""
alias_prefix = "T"
empty_result_set_value = None
subq_aliases = frozenset([alias_prefix])
compiler = "SQLCompiler"
base_table_class = BaseTable
join_class = Join
default_cols = True
default_ordering = True
standard_ordering = True
filter_is_sticky = False
subquery = False
# SQL-related attributes.
# Select and related select clauses are expressions to use in the SELECT
# clause of the query. The select is used for cases where we want to set up
# the select clause to contain other than default fields (values(),
# subqueries...). Note that annotations go to annotations dictionary.
select = ()
# The group_by attribute can have one of the following forms:
# - None: no group by at all in the query
# - A tuple of expressions: group by (at least) those expressions.
# String refs are also allowed for now.
# - True: group by all select fields of the model
# See compiler.get_group_by() for details.
group_by = None
order_by = ()
low_mark = 0 # Used for offset/limit.
high_mark = None # Used for offset/limit.
distinct = False
distinct_fields = ()
select_for_update = False
select_for_update_nowait = False
select_for_update_skip_locked = False
select_for_update_of = ()
select_for_no_key_update = False
select_related = False
has_select_fields = False
# Arbitrary limit for select_related to prevents infinite recursion.
max_depth = 5
# Holds the selects defined by a call to values() or values_list()
# excluding annotation_select and extra_select.
values_select = ()
# SQL annotation-related attributes.
annotation_select_mask = None
_annotation_select_cache = None
# Set combination attributes.
combinator = None
combinator_all = False
combined_queries = ()
# These are for extensions. The contents are more or less appended verbatim
# to the appropriate clause.
extra_select_mask = None
_extra_select_cache = None
extra_tables = ()
extra_order_by = ()
# A tuple that is a set of model field names and either True, if these are
# the fields to defer, or False if these are the only fields to load.
deferred_loading = (frozenset(), True)
explain_info = None
def __init__(self, model, alias_cols=True):
self.model = model
self.alias_refcount = {}
# alias_map is the most important data structure regarding joins.
# It's used for recording which joins exist in the query and what
# types they are. The key is the alias of the joined table (possibly
# the table name) and the value is a Join-like object (see
# sql.datastructures.Join for more information).
self.alias_map = {}
# Whether to provide alias to columns during reference resolving.
self.alias_cols = alias_cols
# Sometimes the query contains references to aliases in outer queries (as
# a result of split_exclude). Correct alias quoting needs to know these
# aliases too.
# Map external tables to whether they are aliased.
self.external_aliases = {}
self.table_map = {} # Maps table names to list of aliases.
self.used_aliases = set()
self.where = WhereNode()
# Maps alias -> Annotation Expression.
self.annotations = {}
# These are for extensions. The contents are more or less appended
# verbatim to the appropriate clause.
self.extra = {} # Maps col_alias -> (col_sql, params).
self._filtered_relations = {}
@property
def output_field(self):
if len(self.select) == 1:
select = self.select[0]
return getattr(select, "target", None) or select.field
elif len(self.annotation_select) == 1:
return next(iter(self.annotation_select.values())).output_field
@cached_property
def base_table(self):
for alias in self.alias_map:
return alias
def __str__(self):
"""
Return the query as a string of SQL with the parameter values
substituted in (use sql_with_params() to see the unsubstituted string).
Parameter values won't necessarily be quoted correctly, since that is
done by the database interface at execution time.
"""
sql, params = self.sql_with_params()
return sql % params
def sql_with_params(self):
"""
Return the query as an SQL string and the parameters that will be
substituted into the query.
"""
return self.get_compiler(DEFAULT_DB_ALIAS).as_sql()
def __deepcopy__(self, memo):
"""Limit the amount of work when a Query is deepcopied."""
result = self.clone()
memo[id(self)] = result
return result
def get_compiler(self, using=None, connection=None, elide_empty=True):
if using is None and connection is None:
raise ValueError("Need either using or connection")
if using:
connection = connections[using]
return connection.ops.compiler(self.compiler)(
self, connection, using, elide_empty
)
def get_meta(self):
"""
Return the Options instance (the model._meta) from which to start
processing. Normally, this is self.model._meta, but it can be changed
by subclasses.
"""
if self.model:
return self.model._meta
def clone(self):
"""
Return a copy of the current Query. A lightweight alternative to
deepcopy().
"""
obj = Empty()
obj.__class__ = self.__class__
# Copy references to everything.
obj.__dict__ = self.__dict__.copy()
# Clone attributes that can't use shallow copy.
obj.alias_refcount = self.alias_refcount.copy()
obj.alias_map = self.alias_map.copy()
obj.external_aliases = self.external_aliases.copy()
obj.table_map = self.table_map.copy()
obj.where = self.where.clone()
obj.annotations = self.annotations.copy()
if self.annotation_select_mask is not None:
obj.annotation_select_mask = self.annotation_select_mask.copy()
if self.combined_queries:
obj.combined_queries = tuple(
[query.clone() for query in self.combined_queries]
)
# _annotation_select_cache cannot be copied, as doing so breaks the
# (necessary) state in which both annotations and
# _annotation_select_cache point to the same underlying objects.
# It will get re-populated in the cloned queryset the next time it's
# used.
obj._annotation_select_cache = None
obj.extra = self.extra.copy()
if self.extra_select_mask is not None:
obj.extra_select_mask = self.extra_select_mask.copy()
if self._extra_select_cache is not None:
obj._extra_select_cache = self._extra_select_cache.copy()
if self.select_related is not False:
# Use deepcopy because select_related stores fields in nested
# dicts.
obj.select_related = copy.deepcopy(obj.select_related)
if "subq_aliases" in self.__dict__:
obj.subq_aliases = self.subq_aliases.copy()
obj.used_aliases = self.used_aliases.copy()
obj._filtered_relations = self._filtered_relations.copy()
# Clear the cached_property, if it exists.
obj.__dict__.pop("base_table", None)
return obj
def chain(self, klass=None):
"""
Return a copy of the current Query that's ready for another operation.
The klass argument changes the type of the Query, e.g. UpdateQuery.
"""
obj = self.clone()
if klass and obj.__class__ != klass:
obj.__class__ = klass
if not obj.filter_is_sticky:
obj.used_aliases = set()
obj.filter_is_sticky = False
if hasattr(obj, "_setup_query"):
obj._setup_query()
return obj
def relabeled_clone(self, change_map):
clone = self.clone()
clone.change_aliases(change_map)
return clone
def _get_col(self, target, field, alias):
if not self.alias_cols:
alias = None
return target.get_col(alias, field)
def get_aggregation(self, using, aggregate_exprs):
"""
Return the dictionary with the values of the existing aggregations.
"""
if not aggregate_exprs:
return {}
# Store annotation mask prior to temporarily adding aggregations for
# resolving purpose to facilitate their subsequent removal.
refs_subquery = False
replacements = {}
annotation_select_mask = self.annotation_select_mask
for alias, aggregate_expr in aggregate_exprs.items():
self.check_alias(alias)
aggregate = aggregate_expr.resolve_expression(
self, allow_joins=True, reuse=None, summarize=True
)
if not aggregate.contains_aggregate:
raise TypeError("%s is not an aggregate expression" % alias)
# Temporarily add aggregate to annotations to allow remaining
# members of `aggregates` to resolve against each others.
self.append_annotation_mask([alias])
refs_subquery |= any(
getattr(self.annotations[ref], "subquery", False)
for ref in aggregate.get_refs()
)
aggregate = aggregate.replace_expressions(replacements)
self.annotations[alias] = aggregate
replacements[Ref(alias, aggregate)] = aggregate
# Stash resolved aggregates now that they have been allowed to resolve
# against each other.
aggregates = {alias: self.annotations.pop(alias) for alias in aggregate_exprs}
self.set_annotation_mask(annotation_select_mask)
# Existing usage of aggregation can be determined by the presence of
# selected aggregates but also by filters against aliased aggregates.
_, having, qualify = self.where.split_having_qualify()
has_existing_aggregation = (
any(
getattr(annotation, "contains_aggregate", True)
for annotation in self.annotations.values()
)
or having
)
# Decide if we need to use a subquery.
#
# Existing aggregations would cause incorrect results as
# get_aggregation() must produce just one result and thus must not use
# GROUP BY.
#
# If the query has limit or distinct, or uses set operations, then
# those operations must be done in a subquery so that the query
# aggregates on the limit and/or distinct results instead of applying
# the distinct and limit after the aggregation.
if (
isinstance(self.group_by, tuple)
or self.is_sliced
or has_existing_aggregation
or refs_subquery
or qualify
or self.distinct
or self.combinator
):
from django.db.models.sql.subqueries import AggregateQuery
inner_query = self.clone()
inner_query.subquery = True
outer_query = AggregateQuery(self.model, inner_query)
inner_query.select_for_update = False
inner_query.select_related = False
inner_query.set_annotation_mask(self.annotation_select)
# Queries with distinct_fields need ordering and when a limit is
# applied we must take the slice from the ordered query. Otherwise
# no need for ordering.
inner_query.clear_ordering(force=False)
if not inner_query.distinct:
# If the inner query uses default select and it has some
# aggregate annotations, then we must make sure the inner
# query is grouped by the main model's primary key. However,
# clearing the select clause can alter results if distinct is
# used.
if inner_query.default_cols and has_existing_aggregation:
inner_query.group_by = (
self.model._meta.pk.get_col(inner_query.get_initial_alias()),
)
inner_query.default_cols = False
if not qualify:
# Mask existing annotations that are not referenced by
# aggregates to be pushed to the outer query unless
# filtering against window functions is involved as it
# requires complex realising.
annotation_mask = set()
if isinstance(self.group_by, tuple):
for expr in self.group_by:
annotation_mask |= expr.get_refs()
for aggregate in aggregates.values():
annotation_mask |= aggregate.get_refs()
inner_query.set_annotation_mask(annotation_mask)
# Add aggregates to the outer AggregateQuery. This requires making
# sure all columns referenced by the aggregates are selected in the
# inner query. It is achieved by retrieving all column references
# by the aggregates, explicitly selecting them in the inner query,
# and making sure the aggregates are repointed to them.
col_refs = {}
for alias, aggregate in aggregates.items():
replacements = {}
for col in self._gen_cols([aggregate], resolve_refs=False):
if not (col_ref := col_refs.get(col)):
index = len(col_refs) + 1
col_alias = f"__col{index}"
col_ref = Ref(col_alias, col)
col_refs[col] = col_ref
inner_query.annotations[col_alias] = col
inner_query.append_annotation_mask([col_alias])
replacements[col] = col_ref
outer_query.annotations[alias] = aggregate.replace_expressions(
replacements
)
if (
inner_query.select == ()
and not inner_query.default_cols
and not inner_query.annotation_select_mask
):
# In case of Model.objects[0:3].count(), there would be no
# field selected in the inner query, yet we must use a subquery.
# So, make sure at least one field is selected.
inner_query.select = (
self.model._meta.pk.get_col(inner_query.get_initial_alias()),
)
else:
outer_query = self
self.select = ()
self.default_cols = False
self.extra = {}
if self.annotations:
# Inline reference to existing annotations and mask them as
# they are unnecessary given only the summarized aggregations
# are requested.
replacements = {
Ref(alias, annotation): annotation
for alias, annotation in self.annotations.items()
}
self.annotations = {
alias: aggregate.replace_expressions(replacements)
for alias, aggregate in aggregates.items()
}
else:
self.annotations = aggregates
self.set_annotation_mask(aggregates)
empty_set_result = [
expression.empty_result_set_value
for expression in outer_query.annotation_select.values()
]
elide_empty = not any(result is NotImplemented for result in empty_set_result)
outer_query.clear_ordering(force=True)
outer_query.clear_limits()
outer_query.select_for_update = False
outer_query.select_related = False
compiler = outer_query.get_compiler(using, elide_empty=elide_empty)
result = compiler.execute_sql(SINGLE)
if result is None:
result = empty_set_result
else:
converters = compiler.get_converters(outer_query.annotation_select.values())
result = next(compiler.apply_converters((result,), converters))
return dict(zip(outer_query.annotation_select, result))
def get_count(self, using):
"""
Perform a COUNT() query using the current filter constraints.
"""
obj = self.clone()
return obj.get_aggregation(using, {"__count": Count("*")})["__count"]
def has_filters(self):
return self.where
def exists(self, limit=True):
q = self.clone()
if not (q.distinct and q.is_sliced):
if q.group_by is True:
q.add_fields(
(f.attname for f in self.model._meta.concrete_fields), False
)
# Disable GROUP BY aliases to avoid orphaning references to the
# SELECT clause which is about to be cleared.
q.set_group_by(allow_aliases=False)
q.clear_select_clause()
if q.combined_queries and q.combinator == "union":
q.combined_queries = tuple(
combined_query.exists(limit=False)
for combined_query in q.combined_queries
)
q.clear_ordering(force=True)
if limit:
q.set_limits(high=1)
q.add_annotation(Value(1), "a")
return q
def has_results(self, using):
q = self.exists(using)
compiler = q.get_compiler(using=using)
return compiler.has_results()
def explain(self, using, format=None, **options):
q = self.clone()
for option_name in options:
if (
not EXPLAIN_OPTIONS_PATTERN.fullmatch(option_name)
or "--" in option_name
):
raise ValueError(f"Invalid option name: {option_name!r}.")
q.explain_info = ExplainInfo(format, options)
compiler = q.get_compiler(using=using)
return "\n".join(compiler.explain_query())
def combine(self, rhs, connector):
"""
Merge the 'rhs' query into the current one (with any 'rhs' effects
being applied *after* (that is, "to the right of") anything in the
current query. 'rhs' is not modified during a call to this function.
The 'connector' parameter describes how to connect filters from the
'rhs' query.
"""
if self.model != rhs.model:
raise TypeError("Cannot combine queries on two different base models.")
if self.is_sliced:
raise TypeError("Cannot combine queries once a slice has been taken.")
if self.distinct != rhs.distinct:
raise TypeError("Cannot combine a unique query with a non-unique query.")
if self.distinct_fields != rhs.distinct_fields:
raise TypeError("Cannot combine queries with different distinct fields.")
# If lhs and rhs shares the same alias prefix, it is possible to have
# conflicting alias changes like T4 -> T5, T5 -> T6, which might end up
# as T4 -> T6 while combining two querysets. To prevent this, change an
# alias prefix of the rhs and update current aliases accordingly,
# except if the alias is the base table since it must be present in the
# query on both sides.
initial_alias = self.get_initial_alias()
rhs.bump_prefix(self, exclude={initial_alias})
# Work out how to relabel the rhs aliases, if necessary.
change_map = {}
conjunction = connector == AND
# Determine which existing joins can be reused. When combining the
# query with AND we must recreate all joins for m2m filters. When
# combining with OR we can reuse joins. The reason is that in AND
# case a single row can't fulfill a condition like:
# revrel__col=1 & revrel__col=2
# But, there might be two different related rows matching this
# condition. In OR case a single True is enough, so single row is
# enough, too.
#
# Note that we will be creating duplicate joins for non-m2m joins in
# the AND case. The results will be correct but this creates too many
# joins. This is something that could be fixed later on.
reuse = set() if conjunction else set(self.alias_map)
joinpromoter = JoinPromoter(connector, 2, False)
joinpromoter.add_votes(
j for j in self.alias_map if self.alias_map[j].join_type == INNER
)
rhs_votes = set()
# Now, add the joins from rhs query into the new query (skipping base
# table).
rhs_tables = list(rhs.alias_map)[1:]
for alias in rhs_tables:
join = rhs.alias_map[alias]
# If the left side of the join was already relabeled, use the
# updated alias.
join = join.relabeled_clone(change_map)
new_alias = self.join(join, reuse=reuse)
if join.join_type == INNER:
rhs_votes.add(new_alias)
# We can't reuse the same join again in the query. If we have two
# distinct joins for the same connection in rhs query, then the
# combined query must have two joins, too.
reuse.discard(new_alias)
if alias != new_alias:
change_map[alias] = new_alias
if not rhs.alias_refcount[alias]:
# The alias was unused in the rhs query. Unref it so that it
# will be unused in the new query, too. We have to add and
# unref the alias so that join promotion has information of
# the join type for the unused alias.
self.unref_alias(new_alias)
joinpromoter.add_votes(rhs_votes)
joinpromoter.update_join_types(self)
# Combine subqueries aliases to ensure aliases relabelling properly
# handle subqueries when combining where and select clauses.
self.subq_aliases |= rhs.subq_aliases
# Now relabel a copy of the rhs where-clause and add it to the current
# one.
w = rhs.where.clone()
w.relabel_aliases(change_map)
self.where.add(w, connector)
# Selection columns and extra extensions are those provided by 'rhs'.
if rhs.select:
self.set_select([col.relabeled_clone(change_map) for col in rhs.select])
else:
self.select = ()
if connector == OR:
# It would be nice to be able to handle this, but the queries don't
# really make sense (or return consistent value sets). Not worth
# the extra complexity when you can write a real query instead.
if self.extra and rhs.extra:
raise ValueError(
"When merging querysets using 'or', you cannot have "
"extra(select=...) on both sides."
)
self.extra.update(rhs.extra)
extra_select_mask = set()
if self.extra_select_mask is not None:
extra_select_mask.update(self.extra_select_mask)
if rhs.extra_select_mask is not None:
extra_select_mask.update(rhs.extra_select_mask)
if extra_select_mask:
self.set_extra_mask(extra_select_mask)
self.extra_tables += rhs.extra_tables
# Ordering uses the 'rhs' ordering, unless it has none, in which case
# the current ordering is used.
self.order_by = rhs.order_by or self.order_by
self.extra_order_by = rhs.extra_order_by or self.extra_order_by
def _get_defer_select_mask(self, opts, mask, select_mask=None):
if select_mask is None:
select_mask = {}
select_mask[opts.pk] = {}
# All concrete fields that are not part of the defer mask must be
# loaded. If a relational field is encountered it gets added to the
# mask for it be considered if `select_related` and the cycle continues
# by recursively calling this function.
for field in opts.concrete_fields:
field_mask = mask.pop(field.name, None)
field_att_mask = mask.pop(field.attname, None)
if field_mask is None and field_att_mask is None:
select_mask.setdefault(field, {})
elif field_mask:
if not field.is_relation:
raise FieldError(next(iter(field_mask)))
field_select_mask = select_mask.setdefault(field, {})
related_model = field.remote_field.model._meta.concrete_model
self._get_defer_select_mask(
related_model._meta, field_mask, field_select_mask
)
# Remaining defer entries must be references to reverse relationships.
# The following code is expected to raise FieldError if it encounters
# a malformed defer entry.
for field_name, field_mask in mask.items():
if filtered_relation := self._filtered_relations.get(field_name):
relation = opts.get_field(filtered_relation.relation_name)
field_select_mask = select_mask.setdefault((field_name, relation), {})
field = relation.field
else:
reverse_rel = opts.get_field(field_name)
# While virtual fields such as many-to-many and generic foreign
# keys cannot be effectively deferred we've historically
# allowed them to be passed to QuerySet.defer(). Ignore such
# field references until a layer of validation at mask
# alteration time will be implemented eventually.
if not hasattr(reverse_rel, "field"):
continue
field = reverse_rel.field
field_select_mask = select_mask.setdefault(field, {})
related_model = field.model._meta.concrete_model
self._get_defer_select_mask(
related_model._meta, field_mask, field_select_mask
)
return select_mask
def _get_only_select_mask(self, opts, mask, select_mask=None):
if select_mask is None:
select_mask = {}
select_mask[opts.pk] = {}
# Only include fields mentioned in the mask.
for field_name, field_mask in mask.items():
field = opts.get_field(field_name)
# Retrieve the actual field associated with reverse relationships
# as that's what is expected in the select mask.
if field in opts.related_objects:
field_key = field.field
else:
field_key = field
field_select_mask = select_mask.setdefault(field_key, {})
if field_mask:
if not field.is_relation:
raise FieldError(next(iter(field_mask)))
related_model = field.remote_field.model._meta.concrete_model
self._get_only_select_mask(
related_model._meta, field_mask, field_select_mask
)
return select_mask
def get_select_mask(self):
"""
Convert the self.deferred_loading data structure to an alternate data
structure, describing the field that *will* be loaded. This is used to
compute the columns to select from the database and also by the
QuerySet class to work out which fields are being initialized on each
model. Models that have all their fields included aren't mentioned in
the result, only those that have field restrictions in place.
"""
field_names, defer = self.deferred_loading
if not field_names:
return {}
mask = {}
for field_name in field_names:
part_mask = mask
for part in field_name.split(LOOKUP_SEP):
part_mask = part_mask.setdefault(part, {})
opts = self.get_meta()
if defer:
return self._get_defer_select_mask(opts, mask)
return self._get_only_select_mask(opts, mask)
def table_alias(self, table_name, create=False, filtered_relation=None):
"""
Return a table alias for the given table_name and whether this is a
new alias or not.
If 'create' is true, a new alias is always created. Otherwise, the
most recently created alias for the table (if one exists) is reused.
"""
alias_list = self.table_map.get(table_name)
if not create and alias_list:
alias = alias_list[0]
self.alias_refcount[alias] += 1
return alias, False
# Create a new alias for this table.
if alias_list:
alias = "%s%d" % (self.alias_prefix, len(self.alias_map) + 1)
alias_list.append(alias)
else:
# The first occurrence of a table uses the table name directly.
alias = (
filtered_relation.alias if filtered_relation is not None else table_name
)
self.table_map[table_name] = [alias]
self.alias_refcount[alias] = 1
return alias, True
def ref_alias(self, alias):
"""Increases the reference count for this alias."""
self.alias_refcount[alias] += 1
def unref_alias(self, alias, amount=1):
"""Decreases the reference count for this alias."""
self.alias_refcount[alias] -= amount
def promote_joins(self, aliases):
"""
Promote recursively the join type of given aliases and its children to
an outer join. If 'unconditional' is False, only promote the join if
it is nullable or the parent join is an outer join.
The children promotion is done to avoid join chains that contain a LOUTER
b INNER c. So, if we have currently a INNER b INNER c and a->b is promoted,
then we must also promote b->c automatically, or otherwise the promotion
of a->b doesn't actually change anything in the query results.
"""
aliases = list(aliases)
while aliases:
alias = aliases.pop(0)
if self.alias_map[alias].join_type is None:
# This is the base table (first FROM entry) - this table
# isn't really joined at all in the query, so we should not
# alter its join type.
continue
# Only the first alias (skipped above) should have None join_type
assert self.alias_map[alias].join_type is not None
parent_alias = self.alias_map[alias].parent_alias
parent_louter = (
parent_alias and self.alias_map[parent_alias].join_type == LOUTER
)
already_louter = self.alias_map[alias].join_type == LOUTER
if (self.alias_map[alias].nullable or parent_louter) and not already_louter:
self.alias_map[alias] = self.alias_map[alias].promote()
# Join type of 'alias' changed, so re-examine all aliases that
# refer to this one.
aliases.extend(
join
for join in self.alias_map
if self.alias_map[join].parent_alias == alias
and join not in aliases
)
def demote_joins(self, aliases):
"""
Change join type from LOUTER to INNER for all joins in aliases.
Similarly to promote_joins(), this method must ensure no join chains
containing first an outer, then an inner join are generated. If we
are demoting b->c join in chain a LOUTER b LOUTER c then we must
demote a->b automatically, or otherwise the demotion of b->c doesn't
actually change anything in the query results. .
"""
aliases = list(aliases)
while aliases:
alias = aliases.pop(0)
if self.alias_map[alias].join_type == LOUTER:
self.alias_map[alias] = self.alias_map[alias].demote()
parent_alias = self.alias_map[alias].parent_alias
if self.alias_map[parent_alias].join_type == INNER:
aliases.append(parent_alias)
def reset_refcounts(self, to_counts):
"""
Reset reference counts for aliases so that they match the value passed
in `to_counts`.
"""
for alias, cur_refcount in self.alias_refcount.copy().items():
unref_amount = cur_refcount - to_counts.get(alias, 0)
self.unref_alias(alias, unref_amount)
def change_aliases(self, change_map):
"""
Change the aliases in change_map (which maps old-alias -> new-alias),
relabelling any references to them in select columns and the where
clause.
"""
# If keys and values of change_map were to intersect, an alias might be
# updated twice (e.g. T4 -> T5, T5 -> T6, so also T4 -> T6) depending
# on their order in change_map.
assert set(change_map).isdisjoint(change_map.values())
# 1. Update references in "select" (normal columns plus aliases),
# "group by" and "where".
self.where.relabel_aliases(change_map)
if isinstance(self.group_by, tuple):
self.group_by = tuple(
[col.relabeled_clone(change_map) for col in self.group_by]
)
self.select = tuple([col.relabeled_clone(change_map) for col in self.select])
self.annotations = self.annotations and {
key: col.relabeled_clone(change_map)
for key, col in self.annotations.items()
}
# 2. Rename the alias in the internal table/alias datastructures.
for old_alias, new_alias in change_map.items():
if old_alias not in self.alias_map:
continue
alias_data = self.alias_map[old_alias].relabeled_clone(change_map)
self.alias_map[new_alias] = alias_data
self.alias_refcount[new_alias] = self.alias_refcount[old_alias]
del self.alias_refcount[old_alias]
del self.alias_map[old_alias]
table_aliases = self.table_map[alias_data.table_name]
for pos, alias in enumerate(table_aliases):
if alias == old_alias:
table_aliases[pos] = new_alias
break
self.external_aliases = {
# Table is aliased or it's being changed and thus is aliased.
change_map.get(alias, alias): (aliased or alias in change_map)
for alias, aliased in self.external_aliases.items()
}
def bump_prefix(self, other_query, exclude=None):
"""
Change the alias prefix to the next letter in the alphabet in a way
that the other query's aliases and this query's aliases will not
conflict. Even tables that previously had no alias will get an alias
after this call. To prevent changing aliases use the exclude parameter.
"""
def prefix_gen():
"""
Generate a sequence of characters in alphabetical order:
-> 'A', 'B', 'C', ...
When the alphabet is finished, the sequence will continue with the
Cartesian product:
-> 'AA', 'AB', 'AC', ...
"""
alphabet = ascii_uppercase
prefix = chr(ord(self.alias_prefix) + 1)
yield prefix
for n in count(1):
seq = alphabet[alphabet.index(prefix) :] if prefix else alphabet
for s in product(seq, repeat=n):
yield "".join(s)
prefix = None
if self.alias_prefix != other_query.alias_prefix:
# No clashes between self and outer query should be possible.
return
# Explicitly avoid infinite loop. The constant divider is based on how
# much depth recursive subquery references add to the stack. This value
# might need to be adjusted when adding or removing function calls from
# the code path in charge of performing these operations.
local_recursion_limit = sys.getrecursionlimit() // 16
for pos, prefix in enumerate(prefix_gen()):
if prefix not in self.subq_aliases:
self.alias_prefix = prefix
break
if pos > local_recursion_limit:
raise RecursionError(
"Maximum recursion depth exceeded: too many subqueries."
)
self.subq_aliases = self.subq_aliases.union([self.alias_prefix])
other_query.subq_aliases = other_query.subq_aliases.union(self.subq_aliases)
if exclude is None:
exclude = {}
self.change_aliases(
{
alias: "%s%d" % (self.alias_prefix, pos)
for pos, alias in enumerate(self.alias_map)
if alias not in exclude
}
)
def get_initial_alias(self):
"""
Return the first alias for this query, after increasing its reference
count.
"""
if self.alias_map:
alias = self.base_table
self.ref_alias(alias)
elif self.model:
alias = self.join(self.base_table_class(self.get_meta().db_table, None))
else:
alias = None
return alias
def count_active_tables(self):
"""
Return the number of tables in this query with a non-zero reference
count. After execution, the reference counts are zeroed, so tables
added in compiler will not be seen by this method.
"""
return len([1 for count in self.alias_refcount.values() if count])
def join(self, join, reuse=None):
"""
Return an alias for the 'join', either reusing an existing alias for
that join or creating a new one. 'join' is either a base_table_class or
join_class.
The 'reuse' parameter can be either None which means all joins are
reusable, or it can be a set containing the aliases that can be reused.
A join is always created as LOUTER if the lhs alias is LOUTER to make
sure chains like t1 LOUTER t2 INNER t3 aren't generated. All new
joins are created as LOUTER if the join is nullable.
"""
reuse_aliases = [
a
for a, j in self.alias_map.items()
if (reuse is None or a in reuse) and j == join
]
if reuse_aliases:
if join.table_alias in reuse_aliases:
reuse_alias = join.table_alias
else:
# Reuse the most recent alias of the joined table
# (a many-to-many relation may be joined multiple times).
reuse_alias = reuse_aliases[-1]
self.ref_alias(reuse_alias)
return reuse_alias
# No reuse is possible, so we need a new alias.
alias, _ = self.table_alias(
join.table_name, create=True, filtered_relation=join.filtered_relation
)
if join.join_type:
if self.alias_map[join.parent_alias].join_type == LOUTER or join.nullable:
join_type = LOUTER
else:
join_type = INNER
join.join_type = join_type
join.table_alias = alias
self.alias_map[alias] = join
if filtered_relation := join.filtered_relation:
resolve_reuse = reuse
if resolve_reuse is not None:
resolve_reuse = set(reuse) | {alias}
joins_len = len(self.alias_map)
join.filtered_relation = filtered_relation.resolve_expression(
self, reuse=resolve_reuse
)
# Some joins were during expression resolving, they must be present
# before the one we just added.
if joins_len < len(self.alias_map):
self.alias_map[alias] = self.alias_map.pop(alias)
return alias
def join_parent_model(self, opts, model, alias, seen):
"""
Make sure the given 'model' is joined in the query. If 'model' isn't
a parent of 'opts' or if it is None this method is a no-op.
The 'alias' is the root alias for starting the join, 'seen' is a dict
of model -> alias of existing joins. It must also contain a mapping
of None -> some alias. This will be returned in the no-op case.
"""
if model in seen:
return seen[model]
chain = opts.get_base_chain(model)
if not chain:
return alias
curr_opts = opts
for int_model in chain:
if int_model in seen:
curr_opts = int_model._meta
alias = seen[int_model]
continue
# Proxy model have elements in base chain
# with no parents, assign the new options
# object and skip to the next base in that
# case
if not curr_opts.parents[int_model]:
curr_opts = int_model._meta
continue
link_field = curr_opts.get_ancestor_link(int_model)
join_info = self.setup_joins([link_field.name], curr_opts, alias)
curr_opts = int_model._meta
alias = seen[int_model] = join_info.joins[-1]
return alias or seen[None]
def check_alias(self, alias):
if FORBIDDEN_ALIAS_PATTERN.search(alias):
raise ValueError(
"Column aliases cannot contain whitespace characters, quotation marks, "
"semicolons, or SQL comments."
)
def add_annotation(self, annotation, alias, select=True):
"""Add a single annotation expression to the Query."""
self.check_alias(alias)
annotation = annotation.resolve_expression(self, allow_joins=True, reuse=None)
if select:
self.append_annotation_mask([alias])
else:
annotation_mask = (
value
for value in dict.fromkeys(self.annotation_select)
if value != alias
)
self.set_annotation_mask(annotation_mask)
self.annotations[alias] = annotation
def resolve_expression(self, query, *args, **kwargs):
clone = self.clone()
# Subqueries need to use a different set of aliases than the outer query.
clone.bump_prefix(query)
clone.subquery = True
clone.where.resolve_expression(query, *args, **kwargs)
# Resolve combined queries.
if clone.combinator:
clone.combined_queries = tuple(
[
combined_query.resolve_expression(query, *args, **kwargs)
for combined_query in clone.combined_queries
]
)
for key, value in clone.annotations.items():
resolved = value.resolve_expression(query, *args, **kwargs)
if hasattr(resolved, "external_aliases"):
resolved.external_aliases.update(clone.external_aliases)
clone.annotations[key] = resolved
# Outer query's aliases are considered external.
for alias, table in query.alias_map.items():
clone.external_aliases[alias] = (
isinstance(table, Join)
and table.join_field.related_model._meta.db_table != alias
) or (
isinstance(table, BaseTable) and table.table_name != table.table_alias
)
return clone
def get_external_cols(self):
exprs = chain(self.annotations.values(), self.where.children)
return [
col
for col in self._gen_cols(exprs, include_external=True)
if col.alias in self.external_aliases
]
def get_group_by_cols(self, wrapper=None):
# If wrapper is referenced by an alias for an explicit GROUP BY through
# values() a reference to this expression and not the self must be
# returned to ensure external column references are not grouped against
# as well.
external_cols = self.get_external_cols()
if any(col.possibly_multivalued for col in external_cols):
return [wrapper or self]
return external_cols
def as_sql(self, compiler, connection):
# Some backends (e.g. Oracle) raise an error when a subquery contains
# unnecessary ORDER BY clause.
if (
self.subquery
and not connection.features.ignores_unnecessary_order_by_in_subqueries
):
self.clear_ordering(force=False)
for query in self.combined_queries:
query.clear_ordering(force=False)
sql, params = self.get_compiler(connection=connection).as_sql()
if self.subquery:
sql = "(%s)" % sql
return sql, params
def resolve_lookup_value(self, value, can_reuse, allow_joins):
if hasattr(value, "resolve_expression"):
value = value.resolve_expression(
self,
reuse=can_reuse,
allow_joins=allow_joins,
)
elif isinstance(value, (list, tuple)):
# The items of the iterable may be expressions and therefore need
# to be resolved independently.
values = (
self.resolve_lookup_value(sub_value, can_reuse, allow_joins)
for sub_value in value
)
type_ = type(value)
if hasattr(type_, "_make"): # namedtuple
return type_(*values)
return type_(values)
return value
def solve_lookup_type(self, lookup, summarize=False):
"""
Solve the lookup type from the lookup (e.g.: 'foobar__id__icontains').
"""
lookup_splitted = lookup.split(LOOKUP_SEP)
if self.annotations:
annotation, expression_lookups = refs_expression(
lookup_splitted, self.annotations
)
if annotation:
expression = self.annotations[annotation]
if summarize:
expression = Ref(annotation, expression)
return expression_lookups, (), expression
_, field, _, lookup_parts = self.names_to_path(lookup_splitted, self.get_meta())
field_parts = lookup_splitted[0 : len(lookup_splitted) - len(lookup_parts)]
if len(lookup_parts) > 1 and not field_parts:
raise FieldError(
'Invalid lookup "%s" for model %s".'
% (lookup, self.get_meta().model.__name__)
)
return lookup_parts, field_parts, False
def check_query_object_type(self, value, opts, field):
"""
Check whether the object passed while querying is of the correct type.
If not, raise a ValueError specifying the wrong object.
"""
if hasattr(value, "_meta"):
if not check_rel_lookup_compatibility(value._meta.model, opts, field):
raise ValueError(
'Cannot query "%s": Must be "%s" instance.'
% (value, opts.object_name)
)
def check_related_objects(self, field, value, opts):
"""Check the type of object passed to query relations."""
if field.is_relation:
# Check that the field and the queryset use the same model in a
# query like .filter(author=Author.objects.all()). For example, the
# opts would be Author's (from the author field) and value.model
# would be Author.objects.all() queryset's .model (Author also).
# The field is the related field on the lhs side.
if (
isinstance(value, Query)
and not value.has_select_fields
and not check_rel_lookup_compatibility(value.model, opts, field)
):
raise ValueError(
'Cannot use QuerySet for "%s": Use a QuerySet for "%s".'
% (value.model._meta.object_name, opts.object_name)
)
elif hasattr(value, "_meta"):
self.check_query_object_type(value, opts, field)
elif hasattr(value, "__iter__"):
for v in value:
self.check_query_object_type(v, opts, field)
def check_filterable(self, expression):
"""Raise an error if expression cannot be used in a WHERE clause."""
if hasattr(expression, "resolve_expression") and not getattr(
expression, "filterable", True
):
raise NotSupportedError(
expression.__class__.__name__ + " is disallowed in the filter "
"clause."
)
if hasattr(expression, "get_source_expressions"):
for expr in expression.get_source_expressions():
self.check_filterable(expr)
def build_lookup(self, lookups, lhs, rhs):
"""
Try to extract transforms and lookup from given lhs.
The lhs value is something that works like SQLExpression.
The rhs value is what the lookup is going to compare against.
The lookups is a list of names to extract using get_lookup()
and get_transform().
"""
# __exact is the default lookup if one isn't given.
*transforms, lookup_name = lookups or ["exact"]
for name in transforms:
lhs = self.try_transform(lhs, name)
# First try get_lookup() so that the lookup takes precedence if the lhs
# supports both transform and lookup for the name.
lookup_class = lhs.get_lookup(lookup_name)
if not lookup_class:
# A lookup wasn't found. Try to interpret the name as a transform
# and do an Exact lookup against it.
lhs = self.try_transform(lhs, lookup_name)
lookup_name = "exact"
lookup_class = lhs.get_lookup(lookup_name)
if not lookup_class:
return
lookup = lookup_class(lhs, rhs)
# Interpret '__exact=None' as the sql 'is NULL'; otherwise, reject all
# uses of None as a query value unless the lookup supports it.
if lookup.rhs is None and not lookup.can_use_none_as_rhs:
if lookup_name not in ("exact", "iexact"):
raise ValueError("Cannot use None as a query value")
return lhs.get_lookup("isnull")(lhs, True)
# For Oracle '' is equivalent to null. The check must be done at this
# stage because join promotion can't be done in the compiler. Using
# DEFAULT_DB_ALIAS isn't nice but it's the best that can be done here.
# A similar thing is done in is_nullable(), too.
if (
lookup_name == "exact"
and lookup.rhs == ""
and connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls
):
return lhs.get_lookup("isnull")(lhs, True)
return lookup
def try_transform(self, lhs, name):
"""
Helper method for build_lookup(). Try to fetch and initialize
a transform for name parameter from lhs.
"""
transform_class = lhs.get_transform(name)
if transform_class:
return transform_class(lhs)
else:
output_field = lhs.output_field.__class__
suggested_lookups = difflib.get_close_matches(
name, lhs.output_field.get_lookups()
)
if suggested_lookups:
suggestion = ", perhaps you meant %s?" % " or ".join(suggested_lookups)
else:
suggestion = "."
raise FieldError(
"Unsupported lookup '%s' for %s or join on the field not "
"permitted%s" % (name, output_field.__name__, suggestion)
)
def build_filter(
self,
filter_expr,
branch_negated=False,
current_negated=False,
can_reuse=None,
allow_joins=True,
split_subq=True,
check_filterable=True,
summarize=False,
update_join_types=True,
):
"""
Build a WhereNode for a single filter clause but don't add it
to this Query. Query.add_q() will then add this filter to the where
Node.
The 'branch_negated' tells us if the current branch contains any
negations. This will be used to determine if subqueries are needed.
The 'current_negated' is used to determine if the current filter is
negated or not and this will be used to determine if IS NULL filtering
is needed.
The difference between current_negated and branch_negated is that
branch_negated is set on first negation, but current_negated is
flipped for each negation.
Note that add_filter will not do any negating itself, that is done
upper in the code by add_q().
The 'can_reuse' is a set of reusable joins for multijoins.
The method will create a filter clause that can be added to the current
query. However, if the filter isn't added to the query then the caller
is responsible for unreffing the joins used.
"""
if isinstance(filter_expr, dict):
raise FieldError("Cannot parse keyword query as dict")
if isinstance(filter_expr, Q):
return self._add_q(
filter_expr,
branch_negated=branch_negated,
current_negated=current_negated,
used_aliases=can_reuse,
allow_joins=allow_joins,
split_subq=split_subq,
check_filterable=check_filterable,
summarize=summarize,
update_join_types=update_join_types,
)
if hasattr(filter_expr, "resolve_expression"):
if not getattr(filter_expr, "conditional", False):
raise TypeError("Cannot filter against a non-conditional expression.")
condition = filter_expr.resolve_expression(
self, allow_joins=allow_joins, reuse=can_reuse, summarize=summarize
)
if not isinstance(condition, Lookup):
condition = self.build_lookup(["exact"], condition, True)
return WhereNode([condition], connector=AND), []
arg, value = filter_expr
if not arg:
raise FieldError("Cannot parse keyword query %r" % arg)
lookups, parts, reffed_expression = self.solve_lookup_type(arg, summarize)
if check_filterable:
self.check_filterable(reffed_expression)
if not allow_joins and len(parts) > 1:
raise FieldError("Joined field references are not permitted in this query")
pre_joins = self.alias_refcount.copy()
value = self.resolve_lookup_value(value, can_reuse, allow_joins)
used_joins = {
k for k, v in self.alias_refcount.items() if v > pre_joins.get(k, 0)
}
if check_filterable:
self.check_filterable(value)
if reffed_expression:
condition = self.build_lookup(lookups, reffed_expression, value)
return WhereNode([condition], connector=AND), []
opts = self.get_meta()
alias = self.get_initial_alias()
allow_many = not branch_negated or not split_subq
try:
join_info = self.setup_joins(
parts,
opts,
alias,
can_reuse=can_reuse,
allow_many=allow_many,
)
# Prevent iterator from being consumed by check_related_objects()
if isinstance(value, Iterator):
value = list(value)
self.check_related_objects(join_info.final_field, value, join_info.opts)
# split_exclude() needs to know which joins were generated for the
# lookup parts
self._lookup_joins = join_info.joins
except MultiJoin as e:
return self.split_exclude(filter_expr, can_reuse, e.names_with_path)
# Update used_joins before trimming since they are reused to determine
# which joins could be later promoted to INNER.
used_joins.update(join_info.joins)
targets, alias, join_list = self.trim_joins(
join_info.targets, join_info.joins, join_info.path
)
if can_reuse is not None:
can_reuse.update(join_list)
if join_info.final_field.is_relation:
if len(targets) == 1:
col = self._get_col(targets[0], join_info.final_field, alias)
else:
col = MultiColSource(
alias, targets, join_info.targets, join_info.final_field
)
else:
col = self._get_col(targets[0], join_info.final_field, alias)
condition = self.build_lookup(lookups, col, value)
lookup_type = condition.lookup_name
clause = WhereNode([condition], connector=AND)
require_outer = (
lookup_type == "isnull" and condition.rhs is True and not current_negated
)
if (
current_negated
and (lookup_type != "isnull" or condition.rhs is False)
and condition.rhs is not None
):
require_outer = True
if lookup_type != "isnull":
# The condition added here will be SQL like this:
# NOT (col IS NOT NULL), where the first NOT is added in
# upper layers of code. The reason for addition is that if col
# is null, then col != someval will result in SQL "unknown"
# which isn't the same as in Python. The Python None handling
# is wanted, and it can be gotten by
# (col IS NULL OR col != someval)
# <=>
# NOT (col IS NOT NULL AND col = someval).
if (
self.is_nullable(targets[0])
or self.alias_map[join_list[-1]].join_type == LOUTER
):
lookup_class = targets[0].get_lookup("isnull")
col = self._get_col(targets[0], join_info.targets[0], alias)
clause.add(lookup_class(col, False), AND)
# If someval is a nullable column, someval IS NOT NULL is
# added.
if isinstance(value, Col) and self.is_nullable(value.target):
lookup_class = value.target.get_lookup("isnull")
clause.add(lookup_class(value, False), AND)
return clause, used_joins if not require_outer else ()
def add_filter(self, filter_lhs, filter_rhs):
self.add_q(Q((filter_lhs, filter_rhs)))
def add_q(self, q_object):
"""
A preprocessor for the internal _add_q(). Responsible for doing final
join promotion.
"""
# For join promotion this case is doing an AND for the added q_object
# and existing conditions. So, any existing inner join forces the join
# type to remain inner. Existing outer joins can however be demoted.
# (Consider case where rel_a is LOUTER and rel_a__col=1 is added - if
# rel_a doesn't produce any rows, then the whole condition must fail.
# So, demotion is OK.
existing_inner = {
a for a in self.alias_map if self.alias_map[a].join_type == INNER
}
clause, _ = self._add_q(q_object, self.used_aliases)
if clause:
self.where.add(clause, AND)
self.demote_joins(existing_inner)
def build_where(self, filter_expr):
return self.build_filter(filter_expr, allow_joins=False)[0]
def clear_where(self):
self.where = WhereNode()
def _add_q(
self,
q_object,
used_aliases,
branch_negated=False,
current_negated=False,
allow_joins=True,
split_subq=True,
check_filterable=True,
summarize=False,
update_join_types=True,
):
"""Add a Q-object to the current filter."""
connector = q_object.connector
current_negated ^= q_object.negated
branch_negated = branch_negated or q_object.negated
target_clause = WhereNode(connector=connector, negated=q_object.negated)
joinpromoter = JoinPromoter(
q_object.connector, len(q_object.children), current_negated
)
for child in q_object.children:
child_clause, needed_inner = self.build_filter(
child,
can_reuse=used_aliases,
branch_negated=branch_negated,
current_negated=current_negated,
allow_joins=allow_joins,
split_subq=split_subq,
check_filterable=check_filterable,
summarize=summarize,
update_join_types=update_join_types,
)
joinpromoter.add_votes(needed_inner)
if child_clause:
target_clause.add(child_clause, connector)
if update_join_types:
needed_inner = joinpromoter.update_join_types(self)
else:
needed_inner = []
return target_clause, needed_inner
def add_filtered_relation(self, filtered_relation, alias):
filtered_relation.alias = alias
lookups = dict(get_children_from_q(filtered_relation.condition))
relation_lookup_parts, relation_field_parts, _ = self.solve_lookup_type(
filtered_relation.relation_name
)
if relation_lookup_parts:
raise ValueError(
"FilteredRelation's relation_name cannot contain lookups "
"(got %r)." % filtered_relation.relation_name
)
for lookup in chain(lookups):
lookup_parts, lookup_field_parts, _ = self.solve_lookup_type(lookup)
shift = 2 if not lookup_parts else 1
lookup_field_path = lookup_field_parts[:-shift]
for idx, lookup_field_part in enumerate(lookup_field_path):
if len(relation_field_parts) > idx:
if relation_field_parts[idx] != lookup_field_part:
raise ValueError(
"FilteredRelation's condition doesn't support "
"relations outside the %r (got %r)."
% (filtered_relation.relation_name, lookup)
)
else:
raise ValueError(
"FilteredRelation's condition doesn't support nested "
"relations deeper than the relation_name (got %r for "
"%r)." % (lookup, filtered_relation.relation_name)
)
filtered_relation.condition = rename_prefix_from_q(
filtered_relation.relation_name,
alias,
filtered_relation.condition,
)
self._filtered_relations[filtered_relation.alias] = filtered_relation
def names_to_path(self, names, opts, allow_many=True, fail_on_missing=False):
"""
Walk the list of names and turns them into PathInfo tuples. A single
name in 'names' can generate multiple PathInfos (m2m, for example).
'names' is the path of names to travel, 'opts' is the model Options we
start the name resolving from, 'allow_many' is as for setup_joins().
If fail_on_missing is set to True, then a name that can't be resolved
will generate a FieldError.
Return a list of PathInfo tuples. In addition return the final field
(the last used join field) and target (which is a field guaranteed to
contain the same value as the final field). Finally, return those names
that weren't found (which are likely transforms and the final lookup).
"""
path, names_with_path = [], []
for pos, name in enumerate(names):
cur_names_with_path = (name, [])
if name == "pk":
name = opts.pk.name
field = None
filtered_relation = None
try:
if opts is None:
raise FieldDoesNotExist
field = opts.get_field(name)
except FieldDoesNotExist:
if name in self.annotation_select:
field = self.annotation_select[name].output_field
elif name in self._filtered_relations and pos == 0:
filtered_relation = self._filtered_relations[name]
if LOOKUP_SEP in filtered_relation.relation_name:
parts = filtered_relation.relation_name.split(LOOKUP_SEP)
filtered_relation_path, field, _, _ = self.names_to_path(
parts,
opts,
allow_many,
fail_on_missing,
)
path.extend(filtered_relation_path[:-1])
else:
field = opts.get_field(filtered_relation.relation_name)
if field is not None:
# Fields that contain one-to-many relations with a generic
# model (like a GenericForeignKey) cannot generate reverse
# relations and therefore cannot be used for reverse querying.
if field.is_relation and not field.related_model:
raise FieldError(
"Field %r does not generate an automatic reverse "
"relation and therefore cannot be used for reverse "
"querying. If it is a GenericForeignKey, consider "
"adding a GenericRelation." % name
)
try:
model = field.model._meta.concrete_model
except AttributeError:
# QuerySet.annotate() may introduce fields that aren't
# attached to a model.
model = None
else:
# We didn't find the current field, so move position back
# one step.
pos -= 1
if pos == -1 or fail_on_missing:
available = sorted(
[
*get_field_names_from_opts(opts),
*self.annotation_select,
*self._filtered_relations,
]
)
raise FieldError(
"Cannot resolve keyword '%s' into field. "
"Choices are: %s" % (name, ", ".join(available))
)
break
# Check if we need any joins for concrete inheritance cases (the
# field lives in parent, but we are currently in one of its
# children)
if opts is not None and model is not opts.model:
path_to_parent = opts.get_path_to_parent(model)
if path_to_parent:
path.extend(path_to_parent)
cur_names_with_path[1].extend(path_to_parent)
opts = path_to_parent[-1].to_opts
if hasattr(field, "path_infos"):
if filtered_relation:
pathinfos = field.get_path_info(filtered_relation)
else:
pathinfos = field.path_infos
if not allow_many:
for inner_pos, p in enumerate(pathinfos):
if p.m2m:
cur_names_with_path[1].extend(pathinfos[0 : inner_pos + 1])
names_with_path.append(cur_names_with_path)
raise MultiJoin(pos + 1, names_with_path)
last = pathinfos[-1]
path.extend(pathinfos)
final_field = last.join_field
opts = last.to_opts
targets = last.target_fields
cur_names_with_path[1].extend(pathinfos)
names_with_path.append(cur_names_with_path)
else:
# Local non-relational field.
final_field = field
targets = (field,)
if fail_on_missing and pos + 1 != len(names):
raise FieldError(
"Cannot resolve keyword %r into field. Join on '%s'"
" not permitted." % (names[pos + 1], name)
)
break
return path, final_field, targets, names[pos + 1 :]
def setup_joins(
self,
names,
opts,
alias,
can_reuse=None,
allow_many=True,
):
"""
Compute the necessary table joins for the passage through the fields
given in 'names'. 'opts' is the Options class for the current model
(which gives the table we are starting from), 'alias' is the alias for
the table to start the joining from.
The 'can_reuse' defines the reverse foreign key joins we can reuse. It
can be None in which case all joins are reusable or a set of aliases
that can be reused. Note that non-reverse foreign keys are always
reusable when using setup_joins().
If 'allow_many' is False, then any reverse foreign key seen will
generate a MultiJoin exception.
Return the final field involved in the joins, the target field (used
for any 'where' constraint), the final 'opts' value, the joins, the
field path traveled to generate the joins, and a transform function
that takes a field and alias and is equivalent to `field.get_col(alias)`
in the simple case but wraps field transforms if they were included in
names.
The target field is the field containing the concrete value. Final
field can be something different, for example foreign key pointing to
that value. Final field is needed for example in some value
conversions (convert 'obj' in fk__id=obj to pk val using the foreign
key field for example).
"""
joins = [alias]
# The transform can't be applied yet, as joins must be trimmed later.
# To avoid making every caller of this method look up transforms
# directly, compute transforms here and create a partial that converts
# fields to the appropriate wrapped version.
def final_transformer(field, alias):
if not self.alias_cols:
alias = None
return field.get_col(alias)
# Try resolving all the names as fields first. If there's an error,
# treat trailing names as lookups until a field can be resolved.
last_field_exception = None
for pivot in range(len(names), 0, -1):
try:
path, final_field, targets, rest = self.names_to_path(
names[:pivot],
opts,
allow_many,
fail_on_missing=True,
)
except FieldError as exc:
if pivot == 1:
# The first item cannot be a lookup, so it's safe
# to raise the field error here.
raise
else:
last_field_exception = exc
else:
# The transforms are the remaining items that couldn't be
# resolved into fields.
transforms = names[pivot:]
break
for name in transforms:
def transform(field, alias, *, name, previous):
try:
wrapped = previous(field, alias)
return self.try_transform(wrapped, name)
except FieldError:
# FieldError is raised if the transform doesn't exist.
if isinstance(final_field, Field) and last_field_exception:
raise last_field_exception
else:
raise
final_transformer = functools.partial(
transform, name=name, previous=final_transformer
)
final_transformer.has_transforms = True
# Then, add the path to the query's joins. Note that we can't trim
# joins at this stage - we will need the information about join type
# of the trimmed joins.
for join in path:
if join.filtered_relation:
filtered_relation = join.filtered_relation.clone()
table_alias = filtered_relation.alias
else:
filtered_relation = None
table_alias = None
opts = join.to_opts
if join.direct:
nullable = self.is_nullable(join.join_field)
else:
nullable = True
connection = self.join_class(
opts.db_table,
alias,
table_alias,
INNER,
join.join_field,
nullable,
filtered_relation=filtered_relation,
)
reuse = can_reuse if join.m2m else None
alias = self.join(connection, reuse=reuse)
joins.append(alias)
return JoinInfo(final_field, targets, opts, joins, path, final_transformer)
def trim_joins(self, targets, joins, path):
"""
The 'target' parameter is the final field being joined to, 'joins'
is the full list of join aliases. The 'path' contain the PathInfos
used to create the joins.
Return the final target field and table alias and the new active
joins.
Always trim any direct join if the target column is already in the
previous table. Can't trim reverse joins as it's unknown if there's
anything on the other side of the join.
"""
joins = joins[:]
for pos, info in enumerate(reversed(path)):
if len(joins) == 1 or not info.direct:
break
if info.filtered_relation:
break
join_targets = {t.column for t in info.join_field.foreign_related_fields}
cur_targets = {t.column for t in targets}
if not cur_targets.issubset(join_targets):
break
targets_dict = {
r[1].column: r[0]
for r in info.join_field.related_fields
if r[1].column in cur_targets
}
targets = tuple(targets_dict[t.column] for t in targets)
self.unref_alias(joins.pop())
return targets, joins[-1], joins
@classmethod
def _gen_cols(cls, exprs, include_external=False, resolve_refs=True):
for expr in exprs:
if isinstance(expr, Col):
yield expr
elif include_external and callable(
getattr(expr, "get_external_cols", None)
):
yield from expr.get_external_cols()
elif hasattr(expr, "get_source_expressions"):
if not resolve_refs and isinstance(expr, Ref):
continue
yield from cls._gen_cols(
expr.get_source_expressions(),
include_external=include_external,
resolve_refs=resolve_refs,
)
@classmethod
def _gen_col_aliases(cls, exprs):
yield from (expr.alias for expr in cls._gen_cols(exprs))
def resolve_ref(self, name, allow_joins=True, reuse=None, summarize=False):
annotation = self.annotations.get(name)
if annotation is not None:
if not allow_joins:
for alias in self._gen_col_aliases([annotation]):
if isinstance(self.alias_map[alias], Join):
raise FieldError(
"Joined field references are not permitted in this query"
)
if summarize:
# Summarize currently means we are doing an aggregate() query
# which is executed as a wrapped subquery if any of the
# aggregate() elements reference an existing annotation. In
# that case we need to return a Ref to the subquery's annotation.
if name not in self.annotation_select:
raise FieldError(
"Cannot aggregate over the '%s' alias. Use annotate() "
"to promote it." % name
)
return Ref(name, self.annotation_select[name])
else:
return annotation
else:
field_list = name.split(LOOKUP_SEP)
annotation = self.annotations.get(field_list[0])
if annotation is not None:
for transform in field_list[1:]:
annotation = self.try_transform(annotation, transform)
return annotation
join_info = self.setup_joins(
field_list, self.get_meta(), self.get_initial_alias(), can_reuse=reuse
)
targets, final_alias, join_list = self.trim_joins(
join_info.targets, join_info.joins, join_info.path
)
if not allow_joins and len(join_list) > 1:
raise FieldError(
"Joined field references are not permitted in this query"
)
if len(targets) > 1:
raise FieldError(
"Referencing multicolumn fields with F() objects isn't supported"
)
# Verify that the last lookup in name is a field or a transform:
# transform_function() raises FieldError if not.
transform = join_info.transform_function(targets[0], final_alias)
if reuse is not None:
reuse.update(join_list)
return transform
def split_exclude(self, filter_expr, can_reuse, names_with_path):
"""
When doing an exclude against any kind of N-to-many relation, we need
to use a subquery. This method constructs the nested query, given the
original exclude filter (filter_expr) and the portion up to the first
N-to-many relation field.
For example, if the origin filter is ~Q(child__name='foo'), filter_expr
is ('child__name', 'foo') and can_reuse is a set of joins usable for
filters in the original query.
We will turn this into equivalent of:
WHERE NOT EXISTS(
SELECT 1
FROM child
WHERE name = 'foo' AND child.parent_id = parent.id
LIMIT 1
)
"""
# Generate the inner query.
query = self.__class__(self.model)
query._filtered_relations = self._filtered_relations
filter_lhs, filter_rhs = filter_expr
if isinstance(filter_rhs, OuterRef):
filter_rhs = OuterRef(filter_rhs)
elif isinstance(filter_rhs, F):
filter_rhs = OuterRef(filter_rhs.name)
query.add_filter(filter_lhs, filter_rhs)
query.clear_ordering(force=True)
# Try to have as simple as possible subquery -> trim leading joins from
# the subquery.
trimmed_prefix, contains_louter = query.trim_start(names_with_path)
col = query.select[0]
select_field = col.target
alias = col.alias
if alias in can_reuse:
pk = select_field.model._meta.pk
# Need to add a restriction so that outer query's filters are in effect for
# the subquery, too.
query.bump_prefix(self)
lookup_class = select_field.get_lookup("exact")
# Note that the query.select[0].alias is different from alias
# due to bump_prefix above.
lookup = lookup_class(pk.get_col(query.select[0].alias), pk.get_col(alias))
query.where.add(lookup, AND)
query.external_aliases[alias] = True
lookup_class = select_field.get_lookup("exact")
lookup = lookup_class(col, ResolvedOuterRef(trimmed_prefix))
query.where.add(lookup, AND)
condition, needed_inner = self.build_filter(Exists(query))
if contains_louter:
or_null_condition, _ = self.build_filter(
("%s__isnull" % trimmed_prefix, True),
current_negated=True,
branch_negated=True,
can_reuse=can_reuse,
)
condition.add(or_null_condition, OR)
# Note that the end result will be:
# (outercol NOT IN innerq AND outercol IS NOT NULL) OR outercol IS NULL.
# This might look crazy but due to how IN works, this seems to be
# correct. If the IS NOT NULL check is removed then outercol NOT
# IN will return UNKNOWN. If the IS NULL check is removed, then if
# outercol IS NULL we will not match the row.
return condition, needed_inner
def set_empty(self):
self.where.add(NothingNode(), AND)
for query in self.combined_queries:
query.set_empty()
def is_empty(self):
return any(isinstance(c, NothingNode) for c in self.where.children)
def set_limits(self, low=None, high=None):
"""
Adjust the limits on the rows retrieved. Use low/high to set these,
as it makes it more Pythonic to read and write. When the SQL query is
created, convert them to the appropriate offset and limit values.
Apply any limits passed in here to the existing constraints. Add low
to the current low value and clamp both to any existing high value.
"""
if high is not None:
if self.high_mark is not None:
self.high_mark = min(self.high_mark, self.low_mark + high)
else:
self.high_mark = self.low_mark + high
if low is not None:
if self.high_mark is not None:
self.low_mark = min(self.high_mark, self.low_mark + low)
else:
self.low_mark = self.low_mark + low
if self.low_mark == self.high_mark:
self.set_empty()
def clear_limits(self):
"""Clear any existing limits."""
self.low_mark, self.high_mark = 0, None
@property
def is_sliced(self):
return self.low_mark != 0 or self.high_mark is not None
def has_limit_one(self):
return self.high_mark is not None and (self.high_mark - self.low_mark) == 1
def can_filter(self):
"""
Return True if adding filters to this instance is still possible.
Typically, this means no limits or offsets have been put on the results.
"""
return not self.is_sliced
def clear_select_clause(self):
"""Remove all fields from SELECT clause."""
self.select = ()
self.default_cols = False
self.select_related = False
self.set_extra_mask(())
self.set_annotation_mask(())
def clear_select_fields(self):
"""
Clear the list of fields to select (but not extra_select columns).
Some queryset types completely replace any existing list of select
columns.
"""
self.select = ()
self.values_select = ()
def add_select_col(self, col, name):
self.select += (col,)
self.values_select += (name,)
def set_select(self, cols):
self.default_cols = False
self.select = tuple(cols)
def add_distinct_fields(self, *field_names):
"""
Add and resolve the given fields to the query's "distinct on" clause.
"""
self.distinct_fields = field_names
self.distinct = True
def add_fields(self, field_names, allow_m2m=True):
"""
Add the given (model) fields to the select set. Add the field names in
the order specified.
"""
alias = self.get_initial_alias()
opts = self.get_meta()
try:
cols = []
for name in field_names:
# Join promotion note - we must not remove any rows here, so
# if there is no existing joins, use outer join.
join_info = self.setup_joins(
name.split(LOOKUP_SEP), opts, alias, allow_many=allow_m2m
)
targets, final_alias, joins = self.trim_joins(
join_info.targets,
join_info.joins,
join_info.path,
)
for target in targets:
cols.append(join_info.transform_function(target, final_alias))
if cols:
self.set_select(cols)
except MultiJoin:
raise FieldError("Invalid field name: '%s'" % name)
except FieldError:
if LOOKUP_SEP in name:
# For lookups spanning over relationships, show the error
# from the model on which the lookup failed.
raise
else:
names = sorted(
[
*get_field_names_from_opts(opts),
*self.extra,
*self.annotation_select,
*self._filtered_relations,
]
)
raise FieldError(
"Cannot resolve keyword %r into field. "
"Choices are: %s" % (name, ", ".join(names))
)
def add_ordering(self, *ordering):
"""
Add items from the 'ordering' sequence to the query's "order by"
clause. These items are either field names (not column names) --
possibly with a direction prefix ('-' or '?') -- or OrderBy
expressions.
If 'ordering' is empty, clear all ordering from the query.
"""
errors = []
for item in ordering:
if isinstance(item, str):
if item == "?":
continue
item = item.removeprefix("-")
if item in self.annotations:
continue
if self.extra and item in self.extra:
continue
# names_to_path() validates the lookup. A descriptive
# FieldError will be raise if it's not.
self.names_to_path(item.split(LOOKUP_SEP), self.model._meta)
elif not hasattr(item, "resolve_expression"):
errors.append(item)
if getattr(item, "contains_aggregate", False):
raise FieldError(
"Using an aggregate in order_by() without also including "
"it in annotate() is not allowed: %s" % item
)
if errors:
raise FieldError("Invalid order_by arguments: %s" % errors)
if ordering:
self.order_by += ordering
else:
self.default_ordering = False
def clear_ordering(self, force=False, clear_default=True):
"""
Remove any ordering settings if the current query allows it without
side effects, set 'force' to True to clear the ordering regardless.
If 'clear_default' is True, there will be no ordering in the resulting
query (not even the model's default).
"""
if not force and (
self.is_sliced or self.distinct_fields or self.select_for_update
):
return
self.order_by = ()
self.extra_order_by = ()
if clear_default:
self.default_ordering = False
def set_group_by(self, allow_aliases=True):
"""
Expand the GROUP BY clause required by the query.
This will usually be the set of all non-aggregate fields in the
return data. If the database backend supports grouping by the
primary key, and the query would be equivalent, the optimization
will be made automatically.
"""
if allow_aliases and self.values_select:
# If grouping by aliases is allowed assign selected value aliases
# by moving them to annotations.
group_by_annotations = {}
values_select = {}
for alias, expr in zip(self.values_select, self.select):
if isinstance(expr, Col):
values_select[alias] = expr
else:
group_by_annotations[alias] = expr
self.annotations = {**group_by_annotations, **self.annotations}
self.append_annotation_mask(group_by_annotations)
self.select = tuple(values_select.values())
self.values_select = tuple(values_select)
group_by = list(self.select)
for alias, annotation in self.annotation_select.items():
if not (group_by_cols := annotation.get_group_by_cols()):
continue
if allow_aliases and not annotation.contains_aggregate:
group_by.append(Ref(alias, annotation))
else:
group_by.extend(group_by_cols)
self.group_by = tuple(group_by)
def add_select_related(self, fields):
"""
Set up the select_related data structure so that we only select
certain related models (as opposed to all models, when
self.select_related=True).
"""
if isinstance(self.select_related, bool):
field_dict = {}
else:
field_dict = self.select_related
for field in fields:
d = field_dict
for part in field.split(LOOKUP_SEP):
d = d.setdefault(part, {})
self.select_related = field_dict
def add_extra(self, select, select_params, where, params, tables, order_by):
"""
Add data to the various extra_* attributes for user-created additions
to the query.
"""
if select:
# We need to pair any placeholder markers in the 'select'
# dictionary with their parameters in 'select_params' so that
# subsequent updates to the select dictionary also adjust the
# parameters appropriately.
select_pairs = {}
if select_params:
param_iter = iter(select_params)
else:
param_iter = iter([])
for name, entry in select.items():
self.check_alias(name)
entry = str(entry)
entry_params = []
pos = entry.find("%s")
while pos != -1:
if pos == 0 or entry[pos - 1] != "%":
entry_params.append(next(param_iter))
pos = entry.find("%s", pos + 2)
select_pairs[name] = (entry, entry_params)
self.extra.update(select_pairs)
if where or params:
self.where.add(ExtraWhere(where, params), AND)
if tables:
self.extra_tables += tuple(tables)
if order_by:
self.extra_order_by = order_by
def clear_deferred_loading(self):
"""Remove any fields from the deferred loading set."""
self.deferred_loading = (frozenset(), True)
def add_deferred_loading(self, field_names):
"""
Add the given list of model field names to the set of fields to
exclude from loading from the database when automatic column selection
is done. Add the new field names to any existing field names that
are deferred (or removed from any existing field names that are marked
as the only ones for immediate loading).
"""
# Fields on related models are stored in the literal double-underscore
# format, so that we can use a set datastructure. We do the foo__bar
# splitting and handling when computing the SQL column names (as part of
# get_columns()).
existing, defer = self.deferred_loading
if defer:
# Add to existing deferred names.
self.deferred_loading = existing.union(field_names), True
else:
# Remove names from the set of any existing "immediate load" names.
if new_existing := existing.difference(field_names):
self.deferred_loading = new_existing, False
else:
self.clear_deferred_loading()
if new_only := set(field_names).difference(existing):
self.deferred_loading = new_only, True
def add_immediate_loading(self, field_names):
"""
Add the given list of model field names to the set of fields to
retrieve when the SQL is executed ("immediate loading" fields). The
field names replace any existing immediate loading field names. If
there are field names already specified for deferred loading, remove
those names from the new field_names before storing the new names
for immediate loading. (That is, immediate loading overrides any
existing immediate values, but respects existing deferrals.)
"""
existing, defer = self.deferred_loading
field_names = set(field_names)
if "pk" in field_names:
field_names.remove("pk")
field_names.add(self.get_meta().pk.name)
if defer:
# Remove any existing deferred names from the current set before
# setting the new names.
self.deferred_loading = field_names.difference(existing), False
else:
# Replace any existing "immediate load" field names.
self.deferred_loading = frozenset(field_names), False
def set_annotation_mask(self, names):
"""Set the mask of annotations that will be returned by the SELECT."""
if names is None:
self.annotation_select_mask = None
else:
self.annotation_select_mask = list(dict.fromkeys(names))
self._annotation_select_cache = None
def append_annotation_mask(self, names):
if self.annotation_select_mask is not None:
self.set_annotation_mask((*self.annotation_select_mask, *names))
def set_extra_mask(self, names):
"""
Set the mask of extra select items that will be returned by SELECT.
Don't remove them from the Query since they might be used later.
"""
if names is None:
self.extra_select_mask = None
else:
self.extra_select_mask = set(names)
self._extra_select_cache = None
def set_values(self, fields):
self.select_related = False
self.clear_deferred_loading()
self.clear_select_fields()
self.has_select_fields = True
if fields:
field_names = []
extra_names = []
annotation_names = []
if not self.extra and not self.annotations:
# Shortcut - if there are no extra or annotations, then
# the values() clause must be just field names.
field_names = list(fields)
else:
self.default_cols = False
for f in fields:
if f in self.extra_select:
extra_names.append(f)
elif f in self.annotation_select:
annotation_names.append(f)
elif f in self.annotations:
raise FieldError(
f"Cannot select the '{f}' alias. Use annotate() to "
"promote it."
)
else:
# Call `names_to_path` to ensure a FieldError including
# annotations about to be masked as valid choices if
# `f` is not resolvable.
if self.annotation_select:
self.names_to_path(f.split(LOOKUP_SEP), self.model._meta)
field_names.append(f)
self.set_extra_mask(extra_names)
self.set_annotation_mask(annotation_names)
selected = frozenset(field_names + extra_names + annotation_names)
else:
field_names = [f.attname for f in self.model._meta.concrete_fields]
selected = frozenset(field_names)
# Selected annotations must be known before setting the GROUP BY
# clause.
if self.group_by is True:
self.add_fields(
(f.attname for f in self.model._meta.concrete_fields), False
)
# Disable GROUP BY aliases to avoid orphaning references to the
# SELECT clause which is about to be cleared.
self.set_group_by(allow_aliases=False)
self.clear_select_fields()
elif self.group_by:
# Resolve GROUP BY annotation references if they are not part of
# the selected fields anymore.
group_by = []
for expr in self.group_by:
if isinstance(expr, Ref) and expr.refs not in selected:
expr = self.annotations[expr.refs]
group_by.append(expr)
self.group_by = tuple(group_by)
self.values_select = tuple(field_names)
self.add_fields(field_names, True)
@property
def annotation_select(self):
"""
Return the dictionary of aggregate columns that are not masked and
should be used in the SELECT clause. Cache this result for performance.
"""
if self._annotation_select_cache is not None:
return self._annotation_select_cache
elif not self.annotations:
return {}
elif self.annotation_select_mask is not None:
self._annotation_select_cache = {
k: self.annotations[k]
for k in self.annotation_select_mask
if k in self.annotations
}
return self._annotation_select_cache
else:
return self.annotations
@property
def extra_select(self):
if self._extra_select_cache is not None:
return self._extra_select_cache
if not self.extra:
return {}
elif self.extra_select_mask is not None:
self._extra_select_cache = {
k: v for k, v in self.extra.items() if k in self.extra_select_mask
}
return self._extra_select_cache
else:
return self.extra
def trim_start(self, names_with_path):
"""
Trim joins from the start of the join path. The candidates for trim
are the PathInfos in names_with_path structure that are m2m joins.
Also set the select column so the start matches the join.
This method is meant to be used for generating the subquery joins &
cols in split_exclude().
Return a lookup usable for doing outerq.filter(lookup=self) and a
boolean indicating if the joins in the prefix contain a LEFT OUTER join.
_"""
all_paths = []
for _, paths in names_with_path:
all_paths.extend(paths)
contains_louter = False
# Trim and operate only on tables that were generated for
# the lookup part of the query. That is, avoid trimming
# joins generated for F() expressions.
lookup_tables = [
t for t in self.alias_map if t in self._lookup_joins or t == self.base_table
]
for trimmed_paths, path in enumerate(all_paths):
if path.m2m:
break
if self.alias_map[lookup_tables[trimmed_paths + 1]].join_type == LOUTER:
contains_louter = True
alias = lookup_tables[trimmed_paths]
self.unref_alias(alias)
# The path.join_field is a Rel, lets get the other side's field
join_field = path.join_field.field
# Build the filter prefix.
paths_in_prefix = trimmed_paths
trimmed_prefix = []
for name, path in names_with_path:
if paths_in_prefix - len(path) < 0:
break
trimmed_prefix.append(name)
paths_in_prefix -= len(path)
trimmed_prefix.append(join_field.foreign_related_fields[0].name)
trimmed_prefix = LOOKUP_SEP.join(trimmed_prefix)
# Lets still see if we can trim the first join from the inner query
# (that is, self). We can't do this for:
# - LEFT JOINs because we would miss those rows that have nothing on
# the outer side,
# - INNER JOINs from filtered relations because we would miss their
# filters.
first_join = self.alias_map[lookup_tables[trimmed_paths + 1]]
if first_join.join_type != LOUTER and not first_join.filtered_relation:
select_fields = [r[0] for r in join_field.related_fields]
select_alias = lookup_tables[trimmed_paths + 1]
self.unref_alias(lookup_tables[trimmed_paths])
extra_restriction = join_field.get_extra_restriction(
None, lookup_tables[trimmed_paths + 1]
)
if extra_restriction:
self.where.add(extra_restriction, AND)
else:
# TODO: It might be possible to trim more joins from the start of the
# inner query if it happens to have a longer join chain containing the
# values in select_fields. Lets punt this one for now.
select_fields = [r[1] for r in join_field.related_fields]
select_alias = lookup_tables[trimmed_paths]
# The found starting point is likely a join_class instead of a
# base_table_class reference. But the first entry in the query's FROM
# clause must not be a JOIN.
for table in self.alias_map:
if self.alias_refcount[table] > 0:
self.alias_map[table] = self.base_table_class(
self.alias_map[table].table_name,
table,
)
break
self.set_select([f.get_col(select_alias) for f in select_fields])
return trimmed_prefix, contains_louter
def is_nullable(self, field):
"""
Check if the given field should be treated as nullable.
Some backends treat '' as null and Django treats such fields as
nullable for those backends. In such situations field.null can be
False even if we should treat the field as nullable.
"""
# We need to use DEFAULT_DB_ALIAS here, as QuerySet does not have
# (nor should it have) knowledge of which connection is going to be
# used. The proper fix would be to defer all decisions where
# is_nullable() is needed to the compiler stage, but that is not easy
# to do currently.
return field.null or (
field.empty_strings_allowed
and connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls
)
def get_order_dir(field, default="ASC"):
"""
Return the field name and direction for an order specification. For
example, '-foo' is returned as ('foo', 'DESC').
The 'default' param is used to indicate which way no prefix (or a '+'
prefix) should sort. The '-' prefix always sorts the opposite way.
"""
dirn = ORDER_DIR[default]
if field[0] == "-":
return field[1:], dirn[1]
return field, dirn[0]
class JoinPromoter:
"""
A class to abstract away join promotion problems for complex filter
conditions.
"""
def __init__(self, connector, num_children, negated):
self.connector = connector
self.negated = negated
if self.negated:
if connector == AND:
self.effective_connector = OR
else:
self.effective_connector = AND
else:
self.effective_connector = self.connector
self.num_children = num_children
# Maps of table alias to how many times it is seen as required for
# inner and/or outer joins.
self.votes = Counter()
def __repr__(self):
return (
f"{self.__class__.__qualname__}(connector={self.connector!r}, "
f"num_children={self.num_children!r}, negated={self.negated!r})"
)
def add_votes(self, votes):
"""
Add single vote per item to self.votes. Parameter can be any
iterable.
"""
self.votes.update(votes)
def update_join_types(self, query):
"""
Change join types so that the generated query is as efficient as
possible, but still correct. So, change as many joins as possible
to INNER, but don't make OUTER joins INNER if that could remove
results from the query.
"""
to_promote = set()
to_demote = set()
# The effective_connector is used so that NOT (a AND b) is treated
# similarly to (a OR b) for join promotion.
for table, votes in self.votes.items():
# We must use outer joins in OR case when the join isn't contained
# in all of the joins. Otherwise the INNER JOIN itself could remove
# valid results. Consider the case where a model with rel_a and
# rel_b relations is queried with rel_a__col=1 | rel_b__col=2. Now,
# if rel_a join doesn't produce any results is null (for example
# reverse foreign key or null value in direct foreign key), and
# there is a matching row in rel_b with col=2, then an INNER join
# to rel_a would remove a valid match from the query. So, we need
# to promote any existing INNER to LOUTER (it is possible this
# promotion in turn will be demoted later on).
if self.effective_connector == OR and votes < self.num_children:
to_promote.add(table)
# If connector is AND and there is a filter that can match only
# when there is a joinable row, then use INNER. For example, in
# rel_a__col=1 & rel_b__col=2, if either of the rels produce NULL
# as join output, then the col=1 or col=2 can't match (as
# NULL=anything is always false).
# For the OR case, if all children voted for a join to be inner,
# then we can use INNER for the join. For example:
# (rel_a__col__icontains=Alex | rel_a__col__icontains=Russell)
# then if rel_a doesn't produce any rows, the whole condition
# can't match. Hence we can safely use INNER join.
if self.effective_connector == AND or (
self.effective_connector == OR and votes == self.num_children
):
to_demote.add(table)
# Finally, what happens in cases where we have:
# (rel_a__col=1|rel_b__col=2) & rel_a__col__gte=0
# Now, we first generate the OR clause, and promote joins for it
# in the first if branch above. Both rel_a and rel_b are promoted
# to LOUTER joins. After that we do the AND case. The OR case
# voted no inner joins but the rel_a__col__gte=0 votes inner join
# for rel_a. We demote it back to INNER join (in AND case a single
# vote is enough). The demotion is OK, if rel_a doesn't produce
# rows, then the rel_a__col__gte=0 clause can't be true, and thus
# the whole clause must be false. So, it is safe to use INNER
# join.
# Note that in this example we could just as well have the __gte
# clause and the OR clause swapped. Or we could replace the __gte
# clause with an OR clause containing rel_a__col=1|rel_a__col=2,
# and again we could safely demote to INNER.
query.promote_joins(to_promote)
query.demote_joins(to_demote)
return to_demote
|
911c60b3407136dca69f9340ccece42b2fc64f613979935bd8c04fd880df627c | """
A Python "serializer". Doesn't do much serializing per se -- just converts to
and from basic Python data types (lists, dicts, strings, etc.). Useful as a basis for
other serializers.
"""
from django.apps import apps
from django.core.serializers import base
from django.db import DEFAULT_DB_ALIAS, models
from django.utils.encoding import is_protected_type
class Serializer(base.Serializer):
"""
Serialize a QuerySet to basic Python objects.
"""
internal_use_only = True
def start_serialization(self):
self._current = None
self.objects = []
def end_serialization(self):
pass
def start_object(self, obj):
self._current = {}
def end_object(self, obj):
self.objects.append(self.get_dump_object(obj))
self._current = None
def get_dump_object(self, obj):
data = {"model": str(obj._meta)}
if not self.use_natural_primary_keys or not hasattr(obj, "natural_key"):
data["pk"] = self._value_from_field(obj, obj._meta.pk)
data["fields"] = self._current
return data
def _value_from_field(self, obj, field):
value = field.value_from_object(obj)
# Protected types (i.e., primitives like None, numbers, dates,
# and Decimals) are passed through as is. All other values are
# converted to string first.
return value if is_protected_type(value) else field.value_to_string(obj)
def handle_field(self, obj, field):
self._current[field.name] = self._value_from_field(obj, field)
def handle_fk_field(self, obj, field):
if self.use_natural_foreign_keys and hasattr(
field.remote_field.model, "natural_key"
):
related = getattr(obj, field.name)
if related:
value = related.natural_key()
else:
value = None
else:
value = self._value_from_field(obj, field)
self._current[field.name] = value
def handle_m2m_field(self, obj, field):
if field.remote_field.through._meta.auto_created:
if self.use_natural_foreign_keys and hasattr(
field.remote_field.model, "natural_key"
):
def m2m_value(value):
return value.natural_key()
def queryset_iterator(obj, field):
return getattr(obj, field.name).iterator()
else:
def m2m_value(value):
return self._value_from_field(value, value._meta.pk)
def queryset_iterator(obj, field):
return (
getattr(obj, field.name).select_related().only("pk").iterator()
)
m2m_iter = getattr(obj, "_prefetched_objects_cache", {}).get(
field.name,
queryset_iterator(obj, field),
)
self._current[field.name] = [m2m_value(related) for related in m2m_iter]
def getvalue(self):
return self.objects
def Deserializer(
object_list, *, using=DEFAULT_DB_ALIAS, ignorenonexistent=False, **options
):
"""
Deserialize simple Python objects back into Django ORM instances.
It's expected that you pass the Python objects themselves (instead of a
stream or a string) to the constructor
"""
handle_forward_references = options.pop("handle_forward_references", False)
field_names_cache = {} # Model: <list of field_names>
for d in object_list:
# Look up the model and starting build a dict of data for it.
try:
Model = _get_model(d["model"])
except base.DeserializationError:
if ignorenonexistent:
continue
else:
raise
data = {}
if "pk" in d:
try:
data[Model._meta.pk.attname] = Model._meta.pk.to_python(d.get("pk"))
except Exception as e:
raise base.DeserializationError.WithData(
e, d["model"], d.get("pk"), None
)
m2m_data = {}
deferred_fields = {}
if Model not in field_names_cache:
field_names_cache[Model] = {f.name for f in Model._meta.get_fields()}
field_names = field_names_cache[Model]
# Handle each field
for field_name, field_value in d["fields"].items():
if ignorenonexistent and field_name not in field_names:
# skip fields no longer on model
continue
field = Model._meta.get_field(field_name)
# Handle M2M relations
if field.remote_field and isinstance(
field.remote_field, models.ManyToManyRel
):
try:
values = base.deserialize_m2m_values(
field, field_value, using, handle_forward_references
)
except base.M2MDeserializationError as e:
raise base.DeserializationError.WithData(
e.original_exc, d["model"], d.get("pk"), e.pk
)
if values == base.DEFER_FIELD:
deferred_fields[field] = field_value
else:
m2m_data[field.name] = values
# Handle FK fields
elif field.remote_field and isinstance(
field.remote_field, models.ManyToOneRel
):
try:
value = base.deserialize_fk_value(
field, field_value, using, handle_forward_references
)
except Exception as e:
raise base.DeserializationError.WithData(
e, d["model"], d.get("pk"), field_value
)
if value == base.DEFER_FIELD:
deferred_fields[field] = field_value
else:
data[field.attname] = value
# Handle all other fields
else:
try:
data[field.name] = field.to_python(field_value)
except Exception as e:
raise base.DeserializationError.WithData(
e, d["model"], d.get("pk"), field_value
)
obj = base.build_instance(Model, data, using)
yield base.DeserializedObject(obj, m2m_data, deferred_fields)
def _get_model(model_identifier):
"""Look up a model from an "app_label.model_name" string."""
try:
return apps.get_model(model_identifier)
except (LookupError, TypeError):
raise base.DeserializationError(
"Invalid model identifier: '%s'" % model_identifier
)
|
d84bf1bf7325dbf4297dcb5b8bccab876c844c2d727d0d4748957fa1d70f22e2 | """
XML serializer.
"""
import json
from xml.dom import pulldom
from xml.sax import handler
from xml.sax.expatreader import ExpatParser as _ExpatParser
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.core.serializers import base
from django.db import DEFAULT_DB_ALIAS, models
from django.utils.xmlutils import SimplerXMLGenerator, UnserializableContentError
class Serializer(base.Serializer):
"""Serialize a QuerySet to XML."""
def indent(self, level):
if self.options.get("indent") is not None:
self.xml.ignorableWhitespace(
"\n" + " " * self.options.get("indent") * level
)
def start_serialization(self):
"""
Start serialization -- open the XML document and the root element.
"""
self.xml = SimplerXMLGenerator(
self.stream, self.options.get("encoding", settings.DEFAULT_CHARSET)
)
self.xml.startDocument()
self.xml.startElement("django-objects", {"version": "1.0"})
def end_serialization(self):
"""
End serialization -- end the document.
"""
self.indent(0)
self.xml.endElement("django-objects")
self.xml.endDocument()
def start_object(self, obj):
"""
Called as each object is handled.
"""
if not hasattr(obj, "_meta"):
raise base.SerializationError(
"Non-model object (%s) encountered during serialization" % type(obj)
)
self.indent(1)
attrs = {"model": str(obj._meta)}
if not self.use_natural_primary_keys or not hasattr(obj, "natural_key"):
obj_pk = obj.pk
if obj_pk is not None:
attrs["pk"] = str(obj_pk)
self.xml.startElement("object", attrs)
def end_object(self, obj):
"""
Called after handling all fields for an object.
"""
self.indent(1)
self.xml.endElement("object")
def handle_field(self, obj, field):
"""
Handle each field on an object (except for ForeignKeys and
ManyToManyFields).
"""
self.indent(2)
self.xml.startElement(
"field",
{
"name": field.name,
"type": field.get_internal_type(),
},
)
# Get a "string version" of the object's data.
if getattr(obj, field.name) is not None:
value = field.value_to_string(obj)
if field.get_internal_type() == "JSONField":
# Dump value since JSONField.value_to_string() doesn't output
# strings.
value = json.dumps(value, cls=field.encoder)
try:
self.xml.characters(value)
except UnserializableContentError:
raise ValueError(
"%s.%s (pk:%s) contains unserializable characters"
% (obj.__class__.__name__, field.name, obj.pk)
)
else:
self.xml.addQuickElement("None")
self.xml.endElement("field")
def handle_fk_field(self, obj, field):
"""
Handle a ForeignKey (they need to be treated slightly
differently from regular fields).
"""
self._start_relational_field(field)
related_att = getattr(obj, field.get_attname())
if related_att is not None:
if self.use_natural_foreign_keys and hasattr(
field.remote_field.model, "natural_key"
):
related = getattr(obj, field.name)
# If related object has a natural key, use it
related = related.natural_key()
# Iterable natural keys are rolled out as subelements
for key_value in related:
self.xml.startElement("natural", {})
self.xml.characters(str(key_value))
self.xml.endElement("natural")
else:
self.xml.characters(str(related_att))
else:
self.xml.addQuickElement("None")
self.xml.endElement("field")
def handle_m2m_field(self, obj, field):
"""
Handle a ManyToManyField. Related objects are only serialized as
references to the object's PK (i.e. the related *data* is not dumped,
just the relation).
"""
if field.remote_field.through._meta.auto_created:
self._start_relational_field(field)
if self.use_natural_foreign_keys and hasattr(
field.remote_field.model, "natural_key"
):
# If the objects in the m2m have a natural key, use it
def handle_m2m(value):
natural = value.natural_key()
# Iterable natural keys are rolled out as subelements
self.xml.startElement("object", {})
for key_value in natural:
self.xml.startElement("natural", {})
self.xml.characters(str(key_value))
self.xml.endElement("natural")
self.xml.endElement("object")
def queryset_iterator(obj, field):
return getattr(obj, field.name).iterator()
else:
def handle_m2m(value):
self.xml.addQuickElement("object", attrs={"pk": str(value.pk)})
def queryset_iterator(obj, field):
return (
getattr(obj, field.name).select_related().only("pk").iterator()
)
m2m_iter = getattr(obj, "_prefetched_objects_cache", {}).get(
field.name,
queryset_iterator(obj, field),
)
for relobj in m2m_iter:
handle_m2m(relobj)
self.xml.endElement("field")
def _start_relational_field(self, field):
"""Output the <field> element for relational fields."""
self.indent(2)
self.xml.startElement(
"field",
{
"name": field.name,
"rel": field.remote_field.__class__.__name__,
"to": str(field.remote_field.model._meta),
},
)
class Deserializer(base.Deserializer):
"""Deserialize XML."""
def __init__(
self,
stream_or_string,
*,
using=DEFAULT_DB_ALIAS,
ignorenonexistent=False,
**options,
):
super().__init__(stream_or_string, **options)
self.handle_forward_references = options.pop("handle_forward_references", False)
self.event_stream = pulldom.parse(self.stream, self._make_parser())
self.db = using
self.ignore = ignorenonexistent
def _make_parser(self):
"""Create a hardened XML parser (no custom/external entities)."""
return DefusedExpatParser()
def __next__(self):
for event, node in self.event_stream:
if event == "START_ELEMENT" and node.nodeName == "object":
self.event_stream.expandNode(node)
return self._handle_object(node)
raise StopIteration
def _handle_object(self, node):
"""Convert an <object> node to a DeserializedObject."""
# Look up the model using the model loading mechanism. If this fails,
# bail.
Model = self._get_model_from_node(node, "model")
# Start building a data dictionary from the object.
data = {}
if node.hasAttribute("pk"):
data[Model._meta.pk.attname] = Model._meta.pk.to_python(
node.getAttribute("pk")
)
# Also start building a dict of m2m data (this is saved as
# {m2m_accessor_attribute : [list_of_related_objects]})
m2m_data = {}
deferred_fields = {}
field_names = {f.name for f in Model._meta.get_fields()}
# Deserialize each field.
for field_node in node.getElementsByTagName("field"):
# If the field is missing the name attribute, bail (are you
# sensing a pattern here?)
field_name = field_node.getAttribute("name")
if not field_name:
raise base.DeserializationError(
"<field> node is missing the 'name' attribute"
)
# Get the field from the Model. This will raise a
# FieldDoesNotExist if, well, the field doesn't exist, which will
# be propagated correctly unless ignorenonexistent=True is used.
if self.ignore and field_name not in field_names:
continue
field = Model._meta.get_field(field_name)
# As is usually the case, relation fields get the special treatment.
if field.remote_field and isinstance(
field.remote_field, models.ManyToManyRel
):
value = self._handle_m2m_field_node(field_node, field)
if value == base.DEFER_FIELD:
deferred_fields[field] = [
[
getInnerText(nat_node).strip()
for nat_node in obj_node.getElementsByTagName("natural")
]
for obj_node in field_node.getElementsByTagName("object")
]
else:
m2m_data[field.name] = value
elif field.remote_field and isinstance(
field.remote_field, models.ManyToOneRel
):
value = self._handle_fk_field_node(field_node, field)
if value == base.DEFER_FIELD:
deferred_fields[field] = [
getInnerText(k).strip()
for k in field_node.getElementsByTagName("natural")
]
else:
data[field.attname] = value
else:
if field_node.getElementsByTagName("None"):
value = None
else:
value = field.to_python(getInnerText(field_node).strip())
# Load value since JSONField.to_python() outputs strings.
if field.get_internal_type() == "JSONField":
value = json.loads(value, cls=field.decoder)
data[field.name] = value
obj = base.build_instance(Model, data, self.db)
# Return a DeserializedObject so that the m2m data has a place to live.
return base.DeserializedObject(obj, m2m_data, deferred_fields)
def _handle_fk_field_node(self, node, field):
"""
Handle a <field> node for a ForeignKey
"""
# Check if there is a child node named 'None', returning None if so.
if node.getElementsByTagName("None"):
return None
else:
model = field.remote_field.model
if hasattr(model._default_manager, "get_by_natural_key"):
keys = node.getElementsByTagName("natural")
if keys:
# If there are 'natural' subelements, it must be a natural key
field_value = [getInnerText(k).strip() for k in keys]
try:
obj = model._default_manager.db_manager(
self.db
).get_by_natural_key(*field_value)
except ObjectDoesNotExist:
if self.handle_forward_references:
return base.DEFER_FIELD
else:
raise
obj_pk = getattr(obj, field.remote_field.field_name)
# If this is a natural foreign key to an object that
# has a FK/O2O as the foreign key, use the FK value
if field.remote_field.model._meta.pk.remote_field:
obj_pk = obj_pk.pk
else:
# Otherwise, treat like a normal PK
field_value = getInnerText(node).strip()
obj_pk = model._meta.get_field(
field.remote_field.field_name
).to_python(field_value)
return obj_pk
else:
field_value = getInnerText(node).strip()
return model._meta.get_field(field.remote_field.field_name).to_python(
field_value
)
def _handle_m2m_field_node(self, node, field):
"""
Handle a <field> node for a ManyToManyField.
"""
model = field.remote_field.model
default_manager = model._default_manager
if hasattr(default_manager, "get_by_natural_key"):
def m2m_convert(n):
keys = n.getElementsByTagName("natural")
if keys:
# If there are 'natural' subelements, it must be a natural key
field_value = [getInnerText(k).strip() for k in keys]
obj_pk = (
default_manager.db_manager(self.db)
.get_by_natural_key(*field_value)
.pk
)
else:
# Otherwise, treat like a normal PK value.
obj_pk = model._meta.pk.to_python(n.getAttribute("pk"))
return obj_pk
else:
def m2m_convert(n):
return model._meta.pk.to_python(n.getAttribute("pk"))
values = []
try:
for c in node.getElementsByTagName("object"):
values.append(m2m_convert(c))
except Exception as e:
if isinstance(e, ObjectDoesNotExist) and self.handle_forward_references:
return base.DEFER_FIELD
else:
raise base.M2MDeserializationError(e, c)
else:
return values
def _get_model_from_node(self, node, attr):
"""
Look up a model from a <object model=...> or a <field rel=... to=...>
node.
"""
model_identifier = node.getAttribute(attr)
if not model_identifier:
raise base.DeserializationError(
"<%s> node is missing the required '%s' attribute"
% (node.nodeName, attr)
)
try:
return apps.get_model(model_identifier)
except (LookupError, TypeError):
raise base.DeserializationError(
"<%s> node has invalid model identifier: '%s'"
% (node.nodeName, model_identifier)
)
def getInnerText(node):
"""Get all the inner text of a DOM node (recursively)."""
# inspired by https://mail.python.org/pipermail/xml-sig/2005-March/011022.html
inner_text = []
for child in node.childNodes:
if (
child.nodeType == child.TEXT_NODE
or child.nodeType == child.CDATA_SECTION_NODE
):
inner_text.append(child.data)
elif child.nodeType == child.ELEMENT_NODE:
inner_text.extend(getInnerText(child))
else:
pass
return "".join(inner_text)
# Below code based on Christian Heimes' defusedxml
class DefusedExpatParser(_ExpatParser):
"""
An expat parser hardened against XML bomb attacks.
Forbid DTDs, external entity references
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setFeature(handler.feature_external_ges, False)
self.setFeature(handler.feature_external_pes, False)
def start_doctype_decl(self, name, sysid, pubid, has_internal_subset):
raise DTDForbidden(name, sysid, pubid)
def entity_decl(
self, name, is_parameter_entity, value, base, sysid, pubid, notation_name
):
raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name)
def unparsed_entity_decl(self, name, base, sysid, pubid, notation_name):
# expat 1.2
raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name)
def external_entity_ref_handler(self, context, base, sysid, pubid):
raise ExternalReferenceForbidden(context, base, sysid, pubid)
def reset(self):
_ExpatParser.reset(self)
parser = self._parser
parser.StartDoctypeDeclHandler = self.start_doctype_decl
parser.EntityDeclHandler = self.entity_decl
parser.UnparsedEntityDeclHandler = self.unparsed_entity_decl
parser.ExternalEntityRefHandler = self.external_entity_ref_handler
class DefusedXmlException(ValueError):
"""Base exception."""
def __repr__(self):
return str(self)
class DTDForbidden(DefusedXmlException):
"""Document type definition is forbidden."""
def __init__(self, name, sysid, pubid):
super().__init__()
self.name = name
self.sysid = sysid
self.pubid = pubid
def __str__(self):
tpl = "DTDForbidden(name='{}', system_id={!r}, public_id={!r})"
return tpl.format(self.name, self.sysid, self.pubid)
class EntitiesForbidden(DefusedXmlException):
"""Entity definition is forbidden."""
def __init__(self, name, value, base, sysid, pubid, notation_name):
super().__init__()
self.name = name
self.value = value
self.base = base
self.sysid = sysid
self.pubid = pubid
self.notation_name = notation_name
def __str__(self):
tpl = "EntitiesForbidden(name='{}', system_id={!r}, public_id={!r})"
return tpl.format(self.name, self.sysid, self.pubid)
class ExternalReferenceForbidden(DefusedXmlException):
"""Resolving an external reference is forbidden."""
def __init__(self, context, base, sysid, pubid):
super().__init__()
self.context = context
self.base = base
self.sysid = sysid
self.pubid = pubid
def __str__(self):
tpl = "ExternalReferenceForbidden(system_id='{}', public_id={})"
return tpl.format(self.sysid, self.pubid)
|
0b322c42eb3a9a3c55af3f724c1a86e0e6ac9b788afc14e65c73e3b37a8d35a3 | import collections
from itertools import chain
from django.apps import apps
from django.conf import settings
from django.contrib.admin.utils import NotRelationField, flatten, get_fields_from_path
from django.core import checks
from django.core.exceptions import FieldDoesNotExist
from django.db import models
from django.db.models.constants import LOOKUP_SEP
from django.db.models.expressions import Combinable
from django.forms.models import BaseModelForm, BaseModelFormSet, _get_foreign_key
from django.template import engines
from django.template.backends.django import DjangoTemplates
from django.utils.module_loading import import_string
def _issubclass(cls, classinfo):
"""
issubclass() variant that doesn't raise an exception if cls isn't a
class.
"""
try:
return issubclass(cls, classinfo)
except TypeError:
return False
def _contains_subclass(class_path, candidate_paths):
"""
Return whether or not a dotted class path (or a subclass of that class) is
found in a list of candidate paths.
"""
cls = import_string(class_path)
for path in candidate_paths:
try:
candidate_cls = import_string(path)
except ImportError:
# ImportErrors are raised elsewhere.
continue
if _issubclass(candidate_cls, cls):
return True
return False
def check_admin_app(app_configs, **kwargs):
from django.contrib.admin.sites import all_sites
errors = []
for site in all_sites:
errors.extend(site.check(app_configs))
return errors
def check_dependencies(**kwargs):
"""
Check that the admin's dependencies are correctly installed.
"""
from django.contrib.admin.sites import all_sites
if not apps.is_installed("django.contrib.admin"):
return []
errors = []
app_dependencies = (
("django.contrib.contenttypes", 401),
("django.contrib.auth", 405),
("django.contrib.messages", 406),
)
for app_name, error_code in app_dependencies:
if not apps.is_installed(app_name):
errors.append(
checks.Error(
"'%s' must be in INSTALLED_APPS in order to use the admin "
"application." % app_name,
id="admin.E%d" % error_code,
)
)
for engine in engines.all():
if isinstance(engine, DjangoTemplates):
django_templates_instance = engine.engine
break
else:
django_templates_instance = None
if not django_templates_instance:
errors.append(
checks.Error(
"A 'django.template.backends.django.DjangoTemplates' instance "
"must be configured in TEMPLATES in order to use the admin "
"application.",
id="admin.E403",
)
)
else:
if (
"django.contrib.auth.context_processors.auth"
not in django_templates_instance.context_processors
and _contains_subclass(
"django.contrib.auth.backends.ModelBackend",
settings.AUTHENTICATION_BACKENDS,
)
):
errors.append(
checks.Error(
"'django.contrib.auth.context_processors.auth' must be "
"enabled in DjangoTemplates (TEMPLATES) if using the default "
"auth backend in order to use the admin application.",
id="admin.E402",
)
)
if (
"django.contrib.messages.context_processors.messages"
not in django_templates_instance.context_processors
):
errors.append(
checks.Error(
"'django.contrib.messages.context_processors.messages' must "
"be enabled in DjangoTemplates (TEMPLATES) in order to use "
"the admin application.",
id="admin.E404",
)
)
sidebar_enabled = any(site.enable_nav_sidebar for site in all_sites)
if (
sidebar_enabled
and "django.template.context_processors.request"
not in django_templates_instance.context_processors
):
errors.append(
checks.Warning(
"'django.template.context_processors.request' must be enabled "
"in DjangoTemplates (TEMPLATES) in order to use the admin "
"navigation sidebar.",
id="admin.W411",
)
)
if not _contains_subclass(
"django.contrib.auth.middleware.AuthenticationMiddleware", settings.MIDDLEWARE
):
errors.append(
checks.Error(
"'django.contrib.auth.middleware.AuthenticationMiddleware' must "
"be in MIDDLEWARE in order to use the admin application.",
id="admin.E408",
)
)
if not _contains_subclass(
"django.contrib.messages.middleware.MessageMiddleware", settings.MIDDLEWARE
):
errors.append(
checks.Error(
"'django.contrib.messages.middleware.MessageMiddleware' must "
"be in MIDDLEWARE in order to use the admin application.",
id="admin.E409",
)
)
if not _contains_subclass(
"django.contrib.sessions.middleware.SessionMiddleware", settings.MIDDLEWARE
):
errors.append(
checks.Error(
"'django.contrib.sessions.middleware.SessionMiddleware' must "
"be in MIDDLEWARE in order to use the admin application.",
hint=(
"Insert "
"'django.contrib.sessions.middleware.SessionMiddleware' "
"before "
"'django.contrib.auth.middleware.AuthenticationMiddleware'."
),
id="admin.E410",
)
)
return errors
class BaseModelAdminChecks:
def check(self, admin_obj, **kwargs):
return [
*self._check_autocomplete_fields(admin_obj),
*self._check_raw_id_fields(admin_obj),
*self._check_fields(admin_obj),
*self._check_fieldsets(admin_obj),
*self._check_exclude(admin_obj),
*self._check_form(admin_obj),
*self._check_filter_vertical(admin_obj),
*self._check_filter_horizontal(admin_obj),
*self._check_radio_fields(admin_obj),
*self._check_prepopulated_fields(admin_obj),
*self._check_view_on_site_url(admin_obj),
*self._check_ordering(admin_obj),
*self._check_readonly_fields(admin_obj),
]
def _check_autocomplete_fields(self, obj):
"""
Check that `autocomplete_fields` is a list or tuple of model fields.
"""
if not isinstance(obj.autocomplete_fields, (list, tuple)):
return must_be(
"a list or tuple",
option="autocomplete_fields",
obj=obj,
id="admin.E036",
)
else:
return list(
chain.from_iterable(
[
self._check_autocomplete_fields_item(
obj, field_name, "autocomplete_fields[%d]" % index
)
for index, field_name in enumerate(obj.autocomplete_fields)
]
)
)
def _check_autocomplete_fields_item(self, obj, field_name, label):
"""
Check that an item in `autocomplete_fields` is a ForeignKey or a
ManyToManyField and that the item has a related ModelAdmin with
search_fields defined.
"""
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E037"
)
else:
if not field.many_to_many and not isinstance(field, models.ForeignKey):
return must_be(
"a foreign key or a many-to-many field",
option=label,
obj=obj,
id="admin.E038",
)
related_admin = obj.admin_site._registry.get(field.remote_field.model)
if related_admin is None:
return [
checks.Error(
'An admin for model "%s" has to be registered '
"to be referenced by %s.autocomplete_fields."
% (
field.remote_field.model.__name__,
type(obj).__name__,
),
obj=obj.__class__,
id="admin.E039",
)
]
elif not related_admin.search_fields:
return [
checks.Error(
'%s must define "search_fields", because it\'s '
"referenced by %s.autocomplete_fields."
% (
related_admin.__class__.__name__,
type(obj).__name__,
),
obj=obj.__class__,
id="admin.E040",
)
]
return []
def _check_raw_id_fields(self, obj):
"""Check that `raw_id_fields` only contains field names that are listed
on the model."""
if not isinstance(obj.raw_id_fields, (list, tuple)):
return must_be(
"a list or tuple", option="raw_id_fields", obj=obj, id="admin.E001"
)
else:
return list(
chain.from_iterable(
self._check_raw_id_fields_item(
obj, field_name, "raw_id_fields[%d]" % index
)
for index, field_name in enumerate(obj.raw_id_fields)
)
)
def _check_raw_id_fields_item(self, obj, field_name, label):
"""Check an item of `raw_id_fields`, i.e. check that field named
`field_name` exists in model `model` and is a ForeignKey or a
ManyToManyField."""
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E002"
)
else:
# Using attname is not supported.
if field.name != field_name:
return refer_to_missing_field(
field=field_name,
option=label,
obj=obj,
id="admin.E002",
)
if not field.many_to_many and not isinstance(field, models.ForeignKey):
return must_be(
"a foreign key or a many-to-many field",
option=label,
obj=obj,
id="admin.E003",
)
else:
return []
def _check_fields(self, obj):
"""Check that `fields` only refer to existing fields, doesn't contain
duplicates. Check if at most one of `fields` and `fieldsets` is defined.
"""
if obj.fields is None:
return []
elif not isinstance(obj.fields, (list, tuple)):
return must_be("a list or tuple", option="fields", obj=obj, id="admin.E004")
elif obj.fieldsets:
return [
checks.Error(
"Both 'fieldsets' and 'fields' are specified.",
obj=obj.__class__,
id="admin.E005",
)
]
fields = flatten(obj.fields)
if len(fields) != len(set(fields)):
return [
checks.Error(
"The value of 'fields' contains duplicate field(s).",
obj=obj.__class__,
id="admin.E006",
)
]
return list(
chain.from_iterable(
self._check_field_spec(obj, field_name, "fields")
for field_name in obj.fields
)
)
def _check_fieldsets(self, obj):
"""Check that fieldsets is properly formatted and doesn't contain
duplicates."""
if obj.fieldsets is None:
return []
elif not isinstance(obj.fieldsets, (list, tuple)):
return must_be(
"a list or tuple", option="fieldsets", obj=obj, id="admin.E007"
)
else:
seen_fields = []
return list(
chain.from_iterable(
self._check_fieldsets_item(
obj, fieldset, "fieldsets[%d]" % index, seen_fields
)
for index, fieldset in enumerate(obj.fieldsets)
)
)
def _check_fieldsets_item(self, obj, fieldset, label, seen_fields):
"""Check an item of `fieldsets`, i.e. check that this is a pair of a
set name and a dictionary containing "fields" key."""
if not isinstance(fieldset, (list, tuple)):
return must_be("a list or tuple", option=label, obj=obj, id="admin.E008")
elif len(fieldset) != 2:
return must_be("of length 2", option=label, obj=obj, id="admin.E009")
elif not isinstance(fieldset[1], dict):
return must_be(
"a dictionary", option="%s[1]" % label, obj=obj, id="admin.E010"
)
elif "fields" not in fieldset[1]:
return [
checks.Error(
"The value of '%s[1]' must contain the key 'fields'." % label,
obj=obj.__class__,
id="admin.E011",
)
]
elif not isinstance(fieldset[1]["fields"], (list, tuple)):
return must_be(
"a list or tuple",
option="%s[1]['fields']" % label,
obj=obj,
id="admin.E008",
)
seen_fields.extend(flatten(fieldset[1]["fields"]))
if len(seen_fields) != len(set(seen_fields)):
return [
checks.Error(
"There are duplicate field(s) in '%s[1]'." % label,
obj=obj.__class__,
id="admin.E012",
)
]
return list(
chain.from_iterable(
self._check_field_spec(obj, fieldset_fields, '%s[1]["fields"]' % label)
for fieldset_fields in fieldset[1]["fields"]
)
)
def _check_field_spec(self, obj, fields, label):
"""`fields` should be an item of `fields` or an item of
fieldset[1]['fields'] for any `fieldset` in `fieldsets`. It should be a
field name or a tuple of field names."""
if isinstance(fields, tuple):
return list(
chain.from_iterable(
self._check_field_spec_item(
obj, field_name, "%s[%d]" % (label, index)
)
for index, field_name in enumerate(fields)
)
)
else:
return self._check_field_spec_item(obj, fields, label)
def _check_field_spec_item(self, obj, field_name, label):
if field_name in obj.readonly_fields:
# Stuff can be put in fields that isn't actually a model field if
# it's in readonly_fields, readonly_fields will handle the
# validation of such things.
return []
else:
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
# If we can't find a field on the model that matches, it could
# be an extra field on the form.
return []
else:
if (
isinstance(field, models.ManyToManyField)
and not field.remote_field.through._meta.auto_created
):
return [
checks.Error(
"The value of '%s' cannot include the ManyToManyField "
"'%s', because that field manually specifies a "
"relationship model." % (label, field_name),
obj=obj.__class__,
id="admin.E013",
)
]
else:
return []
def _check_exclude(self, obj):
"""Check that exclude is a sequence without duplicates."""
if obj.exclude is None: # default value is None
return []
elif not isinstance(obj.exclude, (list, tuple)):
return must_be(
"a list or tuple", option="exclude", obj=obj, id="admin.E014"
)
elif len(obj.exclude) > len(set(obj.exclude)):
return [
checks.Error(
"The value of 'exclude' contains duplicate field(s).",
obj=obj.__class__,
id="admin.E015",
)
]
else:
return []
def _check_form(self, obj):
"""Check that form subclasses BaseModelForm."""
if not _issubclass(obj.form, BaseModelForm):
return must_inherit_from(
parent="BaseModelForm", option="form", obj=obj, id="admin.E016"
)
else:
return []
def _check_filter_vertical(self, obj):
"""Check that filter_vertical is a sequence of field names."""
if not isinstance(obj.filter_vertical, (list, tuple)):
return must_be(
"a list or tuple", option="filter_vertical", obj=obj, id="admin.E017"
)
else:
return list(
chain.from_iterable(
self._check_filter_item(
obj, field_name, "filter_vertical[%d]" % index
)
for index, field_name in enumerate(obj.filter_vertical)
)
)
def _check_filter_horizontal(self, obj):
"""Check that filter_horizontal is a sequence of field names."""
if not isinstance(obj.filter_horizontal, (list, tuple)):
return must_be(
"a list or tuple", option="filter_horizontal", obj=obj, id="admin.E018"
)
else:
return list(
chain.from_iterable(
self._check_filter_item(
obj, field_name, "filter_horizontal[%d]" % index
)
for index, field_name in enumerate(obj.filter_horizontal)
)
)
def _check_filter_item(self, obj, field_name, label):
"""Check one item of `filter_vertical` or `filter_horizontal`, i.e.
check that given field exists and is a ManyToManyField."""
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E019"
)
else:
if not field.many_to_many:
return must_be(
"a many-to-many field", option=label, obj=obj, id="admin.E020"
)
else:
return []
def _check_radio_fields(self, obj):
"""Check that `radio_fields` is a dictionary."""
if not isinstance(obj.radio_fields, dict):
return must_be(
"a dictionary", option="radio_fields", obj=obj, id="admin.E021"
)
else:
return list(
chain.from_iterable(
self._check_radio_fields_key(obj, field_name, "radio_fields")
+ self._check_radio_fields_value(
obj, val, 'radio_fields["%s"]' % field_name
)
for field_name, val in obj.radio_fields.items()
)
)
def _check_radio_fields_key(self, obj, field_name, label):
"""Check that a key of `radio_fields` dictionary is name of existing
field and that the field is a ForeignKey or has `choices` defined."""
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E022"
)
else:
if not (isinstance(field, models.ForeignKey) or field.choices):
return [
checks.Error(
"The value of '%s' refers to '%s', which is not an "
"instance of ForeignKey, and does not have a 'choices' "
"definition." % (label, field_name),
obj=obj.__class__,
id="admin.E023",
)
]
else:
return []
def _check_radio_fields_value(self, obj, val, label):
"""Check type of a value of `radio_fields` dictionary."""
from django.contrib.admin.options import HORIZONTAL, VERTICAL
if val not in (HORIZONTAL, VERTICAL):
return [
checks.Error(
"The value of '%s' must be either admin.HORIZONTAL or "
"admin.VERTICAL." % label,
obj=obj.__class__,
id="admin.E024",
)
]
else:
return []
def _check_view_on_site_url(self, obj):
if not callable(obj.view_on_site) and not isinstance(obj.view_on_site, bool):
return [
checks.Error(
"The value of 'view_on_site' must be a callable or a boolean "
"value.",
obj=obj.__class__,
id="admin.E025",
)
]
else:
return []
def _check_prepopulated_fields(self, obj):
"""Check that `prepopulated_fields` is a dictionary containing allowed
field types."""
if not isinstance(obj.prepopulated_fields, dict):
return must_be(
"a dictionary", option="prepopulated_fields", obj=obj, id="admin.E026"
)
else:
return list(
chain.from_iterable(
self._check_prepopulated_fields_key(
obj, field_name, "prepopulated_fields"
)
+ self._check_prepopulated_fields_value(
obj, val, 'prepopulated_fields["%s"]' % field_name
)
for field_name, val in obj.prepopulated_fields.items()
)
)
def _check_prepopulated_fields_key(self, obj, field_name, label):
"""Check a key of `prepopulated_fields` dictionary, i.e. check that it
is a name of existing field and the field is one of the allowed types.
"""
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E027"
)
else:
if isinstance(
field, (models.DateTimeField, models.ForeignKey, models.ManyToManyField)
):
return [
checks.Error(
"The value of '%s' refers to '%s', which must not be a "
"DateTimeField, a ForeignKey, a OneToOneField, or a "
"ManyToManyField." % (label, field_name),
obj=obj.__class__,
id="admin.E028",
)
]
else:
return []
def _check_prepopulated_fields_value(self, obj, val, label):
"""Check a value of `prepopulated_fields` dictionary, i.e. it's an
iterable of existing fields."""
if not isinstance(val, (list, tuple)):
return must_be("a list or tuple", option=label, obj=obj, id="admin.E029")
else:
return list(
chain.from_iterable(
self._check_prepopulated_fields_value_item(
obj, subfield_name, "%s[%r]" % (label, index)
)
for index, subfield_name in enumerate(val)
)
)
def _check_prepopulated_fields_value_item(self, obj, field_name, label):
"""For `prepopulated_fields` equal to {"slug": ("title",)},
`field_name` is "title"."""
try:
obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E030"
)
else:
return []
def _check_ordering(self, obj):
"""Check that ordering refers to existing fields or is random."""
# ordering = None
if obj.ordering is None: # The default value is None
return []
elif not isinstance(obj.ordering, (list, tuple)):
return must_be(
"a list or tuple", option="ordering", obj=obj, id="admin.E031"
)
else:
return list(
chain.from_iterable(
self._check_ordering_item(obj, field_name, "ordering[%d]" % index)
for index, field_name in enumerate(obj.ordering)
)
)
def _check_ordering_item(self, obj, field_name, label):
"""Check that `ordering` refers to existing fields."""
if isinstance(field_name, (Combinable, models.OrderBy)):
if not isinstance(field_name, models.OrderBy):
field_name = field_name.asc()
if isinstance(field_name.expression, models.F):
field_name = field_name.expression.name
else:
return []
if field_name == "?" and len(obj.ordering) != 1:
return [
checks.Error(
"The value of 'ordering' has the random ordering marker '?', "
"but contains other fields as well.",
hint='Either remove the "?", or remove the other fields.',
obj=obj.__class__,
id="admin.E032",
)
]
elif field_name == "?":
return []
elif LOOKUP_SEP in field_name:
# Skip ordering in the format field1__field2 (FIXME: checking
# this format would be nice, but it's a little fiddly).
return []
else:
field_name = field_name.removeprefix("-")
if field_name == "pk":
return []
try:
obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E033"
)
else:
return []
def _check_readonly_fields(self, obj):
"""Check that readonly_fields refers to proper attribute or field."""
if obj.readonly_fields == ():
return []
elif not isinstance(obj.readonly_fields, (list, tuple)):
return must_be(
"a list or tuple", option="readonly_fields", obj=obj, id="admin.E034"
)
else:
return list(
chain.from_iterable(
self._check_readonly_fields_item(
obj, field_name, "readonly_fields[%d]" % index
)
for index, field_name in enumerate(obj.readonly_fields)
)
)
def _check_readonly_fields_item(self, obj, field_name, label):
if callable(field_name):
return []
elif hasattr(obj, field_name):
return []
elif hasattr(obj.model, field_name):
return []
else:
try:
obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not a callable, "
"an attribute of '%s', or an attribute of '%s'."
% (
label,
field_name,
obj.__class__.__name__,
obj.model._meta.label,
),
obj=obj.__class__,
id="admin.E035",
)
]
else:
return []
class ModelAdminChecks(BaseModelAdminChecks):
def check(self, admin_obj, **kwargs):
return [
*super().check(admin_obj),
*self._check_save_as(admin_obj),
*self._check_save_on_top(admin_obj),
*self._check_inlines(admin_obj),
*self._check_list_display(admin_obj),
*self._check_list_display_links(admin_obj),
*self._check_list_filter(admin_obj),
*self._check_list_select_related(admin_obj),
*self._check_list_per_page(admin_obj),
*self._check_list_max_show_all(admin_obj),
*self._check_list_editable(admin_obj),
*self._check_search_fields(admin_obj),
*self._check_date_hierarchy(admin_obj),
*self._check_action_permission_methods(admin_obj),
*self._check_actions_uniqueness(admin_obj),
]
def _check_save_as(self, obj):
"""Check save_as is a boolean."""
if not isinstance(obj.save_as, bool):
return must_be("a boolean", option="save_as", obj=obj, id="admin.E101")
else:
return []
def _check_save_on_top(self, obj):
"""Check save_on_top is a boolean."""
if not isinstance(obj.save_on_top, bool):
return must_be("a boolean", option="save_on_top", obj=obj, id="admin.E102")
else:
return []
def _check_inlines(self, obj):
"""Check all inline model admin classes."""
if not isinstance(obj.inlines, (list, tuple)):
return must_be(
"a list or tuple", option="inlines", obj=obj, id="admin.E103"
)
else:
return list(
chain.from_iterable(
self._check_inlines_item(obj, item, "inlines[%d]" % index)
for index, item in enumerate(obj.inlines)
)
)
def _check_inlines_item(self, obj, inline, label):
"""Check one inline model admin."""
try:
inline_label = inline.__module__ + "." + inline.__name__
except AttributeError:
return [
checks.Error(
"'%s' must inherit from 'InlineModelAdmin'." % obj,
obj=obj.__class__,
id="admin.E104",
)
]
from django.contrib.admin.options import InlineModelAdmin
if not _issubclass(inline, InlineModelAdmin):
return [
checks.Error(
"'%s' must inherit from 'InlineModelAdmin'." % inline_label,
obj=obj.__class__,
id="admin.E104",
)
]
elif not inline.model:
return [
checks.Error(
"'%s' must have a 'model' attribute." % inline_label,
obj=obj.__class__,
id="admin.E105",
)
]
elif not _issubclass(inline.model, models.Model):
return must_be(
"a Model", option="%s.model" % inline_label, obj=obj, id="admin.E106"
)
else:
return inline(obj.model, obj.admin_site).check()
def _check_list_display(self, obj):
"""Check that list_display only contains fields or usable attributes."""
if not isinstance(obj.list_display, (list, tuple)):
return must_be(
"a list or tuple", option="list_display", obj=obj, id="admin.E107"
)
else:
return list(
chain.from_iterable(
self._check_list_display_item(obj, item, "list_display[%d]" % index)
for index, item in enumerate(obj.list_display)
)
)
def _check_list_display_item(self, obj, item, label):
if callable(item):
return []
elif hasattr(obj, item):
return []
try:
field = obj.model._meta.get_field(item)
except FieldDoesNotExist:
try:
field = getattr(obj.model, item)
except AttributeError:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not a "
"callable, an attribute of '%s', or an attribute or "
"method on '%s'."
% (
label,
item,
obj.__class__.__name__,
obj.model._meta.label,
),
obj=obj.__class__,
id="admin.E108",
)
]
if (
getattr(field, "is_relation", False)
and (field.many_to_many or field.one_to_many)
) or (getattr(field, "rel", None) and field.rel.field.many_to_one):
return [
checks.Error(
f"The value of '{label}' must not be a many-to-many field or a "
f"reverse foreign key.",
obj=obj.__class__,
id="admin.E109",
)
]
return []
def _check_list_display_links(self, obj):
"""Check that list_display_links is a unique subset of list_display."""
from django.contrib.admin.options import ModelAdmin
if obj.list_display_links is None:
return []
elif not isinstance(obj.list_display_links, (list, tuple)):
return must_be(
"a list, a tuple, or None",
option="list_display_links",
obj=obj,
id="admin.E110",
)
# Check only if ModelAdmin.get_list_display() isn't overridden.
elif obj.get_list_display.__func__ is ModelAdmin.get_list_display:
return list(
chain.from_iterable(
self._check_list_display_links_item(
obj, field_name, "list_display_links[%d]" % index
)
for index, field_name in enumerate(obj.list_display_links)
)
)
return []
def _check_list_display_links_item(self, obj, field_name, label):
if field_name not in obj.list_display:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not defined in "
"'list_display'." % (label, field_name),
obj=obj.__class__,
id="admin.E111",
)
]
else:
return []
def _check_list_filter(self, obj):
if not isinstance(obj.list_filter, (list, tuple)):
return must_be(
"a list or tuple", option="list_filter", obj=obj, id="admin.E112"
)
else:
return list(
chain.from_iterable(
self._check_list_filter_item(obj, item, "list_filter[%d]" % index)
for index, item in enumerate(obj.list_filter)
)
)
def _check_list_filter_item(self, obj, item, label):
"""
Check one item of `list_filter`, i.e. check if it is one of three options:
1. 'field' -- a basic field filter, possibly w/ relationships (e.g.
'field__rel')
2. ('field', SomeFieldListFilter) - a field-based list filter class
3. SomeListFilter - a non-field list filter class
"""
from django.contrib.admin import FieldListFilter, ListFilter
if callable(item) and not isinstance(item, models.Field):
# If item is option 3, it should be a ListFilter...
if not _issubclass(item, ListFilter):
return must_inherit_from(
parent="ListFilter", option=label, obj=obj, id="admin.E113"
)
# ... but not a FieldListFilter.
elif issubclass(item, FieldListFilter):
return [
checks.Error(
"The value of '%s' must not inherit from 'FieldListFilter'."
% label,
obj=obj.__class__,
id="admin.E114",
)
]
else:
return []
elif isinstance(item, (tuple, list)):
# item is option #2
field, list_filter_class = item
if not _issubclass(list_filter_class, FieldListFilter):
return must_inherit_from(
parent="FieldListFilter",
option="%s[1]" % label,
obj=obj,
id="admin.E115",
)
else:
return []
else:
# item is option #1
field = item
# Validate the field string
try:
get_fields_from_path(obj.model, field)
except (NotRelationField, FieldDoesNotExist):
return [
checks.Error(
"The value of '%s' refers to '%s', which does not refer to a "
"Field." % (label, field),
obj=obj.__class__,
id="admin.E116",
)
]
else:
return []
def _check_list_select_related(self, obj):
"""Check that list_select_related is a boolean, a list or a tuple."""
if not isinstance(obj.list_select_related, (bool, list, tuple)):
return must_be(
"a boolean, tuple or list",
option="list_select_related",
obj=obj,
id="admin.E117",
)
else:
return []
def _check_list_per_page(self, obj):
"""Check that list_per_page is an integer."""
if not isinstance(obj.list_per_page, int):
return must_be(
"an integer", option="list_per_page", obj=obj, id="admin.E118"
)
else:
return []
def _check_list_max_show_all(self, obj):
"""Check that list_max_show_all is an integer."""
if not isinstance(obj.list_max_show_all, int):
return must_be(
"an integer", option="list_max_show_all", obj=obj, id="admin.E119"
)
else:
return []
def _check_list_editable(self, obj):
"""Check that list_editable is a sequence of editable fields from
list_display without first element."""
if not isinstance(obj.list_editable, (list, tuple)):
return must_be(
"a list or tuple", option="list_editable", obj=obj, id="admin.E120"
)
else:
return list(
chain.from_iterable(
self._check_list_editable_item(
obj, item, "list_editable[%d]" % index
)
for index, item in enumerate(obj.list_editable)
)
)
def _check_list_editable_item(self, obj, field_name, label):
try:
field = obj.model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(
field=field_name, option=label, obj=obj, id="admin.E121"
)
else:
if field_name not in obj.list_display:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not "
"contained in 'list_display'." % (label, field_name),
obj=obj.__class__,
id="admin.E122",
)
]
elif obj.list_display_links and field_name in obj.list_display_links:
return [
checks.Error(
"The value of '%s' cannot be in both 'list_editable' and "
"'list_display_links'." % field_name,
obj=obj.__class__,
id="admin.E123",
)
]
# If list_display[0] is in list_editable, check that
# list_display_links is set. See #22792 and #26229 for use cases.
elif (
obj.list_display[0] == field_name
and not obj.list_display_links
and obj.list_display_links is not None
):
return [
checks.Error(
"The value of '%s' refers to the first field in 'list_display' "
"('%s'), which cannot be used unless 'list_display_links' is "
"set." % (label, obj.list_display[0]),
obj=obj.__class__,
id="admin.E124",
)
]
elif not field.editable or field.primary_key:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not editable "
"through the admin." % (label, field_name),
obj=obj.__class__,
id="admin.E125",
)
]
else:
return []
def _check_search_fields(self, obj):
"""Check search_fields is a sequence."""
if not isinstance(obj.search_fields, (list, tuple)):
return must_be(
"a list or tuple", option="search_fields", obj=obj, id="admin.E126"
)
else:
return []
def _check_date_hierarchy(self, obj):
"""Check that date_hierarchy refers to DateField or DateTimeField."""
if obj.date_hierarchy is None:
return []
else:
try:
field = get_fields_from_path(obj.model, obj.date_hierarchy)[-1]
except (NotRelationField, FieldDoesNotExist):
return [
checks.Error(
"The value of 'date_hierarchy' refers to '%s', which "
"does not refer to a Field." % obj.date_hierarchy,
obj=obj.__class__,
id="admin.E127",
)
]
else:
if not isinstance(field, (models.DateField, models.DateTimeField)):
return must_be(
"a DateField or DateTimeField",
option="date_hierarchy",
obj=obj,
id="admin.E128",
)
else:
return []
def _check_action_permission_methods(self, obj):
"""
Actions with an allowed_permission attribute require the ModelAdmin to
implement a has_<perm>_permission() method for each permission.
"""
actions = obj._get_base_actions()
errors = []
for func, name, _ in actions:
if not hasattr(func, "allowed_permissions"):
continue
for permission in func.allowed_permissions:
method_name = "has_%s_permission" % permission
if not hasattr(obj, method_name):
errors.append(
checks.Error(
"%s must define a %s() method for the %s action."
% (
obj.__class__.__name__,
method_name,
func.__name__,
),
obj=obj.__class__,
id="admin.E129",
)
)
return errors
def _check_actions_uniqueness(self, obj):
"""Check that every action has a unique __name__."""
errors = []
names = collections.Counter(name for _, name, _ in obj._get_base_actions())
for name, count in names.items():
if count > 1:
errors.append(
checks.Error(
"__name__ attributes of actions defined in %s must be "
"unique. Name %r is not unique."
% (
obj.__class__.__name__,
name,
),
obj=obj.__class__,
id="admin.E130",
)
)
return errors
class InlineModelAdminChecks(BaseModelAdminChecks):
def check(self, inline_obj, **kwargs):
parent_model = inline_obj.parent_model
return [
*super().check(inline_obj),
*self._check_relation(inline_obj, parent_model),
*self._check_exclude_of_parent_model(inline_obj, parent_model),
*self._check_extra(inline_obj),
*self._check_max_num(inline_obj),
*self._check_min_num(inline_obj),
*self._check_formset(inline_obj),
]
def _check_exclude_of_parent_model(self, obj, parent_model):
# Do not perform more specific checks if the base checks result in an
# error.
errors = super()._check_exclude(obj)
if errors:
return []
# Skip if `fk_name` is invalid.
if self._check_relation(obj, parent_model):
return []
if obj.exclude is None:
return []
fk = _get_foreign_key(parent_model, obj.model, fk_name=obj.fk_name)
if fk.name in obj.exclude:
return [
checks.Error(
"Cannot exclude the field '%s', because it is the foreign key "
"to the parent model '%s'."
% (
fk.name,
parent_model._meta.label,
),
obj=obj.__class__,
id="admin.E201",
)
]
else:
return []
def _check_relation(self, obj, parent_model):
try:
_get_foreign_key(parent_model, obj.model, fk_name=obj.fk_name)
except ValueError as e:
return [checks.Error(e.args[0], obj=obj.__class__, id="admin.E202")]
else:
return []
def _check_extra(self, obj):
"""Check that extra is an integer."""
if not isinstance(obj.extra, int):
return must_be("an integer", option="extra", obj=obj, id="admin.E203")
else:
return []
def _check_max_num(self, obj):
"""Check that max_num is an integer."""
if obj.max_num is None:
return []
elif not isinstance(obj.max_num, int):
return must_be("an integer", option="max_num", obj=obj, id="admin.E204")
else:
return []
def _check_min_num(self, obj):
"""Check that min_num is an integer."""
if obj.min_num is None:
return []
elif not isinstance(obj.min_num, int):
return must_be("an integer", option="min_num", obj=obj, id="admin.E205")
else:
return []
def _check_formset(self, obj):
"""Check formset is a subclass of BaseModelFormSet."""
if not _issubclass(obj.formset, BaseModelFormSet):
return must_inherit_from(
parent="BaseModelFormSet", option="formset", obj=obj, id="admin.E206"
)
else:
return []
def must_be(type, option, obj, id):
return [
checks.Error(
"The value of '%s' must be %s." % (option, type),
obj=obj.__class__,
id=id,
),
]
def must_inherit_from(parent, option, obj, id):
return [
checks.Error(
"The value of '%s' must inherit from '%s'." % (option, parent),
obj=obj.__class__,
id=id,
),
]
def refer_to_missing_field(field, option, obj, id):
return [
checks.Error(
"The value of '%s' refers to '%s', which is not a field of '%s'."
% (option, field, obj.model._meta.label),
obj=obj.__class__,
id=id,
),
]
|
c1ba3b3664e9ff11226bcf315520ee7c2ac9980af59dccd92b9f9bb2f384ddad | from functools import update_wrapper
from weakref import WeakSet
from django.apps import apps
from django.conf import settings
from django.contrib.admin import ModelAdmin, actions
from django.contrib.admin.views.autocomplete import AutocompleteJsonView
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.core.exceptions import ImproperlyConfigured
from django.db.models.base import ModelBase
from django.http import Http404, HttpResponsePermanentRedirect, HttpResponseRedirect
from django.template.response import TemplateResponse
from django.urls import NoReverseMatch, Resolver404, resolve, reverse
from django.utils.decorators import method_decorator
from django.utils.functional import LazyObject
from django.utils.module_loading import import_string
from django.utils.text import capfirst
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy
from django.views.decorators.cache import never_cache
from django.views.decorators.common import no_append_slash
from django.views.decorators.csrf import csrf_protect
from django.views.i18n import JavaScriptCatalog
all_sites = WeakSet()
class AlreadyRegistered(Exception):
pass
class NotRegistered(Exception):
pass
class AdminSite:
"""
An AdminSite object encapsulates an instance of the Django admin application, ready
to be hooked in to your URLconf. Models are registered with the AdminSite using the
register() method, and the get_urls() method can then be used to access Django view
functions that present a full admin interface for the collection of registered
models.
"""
# Text to put at the end of each page's <title>.
site_title = gettext_lazy("Django site admin")
# Text to put in each page's <div id="site-name">.
site_header = gettext_lazy("Django administration")
# Text to put at the top of the admin index page.
index_title = gettext_lazy("Site administration")
# URL for the "View site" link at the top of each admin page.
site_url = "/"
enable_nav_sidebar = True
empty_value_display = "-"
login_form = None
index_template = None
app_index_template = None
login_template = None
logout_template = None
password_change_template = None
password_change_done_template = None
final_catch_all_view = True
def __init__(self, name="admin"):
self._registry = {} # model_class class -> admin_class instance
self.name = name
self._actions = {"delete_selected": actions.delete_selected}
self._global_actions = self._actions.copy()
all_sites.add(self)
def __repr__(self):
return f"{self.__class__.__name__}(name={self.name!r})"
def check(self, app_configs):
"""
Run the system checks on all ModelAdmins, except if they aren't
customized at all.
"""
if app_configs is None:
app_configs = apps.get_app_configs()
app_configs = set(app_configs) # Speed up lookups below
errors = []
modeladmins = (
o for o in self._registry.values() if o.__class__ is not ModelAdmin
)
for modeladmin in modeladmins:
if modeladmin.model._meta.app_config in app_configs:
errors.extend(modeladmin.check())
return errors
def register(self, model_or_iterable, admin_class=None, **options):
"""
Register the given model(s) with the given admin class.
The model(s) should be Model classes, not instances.
If an admin class isn't given, use ModelAdmin (the default admin
options). If keyword arguments are given -- e.g., list_display --
apply them as options to the admin class.
If a model is already registered, raise AlreadyRegistered.
If a model is abstract, raise ImproperlyConfigured.
"""
admin_class = admin_class or ModelAdmin
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model._meta.abstract:
raise ImproperlyConfigured(
"The model %s is abstract, so it cannot be registered with admin."
% model.__name__
)
if model in self._registry:
registered_admin = str(self._registry[model])
msg = "The model %s is already registered " % model.__name__
if registered_admin.endswith(".ModelAdmin"):
# Most likely registered without a ModelAdmin subclass.
msg += "in app %r." % registered_admin.removesuffix(".ModelAdmin")
else:
msg += "with %r." % registered_admin
raise AlreadyRegistered(msg)
# Ignore the registration if the model has been
# swapped out.
if not model._meta.swapped:
# If we got **options then dynamically construct a subclass of
# admin_class with those **options.
if options:
# For reasons I don't quite understand, without a __module__
# the created class appears to "live" in the wrong place,
# which causes issues later on.
options["__module__"] = __name__
admin_class = type(
"%sAdmin" % model.__name__, (admin_class,), options
)
# Instantiate the admin class to save in the registry
self._registry[model] = admin_class(model, self)
def unregister(self, model_or_iterable):
"""
Unregister the given model(s).
If a model isn't already registered, raise NotRegistered.
"""
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model not in self._registry:
raise NotRegistered("The model %s is not registered" % model.__name__)
del self._registry[model]
def is_registered(self, model):
"""
Check if a model class is registered with this `AdminSite`.
"""
return model in self._registry
def add_action(self, action, name=None):
"""
Register an action to be available globally.
"""
name = name or action.__name__
self._actions[name] = action
self._global_actions[name] = action
def disable_action(self, name):
"""
Disable a globally-registered action. Raise KeyError for invalid names.
"""
del self._actions[name]
def get_action(self, name):
"""
Explicitly get a registered global action whether it's enabled or
not. Raise KeyError for invalid names.
"""
return self._global_actions[name]
@property
def actions(self):
"""
Get all the enabled actions as an iterable of (name, func).
"""
return self._actions.items()
def has_permission(self, request):
"""
Return True if the given HttpRequest has permission to view
*at least one* page in the admin site.
"""
return request.user.is_active and request.user.is_staff
def admin_view(self, view, cacheable=False):
"""
Decorator to create an admin view attached to this ``AdminSite``. This
wraps the view and provides permission checking by calling
``self.has_permission``.
You'll want to use this from within ``AdminSite.get_urls()``:
class MyAdminSite(AdminSite):
def get_urls(self):
from django.urls import path
urls = super().get_urls()
urls += [
path('my_view/', self.admin_view(some_view))
]
return urls
By default, admin_views are marked non-cacheable using the
``never_cache`` decorator. If the view can be safely cached, set
cacheable=True.
"""
def inner(request, *args, **kwargs):
if not self.has_permission(request):
if request.path == reverse("admin:logout", current_app=self.name):
index_path = reverse("admin:index", current_app=self.name)
return HttpResponseRedirect(index_path)
# Inner import to prevent django.contrib.admin (app) from
# importing django.contrib.auth.models.User (unrelated model).
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(
request.get_full_path(),
reverse("admin:login", current_app=self.name),
)
return view(request, *args, **kwargs)
if not cacheable:
inner = never_cache(inner)
# We add csrf_protect here so this function can be used as a utility
# function for any view, without having to repeat 'csrf_protect'.
if not getattr(view, "csrf_exempt", False):
inner = csrf_protect(inner)
return update_wrapper(inner, view)
def get_urls(self):
# Since this module gets imported in the application's root package,
# it cannot import models from other applications at the module level,
# and django.contrib.contenttypes.views imports ContentType.
from django.contrib.contenttypes import views as contenttype_views
from django.urls import include, path, re_path
def wrap(view, cacheable=False):
def wrapper(*args, **kwargs):
return self.admin_view(view, cacheable)(*args, **kwargs)
wrapper.admin_site = self
return update_wrapper(wrapper, view)
# Admin-site-wide views.
urlpatterns = [
path("", wrap(self.index), name="index"),
path("login/", self.login, name="login"),
path("logout/", wrap(self.logout), name="logout"),
path(
"password_change/",
wrap(self.password_change, cacheable=True),
name="password_change",
),
path(
"password_change/done/",
wrap(self.password_change_done, cacheable=True),
name="password_change_done",
),
path("autocomplete/", wrap(self.autocomplete_view), name="autocomplete"),
path("jsi18n/", wrap(self.i18n_javascript, cacheable=True), name="jsi18n"),
path(
"r/<int:content_type_id>/<path:object_id>/",
wrap(contenttype_views.shortcut),
name="view_on_site",
),
]
# Add in each model's views, and create a list of valid URLS for the
# app_index
valid_app_labels = []
for model, model_admin in self._registry.items():
urlpatterns += [
path(
"%s/%s/" % (model._meta.app_label, model._meta.model_name),
include(model_admin.urls),
),
]
if model._meta.app_label not in valid_app_labels:
valid_app_labels.append(model._meta.app_label)
# If there were ModelAdmins registered, we should have a list of app
# labels for which we need to allow access to the app_index view,
if valid_app_labels:
regex = r"^(?P<app_label>" + "|".join(valid_app_labels) + ")/$"
urlpatterns += [
re_path(regex, wrap(self.app_index), name="app_list"),
]
if self.final_catch_all_view:
urlpatterns.append(re_path(r"(?P<url>.*)$", wrap(self.catch_all_view)))
return urlpatterns
@property
def urls(self):
return self.get_urls(), "admin", self.name
def each_context(self, request):
"""
Return a dictionary of variables to put in the template context for
*every* page in the admin site.
For sites running on a subpath, use the SCRIPT_NAME value if site_url
hasn't been customized.
"""
script_name = request.META["SCRIPT_NAME"]
site_url = (
script_name if self.site_url == "/" and script_name else self.site_url
)
return {
"site_title": self.site_title,
"site_header": self.site_header,
"site_url": site_url,
"has_permission": self.has_permission(request),
"available_apps": self.get_app_list(request),
"is_popup": False,
"is_nav_sidebar_enabled": self.enable_nav_sidebar,
"log_entries": self.get_log_entries(request),
}
def password_change(self, request, extra_context=None):
"""
Handle the "change password" task -- both form display and validation.
"""
from django.contrib.admin.forms import AdminPasswordChangeForm
from django.contrib.auth.views import PasswordChangeView
url = reverse("admin:password_change_done", current_app=self.name)
defaults = {
"form_class": AdminPasswordChangeForm,
"success_url": url,
"extra_context": {**self.each_context(request), **(extra_context or {})},
}
if self.password_change_template is not None:
defaults["template_name"] = self.password_change_template
request.current_app = self.name
return PasswordChangeView.as_view(**defaults)(request)
def password_change_done(self, request, extra_context=None):
"""
Display the "success" page after a password change.
"""
from django.contrib.auth.views import PasswordChangeDoneView
defaults = {
"extra_context": {**self.each_context(request), **(extra_context or {})},
}
if self.password_change_done_template is not None:
defaults["template_name"] = self.password_change_done_template
request.current_app = self.name
return PasswordChangeDoneView.as_view(**defaults)(request)
def i18n_javascript(self, request, extra_context=None):
"""
Display the i18n JavaScript that the Django admin requires.
`extra_context` is unused but present for consistency with the other
admin views.
"""
return JavaScriptCatalog.as_view(packages=["django.contrib.admin"])(request)
def logout(self, request, extra_context=None):
"""
Log out the user for the given HttpRequest.
This should *not* assume the user is already logged in.
"""
from django.contrib.auth.views import LogoutView
defaults = {
"extra_context": {
**self.each_context(request),
# Since the user isn't logged out at this point, the value of
# has_permission must be overridden.
"has_permission": False,
**(extra_context or {}),
},
}
if self.logout_template is not None:
defaults["template_name"] = self.logout_template
request.current_app = self.name
return LogoutView.as_view(**defaults)(request)
@method_decorator(never_cache)
def login(self, request, extra_context=None):
"""
Display the login form for the given HttpRequest.
"""
if request.method == "GET" and self.has_permission(request):
# Already logged-in, redirect to admin index
index_path = reverse("admin:index", current_app=self.name)
return HttpResponseRedirect(index_path)
# Since this module gets imported in the application's root package,
# it cannot import models from other applications at the module level,
# and django.contrib.admin.forms eventually imports User.
from django.contrib.admin.forms import AdminAuthenticationForm
from django.contrib.auth.views import LoginView
context = {
**self.each_context(request),
"title": _("Log in"),
"subtitle": None,
"app_path": request.get_full_path(),
"username": request.user.get_username(),
}
if (
REDIRECT_FIELD_NAME not in request.GET
and REDIRECT_FIELD_NAME not in request.POST
):
context[REDIRECT_FIELD_NAME] = reverse("admin:index", current_app=self.name)
context.update(extra_context or {})
defaults = {
"extra_context": context,
"authentication_form": self.login_form or AdminAuthenticationForm,
"template_name": self.login_template or "admin/login.html",
}
request.current_app = self.name
return LoginView.as_view(**defaults)(request)
def autocomplete_view(self, request):
return AutocompleteJsonView.as_view(admin_site=self)(request)
@no_append_slash
def catch_all_view(self, request, url):
if settings.APPEND_SLASH and not url.endswith("/"):
urlconf = getattr(request, "urlconf", None)
try:
match = resolve("%s/" % request.path_info, urlconf)
except Resolver404:
pass
else:
if getattr(match.func, "should_append_slash", True):
return HttpResponsePermanentRedirect(
request.get_full_path(force_append_slash=True)
)
raise Http404
def _build_app_dict(self, request, label=None):
"""
Build the app dictionary. The optional `label` parameter filters models
of a specific app.
"""
app_dict = {}
if label:
models = {
m: m_a
for m, m_a in self._registry.items()
if m._meta.app_label == label
}
else:
models = self._registry
for model, model_admin in models.items():
app_label = model._meta.app_label
has_module_perms = model_admin.has_module_permission(request)
if not has_module_perms:
continue
perms = model_admin.get_model_perms(request)
# Check whether user has any perm for this module.
# If so, add the module to the model_list.
if True not in perms.values():
continue
info = (app_label, model._meta.model_name)
model_dict = {
"model": model,
"name": capfirst(model._meta.verbose_name_plural),
"object_name": model._meta.object_name,
"perms": perms,
"admin_url": None,
"add_url": None,
}
if perms.get("change") or perms.get("view"):
model_dict["view_only"] = not perms.get("change")
try:
model_dict["admin_url"] = reverse(
"admin:%s_%s_changelist" % info, current_app=self.name
)
except NoReverseMatch:
pass
if perms.get("add"):
try:
model_dict["add_url"] = reverse(
"admin:%s_%s_add" % info, current_app=self.name
)
except NoReverseMatch:
pass
if app_label in app_dict:
app_dict[app_label]["models"].append(model_dict)
else:
app_dict[app_label] = {
"name": apps.get_app_config(app_label).verbose_name,
"app_label": app_label,
"app_url": reverse(
"admin:app_list",
kwargs={"app_label": app_label},
current_app=self.name,
),
"has_module_perms": has_module_perms,
"models": [model_dict],
}
return app_dict
def get_app_list(self, request, app_label=None):
"""
Return a sorted list of all the installed apps that have been
registered in this site.
"""
app_dict = self._build_app_dict(request, app_label)
# Sort the apps alphabetically.
app_list = sorted(app_dict.values(), key=lambda x: x["name"].lower())
# Sort the models alphabetically within each app.
for app in app_list:
app["models"].sort(key=lambda x: x["name"])
return app_list
def index(self, request, extra_context=None):
"""
Display the main admin index page, which lists all of the installed
apps that have been registered in this site.
"""
app_list = self.get_app_list(request)
context = {
**self.each_context(request),
"title": self.index_title,
"subtitle": None,
"app_list": app_list,
**(extra_context or {}),
}
request.current_app = self.name
return TemplateResponse(
request, self.index_template or "admin/index.html", context
)
def app_index(self, request, app_label, extra_context=None):
app_list = self.get_app_list(request, app_label)
if not app_list:
raise Http404("The requested admin page does not exist.")
context = {
**self.each_context(request),
"title": _("%(app)s administration") % {"app": app_list[0]["name"]},
"subtitle": None,
"app_list": app_list,
"app_label": app_label,
**(extra_context or {}),
}
request.current_app = self.name
return TemplateResponse(
request,
self.app_index_template
or ["admin/%s/app_index.html" % app_label, "admin/app_index.html"],
context,
)
def get_log_entries(self, request):
from django.contrib.admin.models import LogEntry
return LogEntry.objects.select_related("content_type", "user")
class DefaultAdminSite(LazyObject):
def _setup(self):
AdminSiteClass = import_string(apps.get_app_config("admin").default_site)
self._wrapped = AdminSiteClass()
def __repr__(self):
return repr(self._wrapped)
# This global object represents the default admin site, for the common case.
# You can provide your own AdminSite using the (Simple)AdminConfig.default_site
# attribute. You can also instantiate AdminSite in your own code to create a
# custom admin site.
site = DefaultAdminSite()
|
510a30afd8c5d65db26ff0ff21b637dcc51d5792042bb9f1f9425038c7c25140 | from django.core.exceptions import FieldError
from django.db.models import FilteredRelation
from django.test import SimpleTestCase, TestCase
from .models import (
AdvancedUserStat,
Child1,
Child2,
Child3,
Child4,
Image,
LinkedList,
Parent1,
Parent2,
Product,
StatDetails,
User,
UserProfile,
UserStat,
UserStatResult,
)
class ReverseSelectRelatedTestCase(TestCase):
@classmethod
def setUpTestData(cls):
user = User.objects.create(username="test")
UserProfile.objects.create(user=user, state="KS", city="Lawrence")
results = UserStatResult.objects.create(results="first results")
userstat = UserStat.objects.create(user=user, posts=150, results=results)
StatDetails.objects.create(base_stats=userstat, comments=259)
user2 = User.objects.create(username="bob")
results2 = UserStatResult.objects.create(results="moar results")
advstat = AdvancedUserStat.objects.create(
user=user2, posts=200, karma=5, results=results2
)
StatDetails.objects.create(base_stats=advstat, comments=250)
p1 = Parent1(name1="Only Parent1")
p1.save()
c1 = Child1(name1="Child1 Parent1", name2="Child1 Parent2", value=1)
c1.save()
p2 = Parent2(name2="Child2 Parent2")
p2.save()
c2 = Child2(name1="Child2 Parent1", parent2=p2, value=2)
c2.save()
def test_basic(self):
with self.assertNumQueries(1):
u = User.objects.select_related("userprofile").get(username="test")
self.assertEqual(u.userprofile.state, "KS")
def test_follow_next_level(self):
with self.assertNumQueries(1):
u = User.objects.select_related("userstat__results").get(username="test")
self.assertEqual(u.userstat.posts, 150)
self.assertEqual(u.userstat.results.results, "first results")
def test_follow_two(self):
with self.assertNumQueries(1):
u = User.objects.select_related("userprofile", "userstat").get(
username="test"
)
self.assertEqual(u.userprofile.state, "KS")
self.assertEqual(u.userstat.posts, 150)
def test_follow_two_next_level(self):
with self.assertNumQueries(1):
u = User.objects.select_related(
"userstat__results", "userstat__statdetails"
).get(username="test")
self.assertEqual(u.userstat.results.results, "first results")
self.assertEqual(u.userstat.statdetails.comments, 259)
def test_forward_and_back(self):
with self.assertNumQueries(1):
stat = UserStat.objects.select_related("user__userprofile").get(
user__username="test"
)
self.assertEqual(stat.user.userprofile.state, "KS")
self.assertEqual(stat.user.userstat.posts, 150)
def test_back_and_forward(self):
with self.assertNumQueries(1):
u = User.objects.select_related("userstat").get(username="test")
self.assertEqual(u.userstat.user.username, "test")
def test_not_followed_by_default(self):
with self.assertNumQueries(2):
u = User.objects.select_related().get(username="test")
self.assertEqual(u.userstat.posts, 150)
def test_follow_from_child_class(self):
with self.assertNumQueries(1):
stat = AdvancedUserStat.objects.select_related("user", "statdetails").get(
posts=200
)
self.assertEqual(stat.statdetails.comments, 250)
self.assertEqual(stat.user.username, "bob")
def test_follow_inheritance(self):
with self.assertNumQueries(1):
stat = UserStat.objects.select_related("user", "advanceduserstat").get(
posts=200
)
self.assertEqual(stat.advanceduserstat.posts, 200)
self.assertEqual(stat.user.username, "bob")
with self.assertNumQueries(0):
self.assertEqual(stat.advanceduserstat.user.username, "bob")
def test_nullable_relation(self):
im = Image.objects.create(name="imag1")
p1 = Product.objects.create(name="Django Plushie", image=im)
p2 = Product.objects.create(name="Talking Django Plushie")
with self.assertNumQueries(1):
result = sorted(
Product.objects.select_related("image"), key=lambda x: x.name
)
self.assertEqual(
[p.name for p in result], ["Django Plushie", "Talking Django Plushie"]
)
self.assertEqual(p1.image, im)
# Check for ticket #13839
self.assertIsNone(p2.image)
def test_missing_reverse(self):
"""
Ticket #13839: select_related() should NOT cache None
for missing objects on a reverse 1-1 relation.
"""
with self.assertNumQueries(1):
user = User.objects.select_related("userprofile").get(username="bob")
with self.assertRaises(UserProfile.DoesNotExist):
user.userprofile
def test_nullable_missing_reverse(self):
"""
Ticket #13839: select_related() should NOT cache None
for missing objects on a reverse 0-1 relation.
"""
Image.objects.create(name="imag1")
with self.assertNumQueries(1):
image = Image.objects.select_related("product").get()
with self.assertRaises(Product.DoesNotExist):
image.product
def test_parent_only(self):
with self.assertNumQueries(1):
p = Parent1.objects.select_related("child1").get(name1="Only Parent1")
with self.assertNumQueries(0):
with self.assertRaises(Child1.DoesNotExist):
p.child1
def test_multiple_subclass(self):
with self.assertNumQueries(1):
p = Parent1.objects.select_related("child1").get(name1="Child1 Parent1")
self.assertEqual(p.child1.name2, "Child1 Parent2")
def test_onetoone_with_subclass(self):
with self.assertNumQueries(1):
p = Parent2.objects.select_related("child2").get(name2="Child2 Parent2")
self.assertEqual(p.child2.name1, "Child2 Parent1")
def test_onetoone_with_two_subclasses(self):
with self.assertNumQueries(1):
p = Parent2.objects.select_related("child2", "child2__child3").get(
name2="Child2 Parent2"
)
self.assertEqual(p.child2.name1, "Child2 Parent1")
with self.assertRaises(Child3.DoesNotExist):
p.child2.child3
p3 = Parent2(name2="Child3 Parent2")
p3.save()
c2 = Child3(name1="Child3 Parent1", parent2=p3, value=2, value3=3)
c2.save()
with self.assertNumQueries(1):
p = Parent2.objects.select_related("child2", "child2__child3").get(
name2="Child3 Parent2"
)
self.assertEqual(p.child2.name1, "Child3 Parent1")
self.assertEqual(p.child2.child3.value3, 3)
self.assertEqual(p.child2.child3.value, p.child2.value)
self.assertEqual(p.child2.name1, p.child2.child3.name1)
def test_multiinheritance_two_subclasses(self):
with self.assertNumQueries(1):
p = Parent1.objects.select_related("child1", "child1__child4").get(
name1="Child1 Parent1"
)
self.assertEqual(p.child1.name2, "Child1 Parent2")
self.assertEqual(p.child1.name1, p.name1)
with self.assertRaises(Child4.DoesNotExist):
p.child1.child4
Child4(name1="n1", name2="n2", value=1, value4=4).save()
with self.assertNumQueries(1):
p = Parent2.objects.select_related("child1", "child1__child4").get(
name2="n2"
)
self.assertEqual(p.name2, "n2")
self.assertEqual(p.child1.name1, "n1")
self.assertEqual(p.child1.name2, p.name2)
self.assertEqual(p.child1.value, 1)
self.assertEqual(p.child1.child4.name1, p.child1.name1)
self.assertEqual(p.child1.child4.name2, p.child1.name2)
self.assertEqual(p.child1.child4.value, p.child1.value)
self.assertEqual(p.child1.child4.value4, 4)
def test_inheritance_deferred(self):
c = Child4.objects.create(name1="n1", name2="n2", value=1, value4=4)
with self.assertNumQueries(1):
p = (
Parent2.objects.select_related("child1")
.only("id2", "child1__value")
.get(name2="n2")
)
self.assertEqual(p.id2, c.id2)
self.assertEqual(p.child1.value, 1)
p = (
Parent2.objects.select_related("child1")
.only("id2", "child1__value")
.get(name2="n2")
)
with self.assertNumQueries(1):
self.assertEqual(p.name2, "n2")
p = (
Parent2.objects.select_related("child1")
.only("id2", "child1__value")
.get(name2="n2")
)
with self.assertNumQueries(1):
self.assertEqual(p.child1.name2, "n2")
def test_inheritance_deferred2(self):
c = Child4.objects.create(name1="n1", name2="n2", value=1, value4=4)
qs = Parent2.objects.select_related("child1", "child1__child4").only(
"id2", "child1__value", "child1__child4__value4"
)
with self.assertNumQueries(1):
p = qs.get(name2="n2")
self.assertEqual(p.id2, c.id2)
self.assertEqual(p.child1.value, 1)
self.assertEqual(p.child1.child4.value4, 4)
self.assertEqual(p.child1.child4.id2, c.id2)
p = qs.get(name2="n2")
with self.assertNumQueries(1):
self.assertEqual(p.child1.name2, "n2")
p = qs.get(name2="n2")
with self.assertNumQueries(0):
self.assertEqual(p.child1.value, 1)
self.assertEqual(p.child1.child4.value4, 4)
with self.assertNumQueries(2):
self.assertEqual(p.child1.name1, "n1")
self.assertEqual(p.child1.child4.name1, "n1")
def test_self_relation(self):
item1 = LinkedList.objects.create(name="item1")
LinkedList.objects.create(name="item2", previous_item=item1)
with self.assertNumQueries(1):
item1_db = LinkedList.objects.select_related("next_item").get(name="item1")
self.assertEqual(item1_db.next_item.name, "item2")
class ReverseSelectRelatedValidationTests(SimpleTestCase):
"""
Rverse related fields should be listed in the validation message when an
invalid field is given in select_related().
"""
non_relational_error = (
"Non-relational field given in select_related: '%s'. Choices are: %s"
)
invalid_error = (
"Invalid field name(s) given in select_related: '%s'. Choices are: %s"
)
def test_reverse_related_validation(self):
fields = "userprofile, userstat"
with self.assertRaisesMessage(
FieldError, self.invalid_error % ("foobar", fields)
):
list(User.objects.select_related("foobar"))
with self.assertRaisesMessage(
FieldError, self.non_relational_error % ("username", fields)
):
list(User.objects.select_related("username"))
def test_reverse_related_validation_with_filtered_relation(self):
fields = "userprofile, userstat, relation"
with self.assertRaisesMessage(
FieldError, self.invalid_error % ("foobar", fields)
):
list(
User.objects.annotate(
relation=FilteredRelation("userprofile")
).select_related("foobar")
)
|
53b17aeebb38efa3380e41ebc86a7bcefe07b6d0ca03a28968d8503452702c71 | import datetime
import os
import re
import unittest
import zoneinfo
from unittest import mock
from urllib.parse import parse_qsl, urljoin, urlparse
from django.contrib import admin
from django.contrib.admin import AdminSite, ModelAdmin
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.models import ADDITION, DELETION, LogEntry
from django.contrib.admin.options import TO_FIELD_VAR
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.admin.tests import AdminSeleniumTestCase
from django.contrib.admin.utils import quote
from django.contrib.admin.views.main import IS_POPUP_VAR
from django.contrib.auth import REDIRECT_FIELD_NAME, get_permission_codename
from django.contrib.auth.models import Group, Permission, User
from django.contrib.contenttypes.models import ContentType
from django.core import mail
from django.core.checks import Error
from django.core.files import temp as tempfile
from django.db import connection
from django.forms.utils import ErrorList
from django.template.response import TemplateResponse
from django.test import (
TestCase,
ignore_warnings,
modify_settings,
override_settings,
skipUnlessDBFeature,
)
from django.test.utils import override_script_prefix
from django.urls import NoReverseMatch, resolve, reverse
from django.utils import formats, translation
from django.utils.cache import get_max_age
from django.utils.deprecation import RemovedInDjango60Warning
from django.utils.encoding import iri_to_uri
from django.utils.html import escape
from django.utils.http import urlencode
from . import customadmin
from .admin import CityAdmin, site, site2
from .models import (
Actor,
AdminOrderedAdminMethod,
AdminOrderedCallable,
AdminOrderedField,
AdminOrderedModelMethod,
Album,
Answer,
Answer2,
Article,
BarAccount,
Book,
Bookmark,
Box,
Category,
Chapter,
ChapterXtra1,
ChapterXtra2,
Character,
Child,
Choice,
City,
Collector,
Color,
ComplexSortedPerson,
CoverLetter,
CustomArticle,
CyclicOne,
CyclicTwo,
DooHickey,
Employee,
EmptyModel,
Fabric,
FancyDoodad,
FieldOverridePost,
FilteredManager,
FooAccount,
FoodDelivery,
FunkyTag,
Gallery,
Grommet,
Inquisition,
Language,
Link,
MainPrepopulated,
Media,
ModelWithStringPrimaryKey,
OtherStory,
Paper,
Parent,
ParentWithDependentChildren,
ParentWithUUIDPK,
Person,
Persona,
Picture,
Pizza,
Plot,
PlotDetails,
PluggableSearchPerson,
Podcast,
Post,
PrePopulatedPost,
Promo,
Question,
ReadablePizza,
ReadOnlyPizza,
ReadOnlyRelatedField,
Recommendation,
Recommender,
RelatedPrepopulated,
RelatedWithUUIDPKModel,
Report,
Restaurant,
RowLevelChangePermissionModel,
SecretHideout,
Section,
ShortMessage,
Simple,
Song,
State,
Story,
SuperSecretHideout,
SuperVillain,
Telegram,
TitleTranslation,
Topping,
Traveler,
UnchangeableObject,
UndeletableObject,
UnorderedObject,
UserProxy,
Villain,
Vodcast,
Whatsit,
Widget,
Worker,
WorkHour,
)
ERROR_MESSAGE = "Please enter the correct username and password \
for a staff account. Note that both fields may be case-sensitive."
MULTIPART_ENCTYPE = 'enctype="multipart/form-data"'
def make_aware_datetimes(dt, iana_key):
"""Makes one aware datetime for each supported time zone provider."""
yield dt.replace(tzinfo=zoneinfo.ZoneInfo(iana_key))
class AdminFieldExtractionMixin:
"""
Helper methods for extracting data from AdminForm.
"""
def get_admin_form_fields(self, response):
"""
Return a list of AdminFields for the AdminForm in the response.
"""
fields = []
for fieldset in response.context["adminform"]:
for field_line in fieldset:
fields.extend(field_line)
return fields
def get_admin_readonly_fields(self, response):
"""
Return the readonly fields for the response's AdminForm.
"""
return [f for f in self.get_admin_form_fields(response) if f.is_readonly]
def get_admin_readonly_field(self, response, field_name):
"""
Return the readonly field for the given field_name.
"""
admin_readonly_fields = self.get_admin_readonly_fields(response)
for field in admin_readonly_fields:
if field.field["name"] == field_name:
return field
@override_settings(ROOT_URLCONF="admin_views.urls", USE_I18N=True, LANGUAGE_CODE="en")
class AdminViewBasicTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
title="Article 1",
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
title="Article 2",
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.color1 = Color.objects.create(value="Red", warm=True)
cls.color2 = Color.objects.create(value="Orange", warm=True)
cls.color3 = Color.objects.create(value="Blue", warm=False)
cls.color4 = Color.objects.create(value="Green", warm=False)
cls.fab1 = Fabric.objects.create(surface="x")
cls.fab2 = Fabric.objects.create(surface="y")
cls.fab3 = Fabric.objects.create(surface="plain")
cls.b1 = Book.objects.create(name="Book 1")
cls.b2 = Book.objects.create(name="Book 2")
cls.pro1 = Promo.objects.create(name="Promo 1", book=cls.b1)
cls.pro1 = Promo.objects.create(name="Promo 2", book=cls.b2)
cls.chap1 = Chapter.objects.create(
title="Chapter 1", content="[ insert contents here ]", book=cls.b1
)
cls.chap2 = Chapter.objects.create(
title="Chapter 2", content="[ insert contents here ]", book=cls.b1
)
cls.chap3 = Chapter.objects.create(
title="Chapter 1", content="[ insert contents here ]", book=cls.b2
)
cls.chap4 = Chapter.objects.create(
title="Chapter 2", content="[ insert contents here ]", book=cls.b2
)
cls.cx1 = ChapterXtra1.objects.create(chap=cls.chap1, xtra="ChapterXtra1 1")
cls.cx2 = ChapterXtra1.objects.create(chap=cls.chap3, xtra="ChapterXtra1 2")
Actor.objects.create(name="Palin", age=27)
# Post data for edit inline
cls.inline_post_data = {
"name": "Test section",
# inline data
"article_set-TOTAL_FORMS": "6",
"article_set-INITIAL_FORMS": "3",
"article_set-MAX_NUM_FORMS": "0",
"article_set-0-id": cls.a1.pk,
# there is no title in database, give one here or formset will fail.
"article_set-0-title": "Norske bostaver æøå skaper problemer",
"article_set-0-content": "<p>Middle content</p>",
"article_set-0-date_0": "2008-03-18",
"article_set-0-date_1": "11:54:58",
"article_set-0-section": cls.s1.pk,
"article_set-1-id": cls.a2.pk,
"article_set-1-title": "Need a title.",
"article_set-1-content": "<p>Oldest content</p>",
"article_set-1-date_0": "2000-03-18",
"article_set-1-date_1": "11:54:58",
"article_set-2-id": cls.a3.pk,
"article_set-2-title": "Need a title.",
"article_set-2-content": "<p>Newest content</p>",
"article_set-2-date_0": "2009-03-18",
"article_set-2-date_1": "11:54:58",
"article_set-3-id": "",
"article_set-3-title": "",
"article_set-3-content": "",
"article_set-3-date_0": "",
"article_set-3-date_1": "",
"article_set-4-id": "",
"article_set-4-title": "",
"article_set-4-content": "",
"article_set-4-date_0": "",
"article_set-4-date_1": "",
"article_set-5-id": "",
"article_set-5-title": "",
"article_set-5-content": "",
"article_set-5-date_0": "",
"article_set-5-date_1": "",
}
def setUp(self):
self.client.force_login(self.superuser)
def assertContentBefore(self, response, text1, text2, failing_msg=None):
"""
Testing utility asserting that text1 appears before text2 in response
content.
"""
self.assertEqual(response.status_code, 200)
self.assertLess(
response.content.index(text1.encode()),
response.content.index(text2.encode()),
(failing_msg or "")
+ "\nResponse:\n"
+ response.content.decode(response.charset),
)
class AdminViewBasicTest(AdminViewBasicTestCase):
def test_trailing_slash_required(self):
"""
If you leave off the trailing slash, app should redirect and add it.
"""
add_url = reverse("admin:admin_views_article_add")
response = self.client.get(add_url[:-1])
self.assertRedirects(response, add_url, status_code=301)
def test_basic_add_GET(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_add_with_GET_args(self):
response = self.client.get(
reverse("admin:admin_views_section_add"), {"name": "My Section"}
)
self.assertContains(
response,
'value="My Section"',
msg_prefix="Couldn't find an input with the right value in the response",
)
def test_basic_edit_GET(self):
"""
A smoke test to ensure GET on the change_view works.
"""
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_basic_edit_GET_string_PK(self):
"""
GET on the change_view (when passing a string as the PK argument for a
model with an integer PK field) redirects to the index page with a
message saying the object doesn't exist.
"""
response = self.client.get(
reverse("admin:admin_views_section_change", args=(quote("abc/<b>"),)),
follow=True,
)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["section with ID “abc/<b>” doesn’t exist. Perhaps it was deleted?"],
)
def test_basic_edit_GET_old_url_redirect(self):
"""
The change URL changed in Django 1.9, but the old one still redirects.
"""
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)).replace(
"change/", ""
)
)
self.assertRedirects(
response, reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
def test_basic_inheritance_GET_string_PK(self):
"""
GET on the change_view (for inherited models) redirects to the index
page with a message saying the object doesn't exist.
"""
response = self.client.get(
reverse("admin:admin_views_supervillain_change", args=("abc",)), follow=True
)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["super villain with ID “abc” doesn’t exist. Perhaps it was deleted?"],
)
def test_basic_add_POST(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
"name": "Another Section",
# inline data
"article_set-TOTAL_FORMS": "3",
"article_set-INITIAL_FORMS": "0",
"article_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse("admin:admin_views_section_add"), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_popup_add_POST(self):
"""HTTP response from a popup is properly escaped."""
post_data = {
IS_POPUP_VAR: "1",
"title": "title with a new\nline",
"content": "some content",
"date_0": "2010-09-10",
"date_1": "14:55:39",
}
response = self.client.post(reverse("admin:admin_views_article_add"), post_data)
self.assertContains(response, "title with a new\\nline")
def test_basic_edit_POST(self):
"""
A smoke test to ensure POST on edit_view works.
"""
url = reverse("admin:admin_views_section_change", args=(self.s1.pk,))
response = self.client.post(url, self.inline_post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as(self):
"""
Test "save as".
"""
post_data = self.inline_post_data.copy()
post_data.update(
{
"_saveasnew": "Save+as+new",
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-3-section": "1",
"article_set-4-section": "1",
"article_set-5-section": "1",
}
)
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), post_data
)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as_delete_inline(self):
"""
Should be able to "Save as new" while also deleting an inline.
"""
post_data = self.inline_post_data.copy()
post_data.update(
{
"_saveasnew": "Save+as+new",
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-2-DELETE": "1",
"article_set-3-section": "1",
}
)
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), post_data
)
self.assertEqual(response.status_code, 302)
# started with 3 articles, one was deleted.
self.assertEqual(Section.objects.latest("id").article_set.count(), 2)
def test_change_list_column_field_classes(self):
response = self.client.get(reverse("admin:admin_views_article_changelist"))
# callables display the callable name.
self.assertContains(response, "column-callable_year")
self.assertContains(response, "field-callable_year")
# lambdas display as "lambda" + index that they appear in list_display.
self.assertContains(response, "column-lambda8")
self.assertContains(response, "field-lambda8")
def test_change_list_sorting_callable(self):
"""
Ensure we can sort on a list_display field that is a callable
(column 2 is callable_year in ArticleAdmin)
"""
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": 2}
)
self.assertContentBefore(
response,
"Oldest content",
"Middle content",
"Results of sorting on callable are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Newest content",
"Results of sorting on callable are out of order.",
)
def test_change_list_sorting_property(self):
"""
Sort on a list_display field that is a property (column 10 is
a property in Article model).
"""
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": 10}
)
self.assertContentBefore(
response,
"Oldest content",
"Middle content",
"Results of sorting on property are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Newest content",
"Results of sorting on property are out of order.",
)
def test_change_list_sorting_callable_query_expression(self):
"""Query expressions may be used for admin_order_field."""
tests = [
("order_by_expression", 9),
("order_by_f_expression", 12),
("order_by_orderby_expression", 13),
]
for admin_order_field, index in tests:
with self.subTest(admin_order_field):
response = self.client.get(
reverse("admin:admin_views_article_changelist"),
{"o": index},
)
self.assertContentBefore(
response,
"Oldest content",
"Middle content",
"Results of sorting on callable are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Newest content",
"Results of sorting on callable are out of order.",
)
def test_change_list_sorting_callable_query_expression_reverse(self):
tests = [
("order_by_expression", -9),
("order_by_f_expression", -12),
("order_by_orderby_expression", -13),
]
for admin_order_field, index in tests:
with self.subTest(admin_order_field):
response = self.client.get(
reverse("admin:admin_views_article_changelist"),
{"o": index},
)
self.assertContentBefore(
response,
"Middle content",
"Oldest content",
"Results of sorting on callable are out of order.",
)
self.assertContentBefore(
response,
"Newest content",
"Middle content",
"Results of sorting on callable are out of order.",
)
def test_change_list_sorting_model(self):
"""
Ensure we can sort on a list_display field that is a Model method
(column 3 is 'model_year' in ArticleAdmin)
"""
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": "-3"}
)
self.assertContentBefore(
response,
"Newest content",
"Middle content",
"Results of sorting on Model method are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Oldest content",
"Results of sorting on Model method are out of order.",
)
def test_change_list_sorting_model_admin(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin method
(column 4 is 'modeladmin_year' in ArticleAdmin)
"""
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": "4"}
)
self.assertContentBefore(
response,
"Oldest content",
"Middle content",
"Results of sorting on ModelAdmin method are out of order.",
)
self.assertContentBefore(
response,
"Middle content",
"Newest content",
"Results of sorting on ModelAdmin method are out of order.",
)
def test_change_list_sorting_model_admin_reverse(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin
method in reverse order (i.e. admin_order_field uses the '-' prefix)
(column 6 is 'model_year_reverse' in ArticleAdmin)
"""
td = '<td class="field-model_property_year">%s</td>'
td_2000, td_2008, td_2009 = td % 2000, td % 2008, td % 2009
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": "6"}
)
self.assertContentBefore(
response,
td_2009,
td_2008,
"Results of sorting on ModelAdmin method are out of order.",
)
self.assertContentBefore(
response,
td_2008,
td_2000,
"Results of sorting on ModelAdmin method are out of order.",
)
# Let's make sure the ordering is right and that we don't get a
# FieldError when we change to descending order
response = self.client.get(
reverse("admin:admin_views_article_changelist"), {"o": "-6"}
)
self.assertContentBefore(
response,
td_2000,
td_2008,
"Results of sorting on ModelAdmin method are out of order.",
)
self.assertContentBefore(
response,
td_2008,
td_2009,
"Results of sorting on ModelAdmin method are out of order.",
)
def test_change_list_sorting_multiple(self):
p1 = Person.objects.create(name="Chris", gender=1, alive=True)
p2 = Person.objects.create(name="Chris", gender=2, alive=True)
p3 = Person.objects.create(name="Bob", gender=1, alive=True)
link1 = reverse("admin:admin_views_person_change", args=(p1.pk,))
link2 = reverse("admin:admin_views_person_change", args=(p2.pk,))
link3 = reverse("admin:admin_views_person_change", args=(p3.pk,))
# Sort by name, gender
response = self.client.get(
reverse("admin:admin_views_person_changelist"), {"o": "1.2"}
)
self.assertContentBefore(response, link3, link1)
self.assertContentBefore(response, link1, link2)
# Sort by gender descending, name
response = self.client.get(
reverse("admin:admin_views_person_changelist"), {"o": "-2.1"}
)
self.assertContentBefore(response, link2, link3)
self.assertContentBefore(response, link3, link1)
def test_change_list_sorting_preserve_queryset_ordering(self):
"""
If no ordering is defined in `ModelAdmin.ordering` or in the query
string, then the underlying order of the queryset should not be
changed, even if it is defined in `Modeladmin.get_queryset()`.
Refs #11868, #7309.
"""
p1 = Person.objects.create(name="Amy", gender=1, alive=True, age=80)
p2 = Person.objects.create(name="Bob", gender=1, alive=True, age=70)
p3 = Person.objects.create(name="Chris", gender=2, alive=False, age=60)
link1 = reverse("admin:admin_views_person_change", args=(p1.pk,))
link2 = reverse("admin:admin_views_person_change", args=(p2.pk,))
link3 = reverse("admin:admin_views_person_change", args=(p3.pk,))
response = self.client.get(reverse("admin:admin_views_person_changelist"), {})
self.assertContentBefore(response, link3, link2)
self.assertContentBefore(response, link2, link1)
def test_change_list_sorting_model_meta(self):
# Test ordering on Model Meta is respected
l1 = Language.objects.create(iso="ur", name="Urdu")
l2 = Language.objects.create(iso="ar", name="Arabic")
link1 = reverse("admin:admin_views_language_change", args=(quote(l1.pk),))
link2 = reverse("admin:admin_views_language_change", args=(quote(l2.pk),))
response = self.client.get(reverse("admin:admin_views_language_changelist"), {})
self.assertContentBefore(response, link2, link1)
# Test we can override with query string
response = self.client.get(
reverse("admin:admin_views_language_changelist"), {"o": "-1"}
)
self.assertContentBefore(response, link1, link2)
def test_change_list_sorting_override_model_admin(self):
# Test ordering on Model Admin is respected, and overrides Model Meta
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse("admin:admin_views_podcast_change", args=(p1.pk,))
link2 = reverse("admin:admin_views_podcast_change", args=(p2.pk,))
response = self.client.get(reverse("admin:admin_views_podcast_changelist"), {})
self.assertContentBefore(response, link1, link2)
def test_multiple_sort_same_field(self):
# The changelist displays the correct columns if two columns correspond
# to the same ordering field.
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse("admin:admin_views_podcast_change", args=(quote(p1.pk),))
link2 = reverse("admin:admin_views_podcast_change", args=(quote(p2.pk),))
response = self.client.get(reverse("admin:admin_views_podcast_changelist"), {})
self.assertContentBefore(response, link1, link2)
p1 = ComplexSortedPerson.objects.create(name="Bob", age=10)
p2 = ComplexSortedPerson.objects.create(name="Amy", age=20)
link1 = reverse("admin:admin_views_complexsortedperson_change", args=(p1.pk,))
link2 = reverse("admin:admin_views_complexsortedperson_change", args=(p2.pk,))
response = self.client.get(
reverse("admin:admin_views_complexsortedperson_changelist"), {}
)
# Should have 5 columns (including action checkbox col)
self.assertContains(response, '<th scope="col"', count=5)
self.assertContains(response, "Name")
self.assertContains(response, "Colored name")
# Check order
self.assertContentBefore(response, "Name", "Colored name")
# Check sorting - should be by name
self.assertContentBefore(response, link2, link1)
def test_sort_indicators_admin_order(self):
"""
The admin shows default sort indicators for all kinds of 'ordering'
fields: field names, method on the model admin and model itself, and
other callables. See #17252.
"""
models = [
(AdminOrderedField, "adminorderedfield"),
(AdminOrderedModelMethod, "adminorderedmodelmethod"),
(AdminOrderedAdminMethod, "adminorderedadminmethod"),
(AdminOrderedCallable, "adminorderedcallable"),
]
for model, url in models:
model.objects.create(stuff="The Last Item", order=3)
model.objects.create(stuff="The First Item", order=1)
model.objects.create(stuff="The Middle Item", order=2)
response = self.client.get(
reverse("admin:admin_views_%s_changelist" % url), {}
)
# Should have 3 columns including action checkbox col.
self.assertContains(response, '<th scope="col"', count=3, msg_prefix=url)
# Check if the correct column was selected. 2 is the index of the
# 'order' column in the model admin's 'list_display' with 0 being
# the implicit 'action_checkbox' and 1 being the column 'stuff'.
self.assertEqual(
response.context["cl"].get_ordering_field_columns(), {2: "asc"}
)
# Check order of records.
self.assertContentBefore(response, "The First Item", "The Middle Item")
self.assertContentBefore(response, "The Middle Item", "The Last Item")
def test_has_related_field_in_list_display_fk(self):
"""Joins shouldn't be performed for <FK>_id fields in list display."""
state = State.objects.create(name="Karnataka")
City.objects.create(state=state, name="Bangalore")
response = self.client.get(reverse("admin:admin_views_city_changelist"), {})
response.context["cl"].list_display = ["id", "name", "state"]
self.assertIs(response.context["cl"].has_related_field_in_list_display(), True)
response.context["cl"].list_display = ["id", "name", "state_id"]
self.assertIs(response.context["cl"].has_related_field_in_list_display(), False)
def test_has_related_field_in_list_display_o2o(self):
"""Joins shouldn't be performed for <O2O>_id fields in list display."""
media = Media.objects.create(name="Foo")
Vodcast.objects.create(media=media)
response = self.client.get(reverse("admin:admin_views_vodcast_changelist"), {})
response.context["cl"].list_display = ["media"]
self.assertIs(response.context["cl"].has_related_field_in_list_display(), True)
response.context["cl"].list_display = ["media_id"]
self.assertIs(response.context["cl"].has_related_field_in_list_display(), False)
def test_limited_filter(self):
"""
Admin changelist filters do not contain objects excluded via
limit_choices_to.
"""
response = self.client.get(reverse("admin:admin_views_thing_changelist"))
self.assertContains(
response,
'<div id="changelist-filter">',
msg_prefix="Expected filter not found in changelist view",
)
self.assertNotContains(
response,
'<a href="?color__id__exact=3">Blue</a>',
msg_prefix="Changelist filter not correctly limited by limit_choices_to",
)
def test_change_list_facet_toggle(self):
# Toggle is visible when show_facet is the default of
# admin.ShowFacets.ALLOW.
admin_url = reverse("admin:admin_views_album_changelist")
response = self.client.get(admin_url)
self.assertContains(
response,
'<a href="?_facets=True" class="viewlink">Show counts</a>',
msg_prefix="Expected facet filter toggle not found in changelist view",
)
response = self.client.get(f"{admin_url}?_facets=True")
self.assertContains(
response,
'<a href="?" class="hidelink">Hide counts</a>',
msg_prefix="Expected facet filter toggle not found in changelist view",
)
# Toggle is not visible when show_facet is admin.ShowFacets.ALWAYS.
response = self.client.get(reverse("admin:admin_views_workhour_changelist"))
self.assertNotContains(
response,
"Show counts",
msg_prefix="Expected not to find facet filter toggle in changelist view",
)
self.assertNotContains(
response,
"Hide counts",
msg_prefix="Expected not to find facet filter toggle in changelist view",
)
# Toggle is not visible when show_facet is admin.ShowFacets.NEVER.
response = self.client.get(reverse("admin:admin_views_fooddelivery_changelist"))
self.assertNotContains(
response,
"Show counts",
msg_prefix="Expected not to find facet filter toggle in changelist view",
)
self.assertNotContains(
response,
"Hide counts",
msg_prefix="Expected not to find facet filter toggle in changelist view",
)
def test_relation_spanning_filters(self):
changelist_url = reverse("admin:admin_views_chapterxtra1_changelist")
response = self.client.get(changelist_url)
self.assertContains(response, '<div id="changelist-filter">')
filters = {
"chap__id__exact": {
"values": [c.id for c in Chapter.objects.all()],
"test": lambda obj, value: obj.chap.id == value,
},
"chap__title": {
"values": [c.title for c in Chapter.objects.all()],
"test": lambda obj, value: obj.chap.title == value,
},
"chap__book__id__exact": {
"values": [b.id for b in Book.objects.all()],
"test": lambda obj, value: obj.chap.book.id == value,
},
"chap__book__name": {
"values": [b.name for b in Book.objects.all()],
"test": lambda obj, value: obj.chap.book.name == value,
},
"chap__book__promo__id__exact": {
"values": [p.id for p in Promo.objects.all()],
"test": lambda obj, value: obj.chap.book.promo_set.filter(
id=value
).exists(),
},
"chap__book__promo__name": {
"values": [p.name for p in Promo.objects.all()],
"test": lambda obj, value: obj.chap.book.promo_set.filter(
name=value
).exists(),
},
# A forward relation (book) after a reverse relation (promo).
"guest_author__promo__book__id__exact": {
"values": [p.id for p in Book.objects.all()],
"test": lambda obj, value: obj.guest_author.promo_set.filter(
book=value
).exists(),
},
}
for filter_path, params in filters.items():
for value in params["values"]:
query_string = urlencode({filter_path: value})
# ensure filter link exists
self.assertContains(response, '<a href="?%s"' % query_string)
# ensure link works
filtered_response = self.client.get(
"%s?%s" % (changelist_url, query_string)
)
self.assertEqual(filtered_response.status_code, 200)
# ensure changelist contains only valid objects
for obj in filtered_response.context["cl"].queryset.all():
self.assertTrue(params["test"](obj, value))
def test_incorrect_lookup_parameters(self):
"""Ensure incorrect lookup parameters are handled gracefully."""
changelist_url = reverse("admin:admin_views_thing_changelist")
response = self.client.get(changelist_url, {"notarealfield": "5"})
self.assertRedirects(response, "%s?e=1" % changelist_url)
# Spanning relationships through a nonexistent related object (Refs #16716)
response = self.client.get(changelist_url, {"notarealfield__whatever": "5"})
self.assertRedirects(response, "%s?e=1" % changelist_url)
response = self.client.get(
changelist_url, {"color__id__exact": "StringNotInteger!"}
)
self.assertRedirects(response, "%s?e=1" % changelist_url)
# Regression test for #18530
response = self.client.get(changelist_url, {"pub_date__gte": "foo"})
self.assertRedirects(response, "%s?e=1" % changelist_url)
def test_isnull_lookups(self):
"""Ensure is_null is handled correctly."""
Article.objects.create(
title="I Could Go Anywhere",
content="Versatile",
date=datetime.datetime.now(),
)
changelist_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(changelist_url)
self.assertContains(response, "4 articles")
response = self.client.get(changelist_url, {"section__isnull": "false"})
self.assertContains(response, "3 articles")
response = self.client.get(changelist_url, {"section__isnull": "0"})
self.assertContains(response, "3 articles")
response = self.client.get(changelist_url, {"section__isnull": "true"})
self.assertContains(response, "1 article")
response = self.client.get(changelist_url, {"section__isnull": "1"})
self.assertContains(response, "1 article")
def test_logout_and_password_change_URLs(self):
response = self.client.get(reverse("admin:admin_views_article_changelist"))
self.assertContains(
response,
'<form id="logout-form" method="post" action="%s">'
% reverse("admin:logout"),
)
self.assertContains(
response, '<a href="%s">' % reverse("admin:password_change")
)
def test_named_group_field_choices_change_list(self):
"""
Ensures the admin changelist shows correct values in the relevant column
for rows corresponding to instances of a model in which a named group
has been used in the choices option of a field.
"""
link1 = reverse("admin:admin_views_fabric_change", args=(self.fab1.pk,))
link2 = reverse("admin:admin_views_fabric_change", args=(self.fab2.pk,))
response = self.client.get(reverse("admin:admin_views_fabric_changelist"))
fail_msg = (
"Changelist table isn't showing the right human-readable values "
"set by a model field 'choices' option named group."
)
self.assertContains(
response,
'<a href="%s">Horizontal</a>' % link1,
msg_prefix=fail_msg,
html=True,
)
self.assertContains(
response,
'<a href="%s">Vertical</a>' % link2,
msg_prefix=fail_msg,
html=True,
)
def test_named_group_field_choices_filter(self):
"""
Ensures the filter UI shows correctly when at least one named group has
been used in the choices option of a model field.
"""
response = self.client.get(reverse("admin:admin_views_fabric_changelist"))
fail_msg = (
"Changelist filter isn't showing options contained inside a model "
"field 'choices' option named group."
)
self.assertContains(response, '<div id="changelist-filter">')
self.assertContains(
response,
'<a href="?surface__exact=x">Horizontal</a>',
msg_prefix=fail_msg,
html=True,
)
self.assertContains(
response,
'<a href="?surface__exact=y">Vertical</a>',
msg_prefix=fail_msg,
html=True,
)
def test_change_list_null_boolean_display(self):
Post.objects.create(public=None)
response = self.client.get(reverse("admin:admin_views_post_changelist"))
self.assertContains(response, "icon-unknown.svg")
def test_display_decorator_with_boolean_and_empty_value(self):
msg = (
"The boolean and empty_value arguments to the @display decorator "
"are mutually exclusive."
)
with self.assertRaisesMessage(ValueError, msg):
class BookAdmin(admin.ModelAdmin):
@admin.display(boolean=True, empty_value="(Missing)")
def is_published(self, obj):
return obj.publish_date is not None
def test_i18n_language_non_english_default(self):
"""
Check if the JavaScript i18n view returns an empty language catalog
if the default language is non-English but the selected language
is English. See #13388 and #3594 for more details.
"""
with self.settings(LANGUAGE_CODE="fr"), translation.override("en-us"):
response = self.client.get(reverse("admin:jsi18n"))
self.assertNotContains(response, "Choisir une heure")
def test_i18n_language_non_english_fallback(self):
"""
Makes sure that the fallback language is still working properly
in cases where the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE="fr"), translation.override("none"):
response = self.client.get(reverse("admin:jsi18n"))
self.assertContains(response, "Choisir une heure")
def test_jsi18n_with_context(self):
response = self.client.get(reverse("admin-extra-context:jsi18n"))
self.assertEqual(response.status_code, 200)
def test_jsi18n_format_fallback(self):
"""
The JavaScript i18n view doesn't return localized date/time formats
when the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE="ru"), translation.override("none"):
response = self.client.get(reverse("admin:jsi18n"))
self.assertNotContains(response, "%d.%m.%Y %H:%M:%S")
self.assertContains(response, "%Y-%m-%d %H:%M:%S")
def test_disallowed_filtering(self):
with self.assertLogs("django.security.DisallowedModelAdminLookup", "ERROR"):
response = self.client.get(
"%s?owner__email__startswith=fuzzy"
% reverse("admin:admin_views_album_changelist")
)
self.assertEqual(response.status_code, 400)
# Filters are allowed if explicitly included in list_filter
response = self.client.get(
"%s?color__value__startswith=red"
% reverse("admin:admin_views_thing_changelist")
)
self.assertEqual(response.status_code, 200)
response = self.client.get(
"%s?color__value=red" % reverse("admin:admin_views_thing_changelist")
)
self.assertEqual(response.status_code, 200)
# Filters should be allowed if they involve a local field without the
# need to allow them in list_filter or date_hierarchy.
response = self.client.get(
"%s?age__gt=30" % reverse("admin:admin_views_person_changelist")
)
self.assertEqual(response.status_code, 200)
e1 = Employee.objects.create(
name="Anonymous", gender=1, age=22, alive=True, code="123"
)
e2 = Employee.objects.create(
name="Visitor", gender=2, age=19, alive=True, code="124"
)
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e1)
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e2)
response = self.client.get(reverse("admin:admin_views_workhour_changelist"))
self.assertContains(response, "employee__person_ptr__exact")
response = self.client.get(
"%s?employee__person_ptr__exact=%d"
% (reverse("admin:admin_views_workhour_changelist"), e1.pk)
)
self.assertEqual(response.status_code, 200)
def test_disallowed_to_field(self):
url = reverse("admin:admin_views_section_changelist")
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.get(url, {TO_FIELD_VAR: "missing_field"})
self.assertEqual(response.status_code, 400)
# Specifying a field that is not referred by any other model registered
# to this admin site should raise an exception.
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.get(
reverse("admin:admin_views_section_changelist"), {TO_FIELD_VAR: "name"}
)
self.assertEqual(response.status_code, 400)
# Primary key should always be allowed, even if the referenced model
# isn't registered.
response = self.client.get(
reverse("admin:admin_views_notreferenced_changelist"), {TO_FIELD_VAR: "id"}
)
self.assertEqual(response.status_code, 200)
# Specifying a field referenced by another model though a m2m should be
# allowed.
response = self.client.get(
reverse("admin:admin_views_recipe_changelist"), {TO_FIELD_VAR: "rname"}
)
self.assertEqual(response.status_code, 200)
# Specifying a field referenced through a reverse m2m relationship
# should be allowed.
response = self.client.get(
reverse("admin:admin_views_ingredient_changelist"), {TO_FIELD_VAR: "iname"}
)
self.assertEqual(response.status_code, 200)
# Specifying a field that is not referred by any other model directly
# registered to this admin site but registered through inheritance
# should be allowed.
response = self.client.get(
reverse("admin:admin_views_referencedbyparent_changelist"),
{TO_FIELD_VAR: "name"},
)
self.assertEqual(response.status_code, 200)
# Specifying a field that is only referred to by a inline of a
# registered model should be allowed.
response = self.client.get(
reverse("admin:admin_views_referencedbyinline_changelist"),
{TO_FIELD_VAR: "name"},
)
self.assertEqual(response.status_code, 200)
# #25622 - Specifying a field of a model only referred by a generic
# relation should raise DisallowedModelAdminToField.
url = reverse("admin:admin_views_referencedbygenrel_changelist")
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.get(url, {TO_FIELD_VAR: "object_id"})
self.assertEqual(response.status_code, 400)
# We also want to prevent the add, change, and delete views from
# leaking a disallowed field value.
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.post(
reverse("admin:admin_views_section_add"), {TO_FIELD_VAR: "name"}
)
self.assertEqual(response.status_code, 400)
section = Section.objects.create()
url = reverse("admin:admin_views_section_change", args=(section.pk,))
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.post(url, {TO_FIELD_VAR: "name"})
self.assertEqual(response.status_code, 400)
url = reverse("admin:admin_views_section_delete", args=(section.pk,))
with self.assertLogs("django.security.DisallowedModelAdminToField", "ERROR"):
response = self.client.post(url, {TO_FIELD_VAR: "name"})
self.assertEqual(response.status_code, 400)
def test_allowed_filtering_15103(self):
"""
Regressions test for ticket 15103 - filtering on fields defined in a
ForeignKey 'limit_choices_to' should be allowed, otherwise raw_id_fields
can break.
"""
# Filters should be allowed if they are defined on a ForeignKey
# pointing to this model.
url = "%s?leader__name=Palin&leader__age=27" % reverse(
"admin:admin_views_inquisition_changelist"
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_popup_dismiss_related(self):
"""
Regression test for ticket 20664 - ensure the pk is properly quoted.
"""
actor = Actor.objects.create(name="Palin", age=27)
response = self.client.get(
"%s?%s" % (reverse("admin:admin_views_actor_changelist"), IS_POPUP_VAR)
)
self.assertContains(response, 'data-popup-opener="%s"' % actor.pk)
def test_hide_change_password(self):
"""
Tests if the "change password" link in the admin is hidden if the User
does not have a usable password set.
(against 9bea85795705d015cdadc82c68b99196a8554f5c)
"""
user = User.objects.get(username="super")
user.set_unusable_password()
user.save()
self.client.force_login(user)
response = self.client.get(reverse("admin:index"))
self.assertNotContains(
response,
reverse("admin:password_change"),
msg_prefix=(
'The "change password" link should not be displayed if a user does not '
"have a usable password."
),
)
def test_change_view_with_show_delete_extra_context(self):
"""
The 'show_delete' context variable in the admin's change view controls
the display of the delete button.
"""
instance = UndeletableObject.objects.create(name="foo")
response = self.client.get(
reverse("admin:admin_views_undeletableobject_change", args=(instance.pk,))
)
self.assertNotContains(response, "deletelink")
def test_change_view_logs_m2m_field_changes(self):
"""Changes to ManyToManyFields are included in the object's history."""
pizza = ReadablePizza.objects.create(name="Cheese")
cheese = Topping.objects.create(name="cheese")
post_data = {"name": pizza.name, "toppings": [cheese.pk]}
response = self.client.post(
reverse("admin:admin_views_readablepizza_change", args=(pizza.pk,)),
post_data,
)
self.assertRedirects(
response, reverse("admin:admin_views_readablepizza_changelist")
)
pizza_ctype = ContentType.objects.get_for_model(
ReadablePizza, for_concrete_model=False
)
log = LogEntry.objects.filter(
content_type=pizza_ctype, object_id=pizza.pk
).first()
self.assertEqual(log.get_change_message(), "Changed Toppings.")
def test_allows_attributeerror_to_bubble_up(self):
"""
AttributeErrors are allowed to bubble when raised inside a change list
view. Requires a model to be created so there's something to display.
Refs: #16655, #18593, and #18747
"""
Simple.objects.create()
with self.assertRaises(AttributeError):
self.client.get(reverse("admin:admin_views_simple_changelist"))
def test_changelist_with_no_change_url(self):
"""
ModelAdmin.changelist_view shouldn't result in a NoReverseMatch if url
for change_view is removed from get_urls (#20934).
"""
o = UnchangeableObject.objects.create()
response = self.client.get(
reverse("admin:admin_views_unchangeableobject_changelist")
)
# Check the format of the shown object -- shouldn't contain a change link
self.assertContains(
response, '<th class="field-__str__">%s</th>' % o, html=True
)
def test_invalid_appindex_url(self):
"""
#21056 -- URL reversing shouldn't work for nonexistent apps.
"""
good_url = "/test_admin/admin/admin_views/"
confirm_good_url = reverse(
"admin:app_list", kwargs={"app_label": "admin_views"}
)
self.assertEqual(good_url, confirm_good_url)
with self.assertRaises(NoReverseMatch):
reverse("admin:app_list", kwargs={"app_label": "this_should_fail"})
with self.assertRaises(NoReverseMatch):
reverse("admin:app_list", args=("admin_views2",))
def test_resolve_admin_views(self):
index_match = resolve("/test_admin/admin4/")
list_match = resolve("/test_admin/admin4/auth/user/")
self.assertIs(index_match.func.admin_site, customadmin.simple_site)
self.assertIsInstance(
list_match.func.model_admin, customadmin.CustomPwdTemplateUserAdmin
)
def test_adminsite_display_site_url(self):
"""
#13749 - Admin should display link to front-end site 'View site'
"""
url = reverse("admin:index")
response = self.client.get(url)
self.assertEqual(response.context["site_url"], "/my-site-url/")
self.assertContains(response, '<a href="/my-site-url/">View site</a>')
def test_date_hierarchy_empty_queryset(self):
self.assertIs(Question.objects.exists(), False)
response = self.client.get(reverse("admin:admin_views_answer2_changelist"))
self.assertEqual(response.status_code, 200)
@override_settings(TIME_ZONE="America/Sao_Paulo", USE_TZ=True)
def test_date_hierarchy_timezone_dst(self):
# This datetime doesn't exist in this timezone due to DST.
for date in make_aware_datetimes(
datetime.datetime(2016, 10, 16, 15), "America/Sao_Paulo"
):
with self.subTest(repr(date.tzinfo)):
q = Question.objects.create(question="Why?", expires=date)
Answer2.objects.create(question=q, answer="Because.")
response = self.client.get(
reverse("admin:admin_views_answer2_changelist")
)
self.assertContains(response, "question__expires__day=16")
self.assertContains(response, "question__expires__month=10")
self.assertContains(response, "question__expires__year=2016")
@override_settings(TIME_ZONE="America/Los_Angeles", USE_TZ=True)
def test_date_hierarchy_local_date_differ_from_utc(self):
# This datetime is 2017-01-01 in UTC.
for date in make_aware_datetimes(
datetime.datetime(2016, 12, 31, 16), "America/Los_Angeles"
):
with self.subTest(repr(date.tzinfo)):
q = Question.objects.create(question="Why?", expires=date)
Answer2.objects.create(question=q, answer="Because.")
response = self.client.get(
reverse("admin:admin_views_answer2_changelist")
)
self.assertContains(response, "question__expires__day=31")
self.assertContains(response, "question__expires__month=12")
self.assertContains(response, "question__expires__year=2016")
def test_sortable_by_columns_subset(self):
expected_sortable_fields = ("date", "callable_year")
expected_not_sortable_fields = (
"content",
"model_year",
"modeladmin_year",
"model_year_reversed",
"section",
)
response = self.client.get(reverse("admin6:admin_views_article_changelist"))
for field_name in expected_sortable_fields:
self.assertContains(
response, '<th scope="col" class="sortable column-%s">' % field_name
)
for field_name in expected_not_sortable_fields:
self.assertContains(
response, '<th scope="col" class="column-%s">' % field_name
)
def test_get_sortable_by_columns_subset(self):
response = self.client.get(reverse("admin6:admin_views_actor_changelist"))
self.assertContains(response, '<th scope="col" class="sortable column-age">')
self.assertContains(response, '<th scope="col" class="column-name">')
def test_sortable_by_no_column(self):
expected_not_sortable_fields = ("title", "book")
response = self.client.get(reverse("admin6:admin_views_chapter_changelist"))
for field_name in expected_not_sortable_fields:
self.assertContains(
response, '<th scope="col" class="column-%s">' % field_name
)
self.assertNotContains(response, '<th scope="col" class="sortable column')
def test_get_sortable_by_no_column(self):
response = self.client.get(reverse("admin6:admin_views_color_changelist"))
self.assertContains(response, '<th scope="col" class="column-value">')
self.assertNotContains(response, '<th scope="col" class="sortable column')
def test_app_index_context(self):
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertContains(
response,
"<title>Admin_Views administration | Django site admin</title>",
)
self.assertEqual(response.context["title"], "Admin_Views administration")
self.assertEqual(response.context["app_label"], "admin_views")
# Models are sorted alphabetically by default.
models = [model["name"] for model in response.context["app_list"][0]["models"]]
self.assertSequenceEqual(models, sorted(models))
def test_app_index_context_reordered(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse("admin2:app_list", args=("admin_views",)))
self.assertContains(
response,
"<title>Admin_Views administration | Django site admin</title>",
)
# Models are in reverse order.
models = [model["name"] for model in response.context["app_list"][0]["models"]]
self.assertSequenceEqual(models, sorted(models, reverse=True))
def test_change_view_subtitle_per_object(self):
response = self.client.get(
reverse("admin:admin_views_article_change", args=(self.a1.pk,)),
)
self.assertContains(
response,
"<title>Article 1 | Change article | Django site admin</title>",
)
self.assertContains(response, "<h1>Change article</h1>")
self.assertContains(response, "<h2>Article 1</h2>")
response = self.client.get(
reverse("admin:admin_views_article_change", args=(self.a2.pk,)),
)
self.assertContains(
response,
"<title>Article 2 | Change article | Django site admin</title>",
)
self.assertContains(response, "<h1>Change article</h1>")
self.assertContains(response, "<h2>Article 2</h2>")
def test_view_subtitle_per_object(self):
viewuser = User.objects.create_user(
username="viewuser",
password="secret",
is_staff=True,
)
viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("view", Article._meta)),
)
self.client.force_login(viewuser)
response = self.client.get(
reverse("admin:admin_views_article_change", args=(self.a1.pk,)),
)
self.assertContains(
response,
"<title>Article 1 | View article | Django site admin</title>",
)
self.assertContains(response, "<h1>View article</h1>")
self.assertContains(response, "<h2>Article 1</h2>")
response = self.client.get(
reverse("admin:admin_views_article_change", args=(self.a2.pk,)),
)
self.assertContains(
response,
"<title>Article 2 | View article | Django site admin</title>",
)
self.assertContains(response, "<h1>View article</h1>")
self.assertContains(response, "<h2>Article 2</h2>")
def test_formset_kwargs_can_be_overridden(self):
response = self.client.get(reverse("admin:admin_views_city_add"))
self.assertContains(response, "overridden_name")
def test_render_views_no_subtitle(self):
tests = [
reverse("admin:index"),
reverse("admin:password_change"),
reverse("admin:app_list", args=("admin_views",)),
reverse("admin:admin_views_article_delete", args=(self.a1.pk,)),
reverse("admin:admin_views_article_history", args=(self.a1.pk,)),
]
for url in tests:
with self.subTest(url=url):
with self.assertNoLogs("django.template", "DEBUG"):
self.client.get(url)
# Login must be after logout.
with self.assertNoLogs("django.template", "DEBUG"):
self.client.post(reverse("admin:logout"))
self.client.get(reverse("admin:login"))
def test_render_delete_selected_confirmation_no_subtitle(self):
post_data = {
"action": "delete_selected",
"selected_across": "0",
"index": "0",
"_selected_action": self.a1.pk,
}
with self.assertNoLogs("django.template", "DEBUG"):
self.client.post(reverse("admin:admin_views_article_changelist"), post_data)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
{
"NAME": (
"django.contrib.auth.password_validation."
"NumericPasswordValidator"
)
},
]
)
def test_password_change_helptext(self):
response = self.client.get(reverse("admin:password_change"))
self.assertContains(
response, '<div class="help" id="id_new_password1_helptext">'
)
def test_enable_zooming_on_mobile(self):
response = self.client.get(reverse("admin:index"))
self.assertContains(
response,
'<meta name="viewport" content="width=device-width, initial-scale=1.0">',
)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
{
"NAME": (
"django.contrib.auth.password_validation." "NumericPasswordValidator"
)
},
],
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
# Put this app's and the shared tests templates dirs in DIRS to
# take precedence over the admin's templates dir.
"DIRS": [
os.path.join(os.path.dirname(__file__), "templates"),
os.path.join(os.path.dirname(os.path.dirname(__file__)), "templates"),
],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
],
)
class AdminCustomTemplateTests(AdminViewBasicTestCase):
def test_custom_model_admin_templates(self):
# Test custom change list template with custom extra context
response = self.client.get(
reverse("admin:admin_views_customarticle_changelist")
)
self.assertContains(response, "var hello = 'Hello!';")
self.assertTemplateUsed(response, "custom_admin/change_list.html")
# Test custom add form template
response = self.client.get(reverse("admin:admin_views_customarticle_add"))
self.assertTemplateUsed(response, "custom_admin/add_form.html")
# Add an article so we can test delete, change, and history views
post = self.client.post(
reverse("admin:admin_views_customarticle_add"),
{
"content": "<p>great article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
},
)
self.assertRedirects(
post, reverse("admin:admin_views_customarticle_changelist")
)
self.assertEqual(CustomArticle.objects.count(), 1)
article_pk = CustomArticle.objects.all()[0].pk
# Test custom delete, change, and object history templates
# Test custom change form template
response = self.client.get(
reverse("admin:admin_views_customarticle_change", args=(article_pk,))
)
self.assertTemplateUsed(response, "custom_admin/change_form.html")
response = self.client.get(
reverse("admin:admin_views_customarticle_delete", args=(article_pk,))
)
self.assertTemplateUsed(response, "custom_admin/delete_confirmation.html")
response = self.client.post(
reverse("admin:admin_views_customarticle_changelist"),
data={
"index": 0,
"action": ["delete_selected"],
"_selected_action": ["1"],
},
)
self.assertTemplateUsed(
response, "custom_admin/delete_selected_confirmation.html"
)
response = self.client.get(
reverse("admin:admin_views_customarticle_history", args=(article_pk,))
)
self.assertTemplateUsed(response, "custom_admin/object_history.html")
# A custom popup response template may be specified by
# ModelAdmin.popup_response_template.
response = self.client.post(
reverse("admin:admin_views_customarticle_add") + "?%s=1" % IS_POPUP_VAR,
{
"content": "<p>great article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
IS_POPUP_VAR: "1",
},
)
self.assertEqual(response.template_name, "custom_admin/popup_response.html")
def test_extended_bodyclass_template_change_form(self):
"""
The admin/change_form.html template uses block.super in the
bodyclass block.
"""
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertContains(response, "bodyclass_consistency_check ")
def test_change_password_template(self):
user = User.objects.get(username="super")
response = self.client.get(
reverse("admin:auth_user_password_change", args=(user.id,))
)
# The auth/user/change_password.html template uses super in the
# bodyclass block.
self.assertContains(response, "bodyclass_consistency_check ")
# When a site has multiple passwords in the browser's password manager,
# a browser pop up asks which user the new password is for. To prevent
# this, the username is added to the change password form.
self.assertContains(
response, '<input type="text" name="username" value="super" class="hidden">'
)
# help text for passwords has an id.
self.assertContains(
response,
'<div class="help" id="id_password1_helptext"><ul><li>'
"Your password can’t be too similar to your other personal information."
"</li><li>Your password can’t be entirely numeric.</li></ul></div>",
)
self.assertContains(
response,
'<div class="help" id="id_password2_helptext">'
"Enter the same password as before, for verification.</div>",
)
def test_extended_bodyclass_template_index(self):
"""
The admin/index.html template uses block.super in the bodyclass block.
"""
response = self.client.get(reverse("admin:index"))
self.assertContains(response, "bodyclass_consistency_check ")
def test_extended_bodyclass_change_list(self):
"""
The admin/change_list.html' template uses block.super
in the bodyclass block.
"""
response = self.client.get(reverse("admin:admin_views_article_changelist"))
self.assertContains(response, "bodyclass_consistency_check ")
def test_extended_bodyclass_template_login(self):
"""
The admin/login.html template uses block.super in the
bodyclass block.
"""
self.client.logout()
response = self.client.get(reverse("admin:login"))
self.assertContains(response, "bodyclass_consistency_check ")
def test_extended_bodyclass_template_delete_confirmation(self):
"""
The admin/delete_confirmation.html template uses
block.super in the bodyclass block.
"""
group = Group.objects.create(name="foogroup")
response = self.client.get(reverse("admin:auth_group_delete", args=(group.id,)))
self.assertContains(response, "bodyclass_consistency_check ")
def test_extended_bodyclass_template_delete_selected_confirmation(self):
"""
The admin/delete_selected_confirmation.html template uses
block.super in bodyclass block.
"""
group = Group.objects.create(name="foogroup")
post_data = {
"action": "delete_selected",
"selected_across": "0",
"index": "0",
"_selected_action": group.id,
}
response = self.client.post(reverse("admin:auth_group_changelist"), post_data)
self.assertEqual(response.context["site_header"], "Django administration")
self.assertContains(response, "bodyclass_consistency_check ")
def test_filter_with_custom_template(self):
"""
A custom template can be used to render an admin filter.
"""
response = self.client.get(reverse("admin:admin_views_color2_changelist"))
self.assertTemplateUsed(response, "custom_filter_template.html")
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewFormUrlTest(TestCase):
current_app = "admin3"
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_change_form_URL_has_correct_value(self):
"""
change_view has form_url in response.context
"""
response = self.client.get(
reverse(
"admin:admin_views_section_change",
args=(self.s1.pk,),
current_app=self.current_app,
)
)
self.assertIn(
"form_url", response.context, msg="form_url not present in response.context"
)
self.assertEqual(response.context["form_url"], "pony")
def test_initial_data_can_be_overridden(self):
"""
The behavior for setting initial form data can be overridden in the
ModelAdmin class. Usually, the initial value is set via the GET params.
"""
response = self.client.get(
reverse("admin:admin_views_restaurant_add", current_app=self.current_app),
{"name": "test_value"},
)
# this would be the usual behaviour
self.assertNotContains(response, 'value="test_value"')
# this is the overridden behaviour
self.assertContains(response, 'value="overridden_value"')
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminJavaScriptTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_js_minified_only_if_debug_is_false(self):
"""
The minified versions of the JS files are only used when DEBUG is False.
"""
with override_settings(DEBUG=False):
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertNotContains(response, "vendor/jquery/jquery.js")
self.assertContains(response, "vendor/jquery/jquery.min.js")
self.assertContains(response, "prepopulate.js")
self.assertContains(response, "actions.js")
self.assertContains(response, "collapse.js")
self.assertContains(response, "inlines.js")
with override_settings(DEBUG=True):
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertContains(response, "vendor/jquery/jquery.js")
self.assertNotContains(response, "vendor/jquery/jquery.min.js")
self.assertContains(response, "prepopulate.js")
self.assertContains(response, "actions.js")
self.assertContains(response, "collapse.js")
self.assertContains(response, "inlines.js")
@override_settings(ROOT_URLCONF="admin_views.urls")
class SaveAsTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.per1 = Person.objects.create(name="John Mauchly", gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_as_duplication(self):
"""'save as' creates a new person"""
post_data = {"_saveasnew": "", "name": "John M", "gender": 1, "age": 42}
response = self.client.post(
reverse("admin:admin_views_person_change", args=(self.per1.pk,)), post_data
)
self.assertEqual(len(Person.objects.filter(name="John M")), 1)
self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1)
new_person = Person.objects.latest("id")
self.assertRedirects(
response, reverse("admin:admin_views_person_change", args=(new_person.pk,))
)
def test_save_as_continue_false(self):
"""
Saving a new object using "Save as new" redirects to the changelist
instead of the change view when ModelAdmin.save_as_continue=False.
"""
post_data = {"_saveasnew": "", "name": "John M", "gender": 1, "age": 42}
url = reverse(
"admin:admin_views_person_change",
args=(self.per1.pk,),
current_app=site2.name,
)
response = self.client.post(url, post_data)
self.assertEqual(len(Person.objects.filter(name="John M")), 1)
self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1)
self.assertRedirects(
response,
reverse("admin:admin_views_person_changelist", current_app=site2.name),
)
def test_save_as_new_with_validation_errors(self):
"""
When you click "Save as new" and have a validation error,
you only see the "Save as new" button and not the other save buttons,
and that only the "Save as" button is visible.
"""
response = self.client.post(
reverse("admin:admin_views_person_change", args=(self.per1.pk,)),
{
"_saveasnew": "",
"gender": "invalid",
"_addanother": "fail",
},
)
self.assertContains(response, "Please correct the errors below.")
self.assertFalse(response.context["show_save_and_add_another"])
self.assertFalse(response.context["show_save_and_continue"])
self.assertTrue(response.context["show_save_as_new"])
def test_save_as_new_with_validation_errors_with_inlines(self):
parent = Parent.objects.create(name="Father")
child = Child.objects.create(parent=parent, name="Child")
response = self.client.post(
reverse("admin:admin_views_parent_change", args=(parent.pk,)),
{
"_saveasnew": "Save as new",
"child_set-0-parent": parent.pk,
"child_set-0-id": child.pk,
"child_set-0-name": "Child",
"child_set-INITIAL_FORMS": 1,
"child_set-MAX_NUM_FORMS": 1000,
"child_set-MIN_NUM_FORMS": 0,
"child_set-TOTAL_FORMS": 4,
"name": "_invalid",
},
)
self.assertContains(response, "Please correct the error below.")
self.assertFalse(response.context["show_save_and_add_another"])
self.assertFalse(response.context["show_save_and_continue"])
self.assertTrue(response.context["show_save_as_new"])
def test_save_as_new_with_inlines_with_validation_errors(self):
parent = Parent.objects.create(name="Father")
child = Child.objects.create(parent=parent, name="Child")
response = self.client.post(
reverse("admin:admin_views_parent_change", args=(parent.pk,)),
{
"_saveasnew": "Save as new",
"child_set-0-parent": parent.pk,
"child_set-0-id": child.pk,
"child_set-0-name": "_invalid",
"child_set-INITIAL_FORMS": 1,
"child_set-MAX_NUM_FORMS": 1000,
"child_set-MIN_NUM_FORMS": 0,
"child_set-TOTAL_FORMS": 4,
"name": "Father",
},
)
self.assertContains(response, "Please correct the error below.")
self.assertFalse(response.context["show_save_and_add_another"])
self.assertFalse(response.context["show_save_and_continue"])
self.assertTrue(response.context["show_save_as_new"])
@override_settings(ROOT_URLCONF="admin_views.urls")
class CustomModelAdminTest(AdminViewBasicTestCase):
def test_custom_admin_site_login_form(self):
self.client.logout()
response = self.client.get(reverse("admin2:index"), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
login = self.client.post(
reverse("admin2:login"),
{
REDIRECT_FIELD_NAME: reverse("admin2:index"),
"username": "customform",
"password": "secret",
},
follow=True,
)
self.assertIsInstance(login, TemplateResponse)
self.assertContains(login, "custom form error")
self.assertContains(login, "path/to/media.css")
def test_custom_admin_site_login_template(self):
self.client.logout()
response = self.client.get(reverse("admin2:index"), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/login.html")
self.assertContains(response, "Hello from a custom login template")
def test_custom_admin_site_logout_template(self):
response = self.client.post(reverse("admin2:logout"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/logout.html")
self.assertContains(response, "Hello from a custom logout template")
def test_custom_admin_site_index_view_and_template(self):
response = self.client.get(reverse("admin2:index"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/index.html")
self.assertContains(response, "Hello from a custom index template *bar*")
def test_custom_admin_site_app_index_view_and_template(self):
response = self.client.get(reverse("admin2:app_list", args=("admin_views",)))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/app_index.html")
self.assertContains(response, "Hello from a custom app_index template")
def test_custom_admin_site_password_change_template(self):
response = self.client.get(reverse("admin2:password_change"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/password_change_form.html")
self.assertContains(
response, "Hello from a custom password change form template"
)
def test_custom_admin_site_password_change_with_extra_context(self):
response = self.client.get(reverse("admin2:password_change"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/password_change_form.html")
self.assertContains(response, "eggs")
def test_custom_admin_site_password_change_done_template(self):
response = self.client.get(reverse("admin2:password_change_done"))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, "custom_admin/password_change_done.html")
self.assertContains(
response, "Hello from a custom password change done template"
)
def test_custom_admin_site_view(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse("admin2:my_view"))
self.assertEqual(response.content, b"Django is a magical pony!")
def test_pwd_change_custom_template(self):
self.client.force_login(self.superuser)
su = User.objects.get(username="super")
response = self.client.get(
reverse("admin4:auth_user_password_change", args=(su.pk,))
)
self.assertEqual(response.status_code, 200)
def get_perm(Model, codename):
"""Return the permission object, for the Model"""
ct = ContentType.objects.get_for_model(Model, for_concrete_model=False)
return Permission.objects.get(content_type=ct, codename=codename)
@override_settings(
ROOT_URLCONF="admin_views.urls",
# Test with the admin's documented list of required context processors.
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
],
)
class AdminViewPermissionsTest(TestCase):
"""Tests for Admin Views Permissions."""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.viewuser = User.objects.create_user(
username="viewuser", password="secret", is_staff=True
)
cls.adduser = User.objects.create_user(
username="adduser", password="secret", is_staff=True
)
cls.changeuser = User.objects.create_user(
username="changeuser", password="secret", is_staff=True
)
cls.deleteuser = User.objects.create_user(
username="deleteuser", password="secret", is_staff=True
)
cls.joepublicuser = User.objects.create_user(
username="joepublic", password="secret"
)
cls.nostaffuser = User.objects.create_user(
username="nostaff", password="secret"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
another_section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
# Setup permissions, for our users who can add, change, and delete.
opts = Article._meta
# User who can view Articles
cls.viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("view", opts))
)
# User who can add Articles
cls.adduser.user_permissions.add(
get_perm(Article, get_permission_codename("add", opts))
)
# User who can change Articles
cls.changeuser.user_permissions.add(
get_perm(Article, get_permission_codename("change", opts))
)
cls.nostaffuser.user_permissions.add(
get_perm(Article, get_permission_codename("change", opts))
)
# User who can delete Articles
cls.deleteuser.user_permissions.add(
get_perm(Article, get_permission_codename("delete", opts))
)
cls.deleteuser.user_permissions.add(
get_perm(Section, get_permission_codename("delete", Section._meta))
)
# login POST dicts
cls.index_url = reverse("admin:index")
cls.super_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "super",
"password": "secret",
}
cls.super_email_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "[email protected]",
"password": "secret",
}
cls.super_email_bad_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "[email protected]",
"password": "notsecret",
}
cls.adduser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "adduser",
"password": "secret",
}
cls.changeuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "changeuser",
"password": "secret",
}
cls.deleteuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "deleteuser",
"password": "secret",
}
cls.nostaff_login = {
REDIRECT_FIELD_NAME: reverse("has_permission_admin:index"),
"username": "nostaff",
"password": "secret",
}
cls.joepublic_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "joepublic",
"password": "secret",
}
cls.viewuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"username": "viewuser",
"password": "secret",
}
cls.no_username_login = {
REDIRECT_FIELD_NAME: cls.index_url,
"password": "secret",
}
def test_login(self):
"""
Make sure only staff members can log in.
Successful posts to the login page will redirect to the original url.
Unsuccessful attempts will continue to render the login page with
a 200 status code.
"""
login_url = "%s?next=%s" % (reverse("admin:login"), reverse("admin:index"))
# Super User
response = self.client.get(self.index_url)
self.assertRedirects(response, login_url)
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
# Test if user enters email address
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# only correct passwords get a username hint
login = self.client.post(login_url, self.super_email_bad_login)
self.assertContains(login, ERROR_MESSAGE)
new_user = User(username="jondoe", password="secret", email="[email protected]")
new_user.save()
# check to ensure if there are multiple email addresses a user doesn't get a 500
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# View User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.viewuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
# Add User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.adduser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
# Change User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.changeuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
# Delete User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.deleteuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
# Regular User should not be able to login.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.joepublic_login)
self.assertContains(login, ERROR_MESSAGE)
# Requests without username should not return 500 errors.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.no_username_login)
self.assertEqual(login.status_code, 200)
self.assertFormError(
login.context["form"], "username", ["This field is required."]
)
def test_login_redirect_for_direct_get(self):
"""
Login redirect should be to the admin index page when going directly to
/admin/login/.
"""
response = self.client.get(reverse("admin:login"))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context[REDIRECT_FIELD_NAME], reverse("admin:index"))
def test_login_has_permission(self):
# Regular User should not be able to login.
response = self.client.get(reverse("has_permission_admin:index"))
self.assertEqual(response.status_code, 302)
login = self.client.post(
reverse("has_permission_admin:login"), self.joepublic_login
)
self.assertContains(login, "permission denied")
# User with permissions should be able to login.
response = self.client.get(reverse("has_permission_admin:index"))
self.assertEqual(response.status_code, 302)
login = self.client.post(
reverse("has_permission_admin:login"), self.nostaff_login
)
self.assertRedirects(login, reverse("has_permission_admin:index"))
self.assertFalse(login.context)
self.client.post(reverse("has_permission_admin:logout"))
# Staff should be able to login.
response = self.client.get(reverse("has_permission_admin:index"))
self.assertEqual(response.status_code, 302)
login = self.client.post(
reverse("has_permission_admin:login"),
{
REDIRECT_FIELD_NAME: reverse("has_permission_admin:index"),
"username": "deleteuser",
"password": "secret",
},
)
self.assertRedirects(login, reverse("has_permission_admin:index"))
self.assertFalse(login.context)
self.client.post(reverse("has_permission_admin:logout"))
def test_login_successfully_redirects_to_original_URL(self):
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
query_string = "the-answer=42"
redirect_url = "%s?%s" % (self.index_url, query_string)
new_next = {REDIRECT_FIELD_NAME: redirect_url}
post_data = self.super_login.copy()
post_data.pop(REDIRECT_FIELD_NAME)
login = self.client.post(
"%s?%s" % (reverse("admin:login"), urlencode(new_next)), post_data
)
self.assertRedirects(login, redirect_url)
def test_double_login_is_not_allowed(self):
"""Regression test for #19327"""
login_url = "%s?next=%s" % (reverse("admin:login"), reverse("admin:index"))
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with non-admin user fails
login = self.client.post(login_url, self.joepublic_login)
self.assertContains(login, ERROR_MESSAGE)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with admin user while already logged in
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.post(reverse("admin:logout"))
def test_login_page_notice_for_non_staff_users(self):
"""
A logged-in non-staff user trying to access the admin index should be
presented with the login page and a hint indicating that the current
user doesn't have access to it.
"""
hint_template = "You are authenticated as {}"
# Anonymous user should not be shown the hint
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, "login-form")
self.assertNotContains(response, hint_template.format(""), status_code=200)
# Non-staff user should be shown the hint
self.client.force_login(self.nostaffuser)
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, "login-form")
self.assertContains(
response, hint_template.format(self.nostaffuser.username), status_code=200
)
def test_add_view(self):
"""Test add view restricts access and actually adds items."""
add_dict = {
"title": "Døm ikke",
"content": "<p>great article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
"section": self.s1.pk,
}
# Change User should not have access to add articles
self.client.force_login(self.changeuser)
# make sure the view removes test cookie
self.assertIs(self.client.session.test_cookie_worked(), False)
response = self.client.get(reverse("admin:admin_views_article_add"))
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post(reverse("admin:admin_views_article_add"), add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.post(reverse("admin:logout"))
# View User should not have access to add articles
self.client.force_login(self.viewuser)
response = self.client.get(reverse("admin:admin_views_article_add"))
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post(reverse("admin:admin_views_article_add"), add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
# Now give the user permission to add but not change.
self.viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("add", Article._meta))
)
response = self.client.get(reverse("admin:admin_views_article_add"))
self.assertEqual(response.context["title"], "Add article")
self.assertContains(response, "<title>Add article | Django site admin</title>")
self.assertContains(
response, '<input type="submit" value="Save and view" name="_continue">'
)
post = self.client.post(
reverse("admin:admin_views_article_add"), add_dict, follow=False
)
self.assertEqual(post.status_code, 302)
self.assertEqual(Article.objects.count(), 4)
article = Article.objects.latest("pk")
response = self.client.get(
reverse("admin:admin_views_article_change", args=(article.pk,))
)
self.assertContains(
response,
'<li class="success">The article “Døm ikke” was added successfully.</li>',
)
article.delete()
self.client.post(reverse("admin:logout"))
# Add user may login and POST to add view, then redirect to admin root
self.client.force_login(self.adduser)
addpage = self.client.get(reverse("admin:admin_views_article_add"))
change_list_link = '› <a href="%s">Articles</a>' % reverse(
"admin:admin_views_article_changelist"
)
self.assertNotContains(
addpage,
change_list_link,
msg_prefix=(
"User restricted to add permission is given link to change list view "
"in breadcrumbs."
),
)
post = self.client.post(reverse("admin:admin_views_article_add"), add_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 4)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(mail.outbox[0].subject, "Greetings from a created object")
self.client.post(reverse("admin:logout"))
# The addition was logged correctly
addition_log = LogEntry.objects.all()[0]
new_article = Article.objects.last()
article_ct = ContentType.objects.get_for_model(Article)
self.assertEqual(addition_log.user_id, self.adduser.pk)
self.assertEqual(addition_log.content_type_id, article_ct.pk)
self.assertEqual(addition_log.object_id, str(new_article.pk))
self.assertEqual(addition_log.object_repr, "Døm ikke")
self.assertEqual(addition_log.action_flag, ADDITION)
self.assertEqual(addition_log.get_change_message(), "Added.")
# Super can add too, but is redirected to the change list view
self.client.force_login(self.superuser)
addpage = self.client.get(reverse("admin:admin_views_article_add"))
self.assertContains(
addpage,
change_list_link,
msg_prefix=(
"Unrestricted user is not given link to change list view in "
"breadcrumbs."
),
)
post = self.client.post(reverse("admin:admin_views_article_add"), add_dict)
self.assertRedirects(post, reverse("admin:admin_views_article_changelist"))
self.assertEqual(Article.objects.count(), 5)
self.client.post(reverse("admin:logout"))
# 8509 - if a normal user is already logged in, it is possible
# to change user into the superuser without error
self.client.force_login(self.joepublicuser)
# Check and make sure that if user expires, data still persists
self.client.force_login(self.superuser)
# make sure the view removes test cookie
self.assertIs(self.client.session.test_cookie_worked(), False)
@mock.patch("django.contrib.admin.options.InlineModelAdmin.has_change_permission")
def test_add_view_with_view_only_inlines(self, has_change_permission):
"""User with add permission to a section but view-only for inlines."""
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("add", Section._meta))
)
self.client.force_login(self.viewuser)
# Valid POST creates a new section.
data = {
"name": "New obj",
"article_set-TOTAL_FORMS": 0,
"article_set-INITIAL_FORMS": 0,
}
response = self.client.post(reverse("admin:admin_views_section_add"), data)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(Section.objects.latest("id").name, data["name"])
# InlineModelAdmin.has_change_permission()'s obj argument is always
# None during object add.
self.assertEqual(
[obj for (request, obj), _ in has_change_permission.call_args_list],
[None, None],
)
def test_change_view(self):
"""Change view should restrict access and allow users to edit items."""
change_dict = {
"title": "Ikke fordømt",
"content": "<p>edited article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
"section": self.s1.pk,
}
article_change_url = reverse(
"admin:admin_views_article_change", args=(self.a1.pk,)
)
article_changelist_url = reverse("admin:admin_views_article_changelist")
# add user should not be able to view the list of article or change any of them
self.client.force_login(self.adduser)
response = self.client.get(article_changelist_url)
self.assertEqual(response.status_code, 403)
response = self.client.get(article_change_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(article_change_url, change_dict)
self.assertEqual(post.status_code, 403)
self.client.post(reverse("admin:logout"))
# view user can view articles but not make changes.
self.client.force_login(self.viewuser)
response = self.client.get(article_changelist_url)
self.assertContains(
response,
"<title>Select article to view | Django site admin</title>",
)
self.assertContains(response, "<h1>Select article to view</h1>")
self.assertEqual(response.context["title"], "Select article to view")
response = self.client.get(article_change_url)
self.assertContains(response, "<title>View article | Django site admin</title>")
self.assertContains(response, "<h1>View article</h1>")
self.assertContains(response, "<label>Extra form field:</label>")
self.assertContains(
response,
'<a href="/test_admin/admin/admin_views/article/" class="closelink">Close'
"</a>",
)
self.assertEqual(response.context["title"], "View article")
post = self.client.post(article_change_url, change_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(
Article.objects.get(pk=self.a1.pk).content, "<p>Middle content</p>"
)
self.client.post(reverse("admin:logout"))
# change user can view all items and edit them
self.client.force_login(self.changeuser)
response = self.client.get(article_changelist_url)
self.assertEqual(response.context["title"], "Select article to change")
self.assertContains(
response,
"<title>Select article to change | Django site admin</title>",
)
self.assertContains(response, "<h1>Select article to change</h1>")
response = self.client.get(article_change_url)
self.assertEqual(response.context["title"], "Change article")
self.assertContains(
response,
"<title>Change article | Django site admin</title>",
)
self.assertContains(response, "<h1>Change article</h1>")
post = self.client.post(article_change_url, change_dict)
self.assertRedirects(post, article_changelist_url)
self.assertEqual(
Article.objects.get(pk=self.a1.pk).content, "<p>edited article</p>"
)
# one error in form should produce singular error message, multiple
# errors plural.
change_dict["title"] = ""
post = self.client.post(article_change_url, change_dict)
self.assertContains(
post,
"Please correct the error below.",
msg_prefix=(
"Singular error message not found in response to post with one error"
),
)
change_dict["content"] = ""
post = self.client.post(article_change_url, change_dict)
self.assertContains(
post,
"Please correct the errors below.",
msg_prefix=(
"Plural error message not found in response to post with multiple "
"errors"
),
)
self.client.post(reverse("admin:logout"))
# Test redirection when using row-level change permissions. Refs #11513.
r1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
r2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id")
r3 = RowLevelChangePermissionModel.objects.create(id=3, name="odd id mult 3")
r6 = RowLevelChangePermissionModel.objects.create(id=6, name="even id mult 3")
change_url_1 = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_change", args=(r1.pk,)
)
change_url_2 = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_change", args=(r2.pk,)
)
change_url_3 = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_change", args=(r3.pk,)
)
change_url_6 = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_change", args=(r6.pk,)
)
logins = [
self.superuser,
self.viewuser,
self.adduser,
self.changeuser,
self.deleteuser,
]
for login_user in logins:
with self.subTest(login_user.username):
self.client.force_login(login_user)
response = self.client.get(change_url_1)
self.assertEqual(response.status_code, 403)
response = self.client.post(change_url_1, {"name": "changed"})
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=1).name, "odd id"
)
self.assertEqual(response.status_code, 403)
response = self.client.get(change_url_2)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_2, {"name": "changed"})
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=2).name, "changed"
)
self.assertRedirects(response, self.index_url)
response = self.client.get(change_url_3)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_3, {"name": "changed"})
self.assertEqual(response.status_code, 403)
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=3).name,
"odd id mult 3",
)
response = self.client.get(change_url_6)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_6, {"name": "changed"})
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=6).name, "changed"
)
self.assertRedirects(response, self.index_url)
self.client.post(reverse("admin:logout"))
for login_user in [self.joepublicuser, self.nostaffuser]:
with self.subTest(login_user.username):
self.client.force_login(login_user)
response = self.client.get(change_url_1, follow=True)
self.assertContains(response, "login-form")
response = self.client.post(
change_url_1, {"name": "changed"}, follow=True
)
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=1).name, "odd id"
)
self.assertContains(response, "login-form")
response = self.client.get(change_url_2, follow=True)
self.assertContains(response, "login-form")
response = self.client.post(
change_url_2, {"name": "changed again"}, follow=True
)
self.assertEqual(
RowLevelChangePermissionModel.objects.get(id=2).name, "changed"
)
self.assertContains(response, "login-form")
self.client.post(reverse("admin:logout"))
def test_change_view_without_object_change_permission(self):
"""
The object should be read-only if the user has permission to view it
and change objects of that type but not to change the current object.
"""
change_url = reverse("admin9:admin_views_article_change", args=(self.a1.pk,))
self.client.force_login(self.viewuser)
response = self.client.get(change_url)
self.assertEqual(response.context["title"], "View article")
self.assertContains(response, "<title>View article | Django site admin</title>")
self.assertContains(response, "<h1>View article</h1>")
self.assertContains(
response,
'<a href="/test_admin/admin9/admin_views/article/" class="closelink">Close'
"</a>",
)
def test_change_view_save_as_new(self):
"""
'Save as new' should raise PermissionDenied for users without the 'add'
permission.
"""
change_dict_save_as_new = {
"_saveasnew": "Save as new",
"title": "Ikke fordømt",
"content": "<p>edited article</p>",
"date_0": "2008-03-18",
"date_1": "10:54:39",
"section": self.s1.pk,
}
article_change_url = reverse(
"admin:admin_views_article_change", args=(self.a1.pk,)
)
# Add user can perform "Save as new".
article_count = Article.objects.count()
self.client.force_login(self.adduser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), article_count + 1)
self.client.logout()
# Change user cannot perform "Save as new" (no 'add' permission).
article_count = Article.objects.count()
self.client.force_login(self.changeuser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), article_count)
# User with both add and change permissions should be redirected to the
# change page for the newly created object.
article_count = Article.objects.count()
self.client.force_login(self.superuser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertEqual(Article.objects.count(), article_count + 1)
new_article = Article.objects.latest("id")
self.assertRedirects(
post, reverse("admin:admin_views_article_change", args=(new_article.pk,))
)
def test_change_view_with_view_only_inlines(self):
"""
User with change permission to a section but view-only for inlines.
"""
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("change", Section._meta))
)
self.client.force_login(self.viewuser)
# GET shows inlines.
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 3)
# Valid POST changes the name.
data = {
"name": "Can edit name with view-only inlines",
"article_set-TOTAL_FORMS": 3,
"article_set-INITIAL_FORMS": 3,
}
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertRedirects(response, reverse("admin:admin_views_section_changelist"))
self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data["name"])
# Invalid POST reshows inlines.
del data["name"]
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 3)
def test_change_view_with_view_only_last_inline(self):
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("view", Section._meta))
)
self.client.force_login(self.viewuser)
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 3)
# The last inline is not marked as empty.
self.assertContains(response, 'id="article_set-2"')
def test_change_view_with_view_and_add_inlines(self):
"""User has view and add permissions on the inline model."""
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("change", Section._meta))
)
self.viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("add", Article._meta))
)
self.client.force_login(self.viewuser)
# GET shows inlines.
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 6)
# Valid POST creates a new article.
data = {
"name": "Can edit name with view-only inlines",
"article_set-TOTAL_FORMS": 6,
"article_set-INITIAL_FORMS": 3,
"article_set-3-id": [""],
"article_set-3-title": ["A title"],
"article_set-3-content": ["Added content"],
"article_set-3-date_0": ["2008-3-18"],
"article_set-3-date_1": ["11:54:58"],
"article_set-3-section": [str(self.s1.pk)],
}
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertRedirects(response, reverse("admin:admin_views_section_changelist"))
self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data["name"])
self.assertEqual(Article.objects.count(), 4)
# Invalid POST reshows inlines.
del data["name"]
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context["inline_admin_formsets"]), 1)
formset = response.context["inline_admin_formsets"][0]
self.assertEqual(len(formset.forms), 6)
def test_change_view_with_view_and_delete_inlines(self):
"""User has view and delete permissions on the inline model."""
self.viewuser.user_permissions.add(
get_perm(Section, get_permission_codename("change", Section._meta))
)
self.client.force_login(self.viewuser)
data = {
"name": "Name is required.",
"article_set-TOTAL_FORMS": 6,
"article_set-INITIAL_FORMS": 3,
"article_set-0-id": [str(self.a1.pk)],
"article_set-0-DELETE": ["on"],
}
# Inline POST details are ignored without delete permission.
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertRedirects(response, reverse("admin:admin_views_section_changelist"))
self.assertEqual(Article.objects.count(), 3)
# Deletion successful when delete permission is added.
self.viewuser.user_permissions.add(
get_perm(Article, get_permission_codename("delete", Article._meta))
)
data = {
"name": "Name is required.",
"article_set-TOTAL_FORMS": 6,
"article_set-INITIAL_FORMS": 3,
"article_set-0-id": [str(self.a1.pk)],
"article_set-0-DELETE": ["on"],
}
response = self.client.post(
reverse("admin:admin_views_section_change", args=(self.s1.pk,)), data
)
self.assertRedirects(response, reverse("admin:admin_views_section_changelist"))
self.assertEqual(Article.objects.count(), 2)
def test_delete_view(self):
"""Delete view should restrict access and actually delete items."""
delete_dict = {"post": "yes"}
delete_url = reverse("admin:admin_views_article_delete", args=(self.a1.pk,))
# add user should not be able to delete articles
self.client.force_login(self.adduser)
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(delete_url, delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.logout()
# view user should not be able to delete articles
self.client.force_login(self.viewuser)
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(delete_url, delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.logout()
# Delete user can delete
self.client.force_login(self.deleteuser)
response = self.client.get(
reverse("admin:admin_views_section_delete", args=(self.s1.pk,))
)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 3</li>")
# test response contains link to related Article
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
response = self.client.get(delete_url)
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 1</li>")
post = self.client.post(delete_url, delete_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 2)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "Greetings from a deleted object")
article_ct = ContentType.objects.get_for_model(Article)
logged = LogEntry.objects.get(content_type=article_ct, action_flag=DELETION)
self.assertEqual(logged.object_id, str(self.a1.pk))
def test_delete_view_with_no_default_permissions(self):
"""
The delete view allows users to delete collected objects without a
'delete' permission (ReadOnlyPizza.Meta.default_permissions is empty).
"""
pizza = ReadOnlyPizza.objects.create(name="Double Cheese")
delete_url = reverse("admin:admin_views_readonlypizza_delete", args=(pizza.pk,))
self.client.force_login(self.adduser)
response = self.client.get(delete_url)
self.assertContains(response, "admin_views/readonlypizza/%s/" % pizza.pk)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Read only pizzas: 1</li>")
post = self.client.post(delete_url, {"post": "yes"})
self.assertRedirects(
post, reverse("admin:admin_views_readonlypizza_changelist")
)
self.assertEqual(ReadOnlyPizza.objects.count(), 0)
def test_delete_view_nonexistent_obj(self):
self.client.force_login(self.deleteuser)
url = reverse("admin:admin_views_article_delete", args=("nonexistent",))
response = self.client.get(url, follow=True)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["article with ID “nonexistent” doesn’t exist. Perhaps it was deleted?"],
)
def test_history_view(self):
"""History view should restrict access."""
# add user should not be able to view the list of article or change any of them
self.client.force_login(self.adduser)
response = self.client.get(
reverse("admin:admin_views_article_history", args=(self.a1.pk,))
)
self.assertEqual(response.status_code, 403)
self.client.post(reverse("admin:logout"))
# view user can view all items
self.client.force_login(self.viewuser)
response = self.client.get(
reverse("admin:admin_views_article_history", args=(self.a1.pk,))
)
self.assertEqual(response.status_code, 200)
self.client.post(reverse("admin:logout"))
# change user can view all items and edit them
self.client.force_login(self.changeuser)
response = self.client.get(
reverse("admin:admin_views_article_history", args=(self.a1.pk,))
)
self.assertEqual(response.status_code, 200)
# Test redirection when using row-level change permissions. Refs #11513.
rl1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
rl2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id")
logins = [
self.superuser,
self.viewuser,
self.adduser,
self.changeuser,
self.deleteuser,
]
for login_user in logins:
with self.subTest(login_user.username):
self.client.force_login(login_user)
url = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_history",
args=(rl1.pk,),
)
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
url = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_history",
args=(rl2.pk,),
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.client.post(reverse("admin:logout"))
for login_user in [self.joepublicuser, self.nostaffuser]:
with self.subTest(login_user.username):
self.client.force_login(login_user)
url = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_history",
args=(rl1.pk,),
)
response = self.client.get(url, follow=True)
self.assertContains(response, "login-form")
url = reverse(
"admin:admin_views_rowlevelchangepermissionmodel_history",
args=(rl2.pk,),
)
response = self.client.get(url, follow=True)
self.assertContains(response, "login-form")
self.client.post(reverse("admin:logout"))
def test_history_view_bad_url(self):
self.client.force_login(self.changeuser)
response = self.client.get(
reverse("admin:admin_views_article_history", args=("foo",)), follow=True
)
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["article with ID “foo” doesn’t exist. Perhaps it was deleted?"],
)
def test_conditionally_show_add_section_link(self):
"""
The foreign key widget should only show the "add related" button if the
user has permission to add that related item.
"""
self.client.force_login(self.adduser)
# The user can't add sections yet, so they shouldn't see the "add section" link.
url = reverse("admin:admin_views_article_add")
add_link_text = "add_id_section"
response = self.client.get(url)
self.assertNotContains(response, add_link_text)
# Allow the user to add sections too. Now they can see the "add section" link.
user = User.objects.get(username="adduser")
perm = get_perm(Section, get_permission_codename("add", Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertContains(response, add_link_text)
def test_conditionally_show_change_section_link(self):
"""
The foreign key widget should only show the "change related" button if
the user has permission to change that related item.
"""
def get_change_related(response):
return (
response.context["adminform"]
.form.fields["section"]
.widget.can_change_related
)
self.client.force_login(self.adduser)
# The user can't change sections yet, so they shouldn't see the
# "change section" link.
url = reverse("admin:admin_views_article_add")
change_link_text = "change_id_section"
response = self.client.get(url)
self.assertFalse(get_change_related(response))
self.assertNotContains(response, change_link_text)
# Allow the user to change sections too. Now they can see the
# "change section" link.
user = User.objects.get(username="adduser")
perm = get_perm(Section, get_permission_codename("change", Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_change_related(response))
self.assertContains(response, change_link_text)
def test_conditionally_show_delete_section_link(self):
"""
The foreign key widget should only show the "delete related" button if
the user has permission to delete that related item.
"""
def get_delete_related(response):
return (
response.context["adminform"]
.form.fields["sub_section"]
.widget.can_delete_related
)
self.client.force_login(self.adduser)
# The user can't delete sections yet, so they shouldn't see the
# "delete section" link.
url = reverse("admin:admin_views_article_add")
delete_link_text = "delete_id_sub_section"
response = self.client.get(url)
self.assertFalse(get_delete_related(response))
self.assertNotContains(response, delete_link_text)
# Allow the user to delete sections too. Now they can see the
# "delete section" link.
user = User.objects.get(username="adduser")
perm = get_perm(Section, get_permission_codename("delete", Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_delete_related(response))
self.assertContains(response, delete_link_text)
def test_disabled_permissions_when_logged_in(self):
self.client.force_login(self.superuser)
superuser = User.objects.get(username="super")
superuser.is_active = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, "Log out")
response = self.client.get(reverse("secure_view"), follow=True)
self.assertContains(response, 'id="login-form"')
def test_disabled_staff_permissions_when_logged_in(self):
self.client.force_login(self.superuser)
superuser = User.objects.get(username="super")
superuser.is_staff = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, "Log out")
response = self.client.get(reverse("secure_view"), follow=True)
self.assertContains(response, 'id="login-form"')
def test_app_list_permissions(self):
"""
If a user has no module perms, the app list returns a 404.
"""
opts = Article._meta
change_user = User.objects.get(username="changeuser")
permission = get_perm(Article, get_permission_codename("change", opts))
self.client.force_login(self.changeuser)
# the user has no module permissions
change_user.user_permissions.remove(permission)
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertEqual(response.status_code, 404)
# the user now has module permissions
change_user.user_permissions.add(permission)
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertEqual(response.status_code, 200)
def test_shortcut_view_only_available_to_staff(self):
"""
Only admin users should be able to use the admin shortcut view.
"""
model_ctype = ContentType.objects.get_for_model(ModelWithStringPrimaryKey)
obj = ModelWithStringPrimaryKey.objects.create(string_pk="foo")
shortcut_url = reverse("admin:view_on_site", args=(model_ctype.pk, obj.pk))
# Not logged in: we should see the login page.
response = self.client.get(shortcut_url, follow=True)
self.assertTemplateUsed(response, "admin/login.html")
# Logged in? Redirect.
self.client.force_login(self.superuser)
response = self.client.get(shortcut_url, follow=False)
# Can't use self.assertRedirects() because User.get_absolute_url() is silly.
self.assertEqual(response.status_code, 302)
# Domain may depend on contrib.sites tests also run
self.assertRegex(response.url, "http://(testserver|example.com)/dummy/foo/")
def test_has_module_permission(self):
"""
has_module_permission() returns True for all users who
have any permission for that module (add, change, or delete), so that
the module is displayed on the admin index page.
"""
self.client.force_login(self.superuser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
self.client.logout()
self.client.force_login(self.viewuser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
self.client.logout()
self.client.force_login(self.adduser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
self.client.logout()
self.client.force_login(self.changeuser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
self.client.logout()
self.client.force_login(self.deleteuser)
response = self.client.get(self.index_url)
self.assertContains(response, "admin_views")
self.assertContains(response, "Articles")
def test_overriding_has_module_permission(self):
"""
If has_module_permission() always returns False, the module shouldn't
be displayed on the admin index page for any users.
"""
articles = Article._meta.verbose_name_plural.title()
sections = Section._meta.verbose_name_plural.title()
index_url = reverse("admin7:index")
self.client.force_login(self.superuser)
response = self.client.get(index_url)
self.assertContains(response, sections)
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.viewuser)
response = self.client.get(index_url)
self.assertNotContains(response, "admin_views")
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.adduser)
response = self.client.get(index_url)
self.assertNotContains(response, "admin_views")
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.changeuser)
response = self.client.get(index_url)
self.assertNotContains(response, "admin_views")
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.deleteuser)
response = self.client.get(index_url)
self.assertNotContains(response, articles)
# The app list displays Sections but not Articles as the latter has
# ModelAdmin.has_module_permission() = False.
self.client.force_login(self.superuser)
response = self.client.get(reverse("admin7:app_list", args=("admin_views",)))
self.assertContains(response, sections)
self.assertNotContains(response, articles)
def test_post_save_message_no_forbidden_links_visible(self):
"""
Post-save message shouldn't contain a link to the change form if the
user doesn't have the change permission.
"""
self.client.force_login(self.adduser)
# Emulate Article creation for user with add-only permission.
post_data = {
"title": "Fun & games",
"content": "Some content",
"date_0": "2015-10-31",
"date_1": "16:35:00",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_article_add"), post_data, follow=True
)
self.assertContains(
response,
'<li class="success">The article “Fun & games” was added successfully.'
"</li>",
html=True,
)
@override_settings(
ROOT_URLCONF="admin_views.urls",
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
],
)
class AdminViewProxyModelPermissionsTests(TestCase):
"""Tests for proxy models permissions in the admin."""
@classmethod
def setUpTestData(cls):
cls.viewuser = User.objects.create_user(
username="viewuser", password="secret", is_staff=True
)
cls.adduser = User.objects.create_user(
username="adduser", password="secret", is_staff=True
)
cls.changeuser = User.objects.create_user(
username="changeuser", password="secret", is_staff=True
)
cls.deleteuser = User.objects.create_user(
username="deleteuser", password="secret", is_staff=True
)
# Setup permissions.
opts = UserProxy._meta
cls.viewuser.user_permissions.add(
get_perm(UserProxy, get_permission_codename("view", opts))
)
cls.adduser.user_permissions.add(
get_perm(UserProxy, get_permission_codename("add", opts))
)
cls.changeuser.user_permissions.add(
get_perm(UserProxy, get_permission_codename("change", opts))
)
cls.deleteuser.user_permissions.add(
get_perm(UserProxy, get_permission_codename("delete", opts))
)
# UserProxy instances.
cls.user_proxy = UserProxy.objects.create(
username="user_proxy", password="secret"
)
def test_add(self):
self.client.force_login(self.adduser)
url = reverse("admin:admin_views_userproxy_add")
data = {
"username": "can_add",
"password": "secret",
"date_joined_0": "2019-01-15",
"date_joined_1": "16:59:10",
}
response = self.client.post(url, data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(UserProxy.objects.filter(username="can_add").exists())
def test_view(self):
self.client.force_login(self.viewuser)
response = self.client.get(reverse("admin:admin_views_userproxy_changelist"))
self.assertContains(response, "<h1>Select user proxy to view</h1>")
response = self.client.get(
reverse("admin:admin_views_userproxy_change", args=(self.user_proxy.pk,))
)
self.assertContains(response, "<h1>View user proxy</h1>")
self.assertContains(response, '<div class="readonly">user_proxy</div>')
def test_change(self):
self.client.force_login(self.changeuser)
data = {
"password": self.user_proxy.password,
"username": self.user_proxy.username,
"date_joined_0": self.user_proxy.date_joined.strftime("%Y-%m-%d"),
"date_joined_1": self.user_proxy.date_joined.strftime("%H:%M:%S"),
"first_name": "first_name",
}
url = reverse("admin:admin_views_userproxy_change", args=(self.user_proxy.pk,))
response = self.client.post(url, data)
self.assertRedirects(
response, reverse("admin:admin_views_userproxy_changelist")
)
self.assertEqual(
UserProxy.objects.get(pk=self.user_proxy.pk).first_name, "first_name"
)
def test_delete(self):
self.client.force_login(self.deleteuser)
url = reverse("admin:admin_views_userproxy_delete", args=(self.user_proxy.pk,))
response = self.client.post(url, {"post": "yes"}, follow=True)
self.assertEqual(response.status_code, 200)
self.assertFalse(UserProxy.objects.filter(pk=self.user_proxy.pk).exists())
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewsNoUrlTest(TestCase):
"""Regression test for #17333"""
@classmethod
def setUpTestData(cls):
# User who can change Reports
cls.changeuser = User.objects.create_user(
username="changeuser", password="secret", is_staff=True
)
cls.changeuser.user_permissions.add(
get_perm(Report, get_permission_codename("change", Report._meta))
)
def test_no_standard_modeladmin_urls(self):
"""Admin index views don't break when user's ModelAdmin removes standard urls"""
self.client.force_login(self.changeuser)
r = self.client.get(reverse("admin:index"))
# we shouldn't get a 500 error caused by a NoReverseMatch
self.assertEqual(r.status_code, 200)
self.client.post(reverse("admin:logout"))
@skipUnlessDBFeature("can_defer_constraint_checks")
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewDeletedObjectsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.deleteuser = User.objects.create_user(
username="deleteuser", password="secret", is_staff=True
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.v1 = Villain.objects.create(name="Adam")
cls.v2 = Villain.objects.create(name="Sue")
cls.sv1 = SuperVillain.objects.create(name="Bob")
cls.pl1 = Plot.objects.create(
name="World Domination", team_leader=cls.v1, contact=cls.v2
)
cls.pl2 = Plot.objects.create(
name="World Peace", team_leader=cls.v2, contact=cls.v2
)
cls.pl3 = Plot.objects.create(
name="Corn Conspiracy", team_leader=cls.v1, contact=cls.v1
)
cls.pd1 = PlotDetails.objects.create(details="almost finished", plot=cls.pl1)
cls.sh1 = SecretHideout.objects.create(
location="underground bunker", villain=cls.v1
)
cls.sh2 = SecretHideout.objects.create(
location="floating castle", villain=cls.sv1
)
cls.ssh1 = SuperSecretHideout.objects.create(
location="super floating castle!", supervillain=cls.sv1
)
cls.cy1 = CyclicOne.objects.create(pk=1, name="I am recursive", two_id=1)
cls.cy2 = CyclicTwo.objects.create(pk=1, name="I am recursive too", one_id=1)
def setUp(self):
self.client.force_login(self.superuser)
def test_nesting(self):
"""
Objects should be nested to display the relationships that
cause them to be scheduled for deletion.
"""
pattern = re.compile(
r'<li>Plot: <a href="%s">World Domination</a>\s*<ul>\s*'
r'<li>Plot details: <a href="%s">almost finished</a>'
% (
reverse("admin:admin_views_plot_change", args=(self.pl1.pk,)),
reverse("admin:admin_views_plotdetails_change", args=(self.pd1.pk,)),
)
)
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v1.pk,))
)
self.assertRegex(response.content.decode(), pattern)
def test_cyclic(self):
"""
Cyclic relationships should still cause each object to only be
listed once.
"""
one = '<li>Cyclic one: <a href="%s">I am recursive</a>' % (
reverse("admin:admin_views_cyclicone_change", args=(self.cy1.pk,)),
)
two = '<li>Cyclic two: <a href="%s">I am recursive too</a>' % (
reverse("admin:admin_views_cyclictwo_change", args=(self.cy2.pk,)),
)
response = self.client.get(
reverse("admin:admin_views_cyclicone_delete", args=(self.cy1.pk,))
)
self.assertContains(response, one, 1)
self.assertContains(response, two, 1)
def test_perms_needed(self):
self.client.logout()
delete_user = User.objects.get(username="deleteuser")
delete_user.user_permissions.add(
get_perm(Plot, get_permission_codename("delete", Plot._meta))
)
self.client.force_login(self.deleteuser)
response = self.client.get(
reverse("admin:admin_views_plot_delete", args=(self.pl1.pk,))
)
self.assertContains(
response,
"your account doesn't have permission to delete the following types of "
"objects",
)
self.assertContains(response, "<li>plot details</li>")
def test_protected(self):
q = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q, answer="Because.")
a2 = Answer.objects.create(question=q, answer="Yes.")
response = self.client.get(
reverse("admin:admin_views_question_delete", args=(q.pk,))
)
self.assertContains(
response, "would require deleting the following protected related objects"
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Because.</a></li>'
% reverse("admin:admin_views_answer_change", args=(a1.pk,)),
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Yes.</a></li>'
% reverse("admin:admin_views_answer_change", args=(a2.pk,)),
)
def test_post_delete_protected(self):
"""
A POST request to delete protected objects should display the page
which says the deletion is prohibited.
"""
q = Question.objects.create(question="Why?")
Answer.objects.create(question=q, answer="Because.")
response = self.client.post(
reverse("admin:admin_views_question_delete", args=(q.pk,)), {"post": "yes"}
)
self.assertEqual(Question.objects.count(), 1)
self.assertContains(
response, "would require deleting the following protected related objects"
)
def test_restricted(self):
album = Album.objects.create(title="Amaryllis")
song = Song.objects.create(album=album, name="Unity")
response = self.client.get(
reverse("admin:admin_views_album_delete", args=(album.pk,))
)
self.assertContains(
response,
"would require deleting the following protected related objects",
)
self.assertContains(
response,
'<li>Song: <a href="%s">Unity</a></li>'
% reverse("admin:admin_views_song_change", args=(song.pk,)),
)
def test_post_delete_restricted(self):
album = Album.objects.create(title="Amaryllis")
Song.objects.create(album=album, name="Unity")
response = self.client.post(
reverse("admin:admin_views_album_delete", args=(album.pk,)),
{"post": "yes"},
)
self.assertEqual(Album.objects.count(), 1)
self.assertContains(
response,
"would require deleting the following protected related objects",
)
def test_not_registered(self):
should_contain = """<li>Secret hideout: underground bunker"""
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v1.pk,))
)
self.assertContains(response, should_contain, 1)
def test_multiple_fkeys_to_same_model(self):
"""
If a deleted object has two relationships from another model,
both of those should be followed in looking for related
objects to delete.
"""
should_contain = '<li>Plot: <a href="%s">World Domination</a>' % reverse(
"admin:admin_views_plot_change", args=(self.pl1.pk,)
)
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v1.pk,))
)
self.assertContains(response, should_contain)
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v2.pk,))
)
self.assertContains(response, should_contain)
def test_multiple_fkeys_to_same_instance(self):
"""
If a deleted object has two relationships pointing to it from
another object, the other object should still only be listed
once.
"""
should_contain = '<li>Plot: <a href="%s">World Peace</a></li>' % reverse(
"admin:admin_views_plot_change", args=(self.pl2.pk,)
)
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.v2.pk,))
)
self.assertContains(response, should_contain, 1)
def test_inheritance(self):
"""
In the case of an inherited model, if either the child or
parent-model instance is deleted, both instances are listed
for deletion, as well as any relationships they have.
"""
should_contain = [
'<li>Villain: <a href="%s">Bob</a>'
% reverse("admin:admin_views_villain_change", args=(self.sv1.pk,)),
'<li>Super villain: <a href="%s">Bob</a>'
% reverse("admin:admin_views_supervillain_change", args=(self.sv1.pk,)),
"<li>Secret hideout: floating castle",
"<li>Super secret hideout: super floating castle!",
]
response = self.client.get(
reverse("admin:admin_views_villain_delete", args=(self.sv1.pk,))
)
for should in should_contain:
self.assertContains(response, should, 1)
response = self.client.get(
reverse("admin:admin_views_supervillain_delete", args=(self.sv1.pk,))
)
for should in should_contain:
self.assertContains(response, should, 1)
def test_generic_relations(self):
"""
If a deleted object has GenericForeignKeys pointing to it,
those objects should be listed for deletion.
"""
plot = self.pl3
tag = FunkyTag.objects.create(content_object=plot, name="hott")
should_contain = '<li>Funky tag: <a href="%s">hott' % reverse(
"admin:admin_views_funkytag_change", args=(tag.id,)
)
response = self.client.get(
reverse("admin:admin_views_plot_delete", args=(plot.pk,))
)
self.assertContains(response, should_contain)
def test_generic_relations_with_related_query_name(self):
"""
If a deleted object has GenericForeignKey with
GenericRelation(related_query_name='...') pointing to it, those objects
should be listed for deletion.
"""
bookmark = Bookmark.objects.create(name="djangoproject")
tag = FunkyTag.objects.create(content_object=bookmark, name="django")
tag_url = reverse("admin:admin_views_funkytag_change", args=(tag.id,))
should_contain = '<li>Funky tag: <a href="%s">django' % tag_url
response = self.client.get(
reverse("admin:admin_views_bookmark_delete", args=(bookmark.pk,))
)
self.assertContains(response, should_contain)
def test_delete_view_uses_get_deleted_objects(self):
"""The delete view uses ModelAdmin.get_deleted_objects()."""
book = Book.objects.create(name="Test Book")
response = self.client.get(
reverse("admin2:admin_views_book_delete", args=(book.pk,))
)
# BookAdmin.get_deleted_objects() returns custom text.
self.assertContains(response, "a deletable object")
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestGenericRelations(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.v1 = Villain.objects.create(name="Adam")
cls.pl3 = Plot.objects.create(
name="Corn Conspiracy", team_leader=cls.v1, contact=cls.v1
)
def setUp(self):
self.client.force_login(self.superuser)
def test_generic_content_object_in_list_display(self):
FunkyTag.objects.create(content_object=self.pl3, name="hott")
response = self.client.get(reverse("admin:admin_views_funkytag_changelist"))
self.assertContains(response, "%s</td>" % self.pl3)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewStringPrimaryKeyTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.pk = (
"abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 "
r"""-_.!~*'() ;/?:@&=+$, <>#%" {}|\^[]`"""
)
cls.m1 = ModelWithStringPrimaryKey.objects.create(string_pk=cls.pk)
content_type_pk = ContentType.objects.get_for_model(
ModelWithStringPrimaryKey
).pk
user_pk = cls.superuser.pk
LogEntry.objects.log_action(
user_pk,
content_type_pk,
cls.pk,
cls.pk,
2,
change_message="Changed something",
)
LogEntry.objects.log_action(
user_pk,
content_type_pk,
cls.pk,
cls.pk,
1,
change_message="Added something",
)
LogEntry.objects.log_action(
user_pk,
content_type_pk,
cls.pk,
cls.pk,
3,
change_message="Deleted something",
)
def setUp(self):
self.client.force_login(self.superuser)
def test_get_history_view(self):
"""
Retrieving the history for an object using urlencoded form of primary
key should work.
Refs #12349, #18550.
"""
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_history", args=(self.pk,)
)
)
self.assertContains(response, escape(self.pk))
self.assertContains(response, "Changed something")
def test_get_change_view(self):
"Retrieving the object using urlencoded form of primary key should work"
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change", args=(self.pk,)
)
)
self.assertContains(response, escape(self.pk))
def test_changelist_to_changeform_link(self):
"""
Link to the changeform of the object in changelist should use reverse()
and be quoted.
"""
response = self.client.get(
reverse("admin:admin_views_modelwithstringprimarykey_changelist")
)
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
pk_final_url = escape(iri_to_uri(quote(self.pk)))
change_url = reverse(
"admin:admin_views_modelwithstringprimarykey_change", args=("__fk__",)
).replace("__fk__", pk_final_url)
should_contain = '<th class="field-__str__"><a href="%s">%s</a></th>' % (
change_url,
escape(self.pk),
)
self.assertContains(response, should_contain)
def test_recentactions_link(self):
"""
The link from the recent actions list referring to the changeform of
the object should be quoted.
"""
response = self.client.get(reverse("admin:index"))
link = reverse(
"admin:admin_views_modelwithstringprimarykey_change", args=(quote(self.pk),)
)
should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk))
self.assertContains(response, should_contain)
def test_recentactions_description(self):
response = self.client.get(reverse("admin:index"))
for operation in ["Added", "Changed", "Deleted"]:
with self.subTest(operation):
self.assertContains(
response, f'<span class="visually-hidden">{operation}:'
)
def test_deleteconfirmation_link(self):
""" "
The link from the delete confirmation page referring back to the
changeform of the object should be quoted.
"""
url = reverse(
"admin:admin_views_modelwithstringprimarykey_delete", args=(quote(self.pk),)
)
response = self.client.get(url)
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
change_url = reverse(
"admin:admin_views_modelwithstringprimarykey_change", args=("__fk__",)
).replace("__fk__", escape(iri_to_uri(quote(self.pk))))
should_contain = '<a href="%s">%s</a>' % (change_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_url_conflicts_with_add(self):
"A model with a primary key that ends with add or is `add` should be visible"
add_model = ModelWithStringPrimaryKey.objects.create(
pk="i have something to add"
)
add_model.save()
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(add_model.pk),),
)
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
add_model2 = ModelWithStringPrimaryKey.objects.create(pk="add")
add_url = reverse("admin:admin_views_modelwithstringprimarykey_add")
change_url = reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(add_model2.pk),),
)
self.assertNotEqual(add_url, change_url)
def test_url_conflicts_with_delete(self):
"A model with a primary key that ends with delete should be visible"
delete_model = ModelWithStringPrimaryKey(pk="delete")
delete_model.save()
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(delete_model.pk),),
)
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_url_conflicts_with_history(self):
"A model with a primary key that ends with history should be visible"
history_model = ModelWithStringPrimaryKey(pk="history")
history_model.save()
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(history_model.pk),),
)
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_shortcut_view_with_escaping(self):
"'View on site should' work properly with char fields"
model = ModelWithStringPrimaryKey(pk="abc_123")
model.save()
response = self.client.get(
reverse(
"admin:admin_views_modelwithstringprimarykey_change",
args=(quote(model.pk),),
)
)
should_contain = '/%s/" class="viewsitelink">' % model.pk
self.assertContains(response, should_contain)
def test_change_view_history_link(self):
"""Object history button link should work and contain the pk value quoted."""
url = reverse(
"admin:%s_modelwithstringprimarykey_change"
% ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),),
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
expected_link = reverse(
"admin:%s_modelwithstringprimarykey_history"
% ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),),
)
self.assertContains(
response, '<a href="%s" class="historylink"' % escape(expected_link)
)
def test_redirect_on_add_view_continue_button(self):
"""As soon as an object is added using "Save and continue editing"
button, the user should be redirected to the object's change_view.
In case primary key is a string containing some special characters
like slash or underscore, these characters must be escaped (see #22266)
"""
response = self.client.post(
reverse("admin:admin_views_modelwithstringprimarykey_add"),
{
"string_pk": "123/history",
"_continue": "1", # Save and continue editing
},
)
self.assertEqual(response.status_code, 302) # temporary redirect
self.assertIn("/123_2Fhistory/", response.headers["location"]) # PK is quoted
@override_settings(ROOT_URLCONF="admin_views.urls")
class SecureViewTests(TestCase):
"""
Test behavior of a view protected by the staff_member_required decorator.
"""
def test_secure_view_shows_login_if_not_logged_in(self):
secure_url = reverse("secure_view")
response = self.client.get(secure_url)
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin:login"), secure_url)
)
response = self.client.get(secure_url, follow=True)
self.assertTemplateUsed(response, "admin/login.html")
self.assertEqual(response.context[REDIRECT_FIELD_NAME], secure_url)
def test_staff_member_required_decorator_works_with_argument(self):
"""
Staff_member_required decorator works with an argument
(redirect_field_name).
"""
secure_url = "/test_admin/admin/secure-view2/"
response = self.client.get(secure_url)
self.assertRedirects(
response, "%s?myfield=%s" % (reverse("admin:login"), secure_url)
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewUnicodeTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.b1 = Book.objects.create(name="Lærdommer")
cls.p1 = Promo.objects.create(name="<Promo for Lærdommer>", book=cls.b1)
cls.chap1 = Chapter.objects.create(
title="Norske bostaver æøå skaper problemer",
content="<p>Svært frustrerende med UnicodeDecodeErro</p>",
book=cls.b1,
)
cls.chap2 = Chapter.objects.create(
title="Kjærlighet",
content="<p>La kjærligheten til de lidende seire.</p>",
book=cls.b1,
)
cls.chap3 = Chapter.objects.create(
title="Kjærlighet", content="<p>Noe innhold</p>", book=cls.b1
)
cls.chap4 = ChapterXtra1.objects.create(
chap=cls.chap1, xtra="<Xtra(1) Norske bostaver æøå skaper problemer>"
)
cls.chap5 = ChapterXtra1.objects.create(
chap=cls.chap2, xtra="<Xtra(1) Kjærlighet>"
)
cls.chap6 = ChapterXtra1.objects.create(
chap=cls.chap3, xtra="<Xtra(1) Kjærlighet>"
)
cls.chap7 = ChapterXtra2.objects.create(
chap=cls.chap1, xtra="<Xtra(2) Norske bostaver æøå skaper problemer>"
)
cls.chap8 = ChapterXtra2.objects.create(
chap=cls.chap2, xtra="<Xtra(2) Kjærlighet>"
)
cls.chap9 = ChapterXtra2.objects.create(
chap=cls.chap3, xtra="<Xtra(2) Kjærlighet>"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_unicode_edit(self):
"""
A test to ensure that POST on edit_view handles non-ASCII characters.
"""
post_data = {
"name": "Test lærdommer",
# inline data
"chapter_set-TOTAL_FORMS": "6",
"chapter_set-INITIAL_FORMS": "3",
"chapter_set-MAX_NUM_FORMS": "0",
"chapter_set-0-id": self.chap1.pk,
"chapter_set-0-title": "Norske bostaver æøå skaper problemer",
"chapter_set-0-content": (
"<p>Svært frustrerende med UnicodeDecodeError</p>"
),
"chapter_set-1-id": self.chap2.id,
"chapter_set-1-title": "Kjærlighet.",
"chapter_set-1-content": (
"<p>La kjærligheten til de lidende seire.</p>"
),
"chapter_set-2-id": self.chap3.id,
"chapter_set-2-title": "Need a title.",
"chapter_set-2-content": "<p>Newest content</p>",
"chapter_set-3-id": "",
"chapter_set-3-title": "",
"chapter_set-3-content": "",
"chapter_set-4-id": "",
"chapter_set-4-title": "",
"chapter_set-4-content": "",
"chapter_set-5-id": "",
"chapter_set-5-title": "",
"chapter_set-5-content": "",
}
response = self.client.post(
reverse("admin:admin_views_book_change", args=(self.b1.pk,)), post_data
)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_unicode_delete(self):
"""
The delete_view handles non-ASCII characters
"""
delete_dict = {"post": "yes"}
delete_url = reverse("admin:admin_views_book_delete", args=(self.b1.pk,))
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 200)
response = self.client.post(delete_url, delete_dict)
self.assertRedirects(response, reverse("admin:admin_views_book_changelist"))
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewListEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.per1 = Person.objects.create(name="John Mauchly", gender=1, alive=True)
cls.per2 = Person.objects.create(name="Grace Hopper", gender=1, alive=False)
cls.per3 = Person.objects.create(name="Guido van Rossum", gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_inheritance(self):
Podcast.objects.create(
name="This Week in Django", release_date=datetime.date.today()
)
response = self.client.get(reverse("admin:admin_views_podcast_changelist"))
self.assertEqual(response.status_code, 200)
def test_inheritance_2(self):
Vodcast.objects.create(name="This Week in Django", released=True)
response = self.client.get(reverse("admin:admin_views_vodcast_changelist"))
self.assertEqual(response.status_code, 200)
def test_custom_pk(self):
Language.objects.create(iso="en", name="English", english_name="English")
response = self.client.get(reverse("admin:admin_views_language_changelist"))
self.assertEqual(response.status_code, 200)
def test_changelist_input_html(self):
response = self.client.get(reverse("admin:admin_views_person_changelist"))
# 2 inputs per object(the field and the hidden id field) = 6
# 4 management hidden fields = 4
# 4 action inputs (3 regular checkboxes, 1 checkbox to select all)
# main form submit button = 1
# search field and search submit button = 2
# CSRF field = 2
# field to track 'select all' across paginated views = 1
# 6 + 4 + 4 + 1 + 2 + 2 + 1 = 20 inputs
self.assertContains(response, "<input", count=21)
# 1 select per object = 3 selects
self.assertContains(response, "<select", count=4)
def test_post_messages(self):
# Ticket 12707: Saving inline editable should not show admin
# action warnings
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": str(self.per1.pk),
"form-1-gender": "2",
"form-1-id": str(self.per2.pk),
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": str(self.per3.pk),
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_person_changelist"), data, follow=True
)
self.assertEqual(len(response.context["messages"]), 1)
def test_post_submission(self):
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": str(self.per1.pk),
"form-1-gender": "2",
"form-1-id": str(self.per2.pk),
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": str(self.per3.pk),
"_save": "Save",
}
self.client.post(reverse("admin:admin_views_person_changelist"), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
# test a filtered page
data = {
"form-TOTAL_FORMS": "2",
"form-INITIAL_FORMS": "2",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per1.pk),
"form-0-gender": "1",
"form-0-alive": "checked",
"form-1-id": str(self.per3.pk),
"form-1-gender": "1",
"form-1-alive": "checked",
"_save": "Save",
}
self.client.post(
reverse("admin:admin_views_person_changelist") + "?gender__exact=1", data
)
self.assertIs(Person.objects.get(name="John Mauchly").alive, True)
# test a searched page
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per1.pk),
"form-0-gender": "1",
"_save": "Save",
}
self.client.post(
reverse("admin:admin_views_person_changelist") + "?q=john", data
)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
def test_non_field_errors(self):
"""
Non-field errors are displayed for each of the forms in the
changelist's formset.
"""
fd1 = FoodDelivery.objects.create(
reference="123", driver="bill", restaurant="thai"
)
fd2 = FoodDelivery.objects.create(
reference="456", driver="bill", restaurant="india"
)
fd3 = FoodDelivery.objects.create(
reference="789", driver="bill", restaurant="pizza"
)
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "pizza",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_fooddelivery_changelist"), data
)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
"with this Driver and Restaurant already exists.</li></ul></td></tr>",
1,
html=True,
)
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
# Same data also.
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "thai",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_fooddelivery_changelist"), data
)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
"with this Driver and Restaurant already exists.</li></ul></td></tr>",
2,
html=True,
)
def test_non_form_errors(self):
# test if non-form errors are handled; ticket #12716
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per2.pk),
"form-0-alive": "1",
"form-0-gender": "2",
# The form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_person_changelist"), data
)
self.assertContains(response, "Grace is not a Zombie")
def test_non_form_errors_is_errorlist(self):
# test if non-form errors are correctly handled; ticket #12878
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(self.per2.pk),
"form-0-alive": "1",
"form-0-gender": "2",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_person_changelist"), data
)
non_form_errors = response.context["cl"].formset.non_form_errors()
self.assertIsInstance(non_form_errors, ErrorList)
self.assertEqual(
str(non_form_errors),
str(ErrorList(["Grace is not a Zombie"], error_class="nonform")),
)
def test_list_editable_ordering(self):
collector = Collector.objects.create(id=1, name="Frederick Clegg")
Category.objects.create(id=1, order=1, collector=collector)
Category.objects.create(id=2, order=2, collector=collector)
Category.objects.create(id=3, order=0, collector=collector)
Category.objects.create(id=4, order=0, collector=collector)
# NB: The order values must be changed so that the items are reordered.
data = {
"form-TOTAL_FORMS": "4",
"form-INITIAL_FORMS": "4",
"form-MAX_NUM_FORMS": "0",
"form-0-order": "14",
"form-0-id": "1",
"form-0-collector": "1",
"form-1-order": "13",
"form-1-id": "2",
"form-1-collector": "1",
"form-2-order": "1",
"form-2-id": "3",
"form-2-collector": "1",
"form-3-order": "0",
"form-3-id": "4",
"form-3-collector": "1",
# The form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_category_changelist"), data
)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# The order values have been applied to the right objects
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
def test_list_editable_pagination(self):
"""
Pagination works for list_editable items.
"""
UnorderedObject.objects.create(id=1, name="Unordered object #1")
UnorderedObject.objects.create(id=2, name="Unordered object #2")
UnorderedObject.objects.create(id=3, name="Unordered object #3")
response = self.client.get(
reverse("admin:admin_views_unorderedobject_changelist")
)
self.assertContains(response, "Unordered object #3")
self.assertContains(response, "Unordered object #2")
self.assertNotContains(response, "Unordered object #1")
response = self.client.get(
reverse("admin:admin_views_unorderedobject_changelist") + "?p=2"
)
self.assertNotContains(response, "Unordered object #3")
self.assertNotContains(response, "Unordered object #2")
self.assertContains(response, "Unordered object #1")
def test_list_editable_action_submit(self):
# List editable changes should not be executed if the action "Go" button is
# used to submit the form.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"index": "0",
"_selected_action": ["3"],
"action": ["", "delete_selected"],
}
self.client.post(reverse("admin:admin_views_person_changelist"), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, True)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 1)
def test_list_editable_action_choices(self):
# List editable changes should be executed if the "Save" button is
# used to submit the form - any action choices should be ignored.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": str(self.per1.pk),
"form-1-gender": "2",
"form-1-id": str(self.per2.pk),
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": str(self.per3.pk),
"_save": "Save",
"_selected_action": ["1"],
"action": ["", "delete_selected"],
}
self.client.post(reverse("admin:admin_views_person_changelist"), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
def test_list_editable_popup(self):
"""
Fields should not be list-editable in popups.
"""
response = self.client.get(reverse("admin:admin_views_person_changelist"))
self.assertNotEqual(response.context["cl"].list_editable, ())
response = self.client.get(
reverse("admin:admin_views_person_changelist") + "?%s" % IS_POPUP_VAR
)
self.assertEqual(response.context["cl"].list_editable, ())
def test_pk_hidden_fields(self):
"""
hidden pk fields aren't displayed in the table body and their
corresponding human-readable value is displayed instead. The hidden pk
fields are displayed but separately (not in the table) and only once.
"""
story1 = Story.objects.create(
title="The adventures of Guido", content="Once upon a time in Djangoland..."
)
story2 = Story.objects.create(
title="Crouching Tiger, Hidden Python",
content="The Python was sneaking into...",
)
response = self.client.get(reverse("admin:admin_views_story_changelist"))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n'
"</div>" % (story2.id, story1.id),
html=True,
)
self.assertContains(response, '<td class="field-id">%d</td>' % story1.id, 1)
self.assertContains(response, '<td class="field-id">%d</td>' % story2.id, 1)
def test_pk_hidden_fields_with_list_display_links(self):
"""Similarly as test_pk_hidden_fields, but when the hidden pk fields are
referenced in list_display_links.
Refs #12475.
"""
story1 = OtherStory.objects.create(
title="The adventures of Guido",
content="Once upon a time in Djangoland...",
)
story2 = OtherStory.objects.create(
title="Crouching Tiger, Hidden Python",
content="The Python was sneaking into...",
)
link1 = reverse("admin:admin_views_otherstory_change", args=(story1.pk,))
link2 = reverse("admin:admin_views_otherstory_change", args=(story2.pk,))
response = self.client.get(reverse("admin:admin_views_otherstory_changelist"))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n'
"</div>" % (story2.id, story1.id),
html=True,
)
self.assertContains(
response,
'<th class="field-id"><a href="%s">%d</a></th>' % (link1, story1.id),
1,
)
self.assertContains(
response,
'<th class="field-id"><a href="%s">%d</a></th>' % (link2, story2.id),
1,
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminSearchTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.joepublicuser = User.objects.create_user(
username="joepublic", password="secret"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.per1 = Person.objects.create(name="John Mauchly", gender=1, alive=True)
cls.per2 = Person.objects.create(name="Grace Hopper", gender=1, alive=False)
cls.per3 = Person.objects.create(name="Guido van Rossum", gender=1, alive=True)
Person.objects.create(name="John Doe", gender=1)
Person.objects.create(name='John O"Hara', gender=1)
Person.objects.create(name="John O'Hara", gender=1)
cls.t1 = Recommender.objects.create()
cls.t2 = Recommendation.objects.create(the_recommender=cls.t1)
cls.t3 = Recommender.objects.create()
cls.t4 = Recommendation.objects.create(the_recommender=cls.t3)
cls.tt1 = TitleTranslation.objects.create(title=cls.t1, text="Bar")
cls.tt2 = TitleTranslation.objects.create(title=cls.t2, text="Foo")
cls.tt3 = TitleTranslation.objects.create(title=cls.t3, text="Few")
cls.tt4 = TitleTranslation.objects.create(title=cls.t4, text="Bas")
def setUp(self):
self.client.force_login(self.superuser)
def test_search_on_sibling_models(self):
"A search that mentions sibling models"
response = self.client.get(
reverse("admin:admin_views_recommendation_changelist") + "?q=bar"
)
# confirm the search returned 1 object
self.assertContains(response, "\n1 recommendation\n")
def test_with_fk_to_field(self):
"""
The to_field GET parameter is preserved when a search is performed.
Refs #10918.
"""
response = self.client.get(
reverse("admin:auth_user_changelist") + "?q=joe&%s=id" % TO_FIELD_VAR
)
self.assertContains(response, "\n1 user\n")
self.assertContains(
response,
'<input type="hidden" name="%s" value="id">' % TO_FIELD_VAR,
html=True,
)
def test_exact_matches(self):
response = self.client.get(
reverse("admin:admin_views_recommendation_changelist") + "?q=bar"
)
# confirm the search returned one object
self.assertContains(response, "\n1 recommendation\n")
response = self.client.get(
reverse("admin:admin_views_recommendation_changelist") + "?q=ba"
)
# confirm the search returned zero objects
self.assertContains(response, "\n0 recommendations\n")
def test_beginning_matches(self):
response = self.client.get(
reverse("admin:admin_views_person_changelist") + "?q=Gui"
)
# confirm the search returned one object
self.assertContains(response, "\n1 person\n")
self.assertContains(response, "Guido")
response = self.client.get(
reverse("admin:admin_views_person_changelist") + "?q=uido"
)
# confirm the search returned zero objects
self.assertContains(response, "\n0 persons\n")
self.assertNotContains(response, "Guido")
def test_pluggable_search(self):
PluggableSearchPerson.objects.create(name="Bob", age=10)
PluggableSearchPerson.objects.create(name="Amy", age=20)
response = self.client.get(
reverse("admin:admin_views_pluggablesearchperson_changelist") + "?q=Bob"
)
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Bob")
response = self.client.get(
reverse("admin:admin_views_pluggablesearchperson_changelist") + "?q=20"
)
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Amy")
def test_reset_link(self):
"""
Test presence of reset link in search bar ("1 result (_x total_)").
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
# + 1 for total count
with self.assertNumQueries(5):
response = self.client.get(
reverse("admin:admin_views_person_changelist") + "?q=Gui"
)
self.assertContains(
response,
"""<span class="small quiet">1 result (<a href="?">6 total</a>)</span>""",
html=True,
)
def test_no_total_count(self):
"""
#8408 -- "Show all" should be displayed instead of the total count if
ModelAdmin.show_full_result_count is False.
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
with self.assertNumQueries(4):
response = self.client.get(
reverse("admin:admin_views_recommendation_changelist") + "?q=bar"
)
self.assertContains(
response,
"""<span class="small quiet">1 result (<a href="?">Show all</a>)</span>""",
html=True,
)
self.assertTrue(response.context["cl"].show_admin_actions)
def test_search_with_spaces(self):
url = reverse("admin:admin_views_person_changelist") + "?q=%s"
tests = [
('"John Doe"', 1),
("'John Doe'", 1),
("John Doe", 0),
('"John Doe" John', 1),
("'John Doe' John", 1),
("John Doe John", 0),
('"John Do"', 1),
("'John Do'", 1),
("'John O'Hara'", 0),
("'John O\\'Hara'", 1),
('"John O"Hara"', 0),
('"John O\\"Hara"', 1),
]
for search, hits in tests:
with self.subTest(search=search):
response = self.client.get(url % search)
self.assertContains(response, "\n%s person" % hits)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminInheritedInlinesTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_inline(self):
"""
Inline models which inherit from a common parent are correctly handled.
"""
foo_user = "foo username"
bar_user = "bar username"
name_re = re.compile(b'name="(.*?)"')
# test the add case
response = self.client.get(reverse("admin:admin_views_persona_add"))
names = name_re.findall(response.content)
names.remove(b"csrfmiddlewaretoken")
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
# test the add case
post_data = {
"name": "Test Name",
# inline data
"accounts-TOTAL_FORMS": "1",
"accounts-INITIAL_FORMS": "0",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": foo_user,
"accounts-2-TOTAL_FORMS": "1",
"accounts-2-INITIAL_FORMS": "0",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": bar_user,
}
response = self.client.post(reverse("admin:admin_views_persona_add"), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
persona_id = Persona.objects.all()[0].id
foo_id = FooAccount.objects.all()[0].id
bar_id = BarAccount.objects.all()[0].id
# test the edit case
response = self.client.get(
reverse("admin:admin_views_persona_change", args=(persona_id,))
)
names = name_re.findall(response.content)
names.remove(b"csrfmiddlewaretoken")
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
post_data = {
"name": "Test Name",
"accounts-TOTAL_FORMS": "2",
"accounts-INITIAL_FORMS": "1",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": "%s-1" % foo_user,
"accounts-0-account_ptr": str(foo_id),
"accounts-0-persona": str(persona_id),
"accounts-2-TOTAL_FORMS": "2",
"accounts-2-INITIAL_FORMS": "1",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": "%s-1" % bar_user,
"accounts-2-0-account_ptr": str(bar_id),
"accounts-2-0-persona": str(persona_id),
}
response = self.client.post(
reverse("admin:admin_views_persona_change", args=(persona_id,)), post_data
)
self.assertEqual(response.status_code, 302)
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, "%s-1" % foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, "%s-1" % bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestCustomChangeList(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_custom_changelist(self):
"""
Validate that a custom ChangeList class can be used (#9749)
"""
# Insert some data
post_data = {"name": "First Gadget"}
response = self.client.post(reverse("admin:admin_views_gadget_add"), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
# Hit the page once to get messages out of the queue message list
response = self.client.get(reverse("admin:admin_views_gadget_changelist"))
# Data is still not visible on the page
response = self.client.get(reverse("admin:admin_views_gadget_changelist"))
self.assertNotContains(response, "First Gadget")
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestInlineNotEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_GET_parent_add(self):
"""
InlineModelAdmin broken?
"""
response = self.client.get(reverse("admin:admin_views_parent_add"))
self.assertEqual(response.status_code, 200)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminCustomQuerysetTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.pks = [EmptyModel.objects.create().id for i in range(3)]
def setUp(self):
self.client.force_login(self.superuser)
self.super_login = {
REDIRECT_FIELD_NAME: reverse("admin:index"),
"username": "super",
"password": "secret",
}
def test_changelist_view(self):
response = self.client.get(reverse("admin:admin_views_emptymodel_changelist"))
for i in self.pks:
if i > 1:
self.assertContains(response, "Primary key = %s" % i)
else:
self.assertNotContains(response, "Primary key = %s" % i)
def test_changelist_view_count_queries(self):
# create 2 Person objects
Person.objects.create(name="person1", gender=1)
Person.objects.create(name="person2", gender=2)
changelist_url = reverse("admin:admin_views_person_changelist")
# 5 queries are expected: 1 for the session, 1 for the user,
# 2 for the counts and 1 for the objects on the page
with self.assertNumQueries(5):
resp = self.client.get(changelist_url)
self.assertEqual(resp.context["selection_note"], "0 of 2 selected")
self.assertEqual(resp.context["selection_note_all"], "All 2 selected")
with self.assertNumQueries(5):
extra = {"q": "not_in_name"}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context["selection_note"], "0 of 0 selected")
self.assertEqual(resp.context["selection_note_all"], "All 0 selected")
with self.assertNumQueries(5):
extra = {"q": "person"}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context["selection_note"], "0 of 2 selected")
self.assertEqual(resp.context["selection_note_all"], "All 2 selected")
with self.assertNumQueries(5):
extra = {"gender__exact": "1"}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context["selection_note"], "0 of 1 selected")
self.assertEqual(resp.context["selection_note_all"], "1 selected")
def test_change_view(self):
for i in self.pks:
url = reverse("admin:admin_views_emptymodel_change", args=(i,))
response = self.client.get(url, follow=True)
if i > 1:
self.assertEqual(response.status_code, 200)
else:
self.assertRedirects(response, reverse("admin:index"))
self.assertEqual(
[m.message for m in response.context["messages"]],
["empty model with ID “1” doesn’t exist. Perhaps it was deleted?"],
)
def test_add_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __str__ method
self.assertEqual(CoverLetter.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"author": "Candidate, Best",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_coverletter_add"), post_data, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = CoverLetter.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The cover letter “<a href="%s">'
"Candidate, Best</a>” was added successfully.</li>"
% reverse("admin:admin_views_coverletter_change", args=(pk,)),
html=True,
)
# model has no __str__ method
self.assertEqual(ShortMessage.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"content": "What's this SMS thing?",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_shortmessage_add"), post_data, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name
sm = ShortMessage.objects.all()[0]
self.assertContains(
response,
'<li class="success">The short message “<a href="%s">'
"%s</a>” was added successfully.</li>"
% (reverse("admin:admin_views_shortmessage_change", args=(sm.pk,)), sm),
html=True,
)
def test_add_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __str__ method
self.assertEqual(Telegram.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "Urgent telegram",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_telegram_add"), post_data, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = Telegram.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The telegram “<a href="%s">'
"Urgent telegram</a>” was added successfully.</li>"
% reverse("admin:admin_views_telegram_change", args=(pk,)),
html=True,
)
# model has no __str__ method
self.assertEqual(Paper.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_paper_add"), post_data, follow=True
)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name
p = Paper.objects.all()[0]
self.assertContains(
response,
'<li class="success">The paper “<a href="%s">'
"%s</a>” was added successfully.</li>"
% (reverse("admin:admin_views_paper_change", args=(p.pk,)), p),
html=True,
)
def test_edit_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __str__ method
cl = CoverLetter.objects.create(author="John Doe")
self.assertEqual(CoverLetter.objects.count(), 1)
response = self.client.get(
reverse("admin:admin_views_coverletter_change", args=(cl.pk,))
)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"author": "John Doe II",
"_save": "Save",
}
url = reverse("admin:admin_views_coverletter_change", args=(cl.pk,))
response = self.client.post(url, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name. Instance
# representation is set by model's __str__()
self.assertContains(
response,
'<li class="success">The cover letter “<a href="%s">'
"John Doe II</a>” was changed successfully.</li>"
% reverse("admin:admin_views_coverletter_change", args=(cl.pk,)),
html=True,
)
# model has no __str__ method
sm = ShortMessage.objects.create(content="This is expensive")
self.assertEqual(ShortMessage.objects.count(), 1)
response = self.client.get(
reverse("admin:admin_views_shortmessage_change", args=(sm.pk,))
)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"content": "Too expensive",
"_save": "Save",
}
url = reverse("admin:admin_views_shortmessage_change", args=(sm.pk,))
response = self.client.post(url, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by __str__().
self.assertContains(
response,
'<li class="success">The short message “<a href="%s">'
"%s</a>” was changed successfully.</li>"
% (reverse("admin:admin_views_shortmessage_change", args=(sm.pk,)), sm),
html=True,
)
def test_edit_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __str__ method
t = Telegram.objects.create(title="First Telegram")
self.assertEqual(Telegram.objects.count(), 1)
response = self.client.get(
reverse("admin:admin_views_telegram_change", args=(t.pk,))
)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "Telegram without typo",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_telegram_change", args=(t.pk,)),
post_data,
follow=True,
)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name. The instance
# representation is set by model's __str__()
self.assertContains(
response,
'<li class="success">The telegram “<a href="%s">'
"Telegram without typo</a>” was changed successfully.</li>"
% reverse("admin:admin_views_telegram_change", args=(t.pk,)),
html=True,
)
# model has no __str__ method
p = Paper.objects.create(title="My Paper Title")
self.assertEqual(Paper.objects.count(), 1)
response = self.client.get(
reverse("admin:admin_views_paper_change", args=(p.pk,))
)
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(
reverse("admin:admin_views_paper_change", args=(p.pk,)),
post_data,
follow=True,
)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by __str__().
self.assertContains(
response,
'<li class="success">The paper “<a href="%s">'
"%s</a>” was changed successfully.</li>"
% (reverse("admin:admin_views_paper_change", args=(p.pk,)), p),
html=True,
)
def test_history_view_custom_qs(self):
"""
Custom querysets are considered for the admin history view.
"""
self.client.post(reverse("admin:login"), self.super_login)
FilteredManager.objects.create(pk=1)
FilteredManager.objects.create(pk=2)
response = self.client.get(
reverse("admin:admin_views_filteredmanager_changelist")
)
self.assertContains(response, "PK=1")
self.assertContains(response, "PK=2")
self.assertEqual(
self.client.get(
reverse("admin:admin_views_filteredmanager_history", args=(1,))
).status_code,
200,
)
self.assertEqual(
self.client.get(
reverse("admin:admin_views_filteredmanager_history", args=(2,))
).status_code,
200,
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminInlineFileUploadTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
file1 = tempfile.NamedTemporaryFile(suffix=".file1")
file1.write(b"a" * (2**21))
filename = file1.name
file1.close()
cls.gallery = Gallery.objects.create(name="Test Gallery")
cls.picture = Picture.objects.create(
name="Test Picture",
image=filename,
gallery=cls.gallery,
)
def setUp(self):
self.client.force_login(self.superuser)
def test_form_has_multipart_enctype(self):
response = self.client.get(
reverse("admin:admin_views_gallery_change", args=(self.gallery.id,))
)
self.assertIs(response.context["has_file_field"], True)
self.assertContains(response, MULTIPART_ENCTYPE)
def test_inline_file_upload_edit_validation_error_post(self):
"""
Inline file uploads correctly display prior data (#10002).
"""
post_data = {
"name": "Test Gallery",
"pictures-TOTAL_FORMS": "2",
"pictures-INITIAL_FORMS": "1",
"pictures-MAX_NUM_FORMS": "0",
"pictures-0-id": str(self.picture.id),
"pictures-0-gallery": str(self.gallery.id),
"pictures-0-name": "Test Picture",
"pictures-0-image": "",
"pictures-1-id": "",
"pictures-1-gallery": str(self.gallery.id),
"pictures-1-name": "Test Picture 2",
"pictures-1-image": "",
}
response = self.client.post(
reverse("admin:admin_views_gallery_change", args=(self.gallery.id,)),
post_data,
)
self.assertContains(response, b"Currently")
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminInlineTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.collector = Collector.objects.create(pk=1, name="John Fowles")
def setUp(self):
self.post_data = {
"name": "Test Name",
"widget_set-TOTAL_FORMS": "3",
"widget_set-INITIAL_FORMS": "0",
"widget_set-MAX_NUM_FORMS": "0",
"widget_set-0-id": "",
"widget_set-0-owner": "1",
"widget_set-0-name": "",
"widget_set-1-id": "",
"widget_set-1-owner": "1",
"widget_set-1-name": "",
"widget_set-2-id": "",
"widget_set-2-owner": "1",
"widget_set-2-name": "",
"doohickey_set-TOTAL_FORMS": "3",
"doohickey_set-INITIAL_FORMS": "0",
"doohickey_set-MAX_NUM_FORMS": "0",
"doohickey_set-0-owner": "1",
"doohickey_set-0-code": "",
"doohickey_set-0-name": "",
"doohickey_set-1-owner": "1",
"doohickey_set-1-code": "",
"doohickey_set-1-name": "",
"doohickey_set-2-owner": "1",
"doohickey_set-2-code": "",
"doohickey_set-2-name": "",
"grommet_set-TOTAL_FORMS": "3",
"grommet_set-INITIAL_FORMS": "0",
"grommet_set-MAX_NUM_FORMS": "0",
"grommet_set-0-code": "",
"grommet_set-0-owner": "1",
"grommet_set-0-name": "",
"grommet_set-1-code": "",
"grommet_set-1-owner": "1",
"grommet_set-1-name": "",
"grommet_set-2-code": "",
"grommet_set-2-owner": "1",
"grommet_set-2-name": "",
"whatsit_set-TOTAL_FORMS": "3",
"whatsit_set-INITIAL_FORMS": "0",
"whatsit_set-MAX_NUM_FORMS": "0",
"whatsit_set-0-owner": "1",
"whatsit_set-0-index": "",
"whatsit_set-0-name": "",
"whatsit_set-1-owner": "1",
"whatsit_set-1-index": "",
"whatsit_set-1-name": "",
"whatsit_set-2-owner": "1",
"whatsit_set-2-index": "",
"whatsit_set-2-name": "",
"fancydoodad_set-TOTAL_FORMS": "3",
"fancydoodad_set-INITIAL_FORMS": "0",
"fancydoodad_set-MAX_NUM_FORMS": "0",
"fancydoodad_set-0-doodad_ptr": "",
"fancydoodad_set-0-owner": "1",
"fancydoodad_set-0-name": "",
"fancydoodad_set-0-expensive": "on",
"fancydoodad_set-1-doodad_ptr": "",
"fancydoodad_set-1-owner": "1",
"fancydoodad_set-1-name": "",
"fancydoodad_set-1-expensive": "on",
"fancydoodad_set-2-doodad_ptr": "",
"fancydoodad_set-2-owner": "1",
"fancydoodad_set-2-name": "",
"fancydoodad_set-2-expensive": "on",
"category_set-TOTAL_FORMS": "3",
"category_set-INITIAL_FORMS": "0",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "",
"category_set-0-id": "",
"category_set-0-collector": "1",
"category_set-1-order": "",
"category_set-1-id": "",
"category_set-1-collector": "1",
"category_set-2-order": "",
"category_set-2-id": "",
"category_set-2-collector": "1",
}
self.client.force_login(self.superuser)
def test_simple_inline(self):
"A simple model can be saved as inlines"
# First add a new inline
self.post_data["widget_set-0-name"] = "Widget 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
widget_id = Widget.objects.all()[0].id
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="widget_set-0-id"')
# No file or image fields, no enctype on the forms
self.assertIs(response.context["has_file_field"], False)
self.assertNotContains(response, MULTIPART_ENCTYPE)
# Now resave that inline
self.post_data["widget_set-INITIAL_FORMS"] = "1"
self.post_data["widget_set-0-id"] = str(widget_id)
self.post_data["widget_set-0-name"] = "Widget 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
# Now modify that inline
self.post_data["widget_set-INITIAL_FORMS"] = "1"
self.post_data["widget_set-0-id"] = str(widget_id)
self.post_data["widget_set-0-name"] = "Widget 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1 Updated")
def test_explicit_autofield_inline(self):
"""
A model with an explicit autofield primary key can be saved as inlines.
"""
# First add a new inline
self.post_data["grommet_set-0-name"] = "Grommet 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="grommet_set-0-code"')
# Now resave that inline
self.post_data["grommet_set-INITIAL_FORMS"] = "1"
self.post_data["grommet_set-0-code"] = str(Grommet.objects.all()[0].code)
self.post_data["grommet_set-0-name"] = "Grommet 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Now modify that inline
self.post_data["grommet_set-INITIAL_FORMS"] = "1"
self.post_data["grommet_set-0-code"] = str(Grommet.objects.all()[0].code)
self.post_data["grommet_set-0-name"] = "Grommet 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1 Updated")
def test_char_pk_inline(self):
"A model with a character PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data["doohickey_set-0-code"] = "DH1"
self.post_data["doohickey_set-0-name"] = "Doohickey 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="doohickey_set-0-code"')
# Now resave that inline
self.post_data["doohickey_set-INITIAL_FORMS"] = "1"
self.post_data["doohickey_set-0-code"] = "DH1"
self.post_data["doohickey_set-0-name"] = "Doohickey 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Now modify that inline
self.post_data["doohickey_set-INITIAL_FORMS"] = "1"
self.post_data["doohickey_set-0-code"] = "DH1"
self.post_data["doohickey_set-0-name"] = "Doohickey 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1 Updated")
def test_integer_pk_inline(self):
"A model with an integer PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data["whatsit_set-0-index"] = "42"
self.post_data["whatsit_set-0-name"] = "Whatsit 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="whatsit_set-0-index"')
# Now resave that inline
self.post_data["whatsit_set-INITIAL_FORMS"] = "1"
self.post_data["whatsit_set-0-index"] = "42"
self.post_data["whatsit_set-0-name"] = "Whatsit 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Now modify that inline
self.post_data["whatsit_set-INITIAL_FORMS"] = "1"
self.post_data["whatsit_set-0-index"] = "42"
self.post_data["whatsit_set-0-name"] = "Whatsit 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1 Updated")
def test_inherited_inline(self):
"An inherited model can be saved as inlines. Regression for #11042"
# First add a new inline
self.post_data["fancydoodad_set-0-name"] = "Fancy Doodad 1"
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
doodad_pk = FancyDoodad.objects.all()[0].pk
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="fancydoodad_set-0-doodad_ptr"')
# Now resave that inline
self.post_data["fancydoodad_set-INITIAL_FORMS"] = "1"
self.post_data["fancydoodad_set-0-doodad_ptr"] = str(doodad_pk)
self.post_data["fancydoodad_set-0-name"] = "Fancy Doodad 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
# Now modify that inline
self.post_data["fancydoodad_set-INITIAL_FORMS"] = "1"
self.post_data["fancydoodad_set-0-doodad_ptr"] = str(doodad_pk)
self.post_data["fancydoodad_set-0-name"] = "Fancy Doodad 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1 Updated")
def test_ordered_inline(self):
"""
An inline with an editable ordering fields is updated correctly.
"""
# Create some objects with an initial ordering
Category.objects.create(id=1, order=1, collector=self.collector)
Category.objects.create(id=2, order=2, collector=self.collector)
Category.objects.create(id=3, order=0, collector=self.collector)
Category.objects.create(id=4, order=0, collector=self.collector)
# NB: The order values must be changed so that the items are reordered.
self.post_data.update(
{
"name": "Frederick Clegg",
"category_set-TOTAL_FORMS": "7",
"category_set-INITIAL_FORMS": "4",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "14",
"category_set-0-id": "1",
"category_set-0-collector": "1",
"category_set-1-order": "13",
"category_set-1-id": "2",
"category_set-1-collector": "1",
"category_set-2-order": "1",
"category_set-2-id": "3",
"category_set-2-collector": "1",
"category_set-3-order": "0",
"category_set-3-id": "4",
"category_set-3-collector": "1",
"category_set-4-order": "",
"category_set-4-id": "",
"category_set-4-collector": "1",
"category_set-5-order": "",
"category_set-5-id": "",
"category_set-5-collector": "1",
"category_set-6-order": "",
"category_set-6-id": "",
"category_set-6-collector": "1",
}
)
collector_url = reverse(
"admin:admin_views_collector_change", args=(self.collector.pk,)
)
response = self.client.post(collector_url, self.post_data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# The order values have been applied to the right objects
self.assertEqual(self.collector.category_set.count(), 4)
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
@override_settings(ROOT_URLCONF="admin_views.urls")
class NeverCacheTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
def setUp(self):
self.client.force_login(self.superuser)
def test_admin_index(self):
"Check the never-cache status of the main index"
response = self.client.get(reverse("admin:index"))
self.assertEqual(get_max_age(response), 0)
def test_app_index(self):
"Check the never-cache status of an application index"
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertEqual(get_max_age(response), 0)
def test_model_index(self):
"Check the never-cache status of a model index"
response = self.client.get(reverse("admin:admin_views_fabric_changelist"))
self.assertEqual(get_max_age(response), 0)
def test_model_add(self):
"Check the never-cache status of a model add page"
response = self.client.get(reverse("admin:admin_views_fabric_add"))
self.assertEqual(get_max_age(response), 0)
def test_model_view(self):
"Check the never-cache status of a model edit page"
response = self.client.get(
reverse("admin:admin_views_section_change", args=(self.s1.pk,))
)
self.assertEqual(get_max_age(response), 0)
def test_model_history(self):
"Check the never-cache status of a model history page"
response = self.client.get(
reverse("admin:admin_views_section_history", args=(self.s1.pk,))
)
self.assertEqual(get_max_age(response), 0)
def test_model_delete(self):
"Check the never-cache status of a model delete page"
response = self.client.get(
reverse("admin:admin_views_section_delete", args=(self.s1.pk,))
)
self.assertEqual(get_max_age(response), 0)
def test_login(self):
"Check the never-cache status of login views"
self.client.logout()
response = self.client.get(reverse("admin:index"))
self.assertEqual(get_max_age(response), 0)
def test_logout(self):
"Check the never-cache status of logout view"
response = self.client.post(reverse("admin:logout"))
self.assertEqual(get_max_age(response), 0)
def test_password_change(self):
"Check the never-cache status of the password change view"
self.client.logout()
response = self.client.get(reverse("admin:password_change"))
self.assertIsNone(get_max_age(response))
def test_password_change_done(self):
"Check the never-cache status of the password change done view"
response = self.client.get(reverse("admin:password_change_done"))
self.assertIsNone(get_max_age(response))
def test_JS_i18n(self):
"Check the never-cache status of the JavaScript i18n view"
response = self.client.get(reverse("admin:jsi18n"))
self.assertIsNone(get_max_age(response))
@override_settings(ROOT_URLCONF="admin_views.urls")
class PrePopulatedTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_prepopulated_on(self):
response = self.client.get(reverse("admin:admin_views_prepopulatedpost_add"))
self.assertContains(response, ""id": "#id_slug"")
self.assertContains(
response, ""dependency_ids": ["#id_title"]"
)
self.assertContains(
response,
""id": "#id_prepopulatedsubpost_set-0-subslug"",
)
def test_prepopulated_off(self):
response = self.client.get(
reverse("admin:admin_views_prepopulatedpost_change", args=(self.p1.pk,))
)
self.assertContains(response, "A Long Title")
self.assertNotContains(response, ""id": "#id_slug"")
self.assertNotContains(
response, ""dependency_ids": ["#id_title"]"
)
self.assertNotContains(
response,
""id": "#id_prepopulatedsubpost_set-0-subslug"",
)
@override_settings(USE_THOUSAND_SEPARATOR=True)
def test_prepopulated_maxlength_localized(self):
"""
Regression test for #15938: if USE_THOUSAND_SEPARATOR is set, make sure
that maxLength (in the JavaScript) is rendered without separators.
"""
response = self.client.get(
reverse("admin:admin_views_prepopulatedpostlargeslug_add")
)
self.assertContains(response, ""maxLength": 1000") # instead of 1,000
def test_view_only_add_form(self):
"""
PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug'
which is present in the add view, even if the
ModelAdmin.has_change_permission() returns False.
"""
response = self.client.get(reverse("admin7:admin_views_prepopulatedpost_add"))
self.assertContains(response, "data-prepopulated-fields=")
self.assertContains(response, ""id": "#id_slug"")
def test_view_only_change_form(self):
"""
PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug'. That
doesn't break a view-only change view.
"""
response = self.client.get(
reverse("admin7:admin_views_prepopulatedpost_change", args=(self.p1.pk,))
)
self.assertContains(response, 'data-prepopulated-fields="[]"')
self.assertContains(response, '<div class="readonly">%s</div>' % self.p1.slug)
@override_settings(ROOT_URLCONF="admin_views.urls")
class SeleniumTests(AdminSeleniumTestCase):
available_apps = ["admin_views"] + AdminSeleniumTestCase.available_apps
def setUp(self):
self.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
self.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
def test_login_button_centered(self):
from selenium.webdriver.common.by import By
self.selenium.get(self.live_server_url + reverse("admin:login"))
button = self.selenium.find_element(By.CSS_SELECTOR, ".submit-row input")
offset_left = button.get_property("offsetLeft")
offset_right = button.get_property("offsetParent").get_property(
"offsetWidth"
) - (offset_left + button.get_property("offsetWidth"))
# Use assertAlmostEqual to avoid pixel rounding errors.
self.assertAlmostEqual(offset_left, offset_right, delta=3)
def test_prepopulated_fields(self):
"""
The JavaScript-automated prepopulated fields work with the main form
and with stacked and tabular inlines.
Refs #13068, #9264, #9983, #9784.
"""
from selenium.webdriver.common.by import By
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_mainprepopulated_add")
)
self.wait_for(".select2")
# Main form ----------------------------------------------------------
self.selenium.find_element(By.ID, "id_pubdate").send_keys("2012-02-18")
self.select_option("#id_status", "option two")
self.selenium.find_element(By.ID, "id_name").send_keys(
" the mAin nÀMë and it's awεšomeıııİ"
)
slug1 = self.selenium.find_element(By.ID, "id_slug1").get_attribute("value")
slug2 = self.selenium.find_element(By.ID, "id_slug2").get_attribute("value")
slug3 = self.selenium.find_element(By.ID, "id_slug3").get_attribute("value")
self.assertEqual(slug1, "the-main-name-and-its-awesomeiiii-2012-02-18")
self.assertEqual(slug2, "option-two-the-main-name-and-its-awesomeiiii")
self.assertEqual(
slug3, "the-main-n\xe0m\xeb-and-its-aw\u03b5\u0161ome\u0131\u0131\u0131i"
)
# Stacked inlines with fieldsets -------------------------------------
# Initial inline
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-0-pubdate"
).send_keys("2011-12-17")
self.select_option("#id_relatedprepopulated_set-0-status", "option one")
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-0-name"
).send_keys(" here is a sŤāÇkeð inline ! ")
slug1 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-0-slug1"
).get_attribute("value")
slug2 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-0-slug2"
).get_attribute("value")
self.assertEqual(slug1, "here-is-a-stacked-inline-2011-12-17")
self.assertEqual(slug2, "option-one-here-is-a-stacked-inline")
initial_select2_inputs = self.selenium.find_elements(
By.CLASS_NAME, "select2-selection"
)
# Inline formsets have empty/invisible forms.
# Only the 4 visible select2 inputs are initialized.
num_initial_select2_inputs = len(initial_select2_inputs)
self.assertEqual(num_initial_select2_inputs, 4)
# Add an inline
self.selenium.find_elements(By.LINK_TEXT, "Add another Related prepopulated")[
0
].click()
self.assertEqual(
len(self.selenium.find_elements(By.CLASS_NAME, "select2-selection")),
num_initial_select2_inputs + 2,
)
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-1-pubdate"
).send_keys("1999-01-25")
self.select_option("#id_relatedprepopulated_set-1-status", "option two")
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-1-name"
).send_keys(
" now you haVe anöther sŤāÇkeð inline with a very ... "
"loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog "
"text... "
)
slug1 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-1-slug1"
).get_attribute("value")
slug2 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-1-slug2"
).get_attribute("value")
# 50 characters maximum for slug1 field
self.assertEqual(slug1, "now-you-have-another-stacked-inline-with-a-very-lo")
# 60 characters maximum for slug2 field
self.assertEqual(
slug2, "option-two-now-you-have-another-stacked-inline-with-a-very-l"
)
# Tabular inlines ----------------------------------------------------
# Initial inline
element = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-status"
)
self.selenium.execute_script("window.scrollTo(0, %s);" % element.location["y"])
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-pubdate"
).send_keys("1234-12-07")
self.select_option("#id_relatedprepopulated_set-2-0-status", "option two")
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-name"
).send_keys("And now, with a tÃbűlaŘ inline !!!")
slug1 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-slug1"
).get_attribute("value")
slug2 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-0-slug2"
).get_attribute("value")
self.assertEqual(slug1, "and-now-with-a-tabular-inline-1234-12-07")
self.assertEqual(slug2, "option-two-and-now-with-a-tabular-inline")
# Add an inline
# Button may be outside the browser frame.
element = self.selenium.find_elements(
By.LINK_TEXT, "Add another Related prepopulated"
)[1]
self.selenium.execute_script("window.scrollTo(0, %s);" % element.location["y"])
element.click()
self.assertEqual(
len(self.selenium.find_elements(By.CLASS_NAME, "select2-selection")),
num_initial_select2_inputs + 4,
)
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-1-pubdate"
).send_keys("1981-08-22")
self.select_option("#id_relatedprepopulated_set-2-1-status", "option one")
self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-1-name"
).send_keys(r'tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters')
slug1 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-1-slug1"
).get_attribute("value")
slug2 = self.selenium.find_element(
By.ID, "id_relatedprepopulated_set-2-1-slug2"
).get_attribute("value")
self.assertEqual(slug1, "tabular-inline-with-ignored-characters-1981-08-22")
self.assertEqual(slug2, "option-one-tabular-inline-with-ignored-characters")
# Add an inline without an initial inline.
# The button is outside of the browser frame.
self.selenium.execute_script("window.scrollTo(0, document.body.scrollHeight);")
self.selenium.find_elements(By.LINK_TEXT, "Add another Related prepopulated")[
2
].click()
self.assertEqual(
len(self.selenium.find_elements(By.CLASS_NAME, "select2-selection")),
num_initial_select2_inputs + 6,
)
# Stacked Inlines without fieldsets ----------------------------------
# Initial inline.
row_id = "id_relatedprepopulated_set-4-0-"
self.selenium.find_element(By.ID, f"{row_id}pubdate").send_keys("2011-12-12")
self.select_option(f"#{row_id}status", "option one")
self.selenium.find_element(By.ID, f"{row_id}name").send_keys(
" sŤāÇkeð inline ! "
)
slug1 = self.selenium.find_element(By.ID, f"{row_id}slug1").get_attribute(
"value"
)
slug2 = self.selenium.find_element(By.ID, f"{row_id}slug2").get_attribute(
"value"
)
self.assertEqual(slug1, "stacked-inline-2011-12-12")
self.assertEqual(slug2, "option-one")
# Add inline.
self.selenium.find_elements(
By.LINK_TEXT,
"Add another Related prepopulated",
)[3].click()
row_id = "id_relatedprepopulated_set-4-1-"
self.selenium.find_element(By.ID, f"{row_id}pubdate").send_keys("1999-01-20")
self.select_option(f"#{row_id}status", "option two")
self.selenium.find_element(By.ID, f"{row_id}name").send_keys(
" now you haVe anöther sŤāÇkeð inline with a very loooong "
)
slug1 = self.selenium.find_element(By.ID, f"{row_id}slug1").get_attribute(
"value"
)
slug2 = self.selenium.find_element(By.ID, f"{row_id}slug2").get_attribute(
"value"
)
self.assertEqual(slug1, "now-you-have-another-stacked-inline-with-a-very-lo")
self.assertEqual(slug2, "option-two")
# Save and check that everything is properly stored in the database
with self.wait_page_loaded():
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.assertEqual(MainPrepopulated.objects.count(), 1)
MainPrepopulated.objects.get(
name=" the mAin nÀMë and it's awεšomeıııİ",
pubdate="2012-02-18",
status="option two",
slug1="the-main-name-and-its-awesomeiiii-2012-02-18",
slug2="option-two-the-main-name-and-its-awesomeiiii",
slug3="the-main-nàmë-and-its-awεšomeıııi",
)
self.assertEqual(RelatedPrepopulated.objects.count(), 6)
RelatedPrepopulated.objects.get(
name=" here is a sŤāÇkeð inline ! ",
pubdate="2011-12-17",
status="option one",
slug1="here-is-a-stacked-inline-2011-12-17",
slug2="option-one-here-is-a-stacked-inline",
)
RelatedPrepopulated.objects.get(
# 75 characters in name field
name=(
" now you haVe anöther sŤāÇkeð inline with a very ... "
"loooooooooooooooooo"
),
pubdate="1999-01-25",
status="option two",
slug1="now-you-have-another-stacked-inline-with-a-very-lo",
slug2="option-two-now-you-have-another-stacked-inline-with-a-very-l",
)
RelatedPrepopulated.objects.get(
name="And now, with a tÃbűlaŘ inline !!!",
pubdate="1234-12-07",
status="option two",
slug1="and-now-with-a-tabular-inline-1234-12-07",
slug2="option-two-and-now-with-a-tabular-inline",
)
RelatedPrepopulated.objects.get(
name=r'tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters',
pubdate="1981-08-22",
status="option one",
slug1="tabular-inline-with-ignored-characters-1981-08-22",
slug2="option-one-tabular-inline-with-ignored-characters",
)
def test_populate_existing_object(self):
"""
The prepopulation works for existing objects too, as long as
the original field is empty (#19082).
"""
from selenium.webdriver.common.by import By
# Slugs are empty to start with.
item = MainPrepopulated.objects.create(
name=" this is the mAin nÀMë",
pubdate="2012-02-18",
status="option two",
slug1="",
slug2="",
)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
object_url = self.live_server_url + reverse(
"admin:admin_views_mainprepopulated_change", args=(item.id,)
)
self.selenium.get(object_url)
self.selenium.find_element(By.ID, "id_name").send_keys(" the best")
# The slugs got prepopulated since they were originally empty
slug1 = self.selenium.find_element(By.ID, "id_slug1").get_attribute("value")
slug2 = self.selenium.find_element(By.ID, "id_slug2").get_attribute("value")
self.assertEqual(slug1, "this-is-the-main-name-the-best-2012-02-18")
self.assertEqual(slug2, "option-two-this-is-the-main-name-the-best")
# Save the object
with self.wait_page_loaded():
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.get(object_url)
self.selenium.find_element(By.ID, "id_name").send_keys(" hello")
# The slugs got prepopulated didn't change since they were originally not empty
slug1 = self.selenium.find_element(By.ID, "id_slug1").get_attribute("value")
slug2 = self.selenium.find_element(By.ID, "id_slug2").get_attribute("value")
self.assertEqual(slug1, "this-is-the-main-name-the-best-2012-02-18")
self.assertEqual(slug2, "option-two-this-is-the-main-name-the-best")
def test_collapsible_fieldset(self):
"""
The 'collapse' class in fieldsets definition allows to
show/hide the appropriate field section.
"""
from selenium.webdriver.common.by import By
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_article_add")
)
self.assertFalse(self.selenium.find_element(By.ID, "id_title").is_displayed())
self.selenium.find_elements(By.LINK_TEXT, "Show")[0].click()
self.assertTrue(self.selenium.find_element(By.ID, "id_title").is_displayed())
self.assertEqual(
self.selenium.find_element(By.ID, "fieldsetcollapser0").text, "Hide"
)
def test_selectbox_height_collapsible_fieldset(self):
from selenium.webdriver.common.by import By
self.admin_login(
username="super",
password="secret",
login_url=reverse("admin7:index"),
)
url = self.live_server_url + reverse("admin7:admin_views_pizza_add")
self.selenium.get(url)
self.selenium.find_elements(By.LINK_TEXT, "Show")[0].click()
from_filter_box = self.selenium.find_element(By.ID, "id_toppings_filter")
from_box = self.selenium.find_element(By.ID, "id_toppings_from")
to_filter_box = self.selenium.find_element(By.ID, "id_toppings_filter_selected")
to_box = self.selenium.find_element(By.ID, "id_toppings_to")
self.assertEqual(
(
to_filter_box.get_property("offsetHeight")
+ to_box.get_property("offsetHeight")
),
(
from_filter_box.get_property("offsetHeight")
+ from_box.get_property("offsetHeight")
),
)
def test_selectbox_height_not_collapsible_fieldset(self):
from selenium.webdriver.common.by import By
self.admin_login(
username="super",
password="secret",
login_url=reverse("admin7:index"),
)
url = self.live_server_url + reverse("admin7:admin_views_question_add")
self.selenium.get(url)
from_filter_box = self.selenium.find_element(
By.ID, "id_related_questions_filter"
)
from_box = self.selenium.find_element(By.ID, "id_related_questions_from")
to_filter_box = self.selenium.find_element(
By.ID, "id_related_questions_filter_selected"
)
to_box = self.selenium.find_element(By.ID, "id_related_questions_to")
self.assertEqual(
(
to_filter_box.get_property("offsetHeight")
+ to_box.get_property("offsetHeight")
),
(
from_filter_box.get_property("offsetHeight")
+ from_box.get_property("offsetHeight")
),
)
def test_first_field_focus(self):
"""JavaScript-assisted auto-focus on first usable form field."""
from selenium.webdriver.common.by import By
# First form field has a single widget
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
with self.wait_page_loaded():
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_picture_add")
)
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element(By.ID, "id_name"),
)
# First form field has a MultiWidget
with self.wait_page_loaded():
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_reservation_add")
)
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element(By.ID, "id_start_date_0"),
)
def test_cancel_delete_confirmation(self):
"Cancelling the deletion of an object takes the user back one page."
from selenium.webdriver.common.by import By
pizza = Pizza.objects.create(name="Double Cheese")
url = reverse("admin:admin_views_pizza_change", args=(pizza.id,))
full_url = self.live_server_url + url
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(full_url)
self.selenium.find_element(By.CLASS_NAME, "deletelink").click()
# Click 'cancel' on the delete page.
self.selenium.find_element(By.CLASS_NAME, "cancel-link").click()
# Wait until we're back on the change page.
self.wait_for_text("#content h1", "Change pizza")
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
def test_cancel_delete_related_confirmation(self):
"""
Cancelling the deletion of an object with relations takes the user back
one page.
"""
from selenium.webdriver.common.by import By
pizza = Pizza.objects.create(name="Double Cheese")
topping1 = Topping.objects.create(name="Cheddar")
topping2 = Topping.objects.create(name="Mozzarella")
pizza.toppings.add(topping1, topping2)
url = reverse("admin:admin_views_pizza_change", args=(pizza.id,))
full_url = self.live_server_url + url
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(full_url)
self.selenium.find_element(By.CLASS_NAME, "deletelink").click()
# Click 'cancel' on the delete page.
self.selenium.find_element(By.CLASS_NAME, "cancel-link").click()
# Wait until we're back on the change page.
self.wait_for_text("#content h1", "Change pizza")
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
self.assertEqual(Topping.objects.count(), 2)
def test_list_editable_popups(self):
"""
list_editable foreign keys have add/change popups.
"""
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
s1 = Section.objects.create(name="Test section")
Article.objects.create(
title="foo",
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=s1,
)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(
self.live_server_url + reverse("admin:admin_views_article_changelist")
)
# Change popup
self.selenium.find_element(By.ID, "change_id_form-0-section").click()
self.wait_for_and_switch_to_popup()
self.wait_for_text("#content h1", "Change section")
name_input = self.selenium.find_element(By.ID, "id_name")
name_input.clear()
name_input.send_keys("<i>edited section</i>")
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
# Hide sidebar.
toggle_button = self.selenium.find_element(
By.CSS_SELECTOR, "#toggle-nav-sidebar"
)
toggle_button.click()
select = Select(self.selenium.find_element(By.ID, "id_form-0-section"))
self.assertEqual(select.first_selected_option.text, "<i>edited section</i>")
# Rendered select2 input.
select2_display = self.selenium.find_element(
By.CLASS_NAME, "select2-selection__rendered"
)
# Clear button (×\n) is included in text.
self.assertEqual(select2_display.text, "×\n<i>edited section</i>")
# Add popup
self.selenium.find_element(By.ID, "add_id_form-0-section").click()
self.wait_for_and_switch_to_popup()
self.wait_for_text("#content h1", "Add section")
self.selenium.find_element(By.ID, "id_name").send_keys("new section")
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element(By.ID, "id_form-0-section"))
self.assertEqual(select.first_selected_option.text, "new section")
select2_display = self.selenium.find_element(
By.CLASS_NAME, "select2-selection__rendered"
)
# Clear button (×\n) is included in text.
self.assertEqual(select2_display.text, "×\nnew section")
def test_inline_uuid_pk_edit_with_popup(self):
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
parent = ParentWithUUIDPK.objects.create(title="test")
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
change_url = reverse(
"admin:admin_views_relatedwithuuidpkmodel_change",
args=(related_with_parent.id,),
)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element(By.ID, "change_id_parent").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element(By.ID, "id_parent"))
self.assertEqual(select.first_selected_option.text, str(parent.id))
self.assertEqual(
select.first_selected_option.get_attribute("value"), str(parent.id)
)
def test_inline_uuid_pk_add_with_popup(self):
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
self.selenium.get(
self.live_server_url
+ reverse("admin:admin_views_relatedwithuuidpkmodel_add")
)
self.selenium.find_element(By.ID, "add_id_parent").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.ID, "id_title").send_keys("test")
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element(By.ID, "id_parent"))
uuid_id = str(ParentWithUUIDPK.objects.first().id)
self.assertEqual(select.first_selected_option.text, uuid_id)
self.assertEqual(select.first_selected_option.get_attribute("value"), uuid_id)
def test_inline_uuid_pk_delete_with_popup(self):
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
parent = ParentWithUUIDPK.objects.create(title="test")
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
change_url = reverse(
"admin:admin_views_relatedwithuuidpkmodel_change",
args=(related_with_parent.id,),
)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element(By.ID, "delete_id_parent").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.XPATH, '//input[@value="Yes, I’m sure"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element(By.ID, "id_parent"))
self.assertEqual(ParentWithUUIDPK.objects.count(), 0)
self.assertEqual(select.first_selected_option.text, "---------")
self.assertEqual(select.first_selected_option.get_attribute("value"), "")
def test_inline_with_popup_cancel_delete(self):
"""Clicking ""No, take me back" on a delete popup closes the window."""
from selenium.webdriver.common.by import By
parent = ParentWithUUIDPK.objects.create(title="test")
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
change_url = reverse(
"admin:admin_views_relatedwithuuidpkmodel_change",
args=(related_with_parent.id,),
)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element(By.ID, "delete_id_parent").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.XPATH, '//a[text()="No, take me back"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
self.assertEqual(len(self.selenium.window_handles), 1)
def test_list_editable_raw_id_fields(self):
from selenium.webdriver.common.by import By
parent = ParentWithUUIDPK.objects.create(title="test")
parent2 = ParentWithUUIDPK.objects.create(title="test2")
RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
change_url = reverse(
"admin:admin_views_relatedwithuuidpkmodel_changelist",
current_app=site2.name,
)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element(By.ID, "lookup_id_form-0-parent").click()
self.wait_for_and_switch_to_popup()
# Select "parent2" in the popup.
self.selenium.find_element(By.LINK_TEXT, str(parent2.pk)).click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
# The newly selected pk should appear in the raw id input.
value = self.selenium.find_element(By.ID, "id_form-0-parent").get_attribute(
"value"
)
self.assertEqual(value, str(parent2.pk))
def test_input_element_font(self):
"""
Browsers' default stylesheets override the font of inputs. The admin
adds additional CSS to handle this.
"""
from selenium.webdriver.common.by import By
self.selenium.get(self.live_server_url + reverse("admin:login"))
element = self.selenium.find_element(By.ID, "id_username")
# Some browsers quotes the fonts, some don't.
fonts = [
font.strip().strip('"')
for font in element.value_of_css_property("font-family").split(",")
]
self.assertEqual(
fonts,
[
"Segoe UI",
"system-ui",
"Roboto",
"Helvetica Neue",
"Arial",
"sans-serif",
"Apple Color Emoji",
"Segoe UI Emoji",
"Segoe UI Symbol",
"Noto Color Emoji",
],
)
def test_search_input_filtered_page(self):
from selenium.webdriver.common.by import By
Person.objects.create(name="Guido van Rossum", gender=1, alive=True)
Person.objects.create(name="Grace Hopper", gender=1, alive=False)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
person_url = reverse("admin:admin_views_person_changelist") + "?q=Gui"
self.selenium.get(self.live_server_url + person_url)
self.assertGreater(
self.selenium.find_element(By.ID, "searchbar").rect["width"],
50,
)
def test_related_popup_index(self):
"""
Create a chain of 'self' related objects via popups.
"""
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
add_url = reverse("admin:admin_views_box_add", current_app=site.name)
self.selenium.get(self.live_server_url + add_url)
base_window = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup()
popup_window_test = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "id_title").send_keys("test")
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup(num_windows=3)
popup_window_test2 = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "id_title").send_keys("test2")
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup(num_windows=4)
self.selenium.find_element(By.ID, "id_title").send_keys("test3")
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(popup_window_test2)
select = Select(self.selenium.find_element(By.ID, "id_next_box"))
next_box_id = str(Box.objects.get(title="test3").id)
self.assertEqual(
select.first_selected_option.get_attribute("value"), next_box_id
)
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(popup_window_test)
select = Select(self.selenium.find_element(By.ID, "id_next_box"))
next_box_id = str(Box.objects.get(title="test2").id)
self.assertEqual(
select.first_selected_option.get_attribute("value"), next_box_id
)
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.selenium.switch_to.window(base_window)
select = Select(self.selenium.find_element(By.ID, "id_next_box"))
next_box_id = str(Box.objects.get(title="test").id)
self.assertEqual(
select.first_selected_option.get_attribute("value"), next_box_id
)
def test_related_popup_incorrect_close(self):
"""
Cleanup child popups when closing a parent popup.
"""
from selenium.webdriver.common.by import By
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
add_url = reverse("admin:admin_views_box_add", current_app=site.name)
self.selenium.get(self.live_server_url + add_url)
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup()
test_window = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "id_title").send_keys("test")
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup(num_windows=3)
test2_window = self.selenium.current_window_handle
self.selenium.find_element(By.ID, "id_title").send_keys("test2")
self.selenium.find_element(By.ID, "add_id_next_box").click()
self.wait_for_and_switch_to_popup(num_windows=4)
self.assertEqual(len(self.selenium.window_handles), 4)
self.selenium.switch_to.window(test2_window)
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.wait_until(lambda d: len(d.window_handles) == 2, 1)
self.assertEqual(len(self.selenium.window_handles), 2)
# Close final popup to clean up test.
self.selenium.switch_to.window(test_window)
self.selenium.find_element(By.XPATH, '//input[@value="Save"]').click()
self.wait_until(lambda d: len(d.window_handles) == 1, 1)
self.selenium.switch_to.window(self.selenium.window_handles[-1])
def test_hidden_fields_small_window(self):
from selenium.webdriver.common.by import By
self.admin_login(
username="super",
password="secret",
login_url=reverse("admin:index"),
)
self.selenium.get(self.live_server_url + reverse("admin:admin_views_story_add"))
field_title = self.selenium.find_element(By.CLASS_NAME, "field-title")
current_size = self.selenium.get_window_size()
try:
self.selenium.set_window_size(1024, 768)
self.assertIs(field_title.is_displayed(), False)
self.selenium.set_window_size(767, 575)
self.assertIs(field_title.is_displayed(), False)
finally:
self.selenium.set_window_size(current_size["width"], current_size["height"])
def test_updating_related_objects_updates_fk_selects_except_autocompletes(self):
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import Select
born_country_select_id = "id_born_country"
living_country_select_id = "id_living_country"
living_country_select2_textbox_id = "select2-id_living_country-container"
favorite_country_to_vacation_select_id = "id_favorite_country_to_vacation"
continent_select_id = "id_continent"
def _get_HTML_inside_element_by_id(id_):
return self.selenium.find_element(By.ID, id_).get_attribute("innerHTML")
def _get_text_inside_element_by_selector(selector):
return self.selenium.find_element(By.CSS_SELECTOR, selector).get_attribute(
"innerText"
)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
add_url = reverse("admin:admin_views_traveler_add")
self.selenium.get(self.live_server_url + add_url)
# Add new Country from the born_country select.
self.selenium.find_element(By.ID, f"add_{born_country_select_id}").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.ID, "id_name").send_keys("Argentina")
continent_select = Select(
self.selenium.find_element(By.ID, continent_select_id)
)
continent_select.select_by_visible_text("South America")
self.selenium.find_element(By.CSS_SELECTOR, '[type="submit"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(born_country_select_id),
"""
<option value="" selected="">---------</option>
<option value="1" selected="">Argentina</option>
""",
)
# Argentina isn't added to the living_country select nor selected by
# the select2 widget.
self.assertEqual(
_get_text_inside_element_by_selector(f"#{living_country_select_id}"), ""
)
self.assertEqual(
_get_text_inside_element_by_selector(
f"#{living_country_select2_textbox_id}"
),
"",
)
# Argentina won't appear because favorite_country_to_vacation field has
# limit_choices_to.
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(favorite_country_to_vacation_select_id),
'<option value="" selected="">---------</option>',
)
# Add new Country from the living_country select.
self.selenium.find_element(By.ID, f"add_{living_country_select_id}").click()
self.wait_for_and_switch_to_popup()
self.selenium.find_element(By.ID, "id_name").send_keys("Spain")
continent_select = Select(
self.selenium.find_element(By.ID, continent_select_id)
)
continent_select.select_by_visible_text("Europe")
self.selenium.find_element(By.CSS_SELECTOR, '[type="submit"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(born_country_select_id),
"""
<option value="" selected="">---------</option>
<option value="1" selected="">Argentina</option>
<option value="2">Spain</option>
""",
)
# Spain is added to the living_country select and it's also selected by
# the select2 widget.
self.assertEqual(
_get_text_inside_element_by_selector(f"#{living_country_select_id} option"),
"Spain",
)
self.assertEqual(
_get_text_inside_element_by_selector(
f"#{living_country_select2_textbox_id}"
),
"Spain",
)
# Spain won't appear because favorite_country_to_vacation field has
# limit_choices_to.
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(favorite_country_to_vacation_select_id),
'<option value="" selected="">---------</option>',
)
# Edit second Country created from living_country select.
favorite_select = Select(
self.selenium.find_element(By.ID, living_country_select_id)
)
favorite_select.select_by_visible_text("Spain")
self.selenium.find_element(By.ID, f"change_{living_country_select_id}").click()
self.wait_for_and_switch_to_popup()
favorite_name_input = self.selenium.find_element(By.ID, "id_name")
favorite_name_input.clear()
favorite_name_input.send_keys("Italy")
self.selenium.find_element(By.CSS_SELECTOR, '[type="submit"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(born_country_select_id),
"""
<option value="" selected="">---------</option>
<option value="1" selected="">Argentina</option>
<option value="2">Italy</option>
""",
)
# Italy is added to the living_country select and it's also selected by
# the select2 widget.
self.assertEqual(
_get_text_inside_element_by_selector(f"#{living_country_select_id} option"),
"Italy",
)
self.assertEqual(
_get_text_inside_element_by_selector(
f"#{living_country_select2_textbox_id}"
),
"Italy",
)
# favorite_country_to_vacation field has no options.
self.assertHTMLEqual(
_get_HTML_inside_element_by_id(favorite_country_to_vacation_select_id),
'<option value="" selected="">---------</option>',
)
# Add a new Asian country.
self.selenium.find_element(
By.ID, f"add_{favorite_country_to_vacation_select_id}"
).click()
self.wait_for_and_switch_to_popup()
favorite_name_input = self.selenium.find_element(By.ID, "id_name")
favorite_name_input.send_keys("Qatar")
continent_select = Select(
self.selenium.find_element(By.ID, continent_select_id)
)
continent_select.select_by_visible_text("Asia")
self.selenium.find_element(By.CSS_SELECTOR, '[type="submit"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
# Submit the new Traveler.
self.selenium.find_element(By.CSS_SELECTOR, '[name="_save"]').click()
traveler = Traveler.objects.get()
self.assertEqual(traveler.born_country.name, "Argentina")
self.assertEqual(traveler.living_country.name, "Italy")
self.assertEqual(traveler.favorite_country_to_vacation.name, "Qatar")
def test_redirect_on_add_view_add_another_button(self):
from selenium.webdriver.common.by import By
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
add_url = reverse("admin7:admin_views_section_add")
self.selenium.get(self.live_server_url + add_url)
name_input = self.selenium.find_element(By.ID, "id_name")
name_input.send_keys("Test section 1")
self.selenium.find_element(
By.XPATH, '//input[@value="Save and add another"]'
).click()
self.assertEqual(Section.objects.count(), 1)
name_input = self.selenium.find_element(By.ID, "id_name")
name_input.send_keys("Test section 2")
self.selenium.find_element(
By.XPATH, '//input[@value="Save and add another"]'
).click()
self.assertEqual(Section.objects.count(), 2)
def test_redirect_on_add_view_continue_button(self):
from selenium.webdriver.common.by import By
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
add_url = reverse("admin7:admin_views_section_add")
self.selenium.get(self.live_server_url + add_url)
name_input = self.selenium.find_element(By.ID, "id_name")
name_input.send_keys("Test section 1")
self.selenium.find_element(
By.XPATH, '//input[@value="Save and continue editing"]'
).click()
self.assertEqual(Section.objects.count(), 1)
name_input = self.selenium.find_element(By.ID, "id_name")
name_input_value = name_input.get_attribute("value")
self.assertEqual(name_input_value, "Test section 1")
@override_settings(ROOT_URLCONF="admin_views.urls")
class ReadonlyTest(AdminFieldExtractionMixin, TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
@ignore_warnings(category=RemovedInDjango60Warning)
def test_readonly_get(self):
response = self.client.get(reverse("admin:admin_views_post_add"))
self.assertNotContains(response, 'name="posted"')
# 3 fields + 2 submit buttons + 5 inline management form fields, + 2
# hidden fields for inlines + 1 field for the inline + 2 empty form
# + 1 logout form.
self.assertContains(response, "<input", count=17)
self.assertContains(response, formats.localize(datetime.date.today()))
self.assertContains(response, "<label>Awesomeness level:</label>")
self.assertContains(response, "Very awesome.")
self.assertContains(response, "Unknown coolness.")
self.assertContains(response, "foo")
# Multiline text in a readonly field gets <br> tags
self.assertContains(response, "Multiline<br>test<br>string")
self.assertContains(
response,
'<div class="readonly">Multiline<br>html<br>content</div>',
html=True,
)
self.assertContains(response, "InlineMultiline<br>test<br>string")
self.assertContains(
response,
formats.localize(datetime.date.today() - datetime.timedelta(days=7)),
)
self.assertContains(response, '<div class="form-row field-coolness">')
self.assertContains(response, '<div class="form-row field-awesomeness_level">')
self.assertContains(response, '<div class="form-row field-posted">')
self.assertContains(response, '<div class="form-row field-value">')
self.assertContains(response, '<div class="form-row">')
self.assertContains(response, '<div class="help"', 3)
self.assertContains(
response,
'<div class="help" id="id_title_helptext"><div>Some help text for the '
"title (with Unicode ŠĐĆŽćžšđ)</div></div>",
html=True,
)
self.assertContains(
response,
'<div class="help" id="id_content_helptext"><div>Some help text for the '
"content (with Unicode ŠĐĆŽćžšđ)</div></div>",
html=True,
)
self.assertContains(
response,
'<div class="help"><div>Some help text for the date (with Unicode ŠĐĆŽćžšđ)'
"</div></div>",
html=True,
)
p = Post.objects.create(
title="I worked on readonly_fields", content="Its good stuff"
)
response = self.client.get(
reverse("admin:admin_views_post_change", args=(p.pk,))
)
self.assertContains(response, "%d amount of cool" % p.pk)
@ignore_warnings(category=RemovedInDjango60Warning)
def test_readonly_text_field(self):
p = Post.objects.create(
title="Readonly test",
content="test",
readonly_content="test\r\n\r\ntest\r\n\r\ntest\r\n\r\ntest",
)
Link.objects.create(
url="http://www.djangoproject.com",
post=p,
readonly_link_content="test\r\nlink",
)
response = self.client.get(
reverse("admin:admin_views_post_change", args=(p.pk,))
)
# Checking readonly field.
self.assertContains(response, "test<br><br>test<br><br>test<br><br>test")
# Checking readonly field in inline.
self.assertContains(response, "test<br>link")
@ignore_warnings(category=RemovedInDjango60Warning)
def test_readonly_post(self):
data = {
"title": "Django Got Readonly Fields",
"content": "This is an incredible development.",
"link_set-TOTAL_FORMS": "1",
"link_set-INITIAL_FORMS": "0",
"link_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse("admin:admin_views_post_add"), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 1)
p = Post.objects.get()
self.assertEqual(p.posted, datetime.date.today())
data["posted"] = "10-8-1990" # some date that's not today
response = self.client.post(reverse("admin:admin_views_post_add"), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 2)
p = Post.objects.order_by("-id")[0]
self.assertEqual(p.posted, datetime.date.today())
def test_readonly_manytomany(self):
"Regression test for #13004"
response = self.client.get(reverse("admin:admin_views_pizza_add"))
self.assertEqual(response.status_code, 200)
def test_user_password_change_limited_queryset(self):
su = User.objects.filter(is_superuser=True)[0]
response = self.client.get(
reverse("admin2:auth_user_password_change", args=(su.pk,))
)
self.assertEqual(response.status_code, 404)
def test_change_form_renders_correct_null_choice_value(self):
"""
Regression test for #17911.
"""
choice = Choice.objects.create(choice=None)
response = self.client.get(
reverse("admin:admin_views_choice_change", args=(choice.pk,))
)
self.assertContains(
response, '<div class="readonly">No opinion</div>', html=True
)
def _test_readonly_foreignkey_links(self, admin_site):
"""
ForeignKey readonly fields render as links if the target model is
registered in admin.
"""
chapter = Chapter.objects.create(
title="Chapter 1",
content="content",
book=Book.objects.create(name="Book 1"),
)
language = Language.objects.create(iso="_40", name="Test")
obj = ReadOnlyRelatedField.objects.create(
chapter=chapter,
language=language,
user=self.superuser,
)
response = self.client.get(
reverse(
f"{admin_site}:admin_views_readonlyrelatedfield_change", args=(obj.pk,)
),
)
# Related ForeignKey object registered in admin.
user_url = reverse(f"{admin_site}:auth_user_change", args=(self.superuser.pk,))
self.assertContains(
response,
'<div class="readonly"><a href="%s">super</a></div>' % user_url,
html=True,
)
# Related ForeignKey with the string primary key registered in admin.
language_url = reverse(
f"{admin_site}:admin_views_language_change",
args=(quote(language.pk),),
)
self.assertContains(
response,
'<div class="readonly"><a href="%s">_40</a></div>' % language_url,
html=True,
)
# Related ForeignKey object not registered in admin.
self.assertContains(
response, '<div class="readonly">Chapter 1</div>', html=True
)
def test_readonly_foreignkey_links_default_admin_site(self):
self._test_readonly_foreignkey_links("admin")
def test_readonly_foreignkey_links_custom_admin_site(self):
self._test_readonly_foreignkey_links("namespaced_admin")
def test_readonly_manytomany_backwards_ref(self):
"""
Regression test for #16433 - backwards references for related objects
broke if the related field is read-only due to the help_text attribute
"""
topping = Topping.objects.create(name="Salami")
pizza = Pizza.objects.create(name="Americano")
pizza.toppings.add(topping)
response = self.client.get(reverse("admin:admin_views_topping_add"))
self.assertEqual(response.status_code, 200)
def test_readonly_manytomany_forwards_ref(self):
topping = Topping.objects.create(name="Salami")
pizza = Pizza.objects.create(name="Americano")
pizza.toppings.add(topping)
response = self.client.get(
reverse("admin:admin_views_pizza_change", args=(pizza.pk,))
)
self.assertContains(response, "<label>Toppings:</label>", html=True)
self.assertContains(response, '<div class="readonly">Salami</div>', html=True)
def test_readonly_onetoone_backwards_ref(self):
"""
Can reference a reverse OneToOneField in ModelAdmin.readonly_fields.
"""
v1 = Villain.objects.create(name="Adam")
pl = Plot.objects.create(name="Test Plot", team_leader=v1, contact=v1)
pd = PlotDetails.objects.create(details="Brand New Plot", plot=pl)
response = self.client.get(
reverse("admin:admin_views_plotproxy_change", args=(pl.pk,))
)
field = self.get_admin_readonly_field(response, "plotdetails")
pd_url = reverse("admin:admin_views_plotdetails_change", args=(pd.pk,))
self.assertEqual(field.contents(), '<a href="%s">Brand New Plot</a>' % pd_url)
# The reverse relation also works if the OneToOneField is null.
pd.plot = None
pd.save()
response = self.client.get(
reverse("admin:admin_views_plotproxy_change", args=(pl.pk,))
)
field = self.get_admin_readonly_field(response, "plotdetails")
self.assertEqual(field.contents(), "-") # default empty value
@ignore_warnings(category=RemovedInDjango60Warning)
def test_readonly_field_overrides(self):
"""
Regression test for #22087 - ModelForm Meta overrides are ignored by
AdminReadonlyField
"""
p = FieldOverridePost.objects.create(title="Test Post", content="Test Content")
response = self.client.get(
reverse("admin:admin_views_fieldoverridepost_change", args=(p.pk,))
)
self.assertContains(
response,
'<div class="help"><div>Overridden help text for the date</div></div>',
html=True,
)
self.assertContains(
response,
'<label for="id_public">Overridden public label:</label>',
html=True,
)
self.assertNotContains(
response, "Some help text for the date (with Unicode ŠĐĆŽćžšđ)"
)
def test_correct_autoescaping(self):
"""
Make sure that non-field readonly elements are properly autoescaped (#24461)
"""
section = Section.objects.create(name="<a>evil</a>")
response = self.client.get(
reverse("admin:admin_views_section_change", args=(section.pk,))
)
self.assertNotContains(response, "<a>evil</a>", status_code=200)
self.assertContains(response, "<a>evil</a>", status_code=200)
def test_label_suffix_translated(self):
pizza = Pizza.objects.create(name="Americano")
url = reverse("admin:admin_views_pizza_change", args=(pizza.pk,))
with self.settings(LANGUAGE_CODE="fr"):
response = self.client.get(url)
self.assertContains(response, "<label>Toppings\u00A0:</label>", html=True)
@override_settings(ROOT_URLCONF="admin_views.urls")
class LimitChoicesToInAdminTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_limit_choices_to_as_callable(self):
"""Test for ticket 2445 changes to admin."""
threepwood = Character.objects.create(
username="threepwood",
last_action=datetime.datetime.today() + datetime.timedelta(days=1),
)
marley = Character.objects.create(
username="marley",
last_action=datetime.datetime.today() - datetime.timedelta(days=1),
)
response = self.client.get(reverse("admin:admin_views_stumpjoke_add"))
# The allowed option should appear twice; the limited option should not appear.
self.assertContains(response, threepwood.username, count=2)
self.assertNotContains(response, marley.username)
@override_settings(ROOT_URLCONF="admin_views.urls")
class RawIdFieldsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_limit_choices_to(self):
"""Regression test for 14880"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True, leader=actor, country="England")
Inquisition.objects.create(expected=False, leader=actor, country="Spain")
response = self.client.get(reverse("admin:admin_views_sketch_add"))
# Find the link
m = re.search(
rb'<a href="([^"]*)"[^>]* id="lookup_id_inquisition"', response.content
)
self.assertTrue(m) # Got a match
popup_url = m[1].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request["PATH_INFO"], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step also tests integers, strings and booleans in the
# lookup query string; in model we define inquisition field to have a
# limit_choices_to option that includes a filter on a string field
# (inquisition__actor__name), a filter on an integer field
# (inquisition__actor__age), and a filter on a boolean field
# (inquisition__expected).
response2 = self.client.get(popup_url)
self.assertContains(response2, "Spain")
self.assertNotContains(response2, "England")
def test_limit_choices_to_isnull_false(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse("admin:admin_views_sketch_add"))
# Find the link
m = re.search(
rb'<a href="([^"]*)"[^>]* id="lookup_id_defendant0"', response.content
)
self.assertTrue(m) # Got a match
popup_url = m[1].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request["PATH_INFO"], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=0 gets parsed correctly from the
# lookup query string; in model we define defendant0 field to have a
# limit_choices_to option that includes "actor__title__isnull=False".
response2 = self.client.get(popup_url)
self.assertContains(response2, "Kilbraken")
self.assertNotContains(response2, "Palin")
def test_limit_choices_to_isnull_true(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse("admin:admin_views_sketch_add"))
# Find the link
m = re.search(
rb'<a href="([^"]*)"[^>]* id="lookup_id_defendant1"', response.content
)
self.assertTrue(m) # Got a match
popup_url = m[1].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request["PATH_INFO"], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=1 gets parsed correctly from the
# lookup query string; in model we define defendant1 field to have a
# limit_choices_to option that includes "actor__title__isnull=True".
response2 = self.client.get(popup_url)
self.assertNotContains(response2, "Kilbraken")
self.assertContains(response2, "Palin")
def test_list_display_method_same_name_as_reverse_accessor(self):
"""
Should be able to use a ModelAdmin method in list_display that has the
same name as a reverse model field ("sketch" in this case).
"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True, leader=actor, country="England")
response = self.client.get(reverse("admin:admin_views_inquisition_changelist"))
self.assertContains(response, "list-display-sketch")
@override_settings(ROOT_URLCONF="admin_views.urls")
class UserAdminTest(TestCase):
"""
Tests user CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.adduser = User.objects.create_user(
username="adduser", password="secret", is_staff=True
)
cls.changeuser = User.objects.create_user(
username="changeuser", password="secret", is_staff=True
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
cls.per1 = Person.objects.create(name="John Mauchly", gender=1, alive=True)
cls.per2 = Person.objects.create(name="Grace Hopper", gender=1, alive=False)
cls.per3 = Person.objects.create(name="Guido van Rossum", gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_button(self):
user_count = User.objects.count()
response = self.client.post(
reverse("admin:auth_user_add"),
{
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
},
)
new_user = User.objects.get(username="newuser")
self.assertRedirects(
response, reverse("admin:auth_user_change", args=(new_user.pk,))
)
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_save_continue_editing_button(self):
user_count = User.objects.count()
response = self.client.post(
reverse("admin:auth_user_add"),
{
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
"_continue": "1",
},
)
new_user = User.objects.get(username="newuser")
new_user_url = reverse("admin:auth_user_change", args=(new_user.pk,))
self.assertRedirects(response, new_user_url, fetch_redirect_response=False)
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
response = self.client.get(new_user_url)
self.assertContains(
response,
'<li class="success">The user “<a href="%s">'
"%s</a>” was added successfully. You may edit it again below.</li>"
% (new_user_url, new_user),
html=True,
)
def test_password_mismatch(self):
response = self.client.post(
reverse("admin:auth_user_add"),
{
"username": "newuser",
"password1": "newpassword",
"password2": "mismatch",
},
)
self.assertEqual(response.status_code, 200)
self.assertFormError(response.context["adminform"], "password1", [])
self.assertFormError(
response.context["adminform"],
"password2",
["The two password fields didn’t match."],
)
def test_user_fk_add_popup(self):
"""
User addition through a FK popup should return the appropriate
JavaScript response.
"""
response = self.client.get(reverse("admin:admin_views_album_add"))
self.assertContains(response, reverse("admin:auth_user_add"))
self.assertContains(
response,
'class="related-widget-wrapper-link add-related" id="add_id_owner"',
)
response = self.client.get(
reverse("admin:auth_user_add") + "?%s=1" % IS_POPUP_VAR
)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
IS_POPUP_VAR: "1",
"_save": "1",
}
response = self.client.post(
reverse("admin:auth_user_add") + "?%s=1" % IS_POPUP_VAR, data, follow=True
)
self.assertContains(response, ""obj": "newuser"")
def test_user_fk_change_popup(self):
"""
User change through a FK popup should return the appropriate JavaScript
response.
"""
response = self.client.get(reverse("admin:admin_views_album_add"))
self.assertContains(
response, reverse("admin:auth_user_change", args=("__fk__",))
)
self.assertContains(
response,
'class="related-widget-wrapper-link change-related" id="change_id_owner"',
)
user = User.objects.get(username="changeuser")
url = (
reverse("admin:auth_user_change", args=(user.pk,)) + "?%s=1" % IS_POPUP_VAR
)
response = self.client.get(url)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
"last_login_0": "2007-05-30",
"last_login_1": "13:20:10",
"date_joined_0": "2007-05-30",
"date_joined_1": "13:20:10",
IS_POPUP_VAR: "1",
"_save": "1",
}
response = self.client.post(url, data, follow=True)
self.assertContains(response, ""obj": "newuser"")
self.assertContains(response, ""action": "change"")
def test_user_fk_delete_popup(self):
"""
User deletion through a FK popup should return the appropriate
JavaScript response.
"""
response = self.client.get(reverse("admin:admin_views_album_add"))
self.assertContains(
response, reverse("admin:auth_user_delete", args=("__fk__",))
)
self.assertContains(
response,
'class="related-widget-wrapper-link change-related" id="change_id_owner"',
)
user = User.objects.get(username="changeuser")
url = (
reverse("admin:auth_user_delete", args=(user.pk,)) + "?%s=1" % IS_POPUP_VAR
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = {
"post": "yes",
IS_POPUP_VAR: "1",
}
response = self.client.post(url, data, follow=True)
self.assertContains(response, ""action": "delete"")
def test_save_add_another_button(self):
user_count = User.objects.count()
response = self.client.post(
reverse("admin:auth_user_add"),
{
"username": "newuser",
"password1": "newpassword",
"password2": "newpassword",
"_addanother": "1",
},
)
new_user = User.objects.order_by("-id")[0]
self.assertRedirects(response, reverse("admin:auth_user_add"))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_user_permission_performance(self):
u = User.objects.all()[0]
# Don't depend on a warm cache, see #17377.
ContentType.objects.clear_cache()
expected_num_queries = 10 if connection.features.uses_savepoints else 8
with self.assertNumQueries(expected_num_queries):
response = self.client.get(reverse("admin:auth_user_change", args=(u.pk,)))
self.assertEqual(response.status_code, 200)
def test_form_url_present_in_context(self):
u = User.objects.all()[0]
response = self.client.get(
reverse("admin3:auth_user_password_change", args=(u.pk,))
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["form_url"], "pony")
@override_settings(ROOT_URLCONF="admin_views.urls")
class GroupAdminTest(TestCase):
"""
Tests group CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_button(self):
group_count = Group.objects.count()
response = self.client.post(
reverse("admin:auth_group_add"),
{
"name": "newgroup",
},
)
Group.objects.order_by("-id")[0]
self.assertRedirects(response, reverse("admin:auth_group_changelist"))
self.assertEqual(Group.objects.count(), group_count + 1)
def test_group_permission_performance(self):
g = Group.objects.create(name="test_group")
# Ensure no queries are skipped due to cached content type for Group.
ContentType.objects.clear_cache()
expected_num_queries = 8 if connection.features.uses_savepoints else 6
with self.assertNumQueries(expected_num_queries):
response = self.client.get(reverse("admin:auth_group_change", args=(g.pk,)))
self.assertEqual(response.status_code, 200)
@override_settings(ROOT_URLCONF="admin_views.urls")
class CSSTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = Section.objects.create(name="Test section")
cls.a1 = Article.objects.create(
content="<p>Middle content</p>",
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a2 = Article.objects.create(
content="<p>Oldest content</p>",
date=datetime.datetime(2000, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.a3 = Article.objects.create(
content="<p>Newest content</p>",
date=datetime.datetime(2009, 3, 18, 11, 54, 58),
section=cls.s1,
)
cls.p1 = PrePopulatedPost.objects.create(
title="A Long Title", published=True, slug="a-long-title"
)
def setUp(self):
self.client.force_login(self.superuser)
@ignore_warnings(category=RemovedInDjango60Warning)
def test_field_prefix_css_classes(self):
"""
Fields have a CSS class name with a 'field-' prefix.
"""
response = self.client.get(reverse("admin:admin_views_post_add"))
# The main form
self.assertContains(response, 'class="form-row field-title"')
self.assertContains(response, 'class="form-row field-content"')
self.assertContains(response, 'class="form-row field-public"')
self.assertContains(response, 'class="form-row field-awesomeness_level"')
self.assertContains(response, 'class="form-row field-coolness"')
self.assertContains(response, 'class="form-row field-value"')
self.assertContains(response, 'class="form-row"') # The lambda function
# The tabular inline
self.assertContains(response, '<td class="field-url">')
self.assertContains(response, '<td class="field-posted">')
def test_index_css_classes(self):
"""
CSS class names are used for each app and model on the admin index
pages (#17050).
"""
# General index page
response = self.client.get(reverse("admin:index"))
self.assertContains(response, '<div class="app-admin_views module')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
# App index page
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertContains(response, '<div class="app-admin_views module')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
def test_app_model_in_form_body_class(self):
"""
Ensure app and model tag are correctly read by change_form template
"""
response = self.client.get(reverse("admin:admin_views_section_add"))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_list_body_class(self):
"""
Ensure app and model tag are correctly read by change_list template
"""
response = self.client.get(reverse("admin:admin_views_section_changelist"))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_delete_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by delete_confirmation
template
"""
response = self.client.get(
reverse("admin:admin_views_section_delete", args=(self.s1.pk,))
)
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_app_index_body_class(self):
"""
Ensure app and model tag are correctly read by app_index template
"""
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertContains(response, '<body class=" dashboard app-admin_views')
def test_app_model_in_delete_selected_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by
delete_selected_confirmation template
"""
action_data = {
ACTION_CHECKBOX_NAME: [self.s1.pk],
"action": "delete_selected",
"index": 0,
}
response = self.client.post(
reverse("admin:admin_views_section_changelist"), action_data
)
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_changelist_field_classes(self):
"""
Cells of the change list table should contain the field name in their
class attribute.
"""
Podcast.objects.create(name="Django Dose", release_date=datetime.date.today())
response = self.client.get(reverse("admin:admin_views_podcast_changelist"))
self.assertContains(response, '<th class="field-name">')
self.assertContains(response, '<td class="field-release_date nowrap">')
self.assertContains(response, '<td class="action-checkbox">')
try:
import docutils
except ImportError:
docutils = None
@unittest.skipUnless(docutils, "no docutils installed.")
@override_settings(ROOT_URLCONF="admin_views.urls")
@modify_settings(
INSTALLED_APPS={"append": ["django.contrib.admindocs", "django.contrib.flatpages"]}
)
class AdminDocsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_tags(self):
response = self.client.get(reverse("django-admindocs-tags"))
# The builtin tag group exists
self.assertContains(response, "<h2>Built-in tags</h2>", count=2, html=True)
# A builtin tag exists in both the index and detail
self.assertContains(
response, '<h3 id="built_in-autoescape">autoescape</h3>', html=True
)
self.assertContains(
response,
'<li><a href="#built_in-autoescape">autoescape</a></li>',
html=True,
)
# An app tag exists in both the index and detail
self.assertContains(
response, '<h3 id="flatpages-get_flatpages">get_flatpages</h3>', html=True
)
self.assertContains(
response,
'<li><a href="#flatpages-get_flatpages">get_flatpages</a></li>',
html=True,
)
# The admin list tag group exists
self.assertContains(response, "<h2>admin_list</h2>", count=2, html=True)
# An admin list tag exists in both the index and detail
self.assertContains(
response, '<h3 id="admin_list-admin_actions">admin_actions</h3>', html=True
)
self.assertContains(
response,
'<li><a href="#admin_list-admin_actions">admin_actions</a></li>',
html=True,
)
def test_filters(self):
response = self.client.get(reverse("django-admindocs-filters"))
# The builtin filter group exists
self.assertContains(response, "<h2>Built-in filters</h2>", count=2, html=True)
# A builtin filter exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-add">add</h3>', html=True)
self.assertContains(
response, '<li><a href="#built_in-add">add</a></li>', html=True
)
@override_settings(
ROOT_URLCONF="admin_views.urls",
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
],
)
class ValidXHTMLTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_lang_name_present(self):
with translation.override(None):
response = self.client.get(reverse("admin:app_list", args=("admin_views",)))
self.assertNotContains(response, ' lang=""')
self.assertNotContains(response, ' xml:lang=""')
@override_settings(ROOT_URLCONF="admin_views.urls", USE_THOUSAND_SEPARATOR=True)
class DateHierarchyTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def assert_non_localized_year(self, response, year):
"""
The year is not localized with USE_THOUSAND_SEPARATOR (#15234).
"""
self.assertNotContains(response, formats.number_format(year))
def assert_contains_year_link(self, response, date):
self.assertContains(response, '?release_date__year=%d"' % date.year)
def assert_contains_month_link(self, response, date):
self.assertContains(
response,
'?release_date__month=%d&release_date__year=%d"'
% (date.month, date.year),
)
def assert_contains_day_link(self, response, date):
self.assertContains(
response,
"?release_date__day=%d&"
'release_date__month=%d&release_date__year=%d"'
% (date.day, date.month, date.year),
)
def test_empty(self):
"""
No date hierarchy links display with empty changelist.
"""
response = self.client.get(reverse("admin:admin_views_podcast_changelist"))
self.assertNotContains(response, "release_date__year=")
self.assertNotContains(response, "release_date__month=")
self.assertNotContains(response, "release_date__day=")
def test_single(self):
"""
Single day-level date hierarchy appears for single object.
"""
DATE = datetime.date(2000, 6, 30)
Podcast.objects.create(release_date=DATE)
url = reverse("admin:admin_views_podcast_changelist")
response = self.client.get(url)
self.assert_contains_day_link(response, DATE)
self.assert_non_localized_year(response, 2000)
def test_within_month(self):
"""
day-level links appear for changelist within single month.
"""
DATES = (
datetime.date(2000, 6, 30),
datetime.date(2000, 6, 15),
datetime.date(2000, 6, 3),
)
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse("admin:admin_views_podcast_changelist")
response = self.client.get(url)
for date in DATES:
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_within_year(self):
"""
month-level links appear for changelist within single year.
"""
DATES = (
datetime.date(2000, 1, 30),
datetime.date(2000, 3, 15),
datetime.date(2000, 5, 3),
)
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse("admin:admin_views_podcast_changelist")
response = self.client.get(url)
# no day-level links
self.assertNotContains(response, "release_date__day=")
for date in DATES:
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_multiple_years(self):
"""
year-level links appear for year-spanning changelist.
"""
DATES = (
datetime.date(2001, 1, 30),
datetime.date(2003, 3, 15),
datetime.date(2005, 5, 3),
)
for date in DATES:
Podcast.objects.create(release_date=date)
response = self.client.get(reverse("admin:admin_views_podcast_changelist"))
# no day/month-level links
self.assertNotContains(response, "release_date__day=")
self.assertNotContains(response, "release_date__month=")
for date in DATES:
self.assert_contains_year_link(response, date)
# and make sure GET parameters still behave correctly
for date in DATES:
url = "%s?release_date__year=%d" % (
reverse("admin:admin_views_podcast_changelist"),
date.year,
)
response = self.client.get(url)
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
url = "%s?release_date__year=%d&release_date__month=%d" % (
reverse("admin:admin_views_podcast_changelist"),
date.year,
date.month,
)
response = self.client.get(url)
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
def test_related_field(self):
questions_data = (
# (posted data, number of answers),
(datetime.date(2001, 1, 30), 0),
(datetime.date(2003, 3, 15), 1),
(datetime.date(2005, 5, 3), 2),
)
for date, answer_count in questions_data:
question = Question.objects.create(posted=date)
for i in range(answer_count):
question.answer_set.create()
response = self.client.get(reverse("admin:admin_views_answer_changelist"))
for date, answer_count in questions_data:
link = '?question__posted__year=%d"' % date.year
if answer_count > 0:
self.assertContains(response, link)
else:
self.assertNotContains(response, link)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminCustomSaveRelatedTests(TestCase):
"""
One can easily customize the way related objects are saved.
Refs #16115.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_should_be_able_to_edit_related_objects_on_add_view(self):
post = {
"child_set-TOTAL_FORMS": "3",
"child_set-INITIAL_FORMS": "0",
"name": "Josh Stone",
"child_set-0-name": "Paul",
"child_set-1-name": "Catherine",
}
self.client.post(reverse("admin:admin_views_parent_add"), post)
self.assertEqual(1, Parent.objects.count())
self.assertEqual(2, Child.objects.count())
children_names = list(
Child.objects.order_by("name").values_list("name", flat=True)
)
self.assertEqual("Josh Stone", Parent.objects.latest("id").name)
self.assertEqual(["Catherine Stone", "Paul Stone"], children_names)
def test_should_be_able_to_edit_related_objects_on_change_view(self):
parent = Parent.objects.create(name="Josh Stone")
paul = Child.objects.create(parent=parent, name="Paul")
catherine = Child.objects.create(parent=parent, name="Catherine")
post = {
"child_set-TOTAL_FORMS": "5",
"child_set-INITIAL_FORMS": "2",
"name": "Josh Stone",
"child_set-0-name": "Paul",
"child_set-0-id": paul.id,
"child_set-1-name": "Catherine",
"child_set-1-id": catherine.id,
}
self.client.post(
reverse("admin:admin_views_parent_change", args=(parent.id,)), post
)
children_names = list(
Child.objects.order_by("name").values_list("name", flat=True)
)
self.assertEqual("Josh Stone", Parent.objects.latest("id").name)
self.assertEqual(["Catherine Stone", "Paul Stone"], children_names)
def test_should_be_able_to_edit_related_objects_on_changelist_view(self):
parent = Parent.objects.create(name="Josh Rock")
Child.objects.create(parent=parent, name="Paul")
Child.objects.create(parent=parent, name="Catherine")
post = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": parent.id,
"form-0-name": "Josh Stone",
"_save": "Save",
}
self.client.post(reverse("admin:admin_views_parent_changelist"), post)
children_names = list(
Child.objects.order_by("name").values_list("name", flat=True)
)
self.assertEqual("Josh Stone", Parent.objects.latest("id").name)
self.assertEqual(["Catherine Stone", "Paul Stone"], children_names)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewLogoutTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def test_logout(self):
self.client.force_login(self.superuser)
response = self.client.post(reverse("admin:logout"))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "registration/logged_out.html")
self.assertEqual(response.request["PATH_INFO"], reverse("admin:logout"))
self.assertFalse(response.context["has_permission"])
self.assertNotContains(
response, "user-tools"
) # user-tools div shouldn't visible.
def test_client_logout_url_can_be_used_to_login(self):
response = self.client.post(reverse("admin:logout"))
self.assertEqual(
response.status_code, 302
) # we should be redirected to the login page.
# follow the redirect and test results.
response = self.client.post(reverse("admin:logout"), follow=True)
self.assertContains(
response,
'<input type="hidden" name="next" value="%s">' % reverse("admin:index"),
)
self.assertTemplateUsed(response, "admin/login.html")
self.assertEqual(response.request["PATH_INFO"], reverse("admin:login"))
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminUserMessageTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def send_message(self, level):
"""
Helper that sends a post to the dummy test methods and asserts that a
message with the level has appeared in the response.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
"action": "message_%s" % level,
"index": 0,
}
response = self.client.post(
reverse("admin:admin_views_usermessenger_changelist"),
action_data,
follow=True,
)
self.assertContains(
response, '<li class="%s">Test %s</li>' % (level, level), html=True
)
@override_settings(MESSAGE_LEVEL=10) # Set to DEBUG for this request
def test_message_debug(self):
self.send_message("debug")
def test_message_info(self):
self.send_message("info")
def test_message_success(self):
self.send_message("success")
def test_message_warning(self):
self.send_message("warning")
def test_message_error(self):
self.send_message("error")
def test_message_extra_tags(self):
action_data = {
ACTION_CHECKBOX_NAME: [1],
"action": "message_extra_tags",
"index": 0,
}
response = self.client.post(
reverse("admin:admin_views_usermessenger_changelist"),
action_data,
follow=True,
)
self.assertContains(
response, '<li class="extra_tag info">Test tags</li>', html=True
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminKeepChangeListFiltersTests(TestCase):
admin_site = site
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.joepublicuser = User.objects.create_user(
username="joepublic", password="secret"
)
def setUp(self):
self.client.force_login(self.superuser)
def assertURLEqual(self, url1, url2, msg_prefix=""):
"""
Assert that two URLs are equal despite the ordering
of their querystring. Refs #22360.
"""
parsed_url1 = urlparse(url1)
path1 = parsed_url1.path
parsed_qs1 = dict(parse_qsl(parsed_url1.query))
parsed_url2 = urlparse(url2)
path2 = parsed_url2.path
parsed_qs2 = dict(parse_qsl(parsed_url2.query))
for parsed_qs in [parsed_qs1, parsed_qs2]:
if "_changelist_filters" in parsed_qs:
changelist_filters = parsed_qs["_changelist_filters"]
parsed_filters = dict(parse_qsl(changelist_filters))
parsed_qs["_changelist_filters"] = parsed_filters
self.assertEqual(path1, path2)
self.assertEqual(parsed_qs1, parsed_qs2)
def test_assert_url_equal(self):
# Test equality.
change_user_url = reverse(
"admin:auth_user_change", args=(self.joepublicuser.pk,)
)
self.assertURLEqual(
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
)
# Test inequality.
with self.assertRaises(AssertionError):
self.assertURLEqual(
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D1%26is_superuser__exact%3D1".format(change_user_url),
)
# Ignore scheme and host.
self.assertURLEqual(
"http://testserver{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
"{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
)
# Ignore ordering of querystring.
self.assertURLEqual(
"{}?is_staff__exact=0&is_superuser__exact=0".format(
reverse("admin:auth_user_changelist")
),
"{}?is_superuser__exact=0&is_staff__exact=0".format(
reverse("admin:auth_user_changelist")
),
)
# Ignore ordering of _changelist_filters.
self.assertURLEqual(
"{}?_changelist_filters="
"is_staff__exact%3D0%26is_superuser__exact%3D0".format(change_user_url),
"{}?_changelist_filters="
"is_superuser__exact%3D0%26is_staff__exact%3D0".format(change_user_url),
)
def get_changelist_filters(self):
return {
"is_superuser__exact": 0,
"is_staff__exact": 0,
}
def get_changelist_filters_querystring(self):
return urlencode(self.get_changelist_filters())
def get_preserved_filters_querystring(self):
return urlencode(
{"_changelist_filters": self.get_changelist_filters_querystring()}
)
def get_sample_user_id(self):
return self.joepublicuser.pk
def get_changelist_url(self):
return "%s?%s" % (
reverse("admin:auth_user_changelist", current_app=self.admin_site.name),
self.get_changelist_filters_querystring(),
)
def get_add_url(self, add_preserved_filters=True):
url = reverse("admin:auth_user_add", current_app=self.admin_site.name)
if add_preserved_filters:
url = "%s?%s" % (url, self.get_preserved_filters_querystring())
return url
def get_change_url(self, user_id=None, add_preserved_filters=True):
if user_id is None:
user_id = self.get_sample_user_id()
url = reverse(
"admin:auth_user_change", args=(user_id,), current_app=self.admin_site.name
)
if add_preserved_filters:
url = "%s?%s" % (url, self.get_preserved_filters_querystring())
return url
def get_history_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse(
"admin:auth_user_history",
args=(user_id,),
current_app=self.admin_site.name,
),
self.get_preserved_filters_querystring(),
)
def get_delete_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse(
"admin:auth_user_delete",
args=(user_id,),
current_app=self.admin_site.name,
),
self.get_preserved_filters_querystring(),
)
def test_changelist_view(self):
response = self.client.get(self.get_changelist_url())
self.assertEqual(response.status_code, 200)
# Check the `change_view` link has the correct querystring.
detail_link = re.search(
'<a href="(.*?)">{}</a>'.format(self.joepublicuser.username),
response.content.decode(),
)
self.assertURLEqual(detail_link[1], self.get_change_url())
def test_change_view(self):
# Get the `change_view`.
response = self.client.get(self.get_change_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form action="(.*?)" method="post" id="user_form" novalidate>',
response.content.decode(),
)
self.assertURLEqual(
form_action[1], "?%s" % self.get_preserved_filters_querystring()
)
# Check the history link.
history_link = re.search(
'<a href="(.*?)" class="historylink">History</a>', response.content.decode()
)
self.assertURLEqual(history_link[1], self.get_history_url())
# Check the delete link.
delete_link = re.search(
'<a href="(.*?)" class="deletelink">Delete</a>', response.content.decode()
)
self.assertURLEqual(delete_link[1], self.get_delete_url())
# Test redirect on "Save".
post_data = {
"username": "joepublic",
"last_login_0": "2007-05-30",
"last_login_1": "13:20:10",
"date_joined_0": "2007-05-30",
"date_joined_1": "13:20:10",
}
post_data["_save"] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertRedirects(response, self.get_changelist_url())
post_data.pop("_save")
# Test redirect on "Save and continue".
post_data["_continue"] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertRedirects(response, self.get_change_url())
post_data.pop("_continue")
# Test redirect on "Save and add new".
post_data["_addanother"] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertRedirects(response, self.get_add_url())
post_data.pop("_addanother")
def test_change_view_close_link(self):
viewuser = User.objects.create_user(
username="view", password="secret", is_staff=True
)
viewuser.user_permissions.add(
get_perm(User, get_permission_codename("view", User._meta))
)
self.client.force_login(viewuser)
response = self.client.get(self.get_change_url())
close_link = re.search(
'<a href="(.*?)" class="closelink">Close</a>', response.content.decode()
)
close_link = close_link[1].replace("&", "&")
self.assertURLEqual(close_link, self.get_changelist_url())
def test_change_view_without_preserved_filters(self):
response = self.client.get(self.get_change_url(add_preserved_filters=False))
# The action attribute is omitted.
self.assertContains(response, '<form method="post" id="user_form" novalidate>')
def test_add_view(self):
# Get the `add_view`.
response = self.client.get(self.get_add_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form action="(.*?)" method="post" id="user_form" novalidate>',
response.content.decode(),
)
self.assertURLEqual(
form_action[1], "?%s" % self.get_preserved_filters_querystring()
)
post_data = {
"username": "dummy",
"password1": "test",
"password2": "test",
}
# Test redirect on "Save".
post_data["_save"] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertRedirects(
response, self.get_change_url(User.objects.get(username="dummy").pk)
)
post_data.pop("_save")
# Test redirect on "Save and continue".
post_data["username"] = "dummy2"
post_data["_continue"] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertRedirects(
response, self.get_change_url(User.objects.get(username="dummy2").pk)
)
post_data.pop("_continue")
# Test redirect on "Save and add new".
post_data["username"] = "dummy3"
post_data["_addanother"] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertRedirects(response, self.get_add_url())
post_data.pop("_addanother")
def test_add_view_without_preserved_filters(self):
response = self.client.get(self.get_add_url(add_preserved_filters=False))
# The action attribute is omitted.
self.assertContains(response, '<form method="post" id="user_form" novalidate>')
def test_delete_view(self):
# Test redirect on "Delete".
response = self.client.post(self.get_delete_url(), {"post": "yes"})
self.assertRedirects(response, self.get_changelist_url())
def test_url_prefix(self):
context = {
"preserved_filters": self.get_preserved_filters_querystring(),
"opts": User._meta,
}
prefixes = ("", "/prefix/", "/後台/")
for prefix in prefixes:
with self.subTest(prefix=prefix), override_script_prefix(prefix):
url = reverse(
"admin:auth_user_changelist", current_app=self.admin_site.name
)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
class NamespacedAdminKeepChangeListFiltersTests(AdminKeepChangeListFiltersTests):
admin_site = site2
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestLabelVisibility(TestCase):
"""#11277 -Labels of hidden fields in admin were not hidden."""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_all_fields_visible(self):
response = self.client.get(reverse("admin:admin_views_emptymodelvisible_add"))
self.assert_fieldline_visible(response)
self.assert_field_visible(response, "first")
self.assert_field_visible(response, "second")
def test_all_fields_hidden(self):
response = self.client.get(reverse("admin:admin_views_emptymodelhidden_add"))
self.assert_fieldline_hidden(response)
self.assert_field_hidden(response, "first")
self.assert_field_hidden(response, "second")
def test_mixin(self):
response = self.client.get(reverse("admin:admin_views_emptymodelmixin_add"))
self.assert_fieldline_visible(response)
self.assert_field_hidden(response, "first")
self.assert_field_visible(response, "second")
def assert_field_visible(self, response, field_name):
self.assertContains(
response, f'<div class="flex-container fieldBox field-{field_name}">'
)
def assert_field_hidden(self, response, field_name):
self.assertContains(
response, f'<div class="flex-container fieldBox field-{field_name} hidden">'
)
def assert_fieldline_visible(self, response):
self.assertContains(response, '<div class="form-row field-first field-second">')
def assert_fieldline_hidden(self, response):
self.assertContains(response, '<div class="form-row hidden')
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminViewOnSiteTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = State.objects.create(name="New York")
cls.s2 = State.objects.create(name="Illinois")
cls.s3 = State.objects.create(name="California")
cls.c1 = City.objects.create(state=cls.s1, name="New York")
cls.c2 = City.objects.create(state=cls.s2, name="Chicago")
cls.c3 = City.objects.create(state=cls.s3, name="San Francisco")
cls.r1 = Restaurant.objects.create(city=cls.c1, name="Italian Pizza")
cls.r2 = Restaurant.objects.create(city=cls.c1, name="Boulevard")
cls.r3 = Restaurant.objects.create(city=cls.c2, name="Chinese Dinner")
cls.r4 = Restaurant.objects.create(city=cls.c2, name="Angels")
cls.r5 = Restaurant.objects.create(city=cls.c2, name="Take Away")
cls.r6 = Restaurant.objects.create(city=cls.c3, name="The Unknown Restaurant")
cls.w1 = Worker.objects.create(work_at=cls.r1, name="Mario", surname="Rossi")
cls.w2 = Worker.objects.create(
work_at=cls.r1, name="Antonio", surname="Bianchi"
)
cls.w3 = Worker.objects.create(work_at=cls.r1, name="John", surname="Doe")
def setUp(self):
self.client.force_login(self.superuser)
def test_add_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data.
Also, assertFormError() and assertFormSetError() is usable for admin
forms and formsets.
"""
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {
"family_name": "Test1",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": "",
"dependentchild_set-0-family_name": "Test2",
}
response = self.client.post(
reverse("admin:admin_views_parentwithdependentchildren_add"), post_data
)
self.assertFormError(
response.context["adminform"],
"some_required_info",
["This field is required."],
)
self.assertFormError(response.context["adminform"], None, [])
self.assertFormSetError(
response.context["inline_admin_formset"],
0,
None,
[
"Children must share a family name with their parents in this "
"contrived test case"
],
)
self.assertFormSetError(
response.context["inline_admin_formset"], None, None, []
)
def test_change_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data
"""
pwdc = ParentWithDependentChildren.objects.create(
some_required_info=6, family_name="Test1"
)
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {
"family_name": "Test2",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": str(pwdc.id),
"dependentchild_set-0-family_name": "Test1",
}
response = self.client.post(
reverse(
"admin:admin_views_parentwithdependentchildren_change", args=(pwdc.id,)
),
post_data,
)
self.assertFormError(
response.context["adminform"],
"some_required_info",
["This field is required."],
)
self.assertFormSetError(
response.context["inline_admin_formset"],
0,
None,
[
"Children must share a family name with their parents in this "
"contrived test case"
],
)
def test_check(self):
"The view_on_site value is either a boolean or a callable"
try:
admin = CityAdmin(City, AdminSite())
CityAdmin.view_on_site = True
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = False
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = lambda obj: obj.get_absolute_url()
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = []
self.assertEqual(
admin.check(),
[
Error(
"The value of 'view_on_site' must be a callable or a boolean "
"value.",
obj=CityAdmin,
id="admin.E025",
),
],
)
finally:
# Restore the original values for the benefit of other tests.
CityAdmin.view_on_site = True
def test_false(self):
"The 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(
reverse("admin:admin_views_restaurant_change", args=(self.r1.pk,))
)
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertNotContains(
response, reverse("admin:view_on_site", args=(content_type_pk, 1))
)
def test_true(self):
"The default behavior is followed if view_on_site is True"
response = self.client.get(
reverse("admin:admin_views_city_change", args=(self.c1.pk,))
)
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertContains(
response, reverse("admin:view_on_site", args=(content_type_pk, self.c1.pk))
)
def test_callable(self):
"The right link is displayed if view_on_site is a callable"
response = self.client.get(
reverse("admin:admin_views_worker_change", args=(self.w1.pk,))
)
self.assertContains(
response, '"/worker/%s/%s/"' % (self.w1.surname, self.w1.name)
)
def test_missing_get_absolute_url(self):
"None is returned if model doesn't have get_absolute_url"
model_admin = ModelAdmin(Worker, None)
self.assertIsNone(model_admin.get_view_on_site_url(Worker()))
def test_custom_admin_site(self):
model_admin = ModelAdmin(City, customadmin.site)
content_type_pk = ContentType.objects.get_for_model(City).pk
redirect_url = model_admin.get_view_on_site_url(self.c1)
self.assertEqual(
redirect_url,
reverse(
f"{customadmin.site.name}:view_on_site",
kwargs={
"content_type_id": content_type_pk,
"object_id": self.c1.pk,
},
),
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class InlineAdminViewOnSiteTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
cls.s1 = State.objects.create(name="New York")
cls.s2 = State.objects.create(name="Illinois")
cls.s3 = State.objects.create(name="California")
cls.c1 = City.objects.create(state=cls.s1, name="New York")
cls.c2 = City.objects.create(state=cls.s2, name="Chicago")
cls.c3 = City.objects.create(state=cls.s3, name="San Francisco")
cls.r1 = Restaurant.objects.create(city=cls.c1, name="Italian Pizza")
cls.r2 = Restaurant.objects.create(city=cls.c1, name="Boulevard")
cls.r3 = Restaurant.objects.create(city=cls.c2, name="Chinese Dinner")
cls.r4 = Restaurant.objects.create(city=cls.c2, name="Angels")
cls.r5 = Restaurant.objects.create(city=cls.c2, name="Take Away")
cls.r6 = Restaurant.objects.create(city=cls.c3, name="The Unknown Restaurant")
cls.w1 = Worker.objects.create(work_at=cls.r1, name="Mario", surname="Rossi")
cls.w2 = Worker.objects.create(
work_at=cls.r1, name="Antonio", surname="Bianchi"
)
cls.w3 = Worker.objects.create(work_at=cls.r1, name="John", surname="Doe")
def setUp(self):
self.client.force_login(self.superuser)
def test_false(self):
"The 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(
reverse("admin:admin_views_state_change", args=(self.s1.pk,))
)
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertNotContains(
response, reverse("admin:view_on_site", args=(content_type_pk, self.c1.pk))
)
def test_true(self):
"The 'View on site' button is displayed if view_on_site is True"
response = self.client.get(
reverse("admin:admin_views_city_change", args=(self.c1.pk,))
)
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertContains(
response, reverse("admin:view_on_site", args=(content_type_pk, self.r1.pk))
)
def test_callable(self):
"The right link is displayed if view_on_site is a callable"
response = self.client.get(
reverse("admin:admin_views_restaurant_change", args=(self.r1.pk,))
)
self.assertContains(
response, '"/worker_inline/%s/%s/"' % (self.w1.surname, self.w1.name)
)
@override_settings(ROOT_URLCONF="admin_views.urls")
class GetFormsetsWithInlinesArgumentTest(TestCase):
"""
#23934 - When adding a new model instance in the admin, the 'obj' argument
of get_formsets_with_inlines() should be None. When changing, it should be
equal to the existing model instance.
The GetFormsetsArgumentCheckingAdmin ModelAdmin throws an exception
if obj is not None during add_view or obj is None during change_view.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
def setUp(self):
self.client.force_login(self.superuser)
def test_explicitly_provided_pk(self):
post_data = {"name": "1"}
response = self.client.post(
reverse("admin:admin_views_explicitlyprovidedpk_add"), post_data
)
self.assertEqual(response.status_code, 302)
post_data = {"name": "2"}
response = self.client.post(
reverse("admin:admin_views_explicitlyprovidedpk_change", args=(1,)),
post_data,
)
self.assertEqual(response.status_code, 302)
def test_implicitly_generated_pk(self):
post_data = {"name": "1"}
response = self.client.post(
reverse("admin:admin_views_implicitlygeneratedpk_add"), post_data
)
self.assertEqual(response.status_code, 302)
post_data = {"name": "2"}
response = self.client.post(
reverse("admin:admin_views_implicitlygeneratedpk_change", args=(1,)),
post_data,
)
self.assertEqual(response.status_code, 302)
@override_settings(ROOT_URLCONF="admin_views.urls")
class AdminSiteFinalCatchAllPatternTests(TestCase):
"""
Verifies the behaviour of the admin catch-all view.
* Anonynous/non-staff users are redirected to login for all URLs, whether
otherwise valid or not.
* APPEND_SLASH is applied for staff if needed.
* Otherwise Http404.
* Catch-all view disabled via AdminSite.final_catch_all_view.
"""
@classmethod
def setUpTestData(cls):
cls.staff_user = User.objects.create_user(
username="staff",
password="secret",
email="[email protected]",
is_staff=True,
)
cls.non_staff_user = User.objects.create_user(
username="user",
password="secret",
email="[email protected]",
is_staff=False,
)
def test_unknown_url_redirects_login_if_not_authenticated(self):
unknown_url = "/test_admin/admin/unknown/"
response = self.client.get(unknown_url)
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin:login"), unknown_url)
)
def test_unknown_url_404_if_authenticated(self):
self.client.force_login(self.staff_user)
unknown_url = "/test_admin/admin/unknown/"
response = self.client.get(unknown_url)
self.assertEqual(response.status_code, 404)
def test_known_url_redirects_login_if_not_authenticated(self):
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url)
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin:login"), known_url)
)
def test_known_url_missing_slash_redirects_login_if_not_authenticated(self):
known_url = reverse("admin:admin_views_article_changelist")[:-1]
response = self.client.get(known_url)
# Redirects with the next URL also missing the slash.
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin:login"), known_url)
)
def test_non_admin_url_shares_url_prefix(self):
url = reverse("non_admin")[:-1]
response = self.client.get(url)
# Redirects with the next URL also missing the slash.
self.assertRedirects(response, "%s?next=%s" % (reverse("admin:login"), url))
def test_url_without_trailing_slash_if_not_authenticated(self):
url = reverse("admin:article_extra_json")
response = self.client.get(url)
self.assertRedirects(response, "%s?next=%s" % (reverse("admin:login"), url))
def test_unkown_url_without_trailing_slash_if_not_authenticated(self):
url = reverse("admin:article_extra_json")[:-1]
response = self.client.get(url)
self.assertRedirects(response, "%s?next=%s" % (reverse("admin:login"), url))
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_unknown_url(self):
self.client.force_login(self.staff_user)
unknown_url = "/test_admin/admin/unknown/"
response = self.client.get(unknown_url[:-1])
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true(self):
self.client.force_login(self.staff_user)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response, known_url, status_code=301, target_status_code=403
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_query_string(self):
self.client.force_login(self.staff_user)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get("%s?id=1" % known_url[:-1])
self.assertRedirects(
response,
f"{known_url}?id=1",
status_code=301,
fetch_redirect_response=False,
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_script_name(self):
self.client.force_login(self.staff_user)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1], SCRIPT_NAME="/prefix/")
self.assertRedirects(
response,
"/prefix" + known_url,
status_code=301,
fetch_redirect_response=False,
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_script_name_query_string(self):
self.client.force_login(self.staff_user)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get("%s?id=1" % known_url[:-1], SCRIPT_NAME="/prefix/")
self.assertRedirects(
response,
f"/prefix{known_url}?id=1",
status_code=301,
fetch_redirect_response=False,
)
@override_settings(APPEND_SLASH=True, FORCE_SCRIPT_NAME="/prefix/")
def test_missing_slash_append_slash_true_force_script_name(self):
self.client.force_login(self.staff_user)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response,
"/prefix" + known_url,
status_code=301,
fetch_redirect_response=False,
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_non_staff_user(self):
self.client.force_login(self.non_staff_user)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response,
"/test_admin/admin/login/?next=/test_admin/admin/admin_views/article",
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_non_staff_user_query_string(self):
self.client.force_login(self.non_staff_user)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get("%s?id=1" % known_url[:-1])
self.assertRedirects(
response,
"/test_admin/admin/login/?next=/test_admin/admin/admin_views/article"
"%3Fid%3D1",
)
@override_settings(APPEND_SLASH=False)
def test_missing_slash_append_slash_false(self):
self.client.force_login(self.staff_user)
known_url = reverse("admin:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_single_model_no_append_slash(self):
self.client.force_login(self.staff_user)
known_url = reverse("admin9:admin_views_actor_changelist")
response = self.client.get(known_url[:-1])
self.assertEqual(response.status_code, 404)
# Same tests above with final_catch_all_view=False.
def test_unknown_url_404_if_not_authenticated_without_final_catch_all_view(self):
unknown_url = "/test_admin/admin10/unknown/"
response = self.client.get(unknown_url)
self.assertEqual(response.status_code, 404)
def test_unknown_url_404_if_authenticated_without_final_catch_all_view(self):
self.client.force_login(self.staff_user)
unknown_url = "/test_admin/admin10/unknown/"
response = self.client.get(unknown_url)
self.assertEqual(response.status_code, 404)
def test_known_url_redirects_login_if_not_auth_without_final_catch_all_view(
self,
):
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get(known_url)
self.assertRedirects(
response, "%s?next=%s" % (reverse("admin10:login"), known_url)
)
def test_known_url_missing_slash_redirects_with_slash_if_not_auth_no_catch_all_view(
self,
):
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response, known_url, status_code=301, fetch_redirect_response=False
)
def test_non_admin_url_shares_url_prefix_without_final_catch_all_view(self):
url = reverse("non_admin10")
response = self.client.get(url[:-1])
self.assertRedirects(response, url, status_code=301)
def test_url_no_trailing_slash_if_not_auth_without_final_catch_all_view(
self,
):
url = reverse("admin10:article_extra_json")
response = self.client.get(url)
self.assertRedirects(response, "%s?next=%s" % (reverse("admin10:login"), url))
def test_unknown_url_no_trailing_slash_if_not_auth_without_final_catch_all_view(
self,
):
url = reverse("admin10:article_extra_json")[:-1]
response = self.client.get(url)
# Matches test_admin/admin10/admin_views/article/<path:object_id>/
self.assertRedirects(
response, url + "/", status_code=301, fetch_redirect_response=False
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_unknown_url_without_final_catch_all_view(
self,
):
self.client.force_login(self.staff_user)
unknown_url = "/test_admin/admin10/unknown/"
response = self.client.get(unknown_url[:-1])
self.assertEqual(response.status_code, 404)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_without_final_catch_all_view(self):
self.client.force_login(self.staff_user)
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertRedirects(
response, known_url, status_code=301, target_status_code=403
)
@override_settings(APPEND_SLASH=True)
def test_missing_slash_append_slash_true_query_without_final_catch_all_view(self):
self.client.force_login(self.staff_user)
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get("%s?id=1" % known_url[:-1])
self.assertRedirects(
response,
f"{known_url}?id=1",
status_code=301,
fetch_redirect_response=False,
)
@override_settings(APPEND_SLASH=False)
def test_missing_slash_append_slash_false_without_final_catch_all_view(self):
self.client.force_login(self.staff_user)
known_url = reverse("admin10:admin_views_article_changelist")
response = self.client.get(known_url[:-1])
self.assertEqual(response.status_code, 404)
# Outside admin.
def test_non_admin_url_404_if_not_authenticated(self):
unknown_url = "/unknown/"
response = self.client.get(unknown_url)
# Does not redirect to the admin login.
self.assertEqual(response.status_code, 404)
|
924f7b6b9a0125939c051827f2d8e7471a1f3aa9d20d78fc49ee74d5560768d3 | from django.contrib.admin.tests import AdminSeleniumTestCase
from django.contrib.auth.models import User
from django.test import override_settings
from django.urls import reverse
@override_settings(ROOT_URLCONF="admin_views.urls")
class SeleniumTests(AdminSeleniumTestCase):
available_apps = ["admin_views"] + AdminSeleniumTestCase.available_apps
def setUp(self):
self.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
self.admin_login(
username="super", password="secret", login_url=reverse("admin:index")
)
def test_related_object_link_images_empty_alt(self):
from selenium.webdriver.common.by import By
album_add_url = reverse("admin:admin_views_album_add")
self.selenium.get(self.live_server_url + album_add_url)
tests = [
"add_id_owner",
"change_id_owner",
"delete_id_owner",
"view_id_owner",
]
for link_id in tests:
with self.subTest(link_id):
link_image = self.selenium.find_element(
By.XPATH, f'//*[@id="{link_id}"]/img'
)
self.assertEqual(link_image.get_attribute("alt"), "")
def test_related_object_lookup_link_initial_state(self):
from selenium.webdriver.common.by import By
album_add_url = reverse("admin:admin_views_album_add")
self.selenium.get(self.live_server_url + album_add_url)
tests = [
"change_id_owner",
"delete_id_owner",
"view_id_owner",
]
for link_id in tests:
with self.subTest(link_id):
link = self.selenium.find_element(By.XPATH, f'//*[@id="{link_id}"]')
self.assertEqual(link.get_attribute("aria-disabled"), "true")
def test_related_object_lookup_link_enabled(self):
from selenium.webdriver.common.by import By
from selenium.webdriver.support.select import Select
album_add_url = reverse("admin:admin_views_album_add")
self.selenium.get(self.live_server_url + album_add_url)
select_element = self.selenium.find_element(By.XPATH, '//*[@id="id_owner"]')
option = Select(select_element).options[-1]
self.assertEqual(option.text, "super")
select_element.click()
option.click()
tests = [
"add_id_owner",
"change_id_owner",
"delete_id_owner",
"view_id_owner",
]
for link_id in tests:
with self.subTest(link_id):
link = self.selenium.find_element(By.XPATH, f'//*[@id="{link_id}"]')
self.assertIsNone(link.get_attribute("aria-disabled"))
|
128f7e1a9aa5fe0514ef19ae58e8f8fa77d366867aa277e8d5ea8a6787a36242 | import sys
import unittest
from django.conf import settings
from django.contrib import admin
from django.contrib.admindocs import utils, views
from django.contrib.admindocs.views import get_return_data_type, simplify_regex
from django.contrib.sites.models import Site
from django.db import models
from django.db.models import fields
from django.test import SimpleTestCase, modify_settings, override_settings
from django.test.utils import captured_stderr
from django.urls import include, path, reverse
from django.utils.functional import SimpleLazyObject
from .models import Company, Person
from .tests import AdminDocsTestCase, TestDataMixin
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class AdminDocViewTests(TestDataMixin, AdminDocsTestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_index(self):
response = self.client.get(reverse("django-admindocs-docroot"))
self.assertContains(response, "<h1>Documentation</h1>", html=True)
self.assertContains(
response,
'<div id="site-name"><a href="/admin/">Django administration</a></div>',
)
self.client.logout()
response = self.client.get(reverse("django-admindocs-docroot"), follow=True)
# Should display the login screen
self.assertContains(
response, '<input type="hidden" name="next" value="/admindocs/">', html=True
)
def test_bookmarklets(self):
response = self.client.get(reverse("django-admindocs-bookmarklets"))
self.assertContains(response, "/admindocs/views/")
def test_templatetag_index(self):
response = self.client.get(reverse("django-admindocs-tags"))
self.assertContains(
response, '<h3 id="built_in-extends">extends</h3>', html=True
)
def test_templatefilter_index(self):
response = self.client.get(reverse("django-admindocs-filters"))
self.assertContains(response, '<h3 id="built_in-first">first</h3>', html=True)
def test_view_index(self):
response = self.client.get(reverse("django-admindocs-views-index"))
self.assertContains(
response,
'<h3><a href="/admindocs/views/django.contrib.admindocs.views.'
'BaseAdminDocsView/">/admindocs/</a></h3>',
html=True,
)
self.assertContains(response, "Views by namespace test")
self.assertContains(response, "Name: <code>test:func</code>.")
self.assertContains(
response,
'<h3><a href="/admindocs/views/admin_docs.views.XViewCallableObject/">'
"/xview/callable_object_without_xview/</a></h3>",
html=True,
)
def test_view_index_with_method(self):
"""
Views that are methods are listed correctly.
"""
response = self.client.get(reverse("django-admindocs-views-index"))
self.assertContains(
response,
"<h3>"
'<a href="/admindocs/views/django.contrib.admin.sites.AdminSite.index/">'
"/admin/</a></h3>",
html=True,
)
def test_view_detail(self):
url = reverse(
"django-admindocs-views-detail",
args=["django.contrib.admindocs.views.BaseAdminDocsView"],
)
response = self.client.get(url)
# View docstring
self.assertContains(response, "Base view for admindocs views.")
@override_settings(ROOT_URLCONF="admin_docs.namespace_urls")
def test_namespaced_view_detail(self):
url = reverse(
"django-admindocs-views-detail", args=["admin_docs.views.XViewClass"]
)
response = self.client.get(url)
self.assertContains(response, "<h1>admin_docs.views.XViewClass</h1>")
def test_view_detail_illegal_import(self):
url = reverse(
"django-admindocs-views-detail",
args=["urlpatterns_reverse.nonimported_module.view"],
)
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
self.assertNotIn("urlpatterns_reverse.nonimported_module", sys.modules)
def test_view_detail_as_method(self):
"""
Views that are methods can be displayed.
"""
url = reverse(
"django-admindocs-views-detail",
args=["django.contrib.admin.sites.AdminSite.index"],
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_model_index(self):
response = self.client.get(reverse("django-admindocs-models-index"))
self.assertContains(
response,
'<h2 id="app-auth">Authentication and Authorization (django.contrib.auth)'
"</h2>",
html=True,
)
def test_template_detail(self):
response = self.client.get(
reverse(
"django-admindocs-templates", args=["admin_doc/template_detail.html"]
)
)
self.assertContains(
response,
"<h1>Template: <q>admin_doc/template_detail.html</q></h1>",
html=True,
)
def test_missing_docutils(self):
utils.docutils_is_available = False
try:
response = self.client.get(reverse("django-admindocs-docroot"))
self.assertContains(
response,
"<h3>The admin documentation system requires Python’s "
'<a href="https://docutils.sourceforge.io/">docutils</a> '
"library.</h3>"
"<p>Please ask your administrators to install "
'<a href="https://pypi.org/project/docutils/">docutils</a>.</p>',
html=True,
)
self.assertContains(
response,
'<div id="site-name"><a href="/admin/">Django administration</a></div>',
)
finally:
utils.docutils_is_available = True
@modify_settings(INSTALLED_APPS={"remove": "django.contrib.sites"})
@override_settings(SITE_ID=None) # will restore SITE_ID after the test
def test_no_sites_framework(self):
"""
Without the sites framework, should not access SITE_ID or Site
objects. Deleting settings is fine here as UserSettingsHolder is used.
"""
Site.objects.all().delete()
del settings.SITE_ID
response = self.client.get(reverse("django-admindocs-views-index"))
self.assertContains(response, "View documentation")
def test_callable_urlconf(self):
"""
Index view should correctly resolve view patterns when ROOT_URLCONF is
not a string.
"""
def urlpatterns():
return (
path("admin/doc/", include("django.contrib.admindocs.urls")),
path("admin/", admin.site.urls),
)
with self.settings(ROOT_URLCONF=SimpleLazyObject(urlpatterns)):
response = self.client.get(reverse("django-admindocs-views-index"))
self.assertEqual(response.status_code, 200)
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class AdminDocViewDefaultEngineOnly(TestDataMixin, AdminDocsTestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_template_detail_path_traversal(self):
cases = ["/etc/passwd", "../passwd"]
for fpath in cases:
with self.subTest(path=fpath):
response = self.client.get(
reverse("django-admindocs-templates", args=[fpath]),
)
self.assertEqual(response.status_code, 400)
@override_settings(
TEMPLATES=[
{
"NAME": "ONE",
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
},
{
"NAME": "TWO",
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
},
]
)
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class AdminDocViewWithMultipleEngines(AdminDocViewTests):
def test_templatefilter_index(self):
# Overridden because non-trivial TEMPLATES settings aren't supported
# but the page shouldn't crash (#24125).
response = self.client.get(reverse("django-admindocs-filters"))
self.assertContains(response, "<title>Template filters</title>", html=True)
def test_templatetag_index(self):
# Overridden because non-trivial TEMPLATES settings aren't supported
# but the page shouldn't crash (#24125).
response = self.client.get(reverse("django-admindocs-tags"))
self.assertContains(response, "<title>Template tags</title>", html=True)
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class TestModelDetailView(TestDataMixin, AdminDocsTestCase):
def setUp(self):
self.client.force_login(self.superuser)
with captured_stderr() as self.docutils_stderr:
self.response = self.client.get(
reverse("django-admindocs-models-detail", args=["admin_docs", "Person"])
)
def test_method_excludes(self):
"""
Methods that begin with strings defined in
``django.contrib.admindocs.views.MODEL_METHODS_EXCLUDE``
shouldn't be displayed in the admin docs.
"""
self.assertContains(self.response, "<td>get_full_name</td>")
self.assertNotContains(self.response, "<td>_get_full_name</td>")
self.assertNotContains(self.response, "<td>add_image</td>")
self.assertNotContains(self.response, "<td>delete_image</td>")
self.assertNotContains(self.response, "<td>set_status</td>")
self.assertNotContains(self.response, "<td>save_changes</td>")
def test_methods_with_arguments(self):
"""
Methods that take arguments should also displayed.
"""
self.assertContains(self.response, "<h3>Methods with arguments</h3>")
self.assertContains(self.response, "<td>rename_company</td>")
self.assertContains(self.response, "<td>dummy_function</td>")
self.assertContains(self.response, "<td>suffix_company_name</td>")
def test_methods_with_arguments_display_arguments(self):
"""
Methods with arguments should have their arguments displayed.
"""
self.assertContains(self.response, "<td>new_name</td>")
def test_methods_with_arguments_display_arguments_default_value(self):
"""
Methods with keyword arguments should have their arguments displayed.
"""
self.assertContains(self.response, "<td>suffix='ltd'</td>")
def test_methods_with_multiple_arguments_display_arguments(self):
"""
Methods with multiple arguments should have all their arguments
displayed, but omitting 'self'.
"""
self.assertContains(
self.response, "<td>baz, rox, *some_args, **some_kwargs</td>"
)
def test_instance_of_property_methods_are_displayed(self):
"""Model properties are displayed as fields."""
self.assertContains(self.response, "<td>a_property</td>")
def test_instance_of_cached_property_methods_are_displayed(self):
"""Model cached properties are displayed as fields."""
self.assertContains(self.response, "<td>a_cached_property</td>")
def test_method_data_types(self):
company = Company.objects.create(name="Django")
person = Person.objects.create(
first_name="Human", last_name="User", company=company
)
self.assertEqual(
get_return_data_type(person.get_status_count.__name__), "Integer"
)
self.assertEqual(get_return_data_type(person.get_groups_list.__name__), "List")
def test_descriptions_render_correctly(self):
"""
The ``description`` field should render correctly for each field type.
"""
# help text in fields
self.assertContains(
self.response, "<td>first name - The person's first name</td>"
)
self.assertContains(
self.response, "<td>last name - The person's last name</td>"
)
# method docstrings
self.assertContains(self.response, "<p>Get the full name of the person</p>")
link = '<a class="reference external" href="/admindocs/models/%s/">%s</a>'
markup = "<p>the related %s object</p>"
company_markup = markup % (link % ("admin_docs.company", "admin_docs.Company"))
# foreign keys
self.assertContains(self.response, company_markup)
# foreign keys with help text
self.assertContains(self.response, "%s\n - place of work" % company_markup)
# many to many fields
self.assertContains(
self.response,
"number of related %s objects"
% (link % ("admin_docs.group", "admin_docs.Group")),
)
self.assertContains(
self.response,
"all related %s objects"
% (link % ("admin_docs.group", "admin_docs.Group")),
)
# "raw" and "include" directives are disabled
self.assertContains(
self.response,
"<p>"raw" directive disabled.</p>",
)
self.assertContains(
self.response, ".. raw:: html\n :file: admin_docs/evilfile.txt"
)
self.assertContains(
self.response,
"<p>"include" directive disabled.</p>",
)
self.assertContains(self.response, ".. include:: admin_docs/evilfile.txt")
out = self.docutils_stderr.getvalue()
self.assertIn('"raw" directive disabled', out)
self.assertIn('"include" directive disabled', out)
def test_model_with_many_to_one(self):
link = '<a class="reference external" href="/admindocs/models/%s/">%s</a>'
response = self.client.get(
reverse("django-admindocs-models-detail", args=["admin_docs", "company"])
)
self.assertContains(
response,
"number of related %s objects"
% (link % ("admin_docs.person", "admin_docs.Person")),
)
self.assertContains(
response,
"all related %s objects"
% (link % ("admin_docs.person", "admin_docs.Person")),
)
def test_model_with_no_backward_relations_render_only_relevant_fields(self):
"""
A model with ``related_name`` of `+` shouldn't show backward
relationship links.
"""
response = self.client.get(
reverse("django-admindocs-models-detail", args=["admin_docs", "family"])
)
fields = response.context_data.get("fields")
self.assertEqual(len(fields), 2)
def test_model_docstring_renders_correctly(self):
summary = (
'<h2 class="subhead"><p>Stores information about a person, related to '
'<a class="reference external" href="/admindocs/models/myapp.company/">'
"myapp.Company</a>.</p></h2>"
)
subheading = "<p><strong>Notes</strong></p>"
body = (
'<p>Use <tt class="docutils literal">save_changes()</tt> when saving this '
"object.</p>"
)
model_body = (
'<dl class="docutils"><dt><tt class="'
'docutils literal">company</tt></dt><dd>Field storing <a class="'
'reference external" href="/admindocs/models/myapp.company/">'
"myapp.Company</a> where the person works.</dd></dl>"
)
self.assertContains(self.response, "DESCRIPTION")
self.assertContains(self.response, summary, html=True)
self.assertContains(self.response, subheading, html=True)
self.assertContains(self.response, body, html=True)
self.assertContains(self.response, model_body, html=True)
def test_model_detail_title(self):
self.assertContains(self.response, "<h1>admin_docs.Person</h1>", html=True)
def test_app_not_found(self):
response = self.client.get(
reverse("django-admindocs-models-detail", args=["doesnotexist", "Person"])
)
self.assertEqual(response.context["exception"], "App 'doesnotexist' not found")
self.assertEqual(response.status_code, 404)
def test_model_not_found(self):
response = self.client.get(
reverse(
"django-admindocs-models-detail", args=["admin_docs", "doesnotexist"]
)
)
self.assertEqual(
response.context["exception"],
"Model 'doesnotexist' not found in app 'admin_docs'",
)
self.assertEqual(response.status_code, 404)
class CustomField(models.Field):
description = "A custom field type"
class DescriptionLackingField(models.Field):
pass
class TestFieldType(unittest.TestCase):
def test_field_name(self):
with self.assertRaises(AttributeError):
views.get_readable_field_data_type("NotAField")
def test_builtin_fields(self):
self.assertEqual(
views.get_readable_field_data_type(fields.BooleanField()),
"Boolean (Either True or False)",
)
def test_char_fields(self):
self.assertEqual(
views.get_readable_field_data_type(fields.CharField(max_length=255)),
"String (up to 255)",
)
self.assertEqual(
views.get_readable_field_data_type(fields.CharField()),
"String (unlimited)",
)
def test_custom_fields(self):
self.assertEqual(
views.get_readable_field_data_type(CustomField()), "A custom field type"
)
self.assertEqual(
views.get_readable_field_data_type(DescriptionLackingField()),
"Field of type: DescriptionLackingField",
)
class AdminDocViewFunctionsTests(SimpleTestCase):
def test_simplify_regex(self):
tests = (
# Named and unnamed groups.
(r"^(?P<a>\w+)/b/(?P<c>\w+)/$", "/<a>/b/<c>/"),
(r"^(?P<a>\w+)/b/(?P<c>\w+)$", "/<a>/b/<c>"),
(r"^(?P<a>\w+)/b/(?P<c>\w+)", "/<a>/b/<c>"),
(r"^(?P<a>\w+)/b/(\w+)$", "/<a>/b/<var>"),
(r"^(?P<a>\w+)/b/(\w+)", "/<a>/b/<var>"),
(r"^(?P<a>\w+)/b/((x|y)\w+)$", "/<a>/b/<var>"),
(r"^(?P<a>\w+)/b/((x|y)\w+)", "/<a>/b/<var>"),
(r"^(?P<a>(x|y))/b/(?P<c>\w+)$", "/<a>/b/<c>"),
(r"^(?P<a>(x|y))/b/(?P<c>\w+)", "/<a>/b/<c>"),
(r"^(?P<a>(x|y))/b/(?P<c>\w+)ab", "/<a>/b/<c>ab"),
(r"^(?P<a>(x|y)(\(|\)))/b/(?P<c>\w+)ab", "/<a>/b/<c>ab"),
# Non-capturing groups.
(r"^a(?:\w+)b", "/ab"),
(r"^a(?:(x|y))", "/a"),
(r"^(?:\w+(?:\w+))a", "/a"),
(r"^a(?:\w+)/b(?:\w+)", "/a/b"),
(r"(?P<a>\w+)/b/(?:\w+)c(?:\w+)", "/<a>/b/c"),
(r"(?P<a>\w+)/b/(\w+)/(?:\w+)c(?:\w+)", "/<a>/b/<var>/c"),
# Single and repeated metacharacters.
(r"^a", "/a"),
(r"^^a", "/a"),
(r"^^^a", "/a"),
(r"a$", "/a"),
(r"a$$", "/a"),
(r"a$$$", "/a"),
(r"a?", "/a"),
(r"a??", "/a"),
(r"a???", "/a"),
(r"a*", "/a"),
(r"a**", "/a"),
(r"a***", "/a"),
(r"a+", "/a"),
(r"a++", "/a"),
(r"a+++", "/a"),
(r"\Aa", "/a"),
(r"\A\Aa", "/a"),
(r"\A\A\Aa", "/a"),
(r"a\Z", "/a"),
(r"a\Z\Z", "/a"),
(r"a\Z\Z\Z", "/a"),
(r"\ba", "/a"),
(r"\b\ba", "/a"),
(r"\b\b\ba", "/a"),
(r"a\B", "/a"),
(r"a\B\B", "/a"),
(r"a\B\B\B", "/a"),
# Multiple mixed metacharacters.
(r"^a/?$", "/a/"),
(r"\Aa\Z", "/a"),
(r"\ba\B", "/a"),
# Escaped single metacharacters.
(r"\^a", r"/^a"),
(r"\\^a", r"/\\a"),
(r"\\\^a", r"/\\^a"),
(r"\\\\^a", r"/\\\\a"),
(r"\\\\\^a", r"/\\\\^a"),
(r"a\$", r"/a$"),
(r"a\\$", r"/a\\"),
(r"a\\\$", r"/a\\$"),
(r"a\\\\$", r"/a\\\\"),
(r"a\\\\\$", r"/a\\\\$"),
(r"a\?", r"/a?"),
(r"a\\?", r"/a\\"),
(r"a\\\?", r"/a\\?"),
(r"a\\\\?", r"/a\\\\"),
(r"a\\\\\?", r"/a\\\\?"),
(r"a\*", r"/a*"),
(r"a\\*", r"/a\\"),
(r"a\\\*", r"/a\\*"),
(r"a\\\\*", r"/a\\\\"),
(r"a\\\\\*", r"/a\\\\*"),
(r"a\+", r"/a+"),
(r"a\\+", r"/a\\"),
(r"a\\\+", r"/a\\+"),
(r"a\\\\+", r"/a\\\\"),
(r"a\\\\\+", r"/a\\\\+"),
(r"\\Aa", r"/\Aa"),
(r"\\\Aa", r"/\\a"),
(r"\\\\Aa", r"/\\\Aa"),
(r"\\\\\Aa", r"/\\\\a"),
(r"\\\\\\Aa", r"/\\\\\Aa"),
(r"a\\Z", r"/a\Z"),
(r"a\\\Z", r"/a\\"),
(r"a\\\\Z", r"/a\\\Z"),
(r"a\\\\\Z", r"/a\\\\"),
(r"a\\\\\\Z", r"/a\\\\\Z"),
# Escaped mixed metacharacters.
(r"^a\?$", r"/a?"),
(r"^a\\?$", r"/a\\"),
(r"^a\\\?$", r"/a\\?"),
(r"^a\\\\?$", r"/a\\\\"),
(r"^a\\\\\?$", r"/a\\\\?"),
# Adjacent escaped metacharacters.
(r"^a\?\$", r"/a?$"),
(r"^a\\?\\$", r"/a\\\\"),
(r"^a\\\?\\\$", r"/a\\?\\$"),
(r"^a\\\\?\\\\$", r"/a\\\\\\\\"),
(r"^a\\\\\?\\\\\$", r"/a\\\\?\\\\$"),
# Complex examples with metacharacters and (un)named groups.
(r"^\b(?P<slug>\w+)\B/(\w+)?", "/<slug>/<var>"),
(r"^\A(?P<slug>\w+)\Z", "/<slug>"),
)
for pattern, output in tests:
with self.subTest(pattern=pattern):
self.assertEqual(simplify_regex(pattern), output)
|
84c46ac54cf4e402f0f21a2ff5968dce1e0ce2c6abea7d205184cb769c62b682 | import copy
from io import BytesIO
from itertools import chain
from urllib.parse import urlencode
from django.core.exceptions import DisallowedHost
from django.core.handlers.wsgi import LimitedStream, WSGIRequest
from django.http import (
HttpHeaders,
HttpRequest,
RawPostDataException,
UnreadablePostError,
)
from django.http.multipartparser import MultiPartParserError
from django.http.request import split_domain_port
from django.test import RequestFactory, SimpleTestCase, override_settings
from django.test.client import BOUNDARY, MULTIPART_CONTENT, FakePayload
class RequestsTests(SimpleTestCase):
def test_httprequest(self):
request = HttpRequest()
self.assertEqual(list(request.GET), [])
self.assertEqual(list(request.POST), [])
self.assertEqual(list(request.COOKIES), [])
self.assertEqual(list(request.META), [])
# .GET and .POST should be QueryDicts
self.assertEqual(request.GET.urlencode(), "")
self.assertEqual(request.POST.urlencode(), "")
# and FILES should be MultiValueDict
self.assertEqual(request.FILES.getlist("foo"), [])
self.assertIsNone(request.content_type)
self.assertIsNone(request.content_params)
def test_httprequest_full_path(self):
request = HttpRequest()
request.path = "/;some/?awful/=path/foo:bar/"
request.path_info = "/prefix" + request.path
request.META["QUERY_STRING"] = ";some=query&+query=string"
expected = "/%3Bsome/%3Fawful/%3Dpath/foo:bar/?;some=query&+query=string"
self.assertEqual(request.get_full_path(), expected)
self.assertEqual(request.get_full_path_info(), "/prefix" + expected)
def test_httprequest_full_path_with_query_string_and_fragment(self):
request = HttpRequest()
request.path = "/foo#bar"
request.path_info = "/prefix" + request.path
request.META["QUERY_STRING"] = "baz#quux"
self.assertEqual(request.get_full_path(), "/foo%23bar?baz#quux")
self.assertEqual(request.get_full_path_info(), "/prefix/foo%23bar?baz#quux")
def test_httprequest_repr(self):
request = HttpRequest()
request.path = "/somepath/"
request.method = "GET"
request.GET = {"get-key": "get-value"}
request.POST = {"post-key": "post-value"}
request.COOKIES = {"post-key": "post-value"}
request.META = {"post-key": "post-value"}
self.assertEqual(repr(request), "<HttpRequest: GET '/somepath/'>")
def test_httprequest_repr_invalid_method_and_path(self):
request = HttpRequest()
self.assertEqual(repr(request), "<HttpRequest>")
request = HttpRequest()
request.method = "GET"
self.assertEqual(repr(request), "<HttpRequest>")
request = HttpRequest()
request.path = ""
self.assertEqual(repr(request), "<HttpRequest>")
def test_wsgirequest(self):
request = WSGIRequest(
{
"PATH_INFO": "bogus",
"REQUEST_METHOD": "bogus",
"CONTENT_TYPE": "text/html; charset=utf8",
"wsgi.input": BytesIO(b""),
}
)
self.assertEqual(list(request.GET), [])
self.assertEqual(list(request.POST), [])
self.assertEqual(list(request.COOKIES), [])
self.assertEqual(
set(request.META),
{
"PATH_INFO",
"REQUEST_METHOD",
"SCRIPT_NAME",
"CONTENT_TYPE",
"wsgi.input",
},
)
self.assertEqual(request.META["PATH_INFO"], "bogus")
self.assertEqual(request.META["REQUEST_METHOD"], "bogus")
self.assertEqual(request.META["SCRIPT_NAME"], "")
self.assertEqual(request.content_type, "text/html")
self.assertEqual(request.content_params, {"charset": "utf8"})
def test_wsgirequest_with_script_name(self):
"""
The request's path is correctly assembled, regardless of whether or
not the SCRIPT_NAME has a trailing slash (#20169).
"""
# With trailing slash
request = WSGIRequest(
{
"PATH_INFO": "/somepath/",
"SCRIPT_NAME": "/PREFIX/",
"REQUEST_METHOD": "get",
"wsgi.input": BytesIO(b""),
}
)
self.assertEqual(request.path, "/PREFIX/somepath/")
# Without trailing slash
request = WSGIRequest(
{
"PATH_INFO": "/somepath/",
"SCRIPT_NAME": "/PREFIX",
"REQUEST_METHOD": "get",
"wsgi.input": BytesIO(b""),
}
)
self.assertEqual(request.path, "/PREFIX/somepath/")
def test_wsgirequest_script_url_double_slashes(self):
"""
WSGI squashes multiple successive slashes in PATH_INFO, WSGIRequest
should take that into account when populating request.path and
request.META['SCRIPT_NAME'] (#17133).
"""
request = WSGIRequest(
{
"SCRIPT_URL": "/mst/milestones//accounts/login//help",
"PATH_INFO": "/milestones/accounts/login/help",
"REQUEST_METHOD": "get",
"wsgi.input": BytesIO(b""),
}
)
self.assertEqual(request.path, "/mst/milestones/accounts/login/help")
self.assertEqual(request.META["SCRIPT_NAME"], "/mst")
def test_wsgirequest_with_force_script_name(self):
"""
The FORCE_SCRIPT_NAME setting takes precedence over the request's
SCRIPT_NAME environment parameter (#20169).
"""
with override_settings(FORCE_SCRIPT_NAME="/FORCED_PREFIX/"):
request = WSGIRequest(
{
"PATH_INFO": "/somepath/",
"SCRIPT_NAME": "/PREFIX/",
"REQUEST_METHOD": "get",
"wsgi.input": BytesIO(b""),
}
)
self.assertEqual(request.path, "/FORCED_PREFIX/somepath/")
def test_wsgirequest_path_with_force_script_name_trailing_slash(self):
"""
The request's path is correctly assembled, regardless of whether or not
the FORCE_SCRIPT_NAME setting has a trailing slash (#20169).
"""
# With trailing slash
with override_settings(FORCE_SCRIPT_NAME="/FORCED_PREFIX/"):
request = WSGIRequest(
{
"PATH_INFO": "/somepath/",
"REQUEST_METHOD": "get",
"wsgi.input": BytesIO(b""),
}
)
self.assertEqual(request.path, "/FORCED_PREFIX/somepath/")
# Without trailing slash
with override_settings(FORCE_SCRIPT_NAME="/FORCED_PREFIX"):
request = WSGIRequest(
{
"PATH_INFO": "/somepath/",
"REQUEST_METHOD": "get",
"wsgi.input": BytesIO(b""),
}
)
self.assertEqual(request.path, "/FORCED_PREFIX/somepath/")
def test_wsgirequest_repr(self):
request = WSGIRequest({"REQUEST_METHOD": "get", "wsgi.input": BytesIO(b"")})
self.assertEqual(repr(request), "<WSGIRequest: GET '/'>")
request = WSGIRequest(
{
"PATH_INFO": "/somepath/",
"REQUEST_METHOD": "get",
"wsgi.input": BytesIO(b""),
}
)
request.GET = {"get-key": "get-value"}
request.POST = {"post-key": "post-value"}
request.COOKIES = {"post-key": "post-value"}
request.META = {"post-key": "post-value"}
self.assertEqual(repr(request), "<WSGIRequest: GET '/somepath/'>")
def test_wsgirequest_path_info(self):
def wsgi_str(path_info, encoding="utf-8"):
path_info = path_info.encode(
encoding
) # Actual URL sent by the browser (bytestring)
path_info = path_info.decode(
"iso-8859-1"
) # Value in the WSGI environ dict (native string)
return path_info
# Regression for #19468
request = WSGIRequest(
{
"PATH_INFO": wsgi_str("/سلام/"),
"REQUEST_METHOD": "get",
"wsgi.input": BytesIO(b""),
}
)
self.assertEqual(request.path, "/سلام/")
# The URL may be incorrectly encoded in a non-UTF-8 encoding (#26971)
request = WSGIRequest(
{
"PATH_INFO": wsgi_str("/café/", encoding="iso-8859-1"),
"REQUEST_METHOD": "get",
"wsgi.input": BytesIO(b""),
}
)
# Since it's impossible to decide the (wrong) encoding of the URL, it's
# left percent-encoded in the path.
self.assertEqual(request.path, "/caf%E9/")
def test_wsgirequest_copy(self):
request = WSGIRequest({"REQUEST_METHOD": "get", "wsgi.input": BytesIO(b"")})
request_copy = copy.copy(request)
self.assertIs(request_copy.environ, request.environ)
def test_limited_stream(self):
# Read all of a limited stream
stream = LimitedStream(BytesIO(b"test"), 2)
self.assertEqual(stream.read(), b"te")
# Reading again returns nothing.
self.assertEqual(stream.read(), b"")
# Read a number of characters greater than the stream has to offer
stream = LimitedStream(BytesIO(b"test"), 2)
self.assertEqual(stream.read(5), b"te")
# Reading again returns nothing.
self.assertEqual(stream.readline(5), b"")
# Read sequentially from a stream
stream = LimitedStream(BytesIO(b"12345678"), 8)
self.assertEqual(stream.read(5), b"12345")
self.assertEqual(stream.read(5), b"678")
# Reading again returns nothing.
self.assertEqual(stream.readline(5), b"")
# Read lines from a stream
stream = LimitedStream(BytesIO(b"1234\n5678\nabcd\nefgh\nijkl"), 24)
# Read a full line, unconditionally
self.assertEqual(stream.readline(), b"1234\n")
# Read a number of characters less than a line
self.assertEqual(stream.readline(2), b"56")
# Read the rest of the partial line
self.assertEqual(stream.readline(), b"78\n")
# Read a full line, with a character limit greater than the line length
self.assertEqual(stream.readline(6), b"abcd\n")
# Read the next line, deliberately terminated at the line end
self.assertEqual(stream.readline(4), b"efgh")
# Read the next line... just the line end
self.assertEqual(stream.readline(), b"\n")
# Read everything else.
self.assertEqual(stream.readline(), b"ijkl")
# Regression for #15018
# If a stream contains a newline, but the provided length
# is less than the number of provided characters, the newline
# doesn't reset the available character count
stream = LimitedStream(BytesIO(b"1234\nabcdef"), 9)
self.assertEqual(stream.readline(10), b"1234\n")
self.assertEqual(stream.readline(3), b"abc")
# Now expire the available characters
self.assertEqual(stream.readline(3), b"d")
# Reading again returns nothing.
self.assertEqual(stream.readline(2), b"")
# Same test, but with read, not readline.
stream = LimitedStream(BytesIO(b"1234\nabcdef"), 9)
self.assertEqual(stream.read(6), b"1234\na")
self.assertEqual(stream.read(2), b"bc")
self.assertEqual(stream.read(2), b"d")
self.assertEqual(stream.read(2), b"")
self.assertEqual(stream.read(), b"")
def test_stream_read(self):
payload = FakePayload("name=value")
request = WSGIRequest(
{
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": "application/x-www-form-urlencoded",
"CONTENT_LENGTH": len(payload),
"wsgi.input": payload,
},
)
self.assertEqual(request.read(), b"name=value")
def test_stream_readline(self):
payload = FakePayload("name=value\nother=string")
request = WSGIRequest(
{
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": "application/x-www-form-urlencoded",
"CONTENT_LENGTH": len(payload),
"wsgi.input": payload,
},
)
self.assertEqual(request.readline(), b"name=value\n")
self.assertEqual(request.readline(), b"other=string")
def test_read_after_value(self):
"""
Reading from request is allowed after accessing request contents as
POST or body.
"""
payload = FakePayload("name=value")
request = WSGIRequest(
{
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": "application/x-www-form-urlencoded",
"CONTENT_LENGTH": len(payload),
"wsgi.input": payload,
}
)
self.assertEqual(request.POST, {"name": ["value"]})
self.assertEqual(request.body, b"name=value")
self.assertEqual(request.read(), b"name=value")
def test_value_after_read(self):
"""
Construction of POST or body is not allowed after reading
from request.
"""
payload = FakePayload("name=value")
request = WSGIRequest(
{
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": "application/x-www-form-urlencoded",
"CONTENT_LENGTH": len(payload),
"wsgi.input": payload,
}
)
self.assertEqual(request.read(2), b"na")
with self.assertRaises(RawPostDataException):
request.body
self.assertEqual(request.POST, {})
def test_non_ascii_POST(self):
payload = FakePayload(urlencode({"key": "España"}))
request = WSGIRequest(
{
"REQUEST_METHOD": "POST",
"CONTENT_LENGTH": len(payload),
"CONTENT_TYPE": "application/x-www-form-urlencoded",
"wsgi.input": payload,
}
)
self.assertEqual(request.POST, {"key": ["España"]})
def test_alternate_charset_POST(self):
"""
Test a POST with non-utf-8 payload encoding.
"""
payload = FakePayload(urlencode({"key": "España".encode("latin-1")}))
request = WSGIRequest(
{
"REQUEST_METHOD": "POST",
"CONTENT_LENGTH": len(payload),
"CONTENT_TYPE": "application/x-www-form-urlencoded; charset=iso-8859-1",
"wsgi.input": payload,
}
)
self.assertEqual(request.POST, {"key": ["España"]})
def test_body_after_POST_multipart_form_data(self):
"""
Reading body after parsing multipart/form-data is not allowed
"""
# Because multipart is used for large amounts of data i.e. file uploads,
# we don't want the data held in memory twice, and we don't want to
# silence the error by setting body = '' either.
payload = FakePayload(
"\r\n".join(
[
"--boundary",
'Content-Disposition: form-data; name="name"',
"",
"value",
"--boundary--",
]
)
)
request = WSGIRequest(
{
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": "multipart/form-data; boundary=boundary",
"CONTENT_LENGTH": len(payload),
"wsgi.input": payload,
}
)
self.assertEqual(request.POST, {"name": ["value"]})
with self.assertRaises(RawPostDataException):
request.body
def test_body_after_POST_multipart_related(self):
"""
Reading body after parsing multipart that isn't form-data is allowed
"""
# Ticket #9054
# There are cases in which the multipart data is related instead of
# being a binary upload, in which case it should still be accessible
# via body.
payload_data = b"\r\n".join(
[
b"--boundary",
b'Content-ID: id; name="name"',
b"",
b"value",
b"--boundary--",
]
)
payload = FakePayload(payload_data)
request = WSGIRequest(
{
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": "multipart/related; boundary=boundary",
"CONTENT_LENGTH": len(payload),
"wsgi.input": payload,
}
)
self.assertEqual(request.POST, {})
self.assertEqual(request.body, payload_data)
def test_POST_multipart_with_content_length_zero(self):
"""
Multipart POST requests with Content-Length >= 0 are valid and need to
be handled.
"""
# According to RFC 9110 Section 8.6 every POST with Content-Length >= 0
# is a valid request, so ensure that we handle Content-Length == 0.
payload = FakePayload(
"\r\n".join(
[
"--boundary",
'Content-Disposition: form-data; name="name"',
"",
"value",
"--boundary--",
]
)
)
request = WSGIRequest(
{
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": "multipart/form-data; boundary=boundary",
"CONTENT_LENGTH": 0,
"wsgi.input": payload,
}
)
self.assertEqual(request.POST, {})
def test_POST_binary_only(self):
payload = b"\r\n\x01\x00\x00\x00ab\x00\x00\xcd\xcc,@"
environ = {
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": "application/octet-stream",
"CONTENT_LENGTH": len(payload),
"wsgi.input": BytesIO(payload),
}
request = WSGIRequest(environ)
self.assertEqual(request.POST, {})
self.assertEqual(request.FILES, {})
self.assertEqual(request.body, payload)
# Same test without specifying content-type
environ.update({"CONTENT_TYPE": "", "wsgi.input": BytesIO(payload)})
request = WSGIRequest(environ)
self.assertEqual(request.POST, {})
self.assertEqual(request.FILES, {})
self.assertEqual(request.body, payload)
def test_read_by_lines(self):
payload = FakePayload("name=value")
request = WSGIRequest(
{
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": "application/x-www-form-urlencoded",
"CONTENT_LENGTH": len(payload),
"wsgi.input": payload,
}
)
self.assertEqual(list(request), [b"name=value"])
def test_POST_after_body_read(self):
"""
POST should be populated even if body is read first
"""
payload = FakePayload("name=value")
request = WSGIRequest(
{
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": "application/x-www-form-urlencoded",
"CONTENT_LENGTH": len(payload),
"wsgi.input": payload,
}
)
request.body # evaluate
self.assertEqual(request.POST, {"name": ["value"]})
def test_POST_after_body_read_and_stream_read(self):
"""
POST should be populated even if body is read first, and then
the stream is read second.
"""
payload = FakePayload("name=value")
request = WSGIRequest(
{
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": "application/x-www-form-urlencoded",
"CONTENT_LENGTH": len(payload),
"wsgi.input": payload,
}
)
request.body # evaluate
self.assertEqual(request.read(1), b"n")
self.assertEqual(request.POST, {"name": ["value"]})
def test_multipart_post_field_with_base64(self):
payload = FakePayload(
"\r\n".join(
[
f"--{BOUNDARY}",
'Content-Disposition: form-data; name="name"',
"Content-Transfer-Encoding: base64",
"",
"dmFsdWU=",
f"--{BOUNDARY}--",
"",
]
)
)
request = WSGIRequest(
{
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": MULTIPART_CONTENT,
"CONTENT_LENGTH": len(payload),
"wsgi.input": payload,
}
)
request.body # evaluate
self.assertEqual(request.POST, {"name": ["value"]})
def test_multipart_post_field_with_invalid_base64(self):
payload = FakePayload(
"\r\n".join(
[
f"--{BOUNDARY}",
'Content-Disposition: form-data; name="name"',
"Content-Transfer-Encoding: base64",
"",
"123",
f"--{BOUNDARY}--",
"",
]
)
)
request = WSGIRequest(
{
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": MULTIPART_CONTENT,
"CONTENT_LENGTH": len(payload),
"wsgi.input": payload,
}
)
request.body # evaluate
self.assertEqual(request.POST, {"name": ["123"]})
def test_POST_after_body_read_and_stream_read_multipart(self):
"""
POST should be populated even if body is read first, and then
the stream is read second. Using multipart/form-data instead of urlencoded.
"""
payload = FakePayload(
"\r\n".join(
[
"--boundary",
'Content-Disposition: form-data; name="name"',
"",
"value",
"--boundary--" "",
]
)
)
request = WSGIRequest(
{
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": "multipart/form-data; boundary=boundary",
"CONTENT_LENGTH": len(payload),
"wsgi.input": payload,
}
)
request.body # evaluate
# Consume enough data to mess up the parsing:
self.assertEqual(request.read(13), b"--boundary\r\nC")
self.assertEqual(request.POST, {"name": ["value"]})
def test_POST_immutable_for_multipart(self):
"""
MultiPartParser.parse() leaves request.POST immutable.
"""
payload = FakePayload(
"\r\n".join(
[
"--boundary",
'Content-Disposition: form-data; name="name"',
"",
"value",
"--boundary--",
]
)
)
request = WSGIRequest(
{
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": "multipart/form-data; boundary=boundary",
"CONTENT_LENGTH": len(payload),
"wsgi.input": payload,
}
)
self.assertFalse(request.POST._mutable)
def test_multipart_without_boundary(self):
request = WSGIRequest(
{
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": "multipart/form-data;",
"CONTENT_LENGTH": 0,
"wsgi.input": FakePayload(),
}
)
with self.assertRaisesMessage(
MultiPartParserError, "Invalid boundary in multipart: None"
):
request.POST
def test_multipart_non_ascii_content_type(self):
request = WSGIRequest(
{
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": "multipart/form-data; boundary = \xe0",
"CONTENT_LENGTH": 0,
"wsgi.input": FakePayload(),
}
)
msg = (
"Invalid non-ASCII Content-Type in multipart: multipart/form-data; "
"boundary = à"
)
with self.assertRaisesMessage(MultiPartParserError, msg):
request.POST
def test_POST_connection_error(self):
"""
If wsgi.input.read() raises an exception while trying to read() the
POST, the exception is identifiable (not a generic OSError).
"""
class ExplodingBytesIO(BytesIO):
def read(self, size=-1, /):
raise OSError("kaboom!")
payload = b"name=value"
request = WSGIRequest(
{
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": "application/x-www-form-urlencoded",
"CONTENT_LENGTH": len(payload),
"wsgi.input": ExplodingBytesIO(payload),
}
)
with self.assertRaises(UnreadablePostError):
request.body
def test_set_encoding_clears_POST(self):
payload = FakePayload("name=Hello Günter")
request = WSGIRequest(
{
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": "application/x-www-form-urlencoded",
"CONTENT_LENGTH": len(payload),
"wsgi.input": payload,
}
)
self.assertEqual(request.POST, {"name": ["Hello Günter"]})
request.encoding = "iso-8859-16"
self.assertEqual(request.POST, {"name": ["Hello GĂŒnter"]})
def test_set_encoding_clears_GET(self):
payload = FakePayload("")
request = WSGIRequest(
{
"REQUEST_METHOD": "GET",
"wsgi.input": payload,
"QUERY_STRING": "name=Hello%20G%C3%BCnter",
}
)
self.assertEqual(request.GET, {"name": ["Hello Günter"]})
request.encoding = "iso-8859-16"
self.assertEqual(request.GET, {"name": ["Hello G\u0102\u0152nter"]})
def test_FILES_connection_error(self):
"""
If wsgi.input.read() raises an exception while trying to read() the
FILES, the exception is identifiable (not a generic OSError).
"""
class ExplodingBytesIO(BytesIO):
def read(self, size=-1, /):
raise OSError("kaboom!")
payload = b"x"
request = WSGIRequest(
{
"REQUEST_METHOD": "POST",
"CONTENT_TYPE": "multipart/form-data; boundary=foo_",
"CONTENT_LENGTH": len(payload),
"wsgi.input": ExplodingBytesIO(payload),
}
)
with self.assertRaises(UnreadablePostError):
request.FILES
def test_copy(self):
request = HttpRequest()
request_copy = copy.copy(request)
self.assertIs(request_copy.resolver_match, request.resolver_match)
def test_deepcopy(self):
request = RequestFactory().get("/")
request.session = {}
request_copy = copy.deepcopy(request)
request.session["key"] = "value"
self.assertEqual(request_copy.session, {})
class HostValidationTests(SimpleTestCase):
poisoned_hosts = [
"[email protected]",
"example.com:[email protected]",
"example.com:[email protected]:80",
"example.com:80/badpath",
"example.com: recovermypassword.com",
]
@override_settings(
USE_X_FORWARDED_HOST=False,
ALLOWED_HOSTS=[
"forward.com",
"example.com",
"internal.com",
"12.34.56.78",
"[2001:19f0:feee::dead:beef:cafe]",
"xn--4ca9at.com",
".multitenant.com",
"INSENSITIVE.com",
"[::ffff:169.254.169.254]",
],
)
def test_http_get_host(self):
# Check if X_FORWARDED_HOST is provided.
request = HttpRequest()
request.META = {
"HTTP_X_FORWARDED_HOST": "forward.com",
"HTTP_HOST": "example.com",
"SERVER_NAME": "internal.com",
"SERVER_PORT": 80,
}
# X_FORWARDED_HOST is ignored.
self.assertEqual(request.get_host(), "example.com")
# Check if X_FORWARDED_HOST isn't provided.
request = HttpRequest()
request.META = {
"HTTP_HOST": "example.com",
"SERVER_NAME": "internal.com",
"SERVER_PORT": 80,
}
self.assertEqual(request.get_host(), "example.com")
# Check if HTTP_HOST isn't provided.
request = HttpRequest()
request.META = {
"SERVER_NAME": "internal.com",
"SERVER_PORT": 80,
}
self.assertEqual(request.get_host(), "internal.com")
# Check if HTTP_HOST isn't provided, and we're on a nonstandard port
request = HttpRequest()
request.META = {
"SERVER_NAME": "internal.com",
"SERVER_PORT": 8042,
}
self.assertEqual(request.get_host(), "internal.com:8042")
legit_hosts = [
"example.com",
"example.com:80",
"12.34.56.78",
"12.34.56.78:443",
"[2001:19f0:feee::dead:beef:cafe]",
"[2001:19f0:feee::dead:beef:cafe]:8080",
"xn--4ca9at.com", # Punycode for öäü.com
"anything.multitenant.com",
"multitenant.com",
"insensitive.com",
"example.com.",
"example.com.:80",
"[::ffff:169.254.169.254]",
]
for host in legit_hosts:
request = HttpRequest()
request.META = {
"HTTP_HOST": host,
}
request.get_host()
# Poisoned host headers are rejected as suspicious
for host in chain(self.poisoned_hosts, ["other.com", "example.com.."]):
with self.assertRaises(DisallowedHost):
request = HttpRequest()
request.META = {
"HTTP_HOST": host,
}
request.get_host()
@override_settings(USE_X_FORWARDED_HOST=True, ALLOWED_HOSTS=["*"])
def test_http_get_host_with_x_forwarded_host(self):
# Check if X_FORWARDED_HOST is provided.
request = HttpRequest()
request.META = {
"HTTP_X_FORWARDED_HOST": "forward.com",
"HTTP_HOST": "example.com",
"SERVER_NAME": "internal.com",
"SERVER_PORT": 80,
}
# X_FORWARDED_HOST is obeyed.
self.assertEqual(request.get_host(), "forward.com")
# Check if X_FORWARDED_HOST isn't provided.
request = HttpRequest()
request.META = {
"HTTP_HOST": "example.com",
"SERVER_NAME": "internal.com",
"SERVER_PORT": 80,
}
self.assertEqual(request.get_host(), "example.com")
# Check if HTTP_HOST isn't provided.
request = HttpRequest()
request.META = {
"SERVER_NAME": "internal.com",
"SERVER_PORT": 80,
}
self.assertEqual(request.get_host(), "internal.com")
# Check if HTTP_HOST isn't provided, and we're on a nonstandard port
request = HttpRequest()
request.META = {
"SERVER_NAME": "internal.com",
"SERVER_PORT": 8042,
}
self.assertEqual(request.get_host(), "internal.com:8042")
# Poisoned host headers are rejected as suspicious
legit_hosts = [
"example.com",
"example.com:80",
"12.34.56.78",
"12.34.56.78:443",
"[2001:19f0:feee::dead:beef:cafe]",
"[2001:19f0:feee::dead:beef:cafe]:8080",
"xn--4ca9at.com", # Punycode for öäü.com
]
for host in legit_hosts:
request = HttpRequest()
request.META = {
"HTTP_HOST": host,
}
request.get_host()
for host in self.poisoned_hosts:
with self.assertRaises(DisallowedHost):
request = HttpRequest()
request.META = {
"HTTP_HOST": host,
}
request.get_host()
@override_settings(USE_X_FORWARDED_PORT=False)
def test_get_port(self):
request = HttpRequest()
request.META = {
"SERVER_PORT": "8080",
"HTTP_X_FORWARDED_PORT": "80",
}
# Shouldn't use the X-Forwarded-Port header
self.assertEqual(request.get_port(), "8080")
request = HttpRequest()
request.META = {
"SERVER_PORT": "8080",
}
self.assertEqual(request.get_port(), "8080")
@override_settings(USE_X_FORWARDED_PORT=True)
def test_get_port_with_x_forwarded_port(self):
request = HttpRequest()
request.META = {
"SERVER_PORT": "8080",
"HTTP_X_FORWARDED_PORT": "80",
}
# Should use the X-Forwarded-Port header
self.assertEqual(request.get_port(), "80")
request = HttpRequest()
request.META = {
"SERVER_PORT": "8080",
}
self.assertEqual(request.get_port(), "8080")
@override_settings(DEBUG=True, ALLOWED_HOSTS=[])
def test_host_validation_in_debug_mode(self):
"""
If ALLOWED_HOSTS is empty and DEBUG is True, variants of localhost are
allowed.
"""
valid_hosts = ["localhost", "subdomain.localhost", "127.0.0.1", "[::1]"]
for host in valid_hosts:
request = HttpRequest()
request.META = {"HTTP_HOST": host}
self.assertEqual(request.get_host(), host)
# Other hostnames raise a DisallowedHost.
with self.assertRaises(DisallowedHost):
request = HttpRequest()
request.META = {"HTTP_HOST": "example.com"}
request.get_host()
@override_settings(ALLOWED_HOSTS=[])
def test_get_host_suggestion_of_allowed_host(self):
"""
get_host() makes helpful suggestions if a valid-looking host is not in
ALLOWED_HOSTS.
"""
msg_invalid_host = "Invalid HTTP_HOST header: %r."
msg_suggestion = msg_invalid_host + " You may need to add %r to ALLOWED_HOSTS."
msg_suggestion2 = (
msg_invalid_host
+ " The domain name provided is not valid according to RFC 1034/1035"
)
for host in [ # Valid-looking hosts
"example.com",
"12.34.56.78",
"[2001:19f0:feee::dead:beef:cafe]",
"xn--4ca9at.com", # Punycode for öäü.com
]:
request = HttpRequest()
request.META = {"HTTP_HOST": host}
with self.assertRaisesMessage(
DisallowedHost, msg_suggestion % (host, host)
):
request.get_host()
for domain, port in [ # Valid-looking hosts with a port number
("example.com", 80),
("12.34.56.78", 443),
("[2001:19f0:feee::dead:beef:cafe]", 8080),
]:
host = "%s:%s" % (domain, port)
request = HttpRequest()
request.META = {"HTTP_HOST": host}
with self.assertRaisesMessage(
DisallowedHost, msg_suggestion % (host, domain)
):
request.get_host()
for host in self.poisoned_hosts:
request = HttpRequest()
request.META = {"HTTP_HOST": host}
with self.assertRaisesMessage(DisallowedHost, msg_invalid_host % host):
request.get_host()
request = HttpRequest()
request.META = {"HTTP_HOST": "invalid_hostname.com"}
with self.assertRaisesMessage(
DisallowedHost, msg_suggestion2 % "invalid_hostname.com"
):
request.get_host()
def test_split_domain_port_removes_trailing_dot(self):
domain, port = split_domain_port("example.com.:8080")
self.assertEqual(domain, "example.com")
self.assertEqual(port, "8080")
class BuildAbsoluteURITests(SimpleTestCase):
factory = RequestFactory()
def test_absolute_url(self):
request = HttpRequest()
url = "https://www.example.com/asdf"
self.assertEqual(request.build_absolute_uri(location=url), url)
def test_host_retrieval(self):
request = HttpRequest()
request.get_host = lambda: "www.example.com"
request.path = ""
self.assertEqual(
request.build_absolute_uri(location="/path/with:colons"),
"http://www.example.com/path/with:colons",
)
def test_request_path_begins_with_two_slashes(self):
# //// creates a request with a path beginning with //
request = self.factory.get("////absolute-uri")
tests = (
# location isn't provided
(None, "http://testserver//absolute-uri"),
# An absolute URL
("http://example.com/?foo=bar", "http://example.com/?foo=bar"),
# A schema-relative URL
("//example.com/?foo=bar", "http://example.com/?foo=bar"),
# Relative URLs
("/foo/bar/", "http://testserver/foo/bar/"),
("/foo/./bar/", "http://testserver/foo/bar/"),
("/foo/../bar/", "http://testserver/bar/"),
("///foo/bar/", "http://testserver/foo/bar/"),
)
for location, expected_url in tests:
with self.subTest(location=location):
self.assertEqual(
request.build_absolute_uri(location=location), expected_url
)
class RequestHeadersTests(SimpleTestCase):
ENVIRON = {
# Non-headers are ignored.
"PATH_INFO": "/somepath/",
"REQUEST_METHOD": "get",
"wsgi.input": BytesIO(b""),
"SERVER_NAME": "internal.com",
"SERVER_PORT": 80,
# These non-HTTP prefixed headers are included.
"CONTENT_TYPE": "text/html",
"CONTENT_LENGTH": "100",
# All HTTP-prefixed headers are included.
"HTTP_ACCEPT": "*",
"HTTP_HOST": "example.com",
"HTTP_USER_AGENT": "python-requests/1.2.0",
}
def test_base_request_headers(self):
request = HttpRequest()
request.META = self.ENVIRON
self.assertEqual(
dict(request.headers),
{
"Content-Type": "text/html",
"Content-Length": "100",
"Accept": "*",
"Host": "example.com",
"User-Agent": "python-requests/1.2.0",
},
)
def test_wsgi_request_headers(self):
request = WSGIRequest(self.ENVIRON)
self.assertEqual(
dict(request.headers),
{
"Content-Type": "text/html",
"Content-Length": "100",
"Accept": "*",
"Host": "example.com",
"User-Agent": "python-requests/1.2.0",
},
)
def test_wsgi_request_headers_getitem(self):
request = WSGIRequest(self.ENVIRON)
self.assertEqual(request.headers["User-Agent"], "python-requests/1.2.0")
self.assertEqual(request.headers["user-agent"], "python-requests/1.2.0")
self.assertEqual(request.headers["user_agent"], "python-requests/1.2.0")
self.assertEqual(request.headers["Content-Type"], "text/html")
self.assertEqual(request.headers["Content-Length"], "100")
def test_wsgi_request_headers_get(self):
request = WSGIRequest(self.ENVIRON)
self.assertEqual(request.headers.get("User-Agent"), "python-requests/1.2.0")
self.assertEqual(request.headers.get("user-agent"), "python-requests/1.2.0")
self.assertEqual(request.headers.get("Content-Type"), "text/html")
self.assertEqual(request.headers.get("Content-Length"), "100")
class HttpHeadersTests(SimpleTestCase):
def test_basic(self):
environ = {
"CONTENT_TYPE": "text/html",
"CONTENT_LENGTH": "100",
"HTTP_HOST": "example.com",
}
headers = HttpHeaders(environ)
self.assertEqual(sorted(headers), ["Content-Length", "Content-Type", "Host"])
self.assertEqual(
headers,
{
"Content-Type": "text/html",
"Content-Length": "100",
"Host": "example.com",
},
)
def test_parse_header_name(self):
tests = (
("PATH_INFO", None),
("HTTP_ACCEPT", "Accept"),
("HTTP_USER_AGENT", "User-Agent"),
("HTTP_X_FORWARDED_PROTO", "X-Forwarded-Proto"),
("CONTENT_TYPE", "Content-Type"),
("CONTENT_LENGTH", "Content-Length"),
)
for header, expected in tests:
with self.subTest(header=header):
self.assertEqual(HttpHeaders.parse_header_name(header), expected)
|
8e026725505f15a1393dcf644201d2a8d8536d58efa77c2c1fb230ac13a3fa4b | import os
from datetime import datetime
from django.core.serializers.json import DjangoJSONEncoder
from django.test import SimpleTestCase
from django.utils.deprecation import RemovedInDjango60Warning
from django.utils.functional import lazystr
from django.utils.html import (
conditional_escape,
escape,
escapejs,
format_html,
html_safe,
json_script,
linebreaks,
smart_urlquote,
strip_spaces_between_tags,
strip_tags,
urlize,
)
from django.utils.safestring import mark_safe
class TestUtilsHtml(SimpleTestCase):
def check_output(self, function, value, output=None):
"""
function(value) equals output. If output is None, function(value)
equals value.
"""
if output is None:
output = value
self.assertEqual(function(value), output)
def test_escape(self):
items = (
("&", "&"),
("<", "<"),
(">", ">"),
('"', """),
("'", "'"),
)
# Substitution patterns for testing the above items.
patterns = ("%s", "asdf%sfdsa", "%s1", "1%sb")
for value, output in items:
with self.subTest(value=value, output=output):
for pattern in patterns:
with self.subTest(value=value, output=output, pattern=pattern):
self.check_output(escape, pattern % value, pattern % output)
self.check_output(
escape, lazystr(pattern % value), pattern % output
)
# Check repeated values.
self.check_output(escape, value * 2, output * 2)
# Verify it doesn't double replace &.
self.check_output(escape, "<&", "<&")
def test_format_html(self):
self.assertEqual(
format_html(
"{} {} {third} {fourth}",
"< Dangerous >",
mark_safe("<b>safe</b>"),
third="< dangerous again",
fourth=mark_safe("<i>safe again</i>"),
),
"< Dangerous > <b>safe</b> < dangerous again <i>safe again</i>",
)
def test_format_html_no_params(self):
msg = "Calling format_html() without passing args or kwargs is deprecated."
# RemovedInDjango60Warning: when the deprecation ends, replace with:
# msg = "args or kwargs must be provided."
# with self.assertRaisesMessage(ValueError, msg):
with self.assertWarnsMessage(RemovedInDjango60Warning, msg):
name = "Adam"
self.assertEqual(format_html(f"<i>{name}</i>"), "<i>Adam</i>")
def test_linebreaks(self):
items = (
("para1\n\npara2\r\rpara3", "<p>para1</p>\n\n<p>para2</p>\n\n<p>para3</p>"),
(
"para1\nsub1\rsub2\n\npara2",
"<p>para1<br>sub1<br>sub2</p>\n\n<p>para2</p>",
),
(
"para1\r\n\r\npara2\rsub1\r\rpara4",
"<p>para1</p>\n\n<p>para2<br>sub1</p>\n\n<p>para4</p>",
),
("para1\tmore\n\npara2", "<p>para1\tmore</p>\n\n<p>para2</p>"),
)
for value, output in items:
with self.subTest(value=value, output=output):
self.check_output(linebreaks, value, output)
self.check_output(linebreaks, lazystr(value), output)
def test_strip_tags(self):
items = (
(
"<p>See: 'é is an apostrophe followed by e acute</p>",
"See: 'é is an apostrophe followed by e acute",
),
(
"<p>See: 'é is an apostrophe followed by e acute</p>",
"See: 'é is an apostrophe followed by e acute",
),
("<adf>a", "a"),
("</adf>a", "a"),
("<asdf><asdf>e", "e"),
("hi, <f x", "hi, <f x"),
("234<235, right?", "234<235, right?"),
("a4<a5 right?", "a4<a5 right?"),
("b7>b2!", "b7>b2!"),
("</fe", "</fe"),
("<x>b<y>", "b"),
("a<p onclick=\"alert('<test>')\">b</p>c", "abc"),
("a<p a >b</p>c", "abc"),
("d<a:b c:d>e</p>f", "def"),
('<strong>foo</strong><a href="http://example.com">bar</a>', "foobar"),
# caused infinite loop on Pythons not patched with
# https://bugs.python.org/issue20288
("&gotcha&#;<>", "&gotcha&#;<>"),
("<sc<!-- -->ript>test<<!-- -->/script>", "ript>test"),
("<script>alert()</script>&h", "alert()h"),
("><!" + ("&" * 16000) + "D", "><!" + ("&" * 16000) + "D"),
("X<<<<br>br>br>br>X", "XX"),
)
for value, output in items:
with self.subTest(value=value, output=output):
self.check_output(strip_tags, value, output)
self.check_output(strip_tags, lazystr(value), output)
def test_strip_tags_files(self):
# Test with more lengthy content (also catching performance regressions)
for filename in ("strip_tags1.html", "strip_tags2.txt"):
with self.subTest(filename=filename):
path = os.path.join(os.path.dirname(__file__), "files", filename)
with open(path) as fp:
content = fp.read()
start = datetime.now()
stripped = strip_tags(content)
elapsed = datetime.now() - start
self.assertEqual(elapsed.seconds, 0)
self.assertIn("Test string that has not been stripped.", stripped)
self.assertNotIn("<", stripped)
def test_strip_spaces_between_tags(self):
# Strings that should come out untouched.
items = (" <adf>", "<adf> ", " </adf> ", " <f> x</f>")
for value in items:
with self.subTest(value=value):
self.check_output(strip_spaces_between_tags, value)
self.check_output(strip_spaces_between_tags, lazystr(value))
# Strings that have spaces to strip.
items = (
("<d> </d>", "<d></d>"),
("<p>hello </p>\n<p> world</p>", "<p>hello </p><p> world</p>"),
("\n<p>\t</p>\n<p> </p>\n", "\n<p></p><p></p>\n"),
)
for value, output in items:
with self.subTest(value=value, output=output):
self.check_output(strip_spaces_between_tags, value, output)
self.check_output(strip_spaces_between_tags, lazystr(value), output)
def test_escapejs(self):
items = (
(
"\"double quotes\" and 'single quotes'",
"\\u0022double quotes\\u0022 and \\u0027single quotes\\u0027",
),
(r"\ : backslashes, too", "\\u005C : backslashes, too"),
(
"and lots of whitespace: \r\n\t\v\f\b",
"and lots of whitespace: \\u000D\\u000A\\u0009\\u000B\\u000C\\u0008",
),
(
r"<script>and this</script>",
"\\u003Cscript\\u003Eand this\\u003C/script\\u003E",
),
(
"paragraph separator:\u2029and line separator:\u2028",
"paragraph separator:\\u2029and line separator:\\u2028",
),
("`", "\\u0060"),
)
for value, output in items:
with self.subTest(value=value, output=output):
self.check_output(escapejs, value, output)
self.check_output(escapejs, lazystr(value), output)
def test_json_script(self):
tests = (
# "<", ">" and "&" are quoted inside JSON strings
(
(
"&<>",
'<script id="test_id" type="application/json">'
'"\\u0026\\u003C\\u003E"</script>',
)
),
# "<", ">" and "&" are quoted inside JSON objects
(
{"a": "<script>test&ing</script>"},
'<script id="test_id" type="application/json">'
'{"a": "\\u003Cscript\\u003Etest\\u0026ing\\u003C/script\\u003E"}'
"</script>",
),
# Lazy strings are quoted
(
lazystr("&<>"),
'<script id="test_id" type="application/json">"\\u0026\\u003C\\u003E"'
"</script>",
),
(
{"a": lazystr("<script>test&ing</script>")},
'<script id="test_id" type="application/json">'
'{"a": "\\u003Cscript\\u003Etest\\u0026ing\\u003C/script\\u003E"}'
"</script>",
),
)
for arg, expected in tests:
with self.subTest(arg=arg):
self.assertEqual(json_script(arg, "test_id"), expected)
def test_json_script_custom_encoder(self):
class CustomDjangoJSONEncoder(DjangoJSONEncoder):
def encode(self, o):
return '{"hello": "world"}'
self.assertHTMLEqual(
json_script({}, encoder=CustomDjangoJSONEncoder),
'<script type="application/json">{"hello": "world"}</script>',
)
def test_json_script_without_id(self):
self.assertHTMLEqual(
json_script({"key": "value"}),
'<script type="application/json">{"key": "value"}</script>',
)
def test_smart_urlquote(self):
items = (
("http://öäü.com/", "http://xn--4ca9at.com/"),
("http://öäü.com/öäü/", "http://xn--4ca9at.com/%C3%B6%C3%A4%C3%BC/"),
# Everything unsafe is quoted, !*'();:@&=+$,/?#[]~ is considered
# safe as per RFC.
(
"http://example.com/path/öäü/",
"http://example.com/path/%C3%B6%C3%A4%C3%BC/",
),
("http://example.com/%C3%B6/ä/", "http://example.com/%C3%B6/%C3%A4/"),
("http://example.com/?x=1&y=2+3&z=", "http://example.com/?x=1&y=2+3&z="),
("http://example.com/?x=<>\"'", "http://example.com/?x=%3C%3E%22%27"),
(
"http://example.com/?q=http://example.com/?x=1%26q=django",
"http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3D"
"django",
),
(
"http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3D"
"django",
"http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3D"
"django",
),
("http://.www.f oo.bar/", "http://.www.f%20oo.bar/"),
)
# IDNs are properly quoted
for value, output in items:
with self.subTest(value=value, output=output):
self.assertEqual(smart_urlquote(value), output)
def test_conditional_escape(self):
s = "<h1>interop</h1>"
self.assertEqual(conditional_escape(s), "<h1>interop</h1>")
self.assertEqual(conditional_escape(mark_safe(s)), s)
self.assertEqual(conditional_escape(lazystr(mark_safe(s))), s)
def test_html_safe(self):
@html_safe
class HtmlClass:
def __str__(self):
return "<h1>I'm a html class!</h1>"
html_obj = HtmlClass()
self.assertTrue(hasattr(HtmlClass, "__html__"))
self.assertTrue(hasattr(html_obj, "__html__"))
self.assertEqual(str(html_obj), html_obj.__html__())
def test_html_safe_subclass(self):
class BaseClass:
def __html__(self):
# defines __html__ on its own
return "some html content"
def __str__(self):
return "some non html content"
@html_safe
class Subclass(BaseClass):
def __str__(self):
# overrides __str__ and is marked as html_safe
return "some html safe content"
subclass_obj = Subclass()
self.assertEqual(str(subclass_obj), subclass_obj.__html__())
def test_html_safe_defines_html_error(self):
msg = "can't apply @html_safe to HtmlClass because it defines __html__()."
with self.assertRaisesMessage(ValueError, msg):
@html_safe
class HtmlClass:
def __html__(self):
return "<h1>I'm a html class!</h1>"
def test_html_safe_doesnt_define_str(self):
msg = "can't apply @html_safe to HtmlClass because it doesn't define __str__()."
with self.assertRaisesMessage(ValueError, msg):
@html_safe
class HtmlClass:
pass
def test_urlize(self):
tests = (
(
"Search for google.com/?q=! and see.",
'Search for <a href="http://google.com/?q=">google.com/?q=</a>! and '
"see.",
),
(
"Search for google.com/?q=1<! and see.",
'Search for <a href="http://google.com/?q=1%3C">google.com/?q=1<'
"</a>! and see.",
),
(
lazystr("Search for google.com/?q=!"),
'Search for <a href="http://google.com/?q=">google.com/?q=</a>!',
),
("[email protected]", '<a href="mailto:[email protected]">[email protected]</a>'),
)
for value, output in tests:
with self.subTest(value=value):
self.assertEqual(urlize(value), output)
def test_urlize_unchanged_inputs(self):
tests = (
("a" + "@a" * 50000) + "a", # simple_email_re catastrophic test
("a" + "." * 1000000) + "a", # trailing_punctuation catastrophic test
"foo@",
"@foo.com",
"[email protected]",
"foo@localhost",
"foo@localhost.",
)
for value in tests:
with self.subTest(value=value):
self.assertEqual(urlize(value), value)
|
c5e49033396ced1fc9499db217073b7feaeeee197f11c650e7f8ce7bb1caf9db | from unittest import mock
from django.test import SimpleTestCase
from django.utils.functional import cached_property, classproperty, lazy
from django.utils.version import PY312
class FunctionalTests(SimpleTestCase):
def test_lazy(self):
t = lazy(lambda: tuple(range(3)), list, tuple)
for a, b in zip(t(), range(3)):
self.assertEqual(a, b)
def test_lazy_base_class(self):
"""lazy also finds base class methods in the proxy object"""
class Base:
def base_method(self):
pass
class Klazz(Base):
pass
t = lazy(lambda: Klazz(), Klazz)()
self.assertIn("base_method", dir(t))
def test_lazy_base_class_override(self):
"""lazy finds the correct (overridden) method implementation"""
class Base:
def method(self):
return "Base"
class Klazz(Base):
def method(self):
return "Klazz"
t = lazy(lambda: Klazz(), Base)()
self.assertEqual(t.method(), "Klazz")
def test_lazy_object_to_string(self):
class Klazz:
def __str__(self):
return "Î am ā Ǩlâzz."
def __bytes__(self):
return b"\xc3\x8e am \xc4\x81 binary \xc7\xa8l\xc3\xa2zz."
t = lazy(lambda: Klazz(), Klazz)()
self.assertEqual(str(t), "Î am ā Ǩlâzz.")
self.assertEqual(bytes(t), b"\xc3\x8e am \xc4\x81 binary \xc7\xa8l\xc3\xa2zz.")
def assertCachedPropertyWorks(self, attr, Class):
with self.subTest(attr=attr):
def get(source):
return getattr(source, attr)
obj = Class()
class SubClass(Class):
pass
subobj = SubClass()
# Docstring is preserved.
self.assertEqual(get(Class).__doc__, "Here is the docstring...")
self.assertEqual(get(SubClass).__doc__, "Here is the docstring...")
# It's cached.
self.assertEqual(get(obj), get(obj))
self.assertEqual(get(subobj), get(subobj))
# The correct value is returned.
self.assertEqual(get(obj)[0], 1)
self.assertEqual(get(subobj)[0], 1)
# State isn't shared between instances.
obj2 = Class()
subobj2 = SubClass()
self.assertNotEqual(get(obj), get(obj2))
self.assertNotEqual(get(subobj), get(subobj2))
# It behaves like a property when there's no instance.
self.assertIsInstance(get(Class), cached_property)
self.assertIsInstance(get(SubClass), cached_property)
# 'other_value' doesn't become a property.
self.assertTrue(callable(obj.other_value))
self.assertTrue(callable(subobj.other_value))
def test_cached_property(self):
"""cached_property caches its value and behaves like a property."""
class Class:
@cached_property
def value(self):
"""Here is the docstring..."""
return 1, object()
@cached_property
def __foo__(self):
"""Here is the docstring..."""
return 1, object()
def other_value(self):
"""Here is the docstring..."""
return 1, object()
other = cached_property(other_value)
attrs = ["value", "other", "__foo__"]
for attr in attrs:
self.assertCachedPropertyWorks(attr, Class)
def test_cached_property_auto_name(self):
"""
cached_property caches its value and behaves like a property
on mangled methods or when the name kwarg isn't set.
"""
class Class:
@cached_property
def __value(self):
"""Here is the docstring..."""
return 1, object()
def other_value(self):
"""Here is the docstring..."""
return 1, object()
other = cached_property(other_value)
attrs = ["_Class__value", "other"]
for attr in attrs:
self.assertCachedPropertyWorks(attr, Class)
def test_cached_property_reuse_different_names(self):
"""Disallow this case because the decorated function wouldn't be cached."""
type_msg = (
"Cannot assign the same cached_property to two different names ('a' and "
"'b')."
)
if PY312:
error_type = TypeError
msg = type_msg
else:
error_type = RuntimeError
msg = "Error calling __set_name__"
with self.assertRaisesMessage(error_type, msg) as ctx:
class ReusedCachedProperty:
@cached_property
def a(self):
pass
b = a
if not PY312:
self.assertEqual(str(ctx.exception.__context__), str(TypeError(type_msg)))
def test_cached_property_reuse_same_name(self):
"""
Reusing a cached_property on different classes under the same name is
allowed.
"""
counter = 0
@cached_property
def _cp(_self):
nonlocal counter
counter += 1
return counter
class A:
cp = _cp
class B:
cp = _cp
a = A()
b = B()
self.assertEqual(a.cp, 1)
self.assertEqual(b.cp, 2)
self.assertEqual(a.cp, 1)
def test_cached_property_set_name_not_called(self):
cp = cached_property(lambda s: None)
class Foo:
pass
Foo.cp = cp
msg = (
"Cannot use cached_property instance without calling __set_name__() on it."
)
with self.assertRaisesMessage(TypeError, msg):
Foo().cp
def test_lazy_add(self):
lazy_4 = lazy(lambda: 4, int)
lazy_5 = lazy(lambda: 5, int)
self.assertEqual(lazy_4() + lazy_5(), 9)
def test_lazy_equality(self):
"""
== and != work correctly for Promises.
"""
lazy_a = lazy(lambda: 4, int)
lazy_b = lazy(lambda: 4, int)
lazy_c = lazy(lambda: 5, int)
self.assertEqual(lazy_a(), lazy_b())
self.assertNotEqual(lazy_b(), lazy_c())
def test_lazy_repr_text(self):
original_object = "Lazy translation text"
lazy_obj = lazy(lambda: original_object, str)
self.assertEqual(repr(original_object), repr(lazy_obj()))
def test_lazy_repr_int(self):
original_object = 15
lazy_obj = lazy(lambda: original_object, int)
self.assertEqual(repr(original_object), repr(lazy_obj()))
def test_lazy_repr_bytes(self):
original_object = b"J\xc3\xbcst a str\xc3\xadng"
lazy_obj = lazy(lambda: original_object, bytes)
self.assertEqual(repr(original_object), repr(lazy_obj()))
def test_lazy_class_preparation_caching(self):
# lazy() should prepare the proxy class only once i.e. the first time
# it's used.
lazified = lazy(lambda: 0, int)
__proxy__ = lazified().__class__
with mock.patch.object(__proxy__, "__prepare_class__") as mocked:
lazified()
mocked.assert_not_called()
def test_lazy_bytes_and_str_result_classes(self):
lazy_obj = lazy(lambda: "test", str, bytes)
self.assertEqual(str(lazy_obj()), "test")
def test_lazy_str_cast_mixed_result_types(self):
lazy_value = lazy(lambda: [1], str, list)()
self.assertEqual(str(lazy_value), "[1]")
def test_lazy_str_cast_mixed_bytes_result_types(self):
lazy_value = lazy(lambda: [1], bytes, list)()
self.assertEqual(str(lazy_value), "[1]")
def test_classproperty_getter(self):
class Foo:
foo_attr = 123
def __init__(self):
self.foo_attr = 456
@classproperty
def foo(cls):
return cls.foo_attr
class Bar:
bar = classproperty()
@bar.getter
def bar(cls):
return 123
self.assertEqual(Foo.foo, 123)
self.assertEqual(Foo().foo, 123)
self.assertEqual(Bar.bar, 123)
self.assertEqual(Bar().bar, 123)
def test_classproperty_override_getter(self):
class Foo:
@classproperty
def foo(cls):
return 123
@foo.getter
def foo(cls):
return 456
self.assertEqual(Foo.foo, 456)
self.assertEqual(Foo().foo, 456)
|
4a29c65f4faf3143ebf7b12ccd3f0d5510299accd9ed00b2a155c016b4bba3ee | from xml.dom import minidom
from django.core import serializers
from django.core.serializers.xml_serializer import DTDForbidden
from django.test import TestCase, TransactionTestCase
from .tests import SerializersTestBase, SerializersTransactionTestBase
class XmlSerializerTestCase(SerializersTestBase, TestCase):
serializer_name = "xml"
pkless_str = """<?xml version="1.0" encoding="utf-8"?>
<django-objects version="1.0">
<object model="serializers.category">
<field type="CharField" name="name">Reference</field>
</object>
<object model="serializers.category">
<field type="CharField" name="name">Non-fiction</field>
</object>
</django-objects>"""
mapping_ordering_str = """<?xml version="1.0" encoding="utf-8"?>
<django-objects version="1.0">
<object model="serializers.article" pk="%(article_pk)s">
<field name="author" rel="ManyToOneRel" to="serializers.author">%(author_pk)s</field>
<field name="headline" type="CharField">Poker has no place on ESPN</field>
<field name="pub_date" type="DateTimeField">2006-06-16T11:00:00</field>
<field name="categories" rel="ManyToManyRel" to="serializers.category"><object pk="%(first_category_pk)s"></object><object pk="%(second_category_pk)s"></object></field>
<field name="meta_data" rel="ManyToManyRel" to="serializers.categorymetadata"></field>
<field name="topics" rel="ManyToManyRel" to="serializers.topic"></field>
</object>
</django-objects>""" # NOQA
@staticmethod
def _validate_output(serial_str):
try:
minidom.parseString(serial_str)
except Exception:
return False
else:
return True
@staticmethod
def _get_pk_values(serial_str):
ret_list = []
dom = minidom.parseString(serial_str)
fields = dom.getElementsByTagName("object")
for field in fields:
ret_list.append(field.getAttribute("pk"))
return ret_list
@staticmethod
def _get_field_values(serial_str, field_name):
ret_list = []
dom = minidom.parseString(serial_str)
fields = dom.getElementsByTagName("field")
for field in fields:
if field.getAttribute("name") == field_name:
temp = []
for child in field.childNodes:
temp.append(child.nodeValue)
ret_list.append("".join(temp))
return ret_list
def test_control_char_failure(self):
"""
Serializing control characters with XML should fail as those characters
are not supported in the XML 1.0 standard (except HT, LF, CR).
"""
self.a1.headline = "This contains \u0001 control \u0011 chars"
msg = "Article.headline (pk:%s) contains unserializable characters" % self.a1.pk
with self.assertRaisesMessage(ValueError, msg):
serializers.serialize(self.serializer_name, [self.a1])
self.a1.headline = "HT \u0009, LF \u000A, and CR \u000D are allowed"
self.assertIn(
"HT \t, LF \n, and CR \r are allowed",
serializers.serialize(self.serializer_name, [self.a1]),
)
def test_no_dtd(self):
"""
The XML deserializer shouldn't allow a DTD.
This is the most straightforward way to prevent all entity definitions
and avoid both external entities and entity-expansion attacks.
"""
xml = (
'<?xml version="1.0" standalone="no"?>'
'<!DOCTYPE example SYSTEM "http://example.com/example.dtd">'
)
with self.assertRaises(DTDForbidden):
next(serializers.deserialize("xml", xml))
class XmlSerializerTransactionTestCase(
SerializersTransactionTestBase, TransactionTestCase
):
serializer_name = "xml"
fwd_ref_str = """<?xml version="1.0" encoding="utf-8"?>
<django-objects version="1.0">
<object pk="1" model="serializers.article">
<field to="serializers.author" name="author" rel="ManyToOneRel">1</field>
<field type="CharField" name="headline">Forward references pose no problem</field>
<field type="DateTimeField" name="pub_date">2006-06-16T15:00:00</field>
<field to="serializers.category" name="categories" rel="ManyToManyRel">
<object pk="1"></object>
</field>
<field to="serializers.categorymetadata" name="meta_data" rel="ManyToManyRel"></field>
</object>
<object pk="1" model="serializers.author">
<field type="CharField" name="name">Agnes</field>
</object>
<object pk="1" model="serializers.category">
<field type="CharField" name="name">Reference</field></object>
</django-objects>""" # NOQA
|
b24078c6426f90372bdf3efe8b93738cd680e61900d51d601c224d1d23564e38 | import importlib
import unittest
from io import StringIO
from django.core import management, serializers
from django.core.serializers.base import DeserializationError
from django.test import SimpleTestCase, TestCase, TransactionTestCase
from .models import Author
from .tests import SerializersTestBase, SerializersTransactionTestBase
try:
import yaml
HAS_YAML = True
except ImportError:
HAS_YAML = False
YAML_IMPORT_ERROR_MESSAGE = r"No module named yaml"
class YamlImportModuleMock:
"""Provides a wrapped import_module function to simulate yaml ImportError
In order to run tests that verify the behavior of the YAML serializer
when run on a system that has yaml installed (like the django CI server),
mock import_module, so that it raises an ImportError when the yaml
serializer is being imported. The importlib.import_module() call is
being made in the serializers.register_serializer().
Refs: #12756
"""
def __init__(self):
self._import_module = importlib.import_module
def import_module(self, module_path):
if module_path == serializers.BUILTIN_SERIALIZERS["yaml"]:
raise ImportError(YAML_IMPORT_ERROR_MESSAGE)
return self._import_module(module_path)
class NoYamlSerializerTestCase(SimpleTestCase):
"""Not having pyyaml installed provides a misleading error
Refs: #12756
"""
@classmethod
def setUpClass(cls):
"""Removes imported yaml and stubs importlib.import_module"""
super().setUpClass()
cls._import_module_mock = YamlImportModuleMock()
importlib.import_module = cls._import_module_mock.import_module
# clear out cached serializers to emulate yaml missing
serializers._serializers = {}
@classmethod
def tearDownClass(cls):
"""Puts yaml back if necessary"""
super().tearDownClass()
importlib.import_module = cls._import_module_mock._import_module
# clear out cached serializers to clean out BadSerializer instances
serializers._serializers = {}
def test_serializer_pyyaml_error_message(self):
"""Using yaml serializer without pyyaml raises ImportError"""
jane = Author(name="Jane")
with self.assertRaises(ImportError):
serializers.serialize("yaml", [jane])
def test_deserializer_pyyaml_error_message(self):
"""Using yaml deserializer without pyyaml raises ImportError"""
with self.assertRaises(ImportError):
serializers.deserialize("yaml", "")
def test_dumpdata_pyyaml_error_message(self):
"""Calling dumpdata produces an error when yaml package missing"""
with self.assertRaisesMessage(
management.CommandError, YAML_IMPORT_ERROR_MESSAGE
):
management.call_command("dumpdata", format="yaml")
@unittest.skipUnless(HAS_YAML, "No yaml library detected")
class YamlSerializerTestCase(SerializersTestBase, TestCase):
serializer_name = "yaml"
pkless_str = """- model: serializers.category
pk: null
fields:
name: Reference
- model: serializers.category
fields:
name: Non-fiction"""
mapping_ordering_str = (
"""- model: serializers.article
pk: %(article_pk)s
fields:
author: %(author_pk)s
headline: Poker has no place on ESPN
pub_date: 2006-06-16 11:00:00
categories:"""
+ (
" [%(first_category_pk)s, %(second_category_pk)s]"
if HAS_YAML and yaml.__version__ < "5.1"
else "\n - %(first_category_pk)s\n - %(second_category_pk)s"
)
+ """
meta_data: []
topics: []
"""
)
@staticmethod
def _validate_output(serial_str):
try:
yaml.safe_load(StringIO(serial_str))
except Exception:
return False
else:
return True
@staticmethod
def _get_pk_values(serial_str):
ret_list = []
stream = StringIO(serial_str)
for obj_dict in yaml.safe_load(stream):
ret_list.append(obj_dict["pk"])
return ret_list
@staticmethod
def _get_field_values(serial_str, field_name):
ret_list = []
stream = StringIO(serial_str)
for obj_dict in yaml.safe_load(stream):
if "fields" in obj_dict and field_name in obj_dict["fields"]:
field_value = obj_dict["fields"][field_name]
# yaml.safe_load will return non-string objects for some
# of the fields we are interested in, this ensures that
# everything comes back as a string
if isinstance(field_value, str):
ret_list.append(field_value)
else:
ret_list.append(str(field_value))
return ret_list
def test_yaml_deserializer_exception(self):
with self.assertRaises(DeserializationError):
for obj in serializers.deserialize("yaml", "{"):
pass
@unittest.skipUnless(HAS_YAML, "No yaml library detected")
class YamlSerializerTransactionTestCase(
SerializersTransactionTestBase, TransactionTestCase
):
serializer_name = "yaml"
fwd_ref_str = """- model: serializers.article
pk: 1
fields:
headline: Forward references pose no problem
pub_date: 2006-06-16 15:00:00
categories: [1]
author: 1
- model: serializers.category
pk: 1
fields:
name: Reference
- model: serializers.author
pk: 1
fields:
name: Agnes"""
|
ee47a60465e1836570b96ed96c7798dc9838b4165b7b121e4c18dd89774dd524 | from datetime import datetime
from functools import partialmethod
from io import StringIO
from unittest import mock, skipIf
from django.core import serializers
from django.core.serializers import SerializerDoesNotExist
from django.core.serializers.base import ProgressBar
from django.db import connection, transaction
from django.http import HttpResponse
from django.test import SimpleTestCase, override_settings, skipUnlessDBFeature
from django.test.utils import Approximate
from .models import (
Actor,
Article,
Author,
AuthorProfile,
BaseModel,
Category,
Child,
ComplexModel,
Movie,
Player,
ProxyBaseModel,
ProxyProxyBaseModel,
Score,
Team,
)
@override_settings(
SERIALIZATION_MODULES={
"json2": "django.core.serializers.json",
}
)
class SerializerRegistrationTests(SimpleTestCase):
def setUp(self):
self.old_serializers = serializers._serializers
serializers._serializers = {}
def tearDown(self):
serializers._serializers = self.old_serializers
def test_register(self):
"Registering a new serializer populates the full registry. Refs #14823"
serializers.register_serializer("json3", "django.core.serializers.json")
public_formats = serializers.get_public_serializer_formats()
self.assertIn("json3", public_formats)
self.assertIn("json2", public_formats)
self.assertIn("xml", public_formats)
def test_unregister(self):
"""
Unregistering a serializer doesn't cause the registry to be
repopulated.
"""
serializers.unregister_serializer("xml")
serializers.register_serializer("json3", "django.core.serializers.json")
public_formats = serializers.get_public_serializer_formats()
self.assertNotIn("xml", public_formats)
self.assertIn("json3", public_formats)
def test_unregister_unknown_serializer(self):
with self.assertRaises(SerializerDoesNotExist):
serializers.unregister_serializer("nonsense")
def test_builtin_serializers(self):
"Requesting a list of serializer formats populates the registry"
all_formats = set(serializers.get_serializer_formats())
public_formats = set(serializers.get_public_serializer_formats())
self.assertIn("xml", all_formats),
self.assertIn("xml", public_formats)
self.assertIn("json2", all_formats)
self.assertIn("json2", public_formats)
self.assertIn("python", all_formats)
self.assertNotIn("python", public_formats)
def test_get_unknown_serializer(self):
"""
#15889: get_serializer('nonsense') raises a SerializerDoesNotExist
"""
with self.assertRaises(SerializerDoesNotExist):
serializers.get_serializer("nonsense")
with self.assertRaises(KeyError):
serializers.get_serializer("nonsense")
# SerializerDoesNotExist is instantiated with the nonexistent format
with self.assertRaisesMessage(SerializerDoesNotExist, "nonsense"):
serializers.get_serializer("nonsense")
def test_get_unknown_deserializer(self):
with self.assertRaises(SerializerDoesNotExist):
serializers.get_deserializer("nonsense")
class SerializersTestBase:
serializer_name = None # Set by subclasses to the serialization format name
@classmethod
def setUpTestData(cls):
sports = Category.objects.create(name="Sports")
music = Category.objects.create(name="Music")
op_ed = Category.objects.create(name="Op-Ed")
cls.joe = Author.objects.create(name="Joe")
cls.jane = Author.objects.create(name="Jane")
cls.a1 = Article(
author=cls.jane,
headline="Poker has no place on ESPN",
pub_date=datetime(2006, 6, 16, 11, 00),
)
cls.a1.save()
cls.a1.categories.set([sports, op_ed])
cls.a2 = Article(
author=cls.joe,
headline="Time to reform copyright",
pub_date=datetime(2006, 6, 16, 13, 00, 11, 345),
)
cls.a2.save()
cls.a2.categories.set([music, op_ed])
def test_serialize(self):
"""Basic serialization works."""
serial_str = serializers.serialize(self.serializer_name, Article.objects.all())
self.assertTrue(self._validate_output(serial_str))
def test_serializer_roundtrip(self):
"""Serialized content can be deserialized."""
serial_str = serializers.serialize(self.serializer_name, Article.objects.all())
models = list(serializers.deserialize(self.serializer_name, serial_str))
self.assertEqual(len(models), 2)
def test_serialize_to_stream(self):
obj = ComplexModel(field1="first", field2="second", field3="third")
obj.save_base(raw=True)
# Serialize the test database to a stream
for stream in (StringIO(), HttpResponse()):
serializers.serialize(self.serializer_name, [obj], indent=2, stream=stream)
# Serialize normally for a comparison
string_data = serializers.serialize(self.serializer_name, [obj], indent=2)
# The two are the same
if isinstance(stream, StringIO):
self.assertEqual(string_data, stream.getvalue())
else:
self.assertEqual(string_data, stream.content.decode())
def test_serialize_specific_fields(self):
obj = ComplexModel(field1="first", field2="second", field3="third")
obj.save_base(raw=True)
# Serialize then deserialize the test database
serialized_data = serializers.serialize(
self.serializer_name, [obj], indent=2, fields=("field1", "field3")
)
result = next(serializers.deserialize(self.serializer_name, serialized_data))
# The deserialized object contains data in only the serialized fields.
self.assertEqual(result.object.field1, "first")
self.assertEqual(result.object.field2, "")
self.assertEqual(result.object.field3, "third")
def test_altering_serialized_output(self):
"""
The ability to create new objects by modifying serialized content.
"""
old_headline = "Poker has no place on ESPN"
new_headline = "Poker has no place on television"
serial_str = serializers.serialize(self.serializer_name, Article.objects.all())
serial_str = serial_str.replace(old_headline, new_headline)
models = list(serializers.deserialize(self.serializer_name, serial_str))
# Prior to saving, old headline is in place
self.assertTrue(Article.objects.filter(headline=old_headline))
self.assertFalse(Article.objects.filter(headline=new_headline))
for model in models:
model.save()
# After saving, new headline is in place
self.assertTrue(Article.objects.filter(headline=new_headline))
self.assertFalse(Article.objects.filter(headline=old_headline))
def test_one_to_one_as_pk(self):
"""
If you use your own primary key field (such as a OneToOneField), it
doesn't appear in the serialized field list - it replaces the pk
identifier.
"""
AuthorProfile.objects.create(
author=self.joe, date_of_birth=datetime(1970, 1, 1)
)
serial_str = serializers.serialize(
self.serializer_name, AuthorProfile.objects.all()
)
self.assertFalse(self._get_field_values(serial_str, "author"))
for obj in serializers.deserialize(self.serializer_name, serial_str):
self.assertEqual(obj.object.pk, self.joe.pk)
def test_serialize_field_subset(self):
"""Output can be restricted to a subset of fields"""
valid_fields = ("headline", "pub_date")
invalid_fields = ("author", "categories")
serial_str = serializers.serialize(
self.serializer_name, Article.objects.all(), fields=valid_fields
)
for field_name in invalid_fields:
self.assertFalse(self._get_field_values(serial_str, field_name))
for field_name in valid_fields:
self.assertTrue(self._get_field_values(serial_str, field_name))
def test_serialize_unicode_roundtrip(self):
"""Unicode makes the roundtrip intact"""
actor_name = "Za\u017c\u00f3\u0142\u0107"
movie_title = "G\u0119\u015bl\u0105 ja\u017a\u0144"
ac = Actor(name=actor_name)
mv = Movie(title=movie_title, actor=ac)
ac.save()
mv.save()
serial_str = serializers.serialize(self.serializer_name, [mv])
self.assertEqual(self._get_field_values(serial_str, "title")[0], movie_title)
self.assertEqual(self._get_field_values(serial_str, "actor")[0], actor_name)
obj_list = list(serializers.deserialize(self.serializer_name, serial_str))
mv_obj = obj_list[0].object
self.assertEqual(mv_obj.title, movie_title)
def test_unicode_serialization(self):
unicode_name = "יוניקוד"
data = serializers.serialize(self.serializer_name, [Author(name=unicode_name)])
self.assertIn(unicode_name, data)
objs = list(serializers.deserialize(self.serializer_name, data))
self.assertEqual(objs[0].object.name, unicode_name)
def test_serialize_progressbar(self):
fake_stdout = StringIO()
serializers.serialize(
self.serializer_name,
Article.objects.all(),
progress_output=fake_stdout,
object_count=Article.objects.count(),
)
self.assertTrue(
fake_stdout.getvalue().endswith(
"[" + "." * ProgressBar.progress_width + "]\n"
)
)
def test_serialize_superfluous_queries(self):
"""Ensure no superfluous queries are made when serializing ForeignKeys
#17602
"""
ac = Actor(name="Actor name")
ac.save()
mv = Movie(title="Movie title", actor_id=ac.pk)
mv.save()
with self.assertNumQueries(0):
serializers.serialize(self.serializer_name, [mv])
def test_serialize_prefetch_related_m2m(self):
# One query for the Article table and one for each prefetched m2m
# field.
with self.assertNumQueries(4):
serializers.serialize(
self.serializer_name,
Article.objects.prefetch_related("categories", "meta_data", "topics"),
)
# One query for the Article table, and three m2m queries for each
# article.
with self.assertNumQueries(7):
serializers.serialize(self.serializer_name, Article.objects.all())
def test_serialize_with_null_pk(self):
"""
Serialized data with no primary key results
in a model instance with no id
"""
category = Category(name="Reference")
serial_str = serializers.serialize(self.serializer_name, [category])
pk_value = self._get_pk_values(serial_str)[0]
self.assertFalse(pk_value)
cat_obj = list(serializers.deserialize(self.serializer_name, serial_str))[
0
].object
self.assertIsNone(cat_obj.id)
def test_float_serialization(self):
"""Float values serialize and deserialize intact"""
sc = Score(score=3.4)
sc.save()
serial_str = serializers.serialize(self.serializer_name, [sc])
deserial_objs = list(serializers.deserialize(self.serializer_name, serial_str))
self.assertEqual(deserial_objs[0].object.score, Approximate(3.4, places=1))
def test_deferred_field_serialization(self):
author = Author.objects.create(name="Victor Hugo")
author = Author.objects.defer("name").get(pk=author.pk)
serial_str = serializers.serialize(self.serializer_name, [author])
deserial_objs = list(serializers.deserialize(self.serializer_name, serial_str))
self.assertIsInstance(deserial_objs[0].object, Author)
def test_custom_field_serialization(self):
"""Custom fields serialize and deserialize intact"""
team_str = "Spartak Moskva"
player = Player()
player.name = "Soslan Djanaev"
player.rank = 1
player.team = Team(team_str)
player.save()
serial_str = serializers.serialize(self.serializer_name, Player.objects.all())
team = self._get_field_values(serial_str, "team")
self.assertTrue(team)
self.assertEqual(team[0], team_str)
deserial_objs = list(serializers.deserialize(self.serializer_name, serial_str))
self.assertEqual(
deserial_objs[0].object.team.to_string(), player.team.to_string()
)
def test_pre_1000ad_date(self):
"""Year values before 1000AD are properly formatted"""
# Regression for #12524 -- dates before 1000AD get prefixed
# 0's on the year
a = Article.objects.create(
author=self.jane,
headline="Nobody remembers the early years",
pub_date=datetime(1, 2, 3, 4, 5, 6),
)
serial_str = serializers.serialize(self.serializer_name, [a])
date_values = self._get_field_values(serial_str, "pub_date")
self.assertEqual(date_values[0].replace("T", " "), "0001-02-03 04:05:06")
def test_pkless_serialized_strings(self):
"""
Serialized strings without PKs can be turned into models
"""
deserial_objs = list(
serializers.deserialize(self.serializer_name, self.pkless_str)
)
for obj in deserial_objs:
self.assertFalse(obj.object.id)
obj.save()
self.assertEqual(Category.objects.count(), 5)
def test_deterministic_mapping_ordering(self):
"""Mapping such as fields should be deterministically ordered. (#24558)"""
output = serializers.serialize(self.serializer_name, [self.a1], indent=2)
categories = self.a1.categories.values_list("pk", flat=True)
self.assertEqual(
output,
self.mapping_ordering_str
% {
"article_pk": self.a1.pk,
"author_pk": self.a1.author_id,
"first_category_pk": categories[0],
"second_category_pk": categories[1],
},
)
def test_deserialize_force_insert(self):
"""Deserialized content can be saved with force_insert as a parameter."""
serial_str = serializers.serialize(self.serializer_name, [self.a1])
deserial_obj = list(serializers.deserialize(self.serializer_name, serial_str))[
0
]
with mock.patch("django.db.models.Model") as mock_model:
deserial_obj.save(force_insert=False)
mock_model.save_base.assert_called_with(
deserial_obj.object, raw=True, using=None, force_insert=False
)
@skipUnlessDBFeature("can_defer_constraint_checks")
def test_serialize_proxy_model(self):
BaseModel.objects.create(parent_data=1)
base_objects = BaseModel.objects.all()
proxy_objects = ProxyBaseModel.objects.all()
proxy_proxy_objects = ProxyProxyBaseModel.objects.all()
base_data = serializers.serialize("json", base_objects)
proxy_data = serializers.serialize("json", proxy_objects)
proxy_proxy_data = serializers.serialize("json", proxy_proxy_objects)
self.assertEqual(base_data, proxy_data.replace("proxy", ""))
self.assertEqual(base_data, proxy_proxy_data.replace("proxy", ""))
def test_serialize_inherited_fields(self):
child_1 = Child.objects.create(parent_data="a", child_data="b")
child_2 = Child.objects.create(parent_data="c", child_data="d")
child_1.parent_m2m.add(child_2)
child_data = serializers.serialize(self.serializer_name, [child_1, child_2])
self.assertEqual(self._get_field_values(child_data, "parent_m2m"), [])
self.assertEqual(self._get_field_values(child_data, "parent_data"), [])
def test_serialize_only_pk(self):
with self.assertNumQueries(7) as ctx:
serializers.serialize(
self.serializer_name,
Article.objects.all(),
use_natural_foreign_keys=False,
)
categories_sql = ctx[1]["sql"]
self.assertNotIn(connection.ops.quote_name("meta_data_id"), categories_sql)
meta_data_sql = ctx[2]["sql"]
self.assertNotIn(connection.ops.quote_name("kind"), meta_data_sql)
topics_data_sql = ctx[3]["sql"]
self.assertNotIn(connection.ops.quote_name("category_id"), topics_data_sql)
def test_serialize_no_only_pk_with_natural_keys(self):
with self.assertNumQueries(7) as ctx:
serializers.serialize(
self.serializer_name,
Article.objects.all(),
use_natural_foreign_keys=True,
)
categories_sql = ctx[1]["sql"]
self.assertNotIn(connection.ops.quote_name("meta_data_id"), categories_sql)
# CategoryMetaData has natural_key().
meta_data_sql = ctx[2]["sql"]
self.assertIn(connection.ops.quote_name("kind"), meta_data_sql)
topics_data_sql = ctx[3]["sql"]
self.assertNotIn(connection.ops.quote_name("category_id"), topics_data_sql)
class SerializerAPITests(SimpleTestCase):
def test_stream_class(self):
class File:
def __init__(self):
self.lines = []
def write(self, line):
self.lines.append(line)
def getvalue(self):
return "".join(self.lines)
class Serializer(serializers.json.Serializer):
stream_class = File
serializer = Serializer()
data = serializer.serialize([Score(id=1, score=3.4)])
self.assertIs(serializer.stream_class, File)
self.assertIsInstance(serializer.stream, File)
self.assertEqual(
data, '[{"model": "serializers.score", "pk": 1, "fields": {"score": 3.4}}]'
)
class SerializersTransactionTestBase:
available_apps = ["serializers"]
@skipUnlessDBFeature("supports_forward_references")
def test_forward_refs(self):
"""
Objects ids can be referenced before they are
defined in the serialization data.
"""
# The deserialization process needs to run in a transaction in order
# to test forward reference handling.
with transaction.atomic():
objs = serializers.deserialize(self.serializer_name, self.fwd_ref_str)
with connection.constraint_checks_disabled():
for obj in objs:
obj.save()
for model_cls in (Category, Author, Article):
self.assertEqual(model_cls.objects.count(), 1)
art_obj = Article.objects.all()[0]
self.assertEqual(art_obj.categories.count(), 1)
self.assertEqual(art_obj.author.name, "Agnes")
def register_tests(test_class, method_name, test_func, exclude=()):
"""
Dynamically create serializer tests to ensure that all registered
serializers are automatically tested.
"""
for format_ in serializers.get_serializer_formats():
if format_ == "geojson" or format_ in exclude:
continue
decorated_func = skipIf(
isinstance(serializers.get_serializer(format_), serializers.BadSerializer),
"The Python library for the %s serializer is not installed." % format_,
)(test_func)
setattr(
test_class, method_name % format_, partialmethod(decorated_func, format_)
)
|
f7c86590882a8ed84f65d81e71fbe9807a2a2d5451fb05caf8833ef6180ae13d | import decimal
import json
import re
from django.core import serializers
from django.core.serializers.base import DeserializationError
from django.db import models
from django.test import TestCase, TransactionTestCase
from django.test.utils import isolate_apps
from .models import Score
from .tests import SerializersTestBase, SerializersTransactionTestBase
class JsonlSerializerTestCase(SerializersTestBase, TestCase):
serializer_name = "jsonl"
pkless_str = [
'{"pk": null,"model": "serializers.category","fields": {"name": "Reference"}}',
'{"model": "serializers.category","fields": {"name": "Non-fiction"}}',
]
pkless_str = "\n".join([s.replace("\n", "") for s in pkless_str])
mapping_ordering_str = (
'{"model": "serializers.article","pk": %(article_pk)s,'
'"fields": {'
'"author": %(author_pk)s,'
'"headline": "Poker has no place on ESPN",'
'"pub_date": "2006-06-16T11:00:00",'
'"categories": [%(first_category_pk)s,%(second_category_pk)s],'
'"meta_data": [],'
'"topics": []}}\n'
)
@staticmethod
def _validate_output(serial_str):
try:
for line in serial_str.split("\n"):
if line:
json.loads(line)
except Exception:
return False
else:
return True
@staticmethod
def _get_pk_values(serial_str):
serial_list = [json.loads(line) for line in serial_str.split("\n") if line]
return [obj_dict["pk"] for obj_dict in serial_list]
@staticmethod
def _get_field_values(serial_str, field_name):
serial_list = [json.loads(line) for line in serial_str.split("\n") if line]
return [
obj_dict["fields"][field_name]
for obj_dict in serial_list
if field_name in obj_dict["fields"]
]
def test_no_indentation(self):
s = serializers.jsonl.Serializer()
json_data = s.serialize([Score(score=5.0), Score(score=6.0)], indent=2)
for line in json_data.splitlines():
self.assertIsNone(re.search(r".+,\s*$", line))
@isolate_apps("serializers")
def test_custom_encoder(self):
class ScoreDecimal(models.Model):
score = models.DecimalField()
class CustomJSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
return str(o)
return super().default(o)
s = serializers.jsonl.Serializer()
json_data = s.serialize(
[ScoreDecimal(score=decimal.Decimal(1.0))],
cls=CustomJSONEncoder,
)
self.assertIn('"fields": {"score": "1"}', json_data)
def test_json_deserializer_exception(self):
with self.assertRaises(DeserializationError):
for obj in serializers.deserialize("jsonl", """[{"pk":1}"""):
pass
def test_helpful_error_message_invalid_pk(self):
"""
If there is an invalid primary key, the error message contains the
model associated with it.
"""
test_string = (
'{"pk": "badpk","model": "serializers.player",'
'"fields": {"name": "Bob","rank": 1,"team": "Team"}}'
)
with self.assertRaisesMessage(
DeserializationError, "(serializers.player:pk=badpk)"
):
list(serializers.deserialize("jsonl", test_string))
def test_helpful_error_message_invalid_field(self):
"""
If there is an invalid field value, the error message contains the
model associated with it.
"""
test_string = (
'{"pk": "1","model": "serializers.player",'
'"fields": {"name": "Bob","rank": "invalidint","team": "Team"}}'
)
expected = "(serializers.player:pk=1) field_value was 'invalidint'"
with self.assertRaisesMessage(DeserializationError, expected):
list(serializers.deserialize("jsonl", test_string))
def test_helpful_error_message_for_foreign_keys(self):
"""
Invalid foreign keys with a natural key throws a helpful error message,
such as what the failing key is.
"""
test_string = (
'{"pk": 1, "model": "serializers.category",'
'"fields": {'
'"name": "Unknown foreign key",'
'"meta_data": ["doesnotexist","metadata"]}}'
)
key = ["doesnotexist", "metadata"]
expected = "(serializers.category:pk=1) field_value was '%r'" % key
with self.assertRaisesMessage(DeserializationError, expected):
list(serializers.deserialize("jsonl", test_string))
def test_helpful_error_message_for_many2many_non_natural(self):
"""
Invalid many-to-many keys throws a helpful error message.
"""
test_strings = [
"""{
"pk": 1,
"model": "serializers.article",
"fields": {
"author": 1,
"headline": "Unknown many to many",
"pub_date": "2014-09-15T10:35:00",
"categories": [1, "doesnotexist"]
}
}""",
"""{
"pk": 1,
"model": "serializers.author",
"fields": {"name": "Agnes"}
}""",
"""{
"pk": 1,
"model": "serializers.category",
"fields": {"name": "Reference"}
}""",
]
test_string = "\n".join([s.replace("\n", "") for s in test_strings])
expected = "(serializers.article:pk=1) field_value was 'doesnotexist'"
with self.assertRaisesMessage(DeserializationError, expected):
list(serializers.deserialize("jsonl", test_string))
def test_helpful_error_message_for_many2many_natural1(self):
"""
Invalid many-to-many keys throws a helpful error message where one of a
list of natural keys is invalid.
"""
test_strings = [
"""{
"pk": 1,
"model": "serializers.categorymetadata",
"fields": {"kind": "author","name": "meta1","value": "Agnes"}
}""",
"""{
"pk": 1,
"model": "serializers.article",
"fields": {
"author": 1,
"headline": "Unknown many to many",
"pub_date": "2014-09-15T10:35:00",
"meta_data": [
["author", "meta1"],
["doesnotexist", "meta1"],
["author", "meta1"]
]
}
}""",
"""{
"pk": 1,
"model": "serializers.author",
"fields": {"name": "Agnes"}
}""",
]
test_string = "\n".join([s.replace("\n", "") for s in test_strings])
key = ["doesnotexist", "meta1"]
expected = "(serializers.article:pk=1) field_value was '%r'" % key
with self.assertRaisesMessage(DeserializationError, expected):
for obj in serializers.deserialize("jsonl", test_string):
obj.save()
def test_helpful_error_message_for_many2many_natural2(self):
"""
Invalid many-to-many keys throws a helpful error message where a
natural many-to-many key has only a single value.
"""
test_strings = [
"""{
"pk": 1,
"model": "serializers.article",
"fields": {
"author": 1,
"headline": "Unknown many to many",
"pub_date": "2014-09-15T10:35:00",
"meta_data": [1, "doesnotexist"]
}
}""",
"""{
"pk": 1,
"model": "serializers.categorymetadata",
"fields": {"kind": "author","name": "meta1","value": "Agnes"}
}""",
"""{
"pk": 1,
"model": "serializers.author",
"fields": {"name": "Agnes"}
}""",
]
test_string = "\n".join([s.replace("\n", "") for s in test_strings])
expected = "(serializers.article:pk=1) field_value was 'doesnotexist'"
with self.assertRaisesMessage(DeserializationError, expected):
for obj in serializers.deserialize("jsonl", test_string, ignore=False):
obj.save()
def test_helpful_error_message_for_many2many_not_iterable(self):
"""
Not iterable many-to-many field value throws a helpful error message.
"""
test_string = (
'{"pk": 1,"model": "serializers.m2mdata","fields": {"data": null}}'
)
expected = "(serializers.m2mdata:pk=1) field_value was 'None'"
with self.assertRaisesMessage(DeserializationError, expected):
next(serializers.deserialize("jsonl", test_string, ignore=False))
class JsonSerializerTransactionTestCase(
SerializersTransactionTestBase, TransactionTestCase
):
serializer_name = "jsonl"
fwd_ref_str = [
"""{
"pk": 1,
"model": "serializers.article",
"fields": {
"headline": "Forward references pose no problem",
"pub_date": "2006-06-16T15:00:00",
"categories": [1],
"author": 1
}
}""",
"""{
"pk": 1,
"model": "serializers.category",
"fields": {"name": "Reference"}
}""",
"""{
"pk": 1,
"model": "serializers.author",
"fields": {"name": "Agnes"}
}""",
]
fwd_ref_str = "\n".join([s.replace("\n", "") for s in fwd_ref_str])
|
3699cd116b5c524a6891d17a5897c07a3e0d6f4d79b58f889f8d0ab82fe92d17 | import datetime
import decimal
import json
import re
from django.core import serializers
from django.core.serializers.base import DeserializationError
from django.core.serializers.json import DjangoJSONEncoder
from django.db import models
from django.test import SimpleTestCase, TestCase, TransactionTestCase
from django.test.utils import isolate_apps
from django.utils.translation import gettext_lazy, override
from .models import Score
from .tests import SerializersTestBase, SerializersTransactionTestBase
class JsonSerializerTestCase(SerializersTestBase, TestCase):
serializer_name = "json"
pkless_str = """[
{
"pk": null,
"model": "serializers.category",
"fields": {"name": "Reference"}
}, {
"model": "serializers.category",
"fields": {"name": "Non-fiction"}
}]"""
mapping_ordering_str = """[
{
"model": "serializers.article",
"pk": %(article_pk)s,
"fields": {
"author": %(author_pk)s,
"headline": "Poker has no place on ESPN",
"pub_date": "2006-06-16T11:00:00",
"categories": [
%(first_category_pk)s,
%(second_category_pk)s
],
"meta_data": [],
"topics": []
}
}
]
"""
@staticmethod
def _validate_output(serial_str):
try:
json.loads(serial_str)
except Exception:
return False
else:
return True
@staticmethod
def _get_pk_values(serial_str):
serial_list = json.loads(serial_str)
return [obj_dict["pk"] for obj_dict in serial_list]
@staticmethod
def _get_field_values(serial_str, field_name):
serial_list = json.loads(serial_str)
return [
obj_dict["fields"][field_name]
for obj_dict in serial_list
if field_name in obj_dict["fields"]
]
def test_indentation_whitespace(self):
s = serializers.json.Serializer()
json_data = s.serialize([Score(score=5.0), Score(score=6.0)], indent=2)
for line in json_data.splitlines():
if re.search(r".+,\s*$", line):
self.assertEqual(line, line.rstrip())
@isolate_apps("serializers")
def test_custom_encoder(self):
class ScoreDecimal(models.Model):
score = models.DecimalField()
class CustomJSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
return str(o)
return super().default(o)
s = serializers.json.Serializer()
json_data = s.serialize(
[ScoreDecimal(score=decimal.Decimal(1.0))], cls=CustomJSONEncoder
)
self.assertIn('"fields": {"score": "1"}', json_data)
def test_json_deserializer_exception(self):
with self.assertRaises(DeserializationError):
for obj in serializers.deserialize("json", """[{"pk":1}"""):
pass
def test_helpful_error_message_invalid_pk(self):
"""
If there is an invalid primary key, the error message should contain
the model associated with it.
"""
test_string = """[{
"pk": "badpk",
"model": "serializers.player",
"fields": {
"name": "Bob",
"rank": 1,
"team": "Team"
}
}]"""
with self.assertRaisesMessage(
DeserializationError, "(serializers.player:pk=badpk)"
):
list(serializers.deserialize("json", test_string))
def test_helpful_error_message_invalid_field(self):
"""
If there is an invalid field value, the error message should contain
the model associated with it.
"""
test_string = """[{
"pk": "1",
"model": "serializers.player",
"fields": {
"name": "Bob",
"rank": "invalidint",
"team": "Team"
}
}]"""
expected = "(serializers.player:pk=1) field_value was 'invalidint'"
with self.assertRaisesMessage(DeserializationError, expected):
list(serializers.deserialize("json", test_string))
def test_helpful_error_message_for_foreign_keys(self):
"""
Invalid foreign keys with a natural key should throw a helpful error
message, such as what the failing key is.
"""
test_string = """[{
"pk": 1,
"model": "serializers.category",
"fields": {
"name": "Unknown foreign key",
"meta_data": [
"doesnotexist",
"metadata"
]
}
}]"""
key = ["doesnotexist", "metadata"]
expected = "(serializers.category:pk=1) field_value was '%r'" % key
with self.assertRaisesMessage(DeserializationError, expected):
list(serializers.deserialize("json", test_string))
def test_helpful_error_message_for_many2many_non_natural(self):
"""
Invalid many-to-many keys should throw a helpful error message.
"""
test_string = """[{
"pk": 1,
"model": "serializers.article",
"fields": {
"author": 1,
"headline": "Unknown many to many",
"pub_date": "2014-09-15T10:35:00",
"categories": [1, "doesnotexist"]
}
}, {
"pk": 1,
"model": "serializers.author",
"fields": {
"name": "Agnes"
}
}, {
"pk": 1,
"model": "serializers.category",
"fields": {
"name": "Reference"
}
}]"""
expected = "(serializers.article:pk=1) field_value was 'doesnotexist'"
with self.assertRaisesMessage(DeserializationError, expected):
list(serializers.deserialize("json", test_string))
def test_helpful_error_message_for_many2many_natural1(self):
"""
Invalid many-to-many keys should throw a helpful error message.
This tests the code path where one of a list of natural keys is invalid.
"""
test_string = """[{
"pk": 1,
"model": "serializers.categorymetadata",
"fields": {
"kind": "author",
"name": "meta1",
"value": "Agnes"
}
}, {
"pk": 1,
"model": "serializers.article",
"fields": {
"author": 1,
"headline": "Unknown many to many",
"pub_date": "2014-09-15T10:35:00",
"meta_data": [
["author", "meta1"],
["doesnotexist", "meta1"],
["author", "meta1"]
]
}
}, {
"pk": 1,
"model": "serializers.author",
"fields": {
"name": "Agnes"
}
}]"""
key = ["doesnotexist", "meta1"]
expected = "(serializers.article:pk=1) field_value was '%r'" % key
with self.assertRaisesMessage(DeserializationError, expected):
for obj in serializers.deserialize("json", test_string):
obj.save()
def test_helpful_error_message_for_many2many_natural2(self):
"""
Invalid many-to-many keys should throw a helpful error message. This
tests the code path where a natural many-to-many key has only a single
value.
"""
test_string = """[{
"pk": 1,
"model": "serializers.article",
"fields": {
"author": 1,
"headline": "Unknown many to many",
"pub_date": "2014-09-15T10:35:00",
"meta_data": [1, "doesnotexist"]
}
}, {
"pk": 1,
"model": "serializers.categorymetadata",
"fields": {
"kind": "author",
"name": "meta1",
"value": "Agnes"
}
}, {
"pk": 1,
"model": "serializers.author",
"fields": {
"name": "Agnes"
}
}]"""
expected = "(serializers.article:pk=1) field_value was 'doesnotexist'"
with self.assertRaisesMessage(DeserializationError, expected):
for obj in serializers.deserialize("json", test_string, ignore=False):
obj.save()
def test_helpful_error_message_for_many2many_not_iterable(self):
"""
Not iterable many-to-many field value throws a helpful error message.
"""
test_string = """[{
"pk": 1,
"model": "serializers.m2mdata",
"fields": {"data": null}
}]"""
expected = "(serializers.m2mdata:pk=1) field_value was 'None'"
with self.assertRaisesMessage(DeserializationError, expected):
next(serializers.deserialize("json", test_string, ignore=False))
class JsonSerializerTransactionTestCase(
SerializersTransactionTestBase, TransactionTestCase
):
serializer_name = "json"
fwd_ref_str = """[
{
"pk": 1,
"model": "serializers.article",
"fields": {
"headline": "Forward references pose no problem",
"pub_date": "2006-06-16T15:00:00",
"categories": [1],
"author": 1
}
},
{
"pk": 1,
"model": "serializers.category",
"fields": {
"name": "Reference"
}
},
{
"pk": 1,
"model": "serializers.author",
"fields": {
"name": "Agnes"
}
}]"""
class DjangoJSONEncoderTests(SimpleTestCase):
def test_lazy_string_encoding(self):
self.assertEqual(
json.dumps({"lang": gettext_lazy("French")}, cls=DjangoJSONEncoder),
'{"lang": "French"}',
)
with override("fr"):
self.assertEqual(
json.dumps({"lang": gettext_lazy("French")}, cls=DjangoJSONEncoder),
'{"lang": "Fran\\u00e7ais"}',
)
def test_timedelta(self):
duration = datetime.timedelta(days=1, hours=2, seconds=3)
self.assertEqual(
json.dumps({"duration": duration}, cls=DjangoJSONEncoder),
'{"duration": "P1DT02H00M03S"}',
)
duration = datetime.timedelta(0)
self.assertEqual(
json.dumps({"duration": duration}, cls=DjangoJSONEncoder),
'{"duration": "P0DT00H00M00S"}',
)
|
1e01e15cac52fd5f6646e5e3a41f2627f7392935589e12864e1a56966f43408f | from unittest import mock
from django.db import transaction
from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
from .models import Article, InheritedArticleA, InheritedArticleB, Publication, User
class ManyToManyTests(TestCase):
@classmethod
def setUpTestData(cls):
# Create a couple of Publications.
cls.p1 = Publication.objects.create(title="The Python Journal")
cls.p2 = Publication.objects.create(title="Science News")
cls.p3 = Publication.objects.create(title="Science Weekly")
cls.p4 = Publication.objects.create(title="Highlights for Children")
cls.a1 = Article.objects.create(
headline="Django lets you build web apps easily"
)
cls.a1.publications.add(cls.p1)
cls.a2 = Article.objects.create(headline="NASA uses Python")
cls.a2.publications.add(cls.p1, cls.p2, cls.p3, cls.p4)
cls.a3 = Article.objects.create(headline="NASA finds intelligent life on Earth")
cls.a3.publications.add(cls.p2)
cls.a4 = Article.objects.create(headline="Oxygen-free diet works wonders")
cls.a4.publications.add(cls.p2)
def test_add(self):
# Create an Article.
a5 = Article(headline="Django lets you create web apps easily")
# You can't associate it with a Publication until it's been saved.
msg = (
'"<Article: Django lets you create web apps easily>" needs to have '
'a value for field "id" before this many-to-many relationship can be used.'
)
with self.assertRaisesMessage(ValueError, msg):
getattr(a5, "publications")
# Save it!
a5.save()
# Associate the Article with a Publication.
a5.publications.add(self.p1)
self.assertSequenceEqual(a5.publications.all(), [self.p1])
# Create another Article, and set it to appear in both Publications.
a6 = Article(headline="ESA uses Python")
a6.save()
a6.publications.add(self.p1, self.p2)
a6.publications.add(self.p3)
# Adding a second time is OK
a6.publications.add(self.p3)
self.assertSequenceEqual(
a6.publications.all(),
[self.p2, self.p3, self.p1],
)
# Adding an object of the wrong type raises TypeError
msg = (
"'Publication' instance expected, got <Article: Django lets you create web "
"apps easily>"
)
with self.assertRaisesMessage(TypeError, msg):
with transaction.atomic():
a6.publications.add(a5)
# Add a Publication directly via publications.add by using keyword arguments.
p5 = a6.publications.create(title="Highlights for Adults")
self.assertSequenceEqual(
a6.publications.all(),
[p5, self.p2, self.p3, self.p1],
)
def test_add_remove_set_by_pk(self):
a5 = Article.objects.create(headline="Django lets you create web apps easily")
a5.publications.add(self.p1.pk)
self.assertSequenceEqual(a5.publications.all(), [self.p1])
a5.publications.set([self.p2.pk])
self.assertSequenceEqual(a5.publications.all(), [self.p2])
a5.publications.remove(self.p2.pk)
self.assertSequenceEqual(a5.publications.all(), [])
def test_add_remove_set_by_to_field(self):
user_1 = User.objects.create(username="Jean")
user_2 = User.objects.create(username="Joe")
a5 = Article.objects.create(headline="Django lets you create web apps easily")
a5.authors.add(user_1.username)
self.assertSequenceEqual(a5.authors.all(), [user_1])
a5.authors.set([user_2.username])
self.assertSequenceEqual(a5.authors.all(), [user_2])
a5.authors.remove(user_2.username)
self.assertSequenceEqual(a5.authors.all(), [])
def test_related_manager_refresh(self):
user_1 = User.objects.create(username="Jean")
user_2 = User.objects.create(username="Joe")
self.a3.authors.add(user_1.username)
self.assertSequenceEqual(user_1.article_set.all(), [self.a3])
# Change the username on a different instance of the same user.
user_1_from_db = User.objects.get(pk=user_1.pk)
self.assertSequenceEqual(user_1_from_db.article_set.all(), [self.a3])
user_1_from_db.username = "Paul"
self.a3.authors.set([user_2.username])
user_1_from_db.save()
# Assign a different article.
self.a4.authors.add(user_1_from_db.username)
self.assertSequenceEqual(user_1_from_db.article_set.all(), [self.a4])
# Refresh the instance with an evaluated related manager.
user_1.refresh_from_db()
self.assertEqual(user_1.username, "Paul")
self.assertSequenceEqual(user_1.article_set.all(), [self.a4])
def test_add_remove_invalid_type(self):
msg = "Field 'id' expected a number but got 'invalid'."
for method in ["add", "remove"]:
with self.subTest(method), self.assertRaisesMessage(ValueError, msg):
getattr(self.a1.publications, method)("invalid")
def test_reverse_add(self):
# Adding via the 'other' end of an m2m
a5 = Article(headline="NASA finds intelligent life on Mars")
a5.save()
self.p2.article_set.add(a5)
self.assertSequenceEqual(
self.p2.article_set.all(),
[self.a3, a5, self.a2, self.a4],
)
self.assertSequenceEqual(a5.publications.all(), [self.p2])
# Adding via the other end using keywords
a6 = self.p2.article_set.create(headline="Carbon-free diet works wonders")
self.assertSequenceEqual(
self.p2.article_set.all(),
[a6, self.a3, a5, self.a2, self.a4],
)
a6 = self.p2.article_set.all()[3]
self.assertSequenceEqual(
a6.publications.all(),
[self.p4, self.p2, self.p3, self.p1],
)
@skipUnlessDBFeature("supports_ignore_conflicts")
def test_fast_add_ignore_conflicts(self):
"""
A single query is necessary to add auto-created through instances if
the database backend supports bulk_create(ignore_conflicts) and no
m2m_changed signals receivers are connected.
"""
with self.assertNumQueries(1):
self.a1.publications.add(self.p1, self.p2)
@skipIfDBFeature("supports_ignore_conflicts")
def test_add_existing_different_type(self):
# A single SELECT query is necessary to compare existing values to the
# provided one; no INSERT should be attempted.
with self.assertNumQueries(1):
self.a1.publications.add(str(self.p1.pk))
self.assertEqual(self.a1.publications.get(), self.p1)
@skipUnlessDBFeature("supports_ignore_conflicts")
def test_slow_add_ignore_conflicts(self):
manager_cls = self.a1.publications.__class__
# Simulate a race condition between the missing ids retrieval and
# the bulk insertion attempt.
missing_target_ids = {self.p1.id}
# Disable fast-add to test the case where the slow add path is taken.
add_plan = (True, False, False)
with mock.patch.object(
manager_cls, "_get_missing_target_ids", return_value=missing_target_ids
) as mocked:
with mock.patch.object(manager_cls, "_get_add_plan", return_value=add_plan):
self.a1.publications.add(self.p1)
mocked.assert_called_once()
def test_related_sets(self):
# Article objects have access to their related Publication objects.
self.assertSequenceEqual(self.a1.publications.all(), [self.p1])
self.assertSequenceEqual(
self.a2.publications.all(),
[self.p4, self.p2, self.p3, self.p1],
)
# Publication objects have access to their related Article objects.
self.assertSequenceEqual(
self.p2.article_set.all(),
[self.a3, self.a2, self.a4],
)
self.assertSequenceEqual(
self.p1.article_set.all(),
[self.a1, self.a2],
)
self.assertSequenceEqual(
Publication.objects.get(id=self.p4.id).article_set.all(),
[self.a2],
)
def test_selects(self):
# We can perform kwarg queries across m2m relationships
self.assertSequenceEqual(
Article.objects.filter(publications__id__exact=self.p1.id),
[self.a1, self.a2],
)
self.assertSequenceEqual(
Article.objects.filter(publications__pk=self.p1.id),
[self.a1, self.a2],
)
self.assertSequenceEqual(
Article.objects.filter(publications=self.p1.id),
[self.a1, self.a2],
)
self.assertSequenceEqual(
Article.objects.filter(publications=self.p1),
[self.a1, self.a2],
)
self.assertSequenceEqual(
Article.objects.filter(publications__title__startswith="Science"),
[self.a3, self.a2, self.a2, self.a4],
)
self.assertSequenceEqual(
Article.objects.filter(
publications__title__startswith="Science"
).distinct(),
[self.a3, self.a2, self.a4],
)
# The count() function respects distinct() as well.
self.assertEqual(
Article.objects.filter(publications__title__startswith="Science").count(), 4
)
self.assertEqual(
Article.objects.filter(publications__title__startswith="Science")
.distinct()
.count(),
3,
)
self.assertSequenceEqual(
Article.objects.filter(
publications__in=[self.p1.id, self.p2.id]
).distinct(),
[self.a1, self.a3, self.a2, self.a4],
)
self.assertSequenceEqual(
Article.objects.filter(publications__in=[self.p1.id, self.p2]).distinct(),
[self.a1, self.a3, self.a2, self.a4],
)
self.assertSequenceEqual(
Article.objects.filter(publications__in=[self.p1, self.p2]).distinct(),
[self.a1, self.a3, self.a2, self.a4],
)
# Excluding a related item works as you would expect, too (although the SQL
# involved is a little complex).
self.assertSequenceEqual(
Article.objects.exclude(publications=self.p2),
[self.a1],
)
def test_reverse_selects(self):
# Reverse m2m queries are supported (i.e., starting at the table that
# doesn't have a ManyToManyField).
python_journal = [self.p1]
self.assertSequenceEqual(
Publication.objects.filter(id__exact=self.p1.id), python_journal
)
self.assertSequenceEqual(
Publication.objects.filter(pk=self.p1.id), python_journal
)
self.assertSequenceEqual(
Publication.objects.filter(article__headline__startswith="NASA"),
[self.p4, self.p2, self.p2, self.p3, self.p1],
)
self.assertSequenceEqual(
Publication.objects.filter(article__id__exact=self.a1.id), python_journal
)
self.assertSequenceEqual(
Publication.objects.filter(article__pk=self.a1.id), python_journal
)
self.assertSequenceEqual(
Publication.objects.filter(article=self.a1.id), python_journal
)
self.assertSequenceEqual(
Publication.objects.filter(article=self.a1), python_journal
)
self.assertSequenceEqual(
Publication.objects.filter(article__in=[self.a1.id, self.a2.id]).distinct(),
[self.p4, self.p2, self.p3, self.p1],
)
self.assertSequenceEqual(
Publication.objects.filter(article__in=[self.a1.id, self.a2]).distinct(),
[self.p4, self.p2, self.p3, self.p1],
)
self.assertSequenceEqual(
Publication.objects.filter(article__in=[self.a1, self.a2]).distinct(),
[self.p4, self.p2, self.p3, self.p1],
)
def test_delete(self):
# If we delete a Publication, its Articles won't be able to access it.
self.p1.delete()
self.assertSequenceEqual(
Publication.objects.all(),
[self.p4, self.p2, self.p3],
)
self.assertSequenceEqual(self.a1.publications.all(), [])
# If we delete an Article, its Publications won't be able to access it.
self.a2.delete()
self.assertSequenceEqual(
Article.objects.all(),
[self.a1, self.a3, self.a4],
)
self.assertSequenceEqual(
self.p2.article_set.all(),
[self.a3, self.a4],
)
def test_bulk_delete(self):
# Bulk delete some Publications - references to deleted publications should go
Publication.objects.filter(title__startswith="Science").delete()
self.assertSequenceEqual(
Publication.objects.all(),
[self.p4, self.p1],
)
self.assertSequenceEqual(
Article.objects.all(),
[self.a1, self.a3, self.a2, self.a4],
)
self.assertSequenceEqual(
self.a2.publications.all(),
[self.p4, self.p1],
)
# Bulk delete some articles - references to deleted objects should go
q = Article.objects.filter(headline__startswith="Django")
self.assertSequenceEqual(q, [self.a1])
q.delete()
# After the delete, the QuerySet cache needs to be cleared,
# and the referenced objects should be gone
self.assertSequenceEqual(q, [])
self.assertSequenceEqual(self.p1.article_set.all(), [self.a2])
def test_remove(self):
# Removing publication from an article:
self.assertSequenceEqual(
self.p2.article_set.all(),
[self.a3, self.a2, self.a4],
)
self.a4.publications.remove(self.p2)
self.assertSequenceEqual(
self.p2.article_set.all(),
[self.a3, self.a2],
)
self.assertSequenceEqual(self.a4.publications.all(), [])
# And from the other end
self.p2.article_set.remove(self.a3)
self.assertSequenceEqual(self.p2.article_set.all(), [self.a2])
self.assertSequenceEqual(self.a3.publications.all(), [])
def test_set(self):
self.p2.article_set.set([self.a4, self.a3])
self.assertSequenceEqual(
self.p2.article_set.all(),
[self.a3, self.a4],
)
self.assertSequenceEqual(self.a4.publications.all(), [self.p2])
self.a4.publications.set([self.p3.id])
self.assertSequenceEqual(self.p2.article_set.all(), [self.a3])
self.assertSequenceEqual(self.a4.publications.all(), [self.p3])
self.p2.article_set.set([])
self.assertSequenceEqual(self.p2.article_set.all(), [])
self.a4.publications.set([])
self.assertSequenceEqual(self.a4.publications.all(), [])
self.p2.article_set.set([self.a4, self.a3], clear=True)
self.assertSequenceEqual(
self.p2.article_set.all(),
[self.a3, self.a4],
)
self.assertSequenceEqual(self.a4.publications.all(), [self.p2])
self.a4.publications.set([self.p3.id], clear=True)
self.assertSequenceEqual(self.p2.article_set.all(), [self.a3])
self.assertSequenceEqual(self.a4.publications.all(), [self.p3])
self.p2.article_set.set([], clear=True)
self.assertSequenceEqual(self.p2.article_set.all(), [])
self.a4.publications.set([], clear=True)
self.assertSequenceEqual(self.a4.publications.all(), [])
def test_set_existing_different_type(self):
# Existing many-to-many relations remain the same for values provided
# with a different type.
ids = set(
Publication.article_set.through.objects.filter(
article__in=[self.a4, self.a3],
publication=self.p2,
).values_list("id", flat=True)
)
self.p2.article_set.set([str(self.a4.pk), str(self.a3.pk)])
new_ids = set(
Publication.article_set.through.objects.filter(
publication=self.p2,
).values_list("id", flat=True)
)
self.assertEqual(ids, new_ids)
def test_assign_forward(self):
msg = (
"Direct assignment to the reverse side of a many-to-many set is "
"prohibited. Use article_set.set() instead."
)
with self.assertRaisesMessage(TypeError, msg):
self.p2.article_set = [self.a4, self.a3]
def test_assign_reverse(self):
msg = (
"Direct assignment to the forward side of a many-to-many "
"set is prohibited. Use publications.set() instead."
)
with self.assertRaisesMessage(TypeError, msg):
self.a1.publications = [self.p1, self.p2]
def test_assign(self):
# Relation sets can be assigned using set().
self.p2.article_set.set([self.a4, self.a3])
self.assertSequenceEqual(
self.p2.article_set.all(),
[self.a3, self.a4],
)
self.assertSequenceEqual(self.a4.publications.all(), [self.p2])
self.a4.publications.set([self.p3.id])
self.assertSequenceEqual(self.p2.article_set.all(), [self.a3])
self.assertSequenceEqual(self.a4.publications.all(), [self.p3])
# An alternate to calling clear() is to set an empty set.
self.p2.article_set.set([])
self.assertSequenceEqual(self.p2.article_set.all(), [])
self.a4.publications.set([])
self.assertSequenceEqual(self.a4.publications.all(), [])
def test_assign_ids(self):
# Relation sets can also be set using primary key values
self.p2.article_set.set([self.a4.id, self.a3.id])
self.assertSequenceEqual(
self.p2.article_set.all(),
[self.a3, self.a4],
)
self.assertSequenceEqual(self.a4.publications.all(), [self.p2])
self.a4.publications.set([self.p3.id])
self.assertSequenceEqual(self.p2.article_set.all(), [self.a3])
self.assertSequenceEqual(self.a4.publications.all(), [self.p3])
def test_forward_assign_with_queryset(self):
# Querysets used in m2m assignments are pre-evaluated so their value
# isn't affected by the clearing operation in ManyRelatedManager.set()
# (#19816).
self.a1.publications.set([self.p1, self.p2])
qs = self.a1.publications.filter(title="The Python Journal")
self.a1.publications.set(qs)
self.assertEqual(1, self.a1.publications.count())
self.assertEqual(1, qs.count())
def test_reverse_assign_with_queryset(self):
# Querysets used in M2M assignments are pre-evaluated so their value
# isn't affected by the clearing operation in ManyRelatedManager.set()
# (#19816).
self.p1.article_set.set([self.a1, self.a2])
qs = self.p1.article_set.filter(
headline="Django lets you build web apps easily"
)
self.p1.article_set.set(qs)
self.assertEqual(1, self.p1.article_set.count())
self.assertEqual(1, qs.count())
def test_clear(self):
# Relation sets can be cleared:
self.p2.article_set.clear()
self.assertSequenceEqual(self.p2.article_set.all(), [])
self.assertSequenceEqual(self.a4.publications.all(), [])
# And you can clear from the other end
self.p2.article_set.add(self.a3, self.a4)
self.assertSequenceEqual(
self.p2.article_set.all(),
[self.a3, self.a4],
)
self.assertSequenceEqual(self.a4.publications.all(), [self.p2])
self.a4.publications.clear()
self.assertSequenceEqual(self.a4.publications.all(), [])
self.assertSequenceEqual(self.p2.article_set.all(), [self.a3])
def test_clear_after_prefetch(self):
a4 = Article.objects.prefetch_related("publications").get(id=self.a4.id)
self.assertSequenceEqual(a4.publications.all(), [self.p2])
a4.publications.clear()
self.assertSequenceEqual(a4.publications.all(), [])
def test_remove_after_prefetch(self):
a4 = Article.objects.prefetch_related("publications").get(id=self.a4.id)
self.assertSequenceEqual(a4.publications.all(), [self.p2])
a4.publications.remove(self.p2)
self.assertSequenceEqual(a4.publications.all(), [])
def test_add_after_prefetch(self):
a4 = Article.objects.prefetch_related("publications").get(id=self.a4.id)
self.assertEqual(a4.publications.count(), 1)
a4.publications.add(self.p1)
self.assertEqual(a4.publications.count(), 2)
def test_create_after_prefetch(self):
a4 = Article.objects.prefetch_related("publications").get(id=self.a4.id)
self.assertSequenceEqual(a4.publications.all(), [self.p2])
p5 = a4.publications.create(title="Django beats")
self.assertCountEqual(a4.publications.all(), [self.p2, p5])
def test_set_after_prefetch(self):
a4 = Article.objects.prefetch_related("publications").get(id=self.a4.id)
self.assertEqual(a4.publications.count(), 1)
a4.publications.set([self.p2, self.p1])
self.assertEqual(a4.publications.count(), 2)
a4.publications.set([self.p1])
self.assertEqual(a4.publications.count(), 1)
def test_add_then_remove_after_prefetch(self):
a4 = Article.objects.prefetch_related("publications").get(id=self.a4.id)
self.assertEqual(a4.publications.count(), 1)
a4.publications.add(self.p1)
self.assertEqual(a4.publications.count(), 2)
a4.publications.remove(self.p1)
self.assertSequenceEqual(a4.publications.all(), [self.p2])
def test_inherited_models_selects(self):
"""
#24156 - Objects from child models where the parent's m2m field uses
related_name='+' should be retrieved correctly.
"""
a = InheritedArticleA.objects.create()
b = InheritedArticleB.objects.create()
a.publications.add(self.p1, self.p2)
self.assertSequenceEqual(
a.publications.all(),
[self.p2, self.p1],
)
self.assertSequenceEqual(b.publications.all(), [])
b.publications.add(self.p3)
self.assertSequenceEqual(
a.publications.all(),
[self.p2, self.p1],
)
self.assertSequenceEqual(b.publications.all(), [self.p3])
def test_custom_default_manager_exists_count(self):
a5 = Article.objects.create(headline="deleted")
a5.publications.add(self.p2)
self.assertEqual(self.p2.article_set.count(), self.p2.article_set.all().count())
self.assertEqual(
self.p3.article_set.exists(), self.p3.article_set.all().exists()
)
|
9a579d14f0eca685e600afd76ccd85c2b780e674a85040e8d37a4d4c99e9e2d4 | from django import forms
from django.contrib import admin
from django.contrib.admin import AdminSite
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.middleware import AuthenticationMiddleware
from django.contrib.contenttypes.admin import GenericStackedInline
from django.contrib.messages.middleware import MessageMiddleware
from django.contrib.sessions.middleware import SessionMiddleware
from django.core import checks
from django.test import SimpleTestCase, override_settings
from .models import Album, Author, Book, City, Influence, Song, State, TwoAlbumFKAndAnE
class SongForm(forms.ModelForm):
pass
class ValidFields(admin.ModelAdmin):
form = SongForm
fields = ["title"]
class ValidFormFieldsets(admin.ModelAdmin):
def get_form(self, request, obj=None, **kwargs):
class ExtraFieldForm(SongForm):
name = forms.CharField(max_length=50)
return ExtraFieldForm
fieldsets = (
(
None,
{
"fields": ("name",),
},
),
)
class MyAdmin(admin.ModelAdmin):
def check(self, **kwargs):
return ["error!"]
class AuthenticationMiddlewareSubclass(AuthenticationMiddleware):
pass
class MessageMiddlewareSubclass(MessageMiddleware):
pass
class ModelBackendSubclass(ModelBackend):
pass
class SessionMiddlewareSubclass(SessionMiddleware):
pass
@override_settings(
SILENCED_SYSTEM_CHECKS=["fields.W342"], # ForeignKey(unique=True)
INSTALLED_APPS=[
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.messages",
"admin_checks",
],
)
class SystemChecksTestCase(SimpleTestCase):
databases = "__all__"
def test_checks_are_performed(self):
admin.site.register(Song, MyAdmin)
try:
errors = checks.run_checks()
expected = ["error!"]
self.assertEqual(errors, expected)
finally:
admin.site.unregister(Song)
@override_settings(INSTALLED_APPS=["django.contrib.admin"])
def test_apps_dependencies(self):
errors = admin.checks.check_dependencies()
expected = [
checks.Error(
"'django.contrib.contenttypes' must be in "
"INSTALLED_APPS in order to use the admin application.",
id="admin.E401",
),
checks.Error(
"'django.contrib.auth' must be in INSTALLED_APPS in order "
"to use the admin application.",
id="admin.E405",
),
checks.Error(
"'django.contrib.messages' must be in INSTALLED_APPS in order "
"to use the admin application.",
id="admin.E406",
),
]
self.assertEqual(errors, expected)
@override_settings(TEMPLATES=[])
def test_no_template_engines(self):
self.assertEqual(
admin.checks.check_dependencies(),
[
checks.Error(
"A 'django.template.backends.django.DjangoTemplates' "
"instance must be configured in TEMPLATES in order to use "
"the admin application.",
id="admin.E403",
)
],
)
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [],
},
}
],
)
def test_context_processor_dependencies(self):
expected = [
checks.Error(
"'django.contrib.auth.context_processors.auth' must be "
"enabled in DjangoTemplates (TEMPLATES) if using the default "
"auth backend in order to use the admin application.",
id="admin.E402",
),
checks.Error(
"'django.contrib.messages.context_processors.messages' must "
"be enabled in DjangoTemplates (TEMPLATES) in order to use "
"the admin application.",
id="admin.E404",
),
checks.Warning(
"'django.template.context_processors.request' must be enabled "
"in DjangoTemplates (TEMPLATES) in order to use the admin "
"navigation sidebar.",
id="admin.W411",
),
]
self.assertEqual(admin.checks.check_dependencies(), expected)
# The first error doesn't happen if
# 'django.contrib.auth.backends.ModelBackend' isn't in
# AUTHENTICATION_BACKENDS.
with self.settings(AUTHENTICATION_BACKENDS=[]):
self.assertEqual(admin.checks.check_dependencies(), expected[1:])
@override_settings(
AUTHENTICATION_BACKENDS=["admin_checks.tests.ModelBackendSubclass"],
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.request",
"django.contrib.messages.context_processors.messages",
],
},
}
],
)
def test_context_processor_dependencies_model_backend_subclass(self):
self.assertEqual(
admin.checks.check_dependencies(),
[
checks.Error(
"'django.contrib.auth.context_processors.auth' must be "
"enabled in DjangoTemplates (TEMPLATES) if using the default "
"auth backend in order to use the admin application.",
id="admin.E402",
),
],
)
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.dummy.TemplateStrings",
"DIRS": [],
"APP_DIRS": True,
},
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
],
)
def test_several_templates_backends(self):
self.assertEqual(admin.checks.check_dependencies(), [])
@override_settings(MIDDLEWARE=[])
def test_middleware_dependencies(self):
errors = admin.checks.check_dependencies()
expected = [
checks.Error(
"'django.contrib.auth.middleware.AuthenticationMiddleware' "
"must be in MIDDLEWARE in order to use the admin application.",
id="admin.E408",
),
checks.Error(
"'django.contrib.messages.middleware.MessageMiddleware' "
"must be in MIDDLEWARE in order to use the admin application.",
id="admin.E409",
),
checks.Error(
"'django.contrib.sessions.middleware.SessionMiddleware' "
"must be in MIDDLEWARE in order to use the admin application.",
hint=(
"Insert "
"'django.contrib.sessions.middleware.SessionMiddleware' "
"before "
"'django.contrib.auth.middleware.AuthenticationMiddleware'."
),
id="admin.E410",
),
]
self.assertEqual(errors, expected)
@override_settings(
MIDDLEWARE=[
"admin_checks.tests.AuthenticationMiddlewareSubclass",
"admin_checks.tests.MessageMiddlewareSubclass",
"admin_checks.tests.SessionMiddlewareSubclass",
]
)
def test_middleware_subclasses(self):
self.assertEqual(admin.checks.check_dependencies(), [])
@override_settings(
MIDDLEWARE=[
"django.contrib.does.not.Exist",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
]
)
def test_admin_check_ignores_import_error_in_middleware(self):
self.assertEqual(admin.checks.check_dependencies(), [])
def test_custom_adminsite(self):
class CustomAdminSite(admin.AdminSite):
pass
custom_site = CustomAdminSite()
custom_site.register(Song, MyAdmin)
try:
errors = checks.run_checks()
expected = ["error!"]
self.assertEqual(errors, expected)
finally:
custom_site.unregister(Song)
def test_allows_checks_relying_on_other_modeladmins(self):
class MyBookAdmin(admin.ModelAdmin):
def check(self, **kwargs):
errors = super().check(**kwargs)
author_admin = self.admin_site._registry.get(Author)
if author_admin is None:
errors.append("AuthorAdmin missing!")
return errors
class MyAuthorAdmin(admin.ModelAdmin):
pass
admin.site.register(Book, MyBookAdmin)
admin.site.register(Author, MyAuthorAdmin)
try:
self.assertEqual(admin.site.check(None), [])
finally:
admin.site.unregister(Book)
admin.site.unregister(Author)
def test_field_name_not_in_list_display(self):
class SongAdmin(admin.ModelAdmin):
list_editable = ["original_release"]
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"The value of 'list_editable[0]' refers to 'original_release', "
"which is not contained in 'list_display'.",
obj=SongAdmin,
id="admin.E122",
)
]
self.assertEqual(errors, expected)
def test_list_editable_not_a_list_or_tuple(self):
class SongAdmin(admin.ModelAdmin):
list_editable = "test"
self.assertEqual(
SongAdmin(Song, AdminSite()).check(),
[
checks.Error(
"The value of 'list_editable' must be a list or tuple.",
obj=SongAdmin,
id="admin.E120",
)
],
)
def test_list_editable_missing_field(self):
class SongAdmin(admin.ModelAdmin):
list_editable = ("test",)
self.assertEqual(
SongAdmin(Song, AdminSite()).check(),
[
checks.Error(
"The value of 'list_editable[0]' refers to 'test', which is "
"not a field of 'admin_checks.Song'.",
obj=SongAdmin,
id="admin.E121",
)
],
)
def test_readonly_and_editable(self):
class SongAdmin(admin.ModelAdmin):
readonly_fields = ["original_release"]
list_display = ["pk", "original_release"]
list_editable = ["original_release"]
fieldsets = [
(
None,
{
"fields": ["title", "original_release"],
},
),
]
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"The value of 'list_editable[0]' refers to 'original_release', "
"which is not editable through the admin.",
obj=SongAdmin,
id="admin.E125",
)
]
self.assertEqual(errors, expected)
def test_pk_not_editable(self):
# PKs cannot be edited in the list.
class SongAdmin(admin.ModelAdmin):
list_display = ["title", "id"]
list_editable = ["id"]
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"The value of 'list_editable[0]' refers to 'id', which is not editable "
"through the admin.",
obj=SongAdmin,
id="admin.E125",
)
]
self.assertEqual(errors, expected)
def test_editable(self):
class SongAdmin(admin.ModelAdmin):
list_display = ["pk", "title"]
list_editable = ["title"]
fieldsets = [
(
None,
{
"fields": ["title", "original_release"],
},
),
]
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_custom_modelforms_with_fields_fieldsets(self):
"""
# Regression test for #8027: custom ModelForms with fields/fieldsets
"""
errors = ValidFields(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_custom_get_form_with_fieldsets(self):
"""
The fieldsets checks are skipped when the ModelAdmin.get_form() method
is overridden.
"""
errors = ValidFormFieldsets(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_fieldsets_fields_non_tuple(self):
"""
The first fieldset's fields must be a list/tuple.
"""
class NotATupleAdmin(admin.ModelAdmin):
list_display = ["pk", "title"]
list_editable = ["title"]
fieldsets = [
(None, {"fields": "title"}), # not a tuple
]
errors = NotATupleAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"The value of 'fieldsets[0][1]['fields']' must be a list or tuple.",
obj=NotATupleAdmin,
id="admin.E008",
)
]
self.assertEqual(errors, expected)
def test_nonfirst_fieldset(self):
"""
The second fieldset's fields must be a list/tuple.
"""
class NotATupleAdmin(admin.ModelAdmin):
fieldsets = [
(None, {"fields": ("title",)}),
("foo", {"fields": "author"}), # not a tuple
]
errors = NotATupleAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"The value of 'fieldsets[1][1]['fields']' must be a list or tuple.",
obj=NotATupleAdmin,
id="admin.E008",
)
]
self.assertEqual(errors, expected)
def test_exclude_values(self):
"""
Tests for basic system checks of 'exclude' option values (#12689)
"""
class ExcludedFields1(admin.ModelAdmin):
exclude = "foo"
errors = ExcludedFields1(Book, AdminSite()).check()
expected = [
checks.Error(
"The value of 'exclude' must be a list or tuple.",
obj=ExcludedFields1,
id="admin.E014",
)
]
self.assertEqual(errors, expected)
def test_exclude_duplicate_values(self):
class ExcludedFields2(admin.ModelAdmin):
exclude = ("name", "name")
errors = ExcludedFields2(Book, AdminSite()).check()
expected = [
checks.Error(
"The value of 'exclude' contains duplicate field(s).",
obj=ExcludedFields2,
id="admin.E015",
)
]
self.assertEqual(errors, expected)
def test_exclude_in_inline(self):
class ExcludedFieldsInline(admin.TabularInline):
model = Song
exclude = "foo"
class ExcludedFieldsAlbumAdmin(admin.ModelAdmin):
model = Album
inlines = [ExcludedFieldsInline]
errors = ExcludedFieldsAlbumAdmin(Album, AdminSite()).check()
expected = [
checks.Error(
"The value of 'exclude' must be a list or tuple.",
obj=ExcludedFieldsInline,
id="admin.E014",
)
]
self.assertEqual(errors, expected)
def test_exclude_inline_model_admin(self):
"""
Regression test for #9932 - exclude in InlineModelAdmin should not
contain the ForeignKey field used in ModelAdmin.model
"""
class SongInline(admin.StackedInline):
model = Song
exclude = ["album"]
class AlbumAdmin(admin.ModelAdmin):
model = Album
inlines = [SongInline]
errors = AlbumAdmin(Album, AdminSite()).check()
expected = [
checks.Error(
"Cannot exclude the field 'album', because it is the foreign key "
"to the parent model 'admin_checks.Album'.",
obj=SongInline,
id="admin.E201",
)
]
self.assertEqual(errors, expected)
def test_valid_generic_inline_model_admin(self):
"""
Regression test for #22034 - check that generic inlines don't look for
normal ForeignKey relations.
"""
class InfluenceInline(GenericStackedInline):
model = Influence
class SongAdmin(admin.ModelAdmin):
inlines = [InfluenceInline]
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_generic_inline_model_admin_non_generic_model(self):
"""
A model without a GenericForeignKey raises problems if it's included
in a GenericInlineModelAdmin definition.
"""
class BookInline(GenericStackedInline):
model = Book
class SongAdmin(admin.ModelAdmin):
inlines = [BookInline]
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"'admin_checks.Book' has no GenericForeignKey.",
obj=BookInline,
id="admin.E301",
)
]
self.assertEqual(errors, expected)
def test_generic_inline_model_admin_bad_ct_field(self):
"""
A GenericInlineModelAdmin errors if the ct_field points to a
nonexistent field.
"""
class InfluenceInline(GenericStackedInline):
model = Influence
ct_field = "nonexistent"
class SongAdmin(admin.ModelAdmin):
inlines = [InfluenceInline]
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"'ct_field' references 'nonexistent', which is not a field on "
"'admin_checks.Influence'.",
obj=InfluenceInline,
id="admin.E302",
)
]
self.assertEqual(errors, expected)
def test_generic_inline_model_admin_bad_fk_field(self):
"""
A GenericInlineModelAdmin errors if the ct_fk_field points to a
nonexistent field.
"""
class InfluenceInline(GenericStackedInline):
model = Influence
ct_fk_field = "nonexistent"
class SongAdmin(admin.ModelAdmin):
inlines = [InfluenceInline]
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"'ct_fk_field' references 'nonexistent', which is not a field on "
"'admin_checks.Influence'.",
obj=InfluenceInline,
id="admin.E303",
)
]
self.assertEqual(errors, expected)
def test_generic_inline_model_admin_non_gfk_ct_field(self):
"""
A GenericInlineModelAdmin raises problems if the ct_field points to a
field that isn't part of a GenericForeignKey.
"""
class InfluenceInline(GenericStackedInline):
model = Influence
ct_field = "name"
class SongAdmin(admin.ModelAdmin):
inlines = [InfluenceInline]
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"'admin_checks.Influence' has no GenericForeignKey using "
"content type field 'name' and object ID field 'object_id'.",
obj=InfluenceInline,
id="admin.E304",
)
]
self.assertEqual(errors, expected)
def test_generic_inline_model_admin_non_gfk_fk_field(self):
"""
A GenericInlineModelAdmin raises problems if the ct_fk_field points to
a field that isn't part of a GenericForeignKey.
"""
class InfluenceInline(GenericStackedInline):
model = Influence
ct_fk_field = "name"
class SongAdmin(admin.ModelAdmin):
inlines = [InfluenceInline]
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"'admin_checks.Influence' has no GenericForeignKey using "
"content type field 'content_type' and object ID field 'name'.",
obj=InfluenceInline,
id="admin.E304",
)
]
self.assertEqual(errors, expected)
def test_app_label_in_admin_checks(self):
class RawIdNonexistentAdmin(admin.ModelAdmin):
raw_id_fields = ("nonexistent",)
errors = RawIdNonexistentAdmin(Album, AdminSite()).check()
expected = [
checks.Error(
"The value of 'raw_id_fields[0]' refers to 'nonexistent', "
"which is not a field of 'admin_checks.Album'.",
obj=RawIdNonexistentAdmin,
id="admin.E002",
)
]
self.assertEqual(errors, expected)
def test_fk_exclusion(self):
"""
Regression test for #11709 - when testing for fk excluding (when exclude is
given) make sure fk_name is honored or things blow up when there is more
than one fk to the parent model.
"""
class TwoAlbumFKAndAnEInline(admin.TabularInline):
model = TwoAlbumFKAndAnE
exclude = ("e",)
fk_name = "album1"
class MyAdmin(admin.ModelAdmin):
inlines = [TwoAlbumFKAndAnEInline]
errors = MyAdmin(Album, AdminSite()).check()
self.assertEqual(errors, [])
def test_inline_self_check(self):
class TwoAlbumFKAndAnEInline(admin.TabularInline):
model = TwoAlbumFKAndAnE
class MyAdmin(admin.ModelAdmin):
inlines = [TwoAlbumFKAndAnEInline]
errors = MyAdmin(Album, AdminSite()).check()
expected = [
checks.Error(
"'admin_checks.TwoAlbumFKAndAnE' has more than one ForeignKey "
"to 'admin_checks.Album'. You must specify a 'fk_name' "
"attribute.",
obj=TwoAlbumFKAndAnEInline,
id="admin.E202",
)
]
self.assertEqual(errors, expected)
def test_inline_with_specified(self):
class TwoAlbumFKAndAnEInline(admin.TabularInline):
model = TwoAlbumFKAndAnE
fk_name = "album1"
class MyAdmin(admin.ModelAdmin):
inlines = [TwoAlbumFKAndAnEInline]
errors = MyAdmin(Album, AdminSite()).check()
self.assertEqual(errors, [])
def test_inlines_property(self):
class CitiesInline(admin.TabularInline):
model = City
class StateAdmin(admin.ModelAdmin):
@property
def inlines(self):
return [CitiesInline]
errors = StateAdmin(State, AdminSite()).check()
self.assertEqual(errors, [])
def test_readonly(self):
class SongAdmin(admin.ModelAdmin):
readonly_fields = ("title",)
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_readonly_on_method(self):
@admin.display
def my_function(obj):
pass
class SongAdmin(admin.ModelAdmin):
readonly_fields = (my_function,)
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_readonly_on_modeladmin(self):
class SongAdmin(admin.ModelAdmin):
readonly_fields = ("readonly_method_on_modeladmin",)
@admin.display
def readonly_method_on_modeladmin(self, obj):
pass
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_readonly_dynamic_attribute_on_modeladmin(self):
class SongAdmin(admin.ModelAdmin):
readonly_fields = ("dynamic_method",)
def __getattr__(self, item):
if item == "dynamic_method":
@admin.display
def method(obj):
pass
return method
raise AttributeError
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_readonly_method_on_model(self):
class SongAdmin(admin.ModelAdmin):
readonly_fields = ("readonly_method_on_model",)
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_nonexistent_field(self):
class SongAdmin(admin.ModelAdmin):
readonly_fields = ("title", "nonexistent")
errors = SongAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"The value of 'readonly_fields[1]' refers to 'nonexistent', which is "
"not a callable, an attribute of 'SongAdmin', or an attribute of "
"'admin_checks.Song'.",
obj=SongAdmin,
id="admin.E035",
)
]
self.assertEqual(errors, expected)
def test_nonexistent_field_on_inline(self):
class CityInline(admin.TabularInline):
model = City
readonly_fields = ["i_dont_exist"] # Missing attribute
errors = CityInline(State, AdminSite()).check()
expected = [
checks.Error(
"The value of 'readonly_fields[0]' refers to 'i_dont_exist', which is "
"not a callable, an attribute of 'CityInline', or an attribute of "
"'admin_checks.City'.",
obj=CityInline,
id="admin.E035",
)
]
self.assertEqual(errors, expected)
def test_readonly_fields_not_list_or_tuple(self):
class SongAdmin(admin.ModelAdmin):
readonly_fields = "test"
self.assertEqual(
SongAdmin(Song, AdminSite()).check(),
[
checks.Error(
"The value of 'readonly_fields' must be a list or tuple.",
obj=SongAdmin,
id="admin.E034",
)
],
)
def test_extra(self):
class SongAdmin(admin.ModelAdmin):
@admin.display
def awesome_song(self, instance):
if instance.title == "Born to Run":
return "Best Ever!"
return "Status unknown."
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_readonly_lambda(self):
class SongAdmin(admin.ModelAdmin):
readonly_fields = (lambda obj: "test",)
errors = SongAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_graceful_m2m_fail(self):
"""
Regression test for #12203/#12237 - Fail more gracefully when a M2M field that
specifies the 'through' option is included in the 'fields' or the 'fieldsets'
ModelAdmin options.
"""
class BookAdmin(admin.ModelAdmin):
fields = ["authors"]
errors = BookAdmin(Book, AdminSite()).check()
expected = [
checks.Error(
"The value of 'fields' cannot include the ManyToManyField 'authors', "
"because that field manually specifies a relationship model.",
obj=BookAdmin,
id="admin.E013",
)
]
self.assertEqual(errors, expected)
def test_cannot_include_through(self):
class FieldsetBookAdmin(admin.ModelAdmin):
fieldsets = (
("Header 1", {"fields": ("name",)}),
("Header 2", {"fields": ("authors",)}),
)
errors = FieldsetBookAdmin(Book, AdminSite()).check()
expected = [
checks.Error(
"The value of 'fieldsets[1][1][\"fields\"]' cannot include the "
"ManyToManyField 'authors', because that field manually specifies a "
"relationship model.",
obj=FieldsetBookAdmin,
id="admin.E013",
)
]
self.assertEqual(errors, expected)
def test_nested_fields(self):
class NestedFieldsAdmin(admin.ModelAdmin):
fields = ("price", ("name", "subtitle"))
errors = NestedFieldsAdmin(Book, AdminSite()).check()
self.assertEqual(errors, [])
def test_nested_fieldsets(self):
class NestedFieldsetAdmin(admin.ModelAdmin):
fieldsets = (("Main", {"fields": ("price", ("name", "subtitle"))}),)
errors = NestedFieldsetAdmin(Book, AdminSite()).check()
self.assertEqual(errors, [])
def test_explicit_through_override(self):
"""
Regression test for #12209 -- If the explicitly provided through model
is specified as a string, the admin should still be able use
Model.m2m_field.through
"""
class AuthorsInline(admin.TabularInline):
model = Book.authors.through
class BookAdmin(admin.ModelAdmin):
inlines = [AuthorsInline]
errors = BookAdmin(Book, AdminSite()).check()
self.assertEqual(errors, [])
def test_non_model_fields(self):
"""
Regression for ensuring ModelAdmin.fields can contain non-model fields
that broke with r11737
"""
class SongForm(forms.ModelForm):
extra_data = forms.CharField()
class FieldsOnFormOnlyAdmin(admin.ModelAdmin):
form = SongForm
fields = ["title", "extra_data"]
errors = FieldsOnFormOnlyAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_non_model_first_field(self):
"""
Regression for ensuring ModelAdmin.field can handle first elem being a
non-model field (test fix for UnboundLocalError introduced with r16225).
"""
class SongForm(forms.ModelForm):
extra_data = forms.CharField()
class Meta:
model = Song
fields = "__all__"
class FieldsOnFormOnlyAdmin(admin.ModelAdmin):
form = SongForm
fields = ["extra_data", "title"]
errors = FieldsOnFormOnlyAdmin(Song, AdminSite()).check()
self.assertEqual(errors, [])
def test_check_sublists_for_duplicates(self):
class MyModelAdmin(admin.ModelAdmin):
fields = ["state", ["state"]]
errors = MyModelAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"The value of 'fields' contains duplicate field(s).",
obj=MyModelAdmin,
id="admin.E006",
)
]
self.assertEqual(errors, expected)
def test_check_fieldset_sublists_for_duplicates(self):
class MyModelAdmin(admin.ModelAdmin):
fieldsets = [
(None, {"fields": ["title", "album", ("title", "album")]}),
]
errors = MyModelAdmin(Song, AdminSite()).check()
expected = [
checks.Error(
"There are duplicate field(s) in 'fieldsets[0][1]'.",
obj=MyModelAdmin,
id="admin.E012",
)
]
self.assertEqual(errors, expected)
def test_list_filter_works_on_through_field_even_when_apps_not_ready(self):
"""
Ensure list_filter can access reverse fields even when the app registry
is not ready; refs #24146.
"""
class BookAdminWithListFilter(admin.ModelAdmin):
list_filter = ["authorsbooks__featured"]
# Temporarily pretending apps are not ready yet. This issue can happen
# if the value of 'list_filter' refers to a 'through__field'.
Book._meta.apps.ready = False
try:
errors = BookAdminWithListFilter(Book, AdminSite()).check()
self.assertEqual(errors, [])
finally:
Book._meta.apps.ready = True
|
9b16dae98d1f402b9014cfccba451e2111d90e4623e976fe5faa848c0fd4d803 | from operator import attrgetter
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.db.models import Count
from django.test import TestCase
from .models import (
Base,
Child,
Derived,
Feature,
Item,
ItemAndSimpleItem,
Leaf,
Location,
OneToOneItem,
Proxy,
ProxyRelated,
RelatedItem,
Request,
ResolveThis,
SimpleItem,
SpecialFeature,
)
class DeferRegressionTest(TestCase):
def test_basic(self):
# Deferred fields should really be deferred and not accidentally use
# the field's default value just because they aren't passed to __init__
Item.objects.create(name="first", value=42)
obj = Item.objects.only("name", "other_value").get(name="first")
# Accessing "name" doesn't trigger a new database query. Accessing
# "value" or "text" should.
with self.assertNumQueries(0):
self.assertEqual(obj.name, "first")
self.assertEqual(obj.other_value, 0)
with self.assertNumQueries(1):
self.assertEqual(obj.value, 42)
with self.assertNumQueries(1):
self.assertEqual(obj.text, "xyzzy")
with self.assertNumQueries(0):
self.assertEqual(obj.text, "xyzzy")
# Regression test for #10695. Make sure different instances don't
# inadvertently share data in the deferred descriptor objects.
i = Item.objects.create(name="no I'm first", value=37)
items = Item.objects.only("value").order_by("-value")
self.assertEqual(items[0].name, "first")
self.assertEqual(items[1].name, "no I'm first")
RelatedItem.objects.create(item=i)
r = RelatedItem.objects.defer("item").get()
self.assertEqual(r.item_id, i.id)
self.assertEqual(r.item, i)
# Some further checks for select_related() and inherited model
# behavior (regression for #10710).
c1 = Child.objects.create(name="c1", value=42)
c2 = Child.objects.create(name="c2", value=37)
Leaf.objects.create(name="l1", child=c1, second_child=c2)
obj = Leaf.objects.only("name", "child").select_related()[0]
self.assertEqual(obj.child.name, "c1")
self.assertQuerySetEqual(
Leaf.objects.select_related().only("child__name", "second_child__name"),
[
"l1",
],
attrgetter("name"),
)
# Models instances with deferred fields should still return the same
# content types as their non-deferred versions (bug #10738).
ctype = ContentType.objects.get_for_model
c1 = ctype(Item.objects.all()[0])
c2 = ctype(Item.objects.defer("name")[0])
c3 = ctype(Item.objects.only("name")[0])
self.assertTrue(c1 is c2 is c3)
# Regression for #10733 - only() can be used on a model with two
# foreign keys.
results = Leaf.objects.only("name", "child", "second_child").select_related()
self.assertEqual(results[0].child.name, "c1")
self.assertEqual(results[0].second_child.name, "c2")
results = Leaf.objects.only(
"name", "child", "second_child", "child__name", "second_child__name"
).select_related()
self.assertEqual(results[0].child.name, "c1")
self.assertEqual(results[0].second_child.name, "c2")
# Regression for #16409 - make sure defer() and only() work with annotate()
self.assertIsInstance(
list(SimpleItem.objects.annotate(Count("feature")).defer("name")), list
)
self.assertIsInstance(
list(SimpleItem.objects.annotate(Count("feature")).only("name")), list
)
def test_ticket_16409(self):
# Regression for #16409 - make sure defer() and only() work with annotate()
self.assertIsInstance(
list(SimpleItem.objects.annotate(Count("feature")).defer("name")), list
)
self.assertIsInstance(
list(SimpleItem.objects.annotate(Count("feature")).only("name")), list
)
def test_ticket_23270(self):
d = Derived.objects.create(text="foo", other_text="bar")
with self.assertNumQueries(1):
obj = Base.objects.select_related("derived").defer("text")[0]
self.assertIsInstance(obj.derived, Derived)
self.assertEqual("bar", obj.derived.other_text)
self.assertNotIn("text", obj.__dict__)
self.assertEqual(d.pk, obj.derived.base_ptr_id)
def test_only_and_defer_usage_on_proxy_models(self):
# Regression for #15790 - only() broken for proxy models
proxy = Proxy.objects.create(name="proxy", value=42)
msg = "QuerySet.only() return bogus results with proxy models"
dp = Proxy.objects.only("other_value").get(pk=proxy.pk)
self.assertEqual(dp.name, proxy.name, msg=msg)
self.assertEqual(dp.value, proxy.value, msg=msg)
# also test things with .defer()
msg = "QuerySet.defer() return bogus results with proxy models"
dp = Proxy.objects.defer("name", "text", "value").get(pk=proxy.pk)
self.assertEqual(dp.name, proxy.name, msg=msg)
self.assertEqual(dp.value, proxy.value, msg=msg)
def test_resolve_columns(self):
ResolveThis.objects.create(num=5.0, name="Foobar")
qs = ResolveThis.objects.defer("num")
self.assertEqual(1, qs.count())
self.assertEqual("Foobar", qs[0].name)
def test_reverse_one_to_one_relations(self):
# Refs #14694. Test reverse relations which are known unique (reverse
# side has o2ofield or unique FK) - the o2o case
item = Item.objects.create(name="first", value=42)
o2o = OneToOneItem.objects.create(item=item, name="second")
self.assertEqual(len(Item.objects.defer("one_to_one_item__name")), 1)
self.assertEqual(len(Item.objects.select_related("one_to_one_item")), 1)
self.assertEqual(
len(
Item.objects.select_related("one_to_one_item").defer(
"one_to_one_item__name"
)
),
1,
)
self.assertEqual(
len(Item.objects.select_related("one_to_one_item").defer("value")), 1
)
# Make sure that `only()` doesn't break when we pass in a unique relation,
# rather than a field on the relation.
self.assertEqual(len(Item.objects.only("one_to_one_item")), 1)
with self.assertNumQueries(1):
i = Item.objects.select_related("one_to_one_item")[0]
self.assertEqual(i.one_to_one_item.pk, o2o.pk)
self.assertEqual(i.one_to_one_item.name, "second")
with self.assertNumQueries(1):
i = Item.objects.select_related("one_to_one_item").defer(
"value", "one_to_one_item__name"
)[0]
self.assertEqual(i.one_to_one_item.pk, o2o.pk)
self.assertEqual(i.name, "first")
with self.assertNumQueries(1):
self.assertEqual(i.one_to_one_item.name, "second")
with self.assertNumQueries(1):
self.assertEqual(i.value, 42)
with self.assertNumQueries(1):
i = Item.objects.select_related("one_to_one_item").only(
"name", "one_to_one_item__item"
)[0]
self.assertEqual(i.one_to_one_item.pk, o2o.pk)
self.assertEqual(i.name, "first")
with self.assertNumQueries(1):
self.assertEqual(i.one_to_one_item.name, "second")
with self.assertNumQueries(1):
self.assertEqual(i.value, 42)
def test_defer_with_select_related(self):
item1 = Item.objects.create(name="first", value=47)
item2 = Item.objects.create(name="second", value=42)
simple = SimpleItem.objects.create(name="simple", value="23")
ItemAndSimpleItem.objects.create(item=item1, simple=simple)
obj = ItemAndSimpleItem.objects.defer("item").select_related("simple").get()
self.assertEqual(obj.item, item1)
self.assertEqual(obj.item_id, item1.id)
obj.item = item2
obj.save()
obj = ItemAndSimpleItem.objects.defer("item").select_related("simple").get()
self.assertEqual(obj.item, item2)
self.assertEqual(obj.item_id, item2.id)
def test_proxy_model_defer_with_select_related(self):
# Regression for #22050
item = Item.objects.create(name="first", value=47)
RelatedItem.objects.create(item=item)
# Defer fields with only()
obj = ProxyRelated.objects.select_related().only("item__name")[0]
with self.assertNumQueries(0):
self.assertEqual(obj.item.name, "first")
with self.assertNumQueries(1):
self.assertEqual(obj.item.value, 47)
def test_only_with_select_related(self):
# Test for #17485.
item = SimpleItem.objects.create(name="first", value=47)
feature = Feature.objects.create(item=item)
SpecialFeature.objects.create(feature=feature)
qs = Feature.objects.only("item__name").select_related("item")
self.assertEqual(len(qs), 1)
qs = SpecialFeature.objects.only("feature__item__name").select_related(
"feature__item"
)
self.assertEqual(len(qs), 1)
def test_defer_annotate_select_related(self):
location = Location.objects.create()
Request.objects.create(location=location)
self.assertIsInstance(
list(
Request.objects.annotate(Count("items"))
.select_related("profile", "location")
.only("profile", "location")
),
list,
)
self.assertIsInstance(
list(
Request.objects.annotate(Count("items"))
.select_related("profile", "location")
.only("profile__profile1", "location__location1")
),
list,
)
self.assertIsInstance(
list(
Request.objects.annotate(Count("items"))
.select_related("profile", "location")
.defer("request1", "request2", "request3", "request4")
),
list,
)
def test_common_model_different_mask(self):
child = Child.objects.create(name="Child", value=42)
second_child = Child.objects.create(name="Second", value=64)
Leaf.objects.create(child=child, second_child=second_child)
with self.assertNumQueries(1):
leaf = (
Leaf.objects.select_related("child", "second_child")
.defer("child__name", "second_child__value")
.get()
)
self.assertEqual(leaf.child, child)
self.assertEqual(leaf.second_child, second_child)
self.assertEqual(leaf.child.get_deferred_fields(), {"name"})
self.assertEqual(leaf.second_child.get_deferred_fields(), {"value"})
with self.assertNumQueries(0):
self.assertEqual(leaf.child.value, 42)
self.assertEqual(leaf.second_child.name, "Second")
with self.assertNumQueries(1):
self.assertEqual(leaf.child.name, "Child")
with self.assertNumQueries(1):
self.assertEqual(leaf.second_child.value, 64)
def test_defer_many_to_many_ignored(self):
location = Location.objects.create()
request = Request.objects.create(location=location)
with self.assertNumQueries(1):
self.assertEqual(Request.objects.defer("items").get(), request)
def test_only_many_to_many_ignored(self):
location = Location.objects.create()
request = Request.objects.create(location=location)
with self.assertNumQueries(1):
self.assertEqual(Request.objects.only("items").get(), request)
def test_defer_reverse_many_to_many_ignored(self):
location = Location.objects.create()
request = Request.objects.create(location=location)
item = Item.objects.create(value=1)
request.items.add(item)
with self.assertNumQueries(1):
self.assertEqual(Item.objects.defer("request").get(), item)
def test_only_reverse_many_to_many_ignored(self):
location = Location.objects.create()
request = Request.objects.create(location=location)
item = Item.objects.create(value=1)
request.items.add(item)
with self.assertNumQueries(1):
self.assertEqual(Item.objects.only("request").get(), item)
class DeferDeletionSignalsTests(TestCase):
senders = [Item, Proxy]
@classmethod
def setUpTestData(cls):
cls.item_pk = Item.objects.create(value=1).pk
def setUp(self):
self.pre_delete_senders = []
self.post_delete_senders = []
for sender in self.senders:
models.signals.pre_delete.connect(self.pre_delete_receiver, sender)
models.signals.post_delete.connect(self.post_delete_receiver, sender)
def tearDown(self):
for sender in self.senders:
models.signals.pre_delete.disconnect(self.pre_delete_receiver, sender)
models.signals.post_delete.disconnect(self.post_delete_receiver, sender)
def pre_delete_receiver(self, sender, **kwargs):
self.pre_delete_senders.append(sender)
def post_delete_receiver(self, sender, **kwargs):
self.post_delete_senders.append(sender)
def test_delete_defered_model(self):
Item.objects.only("value").get(pk=self.item_pk).delete()
self.assertEqual(self.pre_delete_senders, [Item])
self.assertEqual(self.post_delete_senders, [Item])
def test_delete_defered_proxy_model(self):
Proxy.objects.only("value").get(pk=self.item_pk).delete()
self.assertEqual(self.pre_delete_senders, [Proxy])
self.assertEqual(self.post_delete_senders, [Proxy])
|
cdbe2bf036aa6b6dda8f8479d92423d69493b6d313bed7f8798c5dcc23b7ef49 | import collections.abc
from datetime import datetime
from math import ceil
from operator import attrgetter
from unittest import skipUnless
from django.core.exceptions import FieldError
from django.db import connection, models
from django.db.models import (
BooleanField,
Case,
Exists,
ExpressionWrapper,
F,
Max,
OuterRef,
Q,
Subquery,
Value,
When,
)
from django.db.models.functions import Abs, Cast, Length, Substr
from django.db.models.lookups import (
Exact,
GreaterThan,
GreaterThanOrEqual,
IsNull,
LessThan,
LessThanOrEqual,
)
from django.test import TestCase, skipUnlessDBFeature
from django.test.utils import isolate_apps, register_lookup
from .models import (
Article,
Author,
Freebie,
Game,
IsNullWithNoneAsRHS,
Player,
Product,
Season,
Stock,
Tag,
)
class LookupTests(TestCase):
@classmethod
def setUpTestData(cls):
# Create a few Authors.
cls.au1 = Author.objects.create(name="Author 1", alias="a1", bio="x" * 4001)
cls.au2 = Author.objects.create(name="Author 2", alias="a2")
# Create a few Articles.
cls.a1 = Article.objects.create(
headline="Article 1",
pub_date=datetime(2005, 7, 26),
author=cls.au1,
slug="a1",
)
cls.a2 = Article.objects.create(
headline="Article 2",
pub_date=datetime(2005, 7, 27),
author=cls.au1,
slug="a2",
)
cls.a3 = Article.objects.create(
headline="Article 3",
pub_date=datetime(2005, 7, 27),
author=cls.au1,
slug="a3",
)
cls.a4 = Article.objects.create(
headline="Article 4",
pub_date=datetime(2005, 7, 28),
author=cls.au1,
slug="a4",
)
cls.a5 = Article.objects.create(
headline="Article 5",
pub_date=datetime(2005, 8, 1, 9, 0),
author=cls.au2,
slug="a5",
)
cls.a6 = Article.objects.create(
headline="Article 6",
pub_date=datetime(2005, 8, 1, 8, 0),
author=cls.au2,
slug="a6",
)
cls.a7 = Article.objects.create(
headline="Article 7",
pub_date=datetime(2005, 7, 27),
author=cls.au2,
slug="a7",
)
# Create a few Tags.
cls.t1 = Tag.objects.create(name="Tag 1")
cls.t1.articles.add(cls.a1, cls.a2, cls.a3)
cls.t2 = Tag.objects.create(name="Tag 2")
cls.t2.articles.add(cls.a3, cls.a4, cls.a5)
cls.t3 = Tag.objects.create(name="Tag 3")
cls.t3.articles.add(cls.a5, cls.a6, cls.a7)
def test_exists(self):
# We can use .exists() to check that there are some
self.assertTrue(Article.objects.exists())
for a in Article.objects.all():
a.delete()
# There should be none now!
self.assertFalse(Article.objects.exists())
def test_lookup_int_as_str(self):
# Integer value can be queried using string
self.assertSequenceEqual(
Article.objects.filter(id__iexact=str(self.a1.id)),
[self.a1],
)
@skipUnlessDBFeature("supports_date_lookup_using_string")
def test_lookup_date_as_str(self):
# A date lookup can be performed using a string search
self.assertSequenceEqual(
Article.objects.filter(pub_date__startswith="2005"),
[self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1],
)
def test_iterator(self):
# Each QuerySet gets iterator(), which is a generator that "lazily"
# returns results using database-level iteration.
self.assertIsInstance(Article.objects.iterator(), collections.abc.Iterator)
self.assertQuerySetEqual(
Article.objects.iterator(),
[
"Article 5",
"Article 6",
"Article 4",
"Article 2",
"Article 3",
"Article 7",
"Article 1",
],
transform=attrgetter("headline"),
)
# iterator() can be used on any QuerySet.
self.assertQuerySetEqual(
Article.objects.filter(headline__endswith="4").iterator(),
["Article 4"],
transform=attrgetter("headline"),
)
def test_count(self):
# count() returns the number of objects matching search criteria.
self.assertEqual(Article.objects.count(), 7)
self.assertEqual(
Article.objects.filter(pub_date__exact=datetime(2005, 7, 27)).count(), 3
)
self.assertEqual(
Article.objects.filter(headline__startswith="Blah blah").count(), 0
)
# count() should respect sliced query sets.
articles = Article.objects.all()
self.assertEqual(articles.count(), 7)
self.assertEqual(articles[:4].count(), 4)
self.assertEqual(articles[1:100].count(), 6)
self.assertEqual(articles[10:100].count(), 0)
# Date and date/time lookups can also be done with strings.
self.assertEqual(
Article.objects.filter(pub_date__exact="2005-07-27 00:00:00").count(), 3
)
def test_in_bulk(self):
# in_bulk() takes a list of IDs and returns a dictionary mapping IDs to objects.
arts = Article.objects.in_bulk([self.a1.id, self.a2.id])
self.assertEqual(arts[self.a1.id], self.a1)
self.assertEqual(arts[self.a2.id], self.a2)
self.assertEqual(
Article.objects.in_bulk(),
{
self.a1.id: self.a1,
self.a2.id: self.a2,
self.a3.id: self.a3,
self.a4.id: self.a4,
self.a5.id: self.a5,
self.a6.id: self.a6,
self.a7.id: self.a7,
},
)
self.assertEqual(Article.objects.in_bulk([self.a3.id]), {self.a3.id: self.a3})
self.assertEqual(Article.objects.in_bulk({self.a3.id}), {self.a3.id: self.a3})
self.assertEqual(
Article.objects.in_bulk(frozenset([self.a3.id])), {self.a3.id: self.a3}
)
self.assertEqual(Article.objects.in_bulk((self.a3.id,)), {self.a3.id: self.a3})
self.assertEqual(Article.objects.in_bulk([1000]), {})
self.assertEqual(Article.objects.in_bulk([]), {})
self.assertEqual(
Article.objects.in_bulk(iter([self.a1.id])), {self.a1.id: self.a1}
)
self.assertEqual(Article.objects.in_bulk(iter([])), {})
with self.assertRaises(TypeError):
Article.objects.in_bulk(headline__startswith="Blah")
def test_in_bulk_lots_of_ids(self):
test_range = 2000
max_query_params = connection.features.max_query_params
expected_num_queries = (
ceil(test_range / max_query_params) if max_query_params else 1
)
Author.objects.bulk_create(
[Author() for i in range(test_range - Author.objects.count())]
)
authors = {author.pk: author for author in Author.objects.all()}
with self.assertNumQueries(expected_num_queries):
self.assertEqual(Author.objects.in_bulk(authors), authors)
def test_in_bulk_with_field(self):
self.assertEqual(
Article.objects.in_bulk(
[self.a1.slug, self.a2.slug, self.a3.slug], field_name="slug"
),
{
self.a1.slug: self.a1,
self.a2.slug: self.a2,
self.a3.slug: self.a3,
},
)
def test_in_bulk_meta_constraint(self):
season_2011 = Season.objects.create(year=2011)
season_2012 = Season.objects.create(year=2012)
Season.objects.create(year=2013)
self.assertEqual(
Season.objects.in_bulk(
[season_2011.year, season_2012.year],
field_name="year",
),
{season_2011.year: season_2011, season_2012.year: season_2012},
)
def test_in_bulk_non_unique_field(self):
msg = "in_bulk()'s field_name must be a unique field but 'author' isn't."
with self.assertRaisesMessage(ValueError, msg):
Article.objects.in_bulk([self.au1], field_name="author")
@skipUnlessDBFeature("can_distinct_on_fields")
def test_in_bulk_preserve_ordering(self):
articles = (
Article.objects.order_by("author_id", "-pub_date")
.distinct("author_id")
.in_bulk([self.au1.id, self.au2.id], field_name="author_id")
)
self.assertEqual(
articles,
{self.au1.id: self.a4, self.au2.id: self.a5},
)
@skipUnlessDBFeature("can_distinct_on_fields")
def test_in_bulk_preserve_ordering_with_batch_size(self):
old_max_query_params = connection.features.max_query_params
connection.features.max_query_params = 1
try:
articles = (
Article.objects.order_by("author_id", "-pub_date")
.distinct("author_id")
.in_bulk([self.au1.id, self.au2.id], field_name="author_id")
)
self.assertEqual(
articles,
{self.au1.id: self.a4, self.au2.id: self.a5},
)
finally:
connection.features.max_query_params = old_max_query_params
@skipUnlessDBFeature("can_distinct_on_fields")
def test_in_bulk_distinct_field(self):
self.assertEqual(
Article.objects.order_by("headline")
.distinct("headline")
.in_bulk(
[self.a1.headline, self.a5.headline],
field_name="headline",
),
{self.a1.headline: self.a1, self.a5.headline: self.a5},
)
@skipUnlessDBFeature("can_distinct_on_fields")
def test_in_bulk_multiple_distinct_field(self):
msg = "in_bulk()'s field_name must be a unique field but 'pub_date' isn't."
with self.assertRaisesMessage(ValueError, msg):
Article.objects.order_by("headline", "pub_date").distinct(
"headline",
"pub_date",
).in_bulk(field_name="pub_date")
@isolate_apps("lookup")
def test_in_bulk_non_unique_meta_constaint(self):
class Model(models.Model):
ean = models.CharField(max_length=100)
brand = models.CharField(max_length=100)
name = models.CharField(max_length=80)
class Meta:
constraints = [
models.UniqueConstraint(
fields=["ean"],
name="partial_ean_unique",
condition=models.Q(is_active=True),
),
models.UniqueConstraint(
fields=["brand", "name"],
name="together_brand_name_unique",
),
]
msg = "in_bulk()'s field_name must be a unique field but '%s' isn't."
for field_name in ["brand", "ean"]:
with self.subTest(field_name=field_name):
with self.assertRaisesMessage(ValueError, msg % field_name):
Model.objects.in_bulk(field_name=field_name)
def test_in_bulk_sliced_queryset(self):
msg = "Cannot use 'limit' or 'offset' with in_bulk()."
with self.assertRaisesMessage(TypeError, msg):
Article.objects.all()[0:5].in_bulk([self.a1.id, self.a2.id])
def test_values(self):
# values() returns a list of dictionaries instead of object instances --
# and you can specify which fields you want to retrieve.
self.assertSequenceEqual(
Article.objects.values("headline"),
[
{"headline": "Article 5"},
{"headline": "Article 6"},
{"headline": "Article 4"},
{"headline": "Article 2"},
{"headline": "Article 3"},
{"headline": "Article 7"},
{"headline": "Article 1"},
],
)
self.assertSequenceEqual(
Article.objects.filter(pub_date__exact=datetime(2005, 7, 27)).values("id"),
[{"id": self.a2.id}, {"id": self.a3.id}, {"id": self.a7.id}],
)
self.assertSequenceEqual(
Article.objects.values("id", "headline"),
[
{"id": self.a5.id, "headline": "Article 5"},
{"id": self.a6.id, "headline": "Article 6"},
{"id": self.a4.id, "headline": "Article 4"},
{"id": self.a2.id, "headline": "Article 2"},
{"id": self.a3.id, "headline": "Article 3"},
{"id": self.a7.id, "headline": "Article 7"},
{"id": self.a1.id, "headline": "Article 1"},
],
)
# You can use values() with iterator() for memory savings,
# because iterator() uses database-level iteration.
self.assertSequenceEqual(
list(Article.objects.values("id", "headline").iterator()),
[
{"headline": "Article 5", "id": self.a5.id},
{"headline": "Article 6", "id": self.a6.id},
{"headline": "Article 4", "id": self.a4.id},
{"headline": "Article 2", "id": self.a2.id},
{"headline": "Article 3", "id": self.a3.id},
{"headline": "Article 7", "id": self.a7.id},
{"headline": "Article 1", "id": self.a1.id},
],
)
# The values() method works with "extra" fields specified in extra(select).
self.assertSequenceEqual(
Article.objects.extra(select={"id_plus_one": "id + 1"}).values(
"id", "id_plus_one"
),
[
{"id": self.a5.id, "id_plus_one": self.a5.id + 1},
{"id": self.a6.id, "id_plus_one": self.a6.id + 1},
{"id": self.a4.id, "id_plus_one": self.a4.id + 1},
{"id": self.a2.id, "id_plus_one": self.a2.id + 1},
{"id": self.a3.id, "id_plus_one": self.a3.id + 1},
{"id": self.a7.id, "id_plus_one": self.a7.id + 1},
{"id": self.a1.id, "id_plus_one": self.a1.id + 1},
],
)
data = {
"id_plus_one": "id+1",
"id_plus_two": "id+2",
"id_plus_three": "id+3",
"id_plus_four": "id+4",
"id_plus_five": "id+5",
"id_plus_six": "id+6",
"id_plus_seven": "id+7",
"id_plus_eight": "id+8",
}
self.assertSequenceEqual(
Article.objects.filter(id=self.a1.id).extra(select=data).values(*data),
[
{
"id_plus_one": self.a1.id + 1,
"id_plus_two": self.a1.id + 2,
"id_plus_three": self.a1.id + 3,
"id_plus_four": self.a1.id + 4,
"id_plus_five": self.a1.id + 5,
"id_plus_six": self.a1.id + 6,
"id_plus_seven": self.a1.id + 7,
"id_plus_eight": self.a1.id + 8,
}
],
)
# You can specify fields from forward and reverse relations, just like filter().
self.assertSequenceEqual(
Article.objects.values("headline", "author__name"),
[
{"headline": self.a5.headline, "author__name": self.au2.name},
{"headline": self.a6.headline, "author__name": self.au2.name},
{"headline": self.a4.headline, "author__name": self.au1.name},
{"headline": self.a2.headline, "author__name": self.au1.name},
{"headline": self.a3.headline, "author__name": self.au1.name},
{"headline": self.a7.headline, "author__name": self.au2.name},
{"headline": self.a1.headline, "author__name": self.au1.name},
],
)
self.assertSequenceEqual(
Author.objects.values("name", "article__headline").order_by(
"name", "article__headline"
),
[
{"name": self.au1.name, "article__headline": self.a1.headline},
{"name": self.au1.name, "article__headline": self.a2.headline},
{"name": self.au1.name, "article__headline": self.a3.headline},
{"name": self.au1.name, "article__headline": self.a4.headline},
{"name": self.au2.name, "article__headline": self.a5.headline},
{"name": self.au2.name, "article__headline": self.a6.headline},
{"name": self.au2.name, "article__headline": self.a7.headline},
],
)
self.assertSequenceEqual(
(
Author.objects.values(
"name", "article__headline", "article__tag__name"
).order_by("name", "article__headline", "article__tag__name")
),
[
{
"name": self.au1.name,
"article__headline": self.a1.headline,
"article__tag__name": self.t1.name,
},
{
"name": self.au1.name,
"article__headline": self.a2.headline,
"article__tag__name": self.t1.name,
},
{
"name": self.au1.name,
"article__headline": self.a3.headline,
"article__tag__name": self.t1.name,
},
{
"name": self.au1.name,
"article__headline": self.a3.headline,
"article__tag__name": self.t2.name,
},
{
"name": self.au1.name,
"article__headline": self.a4.headline,
"article__tag__name": self.t2.name,
},
{
"name": self.au2.name,
"article__headline": self.a5.headline,
"article__tag__name": self.t2.name,
},
{
"name": self.au2.name,
"article__headline": self.a5.headline,
"article__tag__name": self.t3.name,
},
{
"name": self.au2.name,
"article__headline": self.a6.headline,
"article__tag__name": self.t3.name,
},
{
"name": self.au2.name,
"article__headline": self.a7.headline,
"article__tag__name": self.t3.name,
},
],
)
# However, an exception FieldDoesNotExist will be thrown if you specify
# a nonexistent field name in values() (a field that is neither in the
# model nor in extra(select)).
msg = (
"Cannot resolve keyword 'id_plus_two' into field. Choices are: "
"author, author_id, headline, id, id_plus_one, pub_date, slug, tag"
)
with self.assertRaisesMessage(FieldError, msg):
Article.objects.extra(select={"id_plus_one": "id + 1"}).values(
"id", "id_plus_two"
)
# If you don't specify field names to values(), all are returned.
self.assertSequenceEqual(
Article.objects.filter(id=self.a5.id).values(),
[
{
"id": self.a5.id,
"author_id": self.au2.id,
"headline": "Article 5",
"pub_date": datetime(2005, 8, 1, 9, 0),
"slug": "a5",
}
],
)
def test_values_list(self):
# values_list() is similar to values(), except that the results are
# returned as a list of tuples, rather than a list of dictionaries.
# Within each tuple, the order of the elements is the same as the order
# of fields in the values_list() call.
self.assertSequenceEqual(
Article.objects.values_list("headline"),
[
("Article 5",),
("Article 6",),
("Article 4",),
("Article 2",),
("Article 3",),
("Article 7",),
("Article 1",),
],
)
self.assertSequenceEqual(
Article.objects.values_list("id").order_by("id"),
[
(self.a1.id,),
(self.a2.id,),
(self.a3.id,),
(self.a4.id,),
(self.a5.id,),
(self.a6.id,),
(self.a7.id,),
],
)
self.assertSequenceEqual(
Article.objects.values_list("id", flat=True).order_by("id"),
[
self.a1.id,
self.a2.id,
self.a3.id,
self.a4.id,
self.a5.id,
self.a6.id,
self.a7.id,
],
)
self.assertSequenceEqual(
Article.objects.extra(select={"id_plus_one": "id+1"})
.order_by("id")
.values_list("id"),
[
(self.a1.id,),
(self.a2.id,),
(self.a3.id,),
(self.a4.id,),
(self.a5.id,),
(self.a6.id,),
(self.a7.id,),
],
)
self.assertSequenceEqual(
Article.objects.extra(select={"id_plus_one": "id+1"})
.order_by("id")
.values_list("id_plus_one", "id"),
[
(self.a1.id + 1, self.a1.id),
(self.a2.id + 1, self.a2.id),
(self.a3.id + 1, self.a3.id),
(self.a4.id + 1, self.a4.id),
(self.a5.id + 1, self.a5.id),
(self.a6.id + 1, self.a6.id),
(self.a7.id + 1, self.a7.id),
],
)
self.assertSequenceEqual(
Article.objects.extra(select={"id_plus_one": "id+1"})
.order_by("id")
.values_list("id", "id_plus_one"),
[
(self.a1.id, self.a1.id + 1),
(self.a2.id, self.a2.id + 1),
(self.a3.id, self.a3.id + 1),
(self.a4.id, self.a4.id + 1),
(self.a5.id, self.a5.id + 1),
(self.a6.id, self.a6.id + 1),
(self.a7.id, self.a7.id + 1),
],
)
args = ("name", "article__headline", "article__tag__name")
self.assertSequenceEqual(
Author.objects.values_list(*args).order_by(*args),
[
(self.au1.name, self.a1.headline, self.t1.name),
(self.au1.name, self.a2.headline, self.t1.name),
(self.au1.name, self.a3.headline, self.t1.name),
(self.au1.name, self.a3.headline, self.t2.name),
(self.au1.name, self.a4.headline, self.t2.name),
(self.au2.name, self.a5.headline, self.t2.name),
(self.au2.name, self.a5.headline, self.t3.name),
(self.au2.name, self.a6.headline, self.t3.name),
(self.au2.name, self.a7.headline, self.t3.name),
],
)
with self.assertRaises(TypeError):
Article.objects.values_list("id", "headline", flat=True)
def test_get_next_previous_by(self):
# Every DateField and DateTimeField creates get_next_by_FOO() and
# get_previous_by_FOO() methods. In the case of identical date values,
# these methods will use the ID as a fallback check. This guarantees
# that no records are skipped or duplicated.
self.assertEqual(repr(self.a1.get_next_by_pub_date()), "<Article: Article 2>")
self.assertEqual(repr(self.a2.get_next_by_pub_date()), "<Article: Article 3>")
self.assertEqual(
repr(self.a2.get_next_by_pub_date(headline__endswith="6")),
"<Article: Article 6>",
)
self.assertEqual(repr(self.a3.get_next_by_pub_date()), "<Article: Article 7>")
self.assertEqual(repr(self.a4.get_next_by_pub_date()), "<Article: Article 6>")
with self.assertRaises(Article.DoesNotExist):
self.a5.get_next_by_pub_date()
self.assertEqual(repr(self.a6.get_next_by_pub_date()), "<Article: Article 5>")
self.assertEqual(repr(self.a7.get_next_by_pub_date()), "<Article: Article 4>")
self.assertEqual(
repr(self.a7.get_previous_by_pub_date()), "<Article: Article 3>"
)
self.assertEqual(
repr(self.a6.get_previous_by_pub_date()), "<Article: Article 4>"
)
self.assertEqual(
repr(self.a5.get_previous_by_pub_date()), "<Article: Article 6>"
)
self.assertEqual(
repr(self.a4.get_previous_by_pub_date()), "<Article: Article 7>"
)
self.assertEqual(
repr(self.a3.get_previous_by_pub_date()), "<Article: Article 2>"
)
self.assertEqual(
repr(self.a2.get_previous_by_pub_date()), "<Article: Article 1>"
)
def test_escaping(self):
# Underscores, percent signs and backslashes have special meaning in the
# underlying SQL code, but Django handles the quoting of them automatically.
a8 = Article.objects.create(
headline="Article_ with underscore", pub_date=datetime(2005, 11, 20)
)
self.assertSequenceEqual(
Article.objects.filter(headline__startswith="Article"),
[a8, self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1],
)
self.assertSequenceEqual(
Article.objects.filter(headline__startswith="Article_"),
[a8],
)
a9 = Article.objects.create(
headline="Article% with percent sign", pub_date=datetime(2005, 11, 21)
)
self.assertSequenceEqual(
Article.objects.filter(headline__startswith="Article"),
[a9, a8, self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1],
)
self.assertSequenceEqual(
Article.objects.filter(headline__startswith="Article%"),
[a9],
)
a10 = Article.objects.create(
headline="Article with \\ backslash", pub_date=datetime(2005, 11, 22)
)
self.assertSequenceEqual(
Article.objects.filter(headline__contains="\\"),
[a10],
)
def test_exclude(self):
a8 = Article.objects.create(
headline="Article_ with underscore", pub_date=datetime(2005, 11, 20)
)
a9 = Article.objects.create(
headline="Article% with percent sign", pub_date=datetime(2005, 11, 21)
)
a10 = Article.objects.create(
headline="Article with \\ backslash", pub_date=datetime(2005, 11, 22)
)
# exclude() is the opposite of filter() when doing lookups:
self.assertSequenceEqual(
Article.objects.filter(headline__contains="Article").exclude(
headline__contains="with"
),
[self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1],
)
self.assertSequenceEqual(
Article.objects.exclude(headline__startswith="Article_"),
[a10, a9, self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1],
)
self.assertSequenceEqual(
Article.objects.exclude(headline="Article 7"),
[a10, a9, a8, self.a5, self.a6, self.a4, self.a2, self.a3, self.a1],
)
def test_none(self):
# none() returns a QuerySet that behaves like any other QuerySet object
self.assertSequenceEqual(Article.objects.none(), [])
self.assertSequenceEqual(
Article.objects.none().filter(headline__startswith="Article"), []
)
self.assertSequenceEqual(
Article.objects.filter(headline__startswith="Article").none(), []
)
self.assertEqual(Article.objects.none().count(), 0)
self.assertEqual(
Article.objects.none().update(headline="This should not take effect"), 0
)
self.assertSequenceEqual(list(Article.objects.none().iterator()), [])
def test_in(self):
self.assertSequenceEqual(
Article.objects.exclude(id__in=[]),
[self.a5, self.a6, self.a4, self.a2, self.a3, self.a7, self.a1],
)
def test_in_empty_list(self):
self.assertSequenceEqual(Article.objects.filter(id__in=[]), [])
def test_in_different_database(self):
with self.assertRaisesMessage(
ValueError,
"Subqueries aren't allowed across different databases. Force the "
"inner query to be evaluated using `list(inner_query)`.",
):
list(Article.objects.filter(id__in=Article.objects.using("other").all()))
def test_in_keeps_value_ordering(self):
query = (
Article.objects.filter(slug__in=["a%d" % i for i in range(1, 8)])
.values("pk")
.query
)
self.assertIn(" IN (a1, a2, a3, a4, a5, a6, a7) ", str(query))
def test_in_ignore_none(self):
with self.assertNumQueries(1) as ctx:
self.assertSequenceEqual(
Article.objects.filter(id__in=[None, self.a1.id]),
[self.a1],
)
sql = ctx.captured_queries[0]["sql"]
self.assertIn("IN (%s)" % self.a1.pk, sql)
def test_in_ignore_solo_none(self):
with self.assertNumQueries(0):
self.assertSequenceEqual(Article.objects.filter(id__in=[None]), [])
def test_in_ignore_none_with_unhashable_items(self):
class UnhashableInt(int):
__hash__ = None
with self.assertNumQueries(1) as ctx:
self.assertSequenceEqual(
Article.objects.filter(id__in=[None, UnhashableInt(self.a1.id)]),
[self.a1],
)
sql = ctx.captured_queries[0]["sql"]
self.assertIn("IN (%s)" % self.a1.pk, sql)
def test_error_messages(self):
# Programming errors are pointed out with nice error messages
with self.assertRaisesMessage(
FieldError,
"Cannot resolve keyword 'pub_date_year' into field. Choices are: "
"author, author_id, headline, id, pub_date, slug, tag",
):
Article.objects.filter(pub_date_year="2005").count()
def test_unsupported_lookups(self):
with self.assertRaisesMessage(
FieldError,
"Unsupported lookup 'starts' for CharField or join on the field "
"not permitted, perhaps you meant startswith or istartswith?",
):
Article.objects.filter(headline__starts="Article")
with self.assertRaisesMessage(
FieldError,
"Unsupported lookup 'is_null' for DateTimeField or join on the field "
"not permitted, perhaps you meant isnull?",
):
Article.objects.filter(pub_date__is_null=True)
with self.assertRaisesMessage(
FieldError,
"Unsupported lookup 'gobbledygook' for DateTimeField or join on the field "
"not permitted.",
):
Article.objects.filter(pub_date__gobbledygook="blahblah")
def test_unsupported_lookups_custom_lookups(self):
slug_field = Article._meta.get_field("slug")
msg = (
"Unsupported lookup 'lengtp' for SlugField or join on the field not "
"permitted, perhaps you meant length?"
)
with self.assertRaisesMessage(FieldError, msg):
with register_lookup(slug_field, Length):
Article.objects.filter(slug__lengtp=20)
def test_relation_nested_lookup_error(self):
# An invalid nested lookup on a related field raises a useful error.
msg = (
"Unsupported lookup 'editor' for ForeignKey or join on the field not "
"permitted."
)
with self.assertRaisesMessage(FieldError, msg):
Article.objects.filter(author__editor__name="James")
msg = (
"Unsupported lookup 'foo' for ForeignKey or join on the field not "
"permitted."
)
with self.assertRaisesMessage(FieldError, msg):
Tag.objects.filter(articles__foo="bar")
def test_unsupported_lookup_reverse_foreign_key(self):
msg = (
"Unsupported lookup 'title' for ManyToOneRel or join on the field not "
"permitted."
)
with self.assertRaisesMessage(FieldError, msg):
Author.objects.filter(article__title="Article 1")
def test_unsupported_lookup_reverse_foreign_key_custom_lookups(self):
msg = (
"Unsupported lookup 'abspl' for ManyToOneRel or join on the field not "
"permitted, perhaps you meant abspk?"
)
fk_field = Article._meta.get_field("author")
with self.assertRaisesMessage(FieldError, msg):
with register_lookup(fk_field, Abs, lookup_name="abspk"):
Author.objects.filter(article__abspl=2)
def test_filter_by_reverse_related_field_transform(self):
fk_field = Article._meta.get_field("author")
with register_lookup(fk_field, Abs):
self.assertSequenceEqual(
Author.objects.filter(article__abs=self.a1.pk), [self.au1]
)
def test_regex(self):
# Create some articles with a bit more interesting headlines for
# testing field lookups.
Article.objects.all().delete()
now = datetime.now()
Article.objects.bulk_create(
[
Article(pub_date=now, headline="f"),
Article(pub_date=now, headline="fo"),
Article(pub_date=now, headline="foo"),
Article(pub_date=now, headline="fooo"),
Article(pub_date=now, headline="hey-Foo"),
Article(pub_date=now, headline="bar"),
Article(pub_date=now, headline="AbBa"),
Article(pub_date=now, headline="baz"),
Article(pub_date=now, headline="baxZ"),
]
)
# zero-or-more
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"fo*"),
Article.objects.filter(headline__in=["f", "fo", "foo", "fooo"]),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__iregex=r"fo*"),
Article.objects.filter(headline__in=["f", "fo", "foo", "fooo", "hey-Foo"]),
)
# one-or-more
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"fo+"),
Article.objects.filter(headline__in=["fo", "foo", "fooo"]),
)
# wildcard
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"fooo?"),
Article.objects.filter(headline__in=["foo", "fooo"]),
)
# leading anchor
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"^b"),
Article.objects.filter(headline__in=["bar", "baxZ", "baz"]),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__iregex=r"^a"),
Article.objects.filter(headline="AbBa"),
)
# trailing anchor
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"z$"),
Article.objects.filter(headline="baz"),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__iregex=r"z$"),
Article.objects.filter(headline__in=["baxZ", "baz"]),
)
# character sets
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"ba[rz]"),
Article.objects.filter(headline__in=["bar", "baz"]),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"ba.[RxZ]"),
Article.objects.filter(headline="baxZ"),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__iregex=r"ba[RxZ]"),
Article.objects.filter(headline__in=["bar", "baxZ", "baz"]),
)
# and more articles:
Article.objects.bulk_create(
[
Article(pub_date=now, headline="foobar"),
Article(pub_date=now, headline="foobaz"),
Article(pub_date=now, headline="ooF"),
Article(pub_date=now, headline="foobarbaz"),
Article(pub_date=now, headline="zoocarfaz"),
Article(pub_date=now, headline="barfoobaz"),
Article(pub_date=now, headline="bazbaRFOO"),
]
)
# alternation
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"oo(f|b)"),
Article.objects.filter(
headline__in=[
"barfoobaz",
"foobar",
"foobarbaz",
"foobaz",
]
),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__iregex=r"oo(f|b)"),
Article.objects.filter(
headline__in=[
"barfoobaz",
"foobar",
"foobarbaz",
"foobaz",
"ooF",
]
),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"^foo(f|b)"),
Article.objects.filter(headline__in=["foobar", "foobarbaz", "foobaz"]),
)
# greedy matching
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"b.*az"),
Article.objects.filter(
headline__in=[
"barfoobaz",
"baz",
"bazbaRFOO",
"foobarbaz",
"foobaz",
]
),
)
self.assertQuerySetEqual(
Article.objects.filter(headline__iregex=r"b.*ar"),
Article.objects.filter(
headline__in=[
"bar",
"barfoobaz",
"bazbaRFOO",
"foobar",
"foobarbaz",
]
),
)
@skipUnlessDBFeature("supports_regex_backreferencing")
def test_regex_backreferencing(self):
# grouping and backreferences
now = datetime.now()
Article.objects.bulk_create(
[
Article(pub_date=now, headline="foobar"),
Article(pub_date=now, headline="foobaz"),
Article(pub_date=now, headline="ooF"),
Article(pub_date=now, headline="foobarbaz"),
Article(pub_date=now, headline="zoocarfaz"),
Article(pub_date=now, headline="barfoobaz"),
Article(pub_date=now, headline="bazbaRFOO"),
]
)
self.assertQuerySetEqual(
Article.objects.filter(headline__regex=r"b(.).*b\1").values_list(
"headline", flat=True
),
["barfoobaz", "bazbaRFOO", "foobarbaz"],
)
def test_regex_null(self):
"""
A regex lookup does not fail on null/None values
"""
Season.objects.create(year=2012, gt=None)
self.assertQuerySetEqual(Season.objects.filter(gt__regex=r"^$"), [])
def test_textfield_exact_null(self):
with self.assertNumQueries(1) as ctx:
self.assertSequenceEqual(Author.objects.filter(bio=None), [self.au2])
# Columns with IS NULL condition are not wrapped (except PostgreSQL).
bio_column = connection.ops.quote_name(Author._meta.get_field("bio").column)
self.assertIn(f"{bio_column} IS NULL", ctx.captured_queries[0]["sql"])
def test_regex_non_string(self):
"""
A regex lookup does not fail on non-string fields
"""
s = Season.objects.create(year=2013, gt=444)
self.assertQuerySetEqual(Season.objects.filter(gt__regex=r"^444$"), [s])
def test_regex_non_ascii(self):
"""
A regex lookup does not trip on non-ASCII characters.
"""
Player.objects.create(name="\u2660")
Player.objects.get(name__regex="\u2660")
def test_nonfield_lookups(self):
"""
A lookup query containing non-fields raises the proper exception.
"""
msg = (
"Unsupported lookup 'blahblah' for CharField or join on the field not "
"permitted."
)
with self.assertRaisesMessage(FieldError, msg):
Article.objects.filter(headline__blahblah=99)
with self.assertRaisesMessage(FieldError, msg):
Article.objects.filter(headline__blahblah__exact=99)
msg = (
"Cannot resolve keyword 'blahblah' into field. Choices are: "
"author, author_id, headline, id, pub_date, slug, tag"
)
with self.assertRaisesMessage(FieldError, msg):
Article.objects.filter(blahblah=99)
def test_lookup_collision(self):
"""
Genuine field names don't collide with built-in lookup types
('year', 'gt', 'range', 'in' etc.) (#11670).
"""
# 'gt' is used as a code number for the year, e.g. 111=>2009.
season_2009 = Season.objects.create(year=2009, gt=111)
season_2009.games.create(home="Houston Astros", away="St. Louis Cardinals")
season_2010 = Season.objects.create(year=2010, gt=222)
season_2010.games.create(home="Houston Astros", away="Chicago Cubs")
season_2010.games.create(home="Houston Astros", away="Milwaukee Brewers")
season_2010.games.create(home="Houston Astros", away="St. Louis Cardinals")
season_2011 = Season.objects.create(year=2011, gt=333)
season_2011.games.create(home="Houston Astros", away="St. Louis Cardinals")
season_2011.games.create(home="Houston Astros", away="Milwaukee Brewers")
hunter_pence = Player.objects.create(name="Hunter Pence")
hunter_pence.games.set(Game.objects.filter(season__year__in=[2009, 2010]))
pudge = Player.objects.create(name="Ivan Rodriquez")
pudge.games.set(Game.objects.filter(season__year=2009))
pedro_feliz = Player.objects.create(name="Pedro Feliz")
pedro_feliz.games.set(Game.objects.filter(season__year__in=[2011]))
johnson = Player.objects.create(name="Johnson")
johnson.games.set(Game.objects.filter(season__year__in=[2011]))
# Games in 2010
self.assertEqual(Game.objects.filter(season__year=2010).count(), 3)
self.assertEqual(Game.objects.filter(season__year__exact=2010).count(), 3)
self.assertEqual(Game.objects.filter(season__gt=222).count(), 3)
self.assertEqual(Game.objects.filter(season__gt__exact=222).count(), 3)
# Games in 2011
self.assertEqual(Game.objects.filter(season__year=2011).count(), 2)
self.assertEqual(Game.objects.filter(season__year__exact=2011).count(), 2)
self.assertEqual(Game.objects.filter(season__gt=333).count(), 2)
self.assertEqual(Game.objects.filter(season__gt__exact=333).count(), 2)
self.assertEqual(Game.objects.filter(season__year__gt=2010).count(), 2)
self.assertEqual(Game.objects.filter(season__gt__gt=222).count(), 2)
# Games played in 2010 and 2011
self.assertEqual(Game.objects.filter(season__year__in=[2010, 2011]).count(), 5)
self.assertEqual(Game.objects.filter(season__year__gt=2009).count(), 5)
self.assertEqual(Game.objects.filter(season__gt__in=[222, 333]).count(), 5)
self.assertEqual(Game.objects.filter(season__gt__gt=111).count(), 5)
# Players who played in 2009
self.assertEqual(
Player.objects.filter(games__season__year=2009).distinct().count(), 2
)
self.assertEqual(
Player.objects.filter(games__season__year__exact=2009).distinct().count(), 2
)
self.assertEqual(
Player.objects.filter(games__season__gt=111).distinct().count(), 2
)
self.assertEqual(
Player.objects.filter(games__season__gt__exact=111).distinct().count(), 2
)
# Players who played in 2010
self.assertEqual(
Player.objects.filter(games__season__year=2010).distinct().count(), 1
)
self.assertEqual(
Player.objects.filter(games__season__year__exact=2010).distinct().count(), 1
)
self.assertEqual(
Player.objects.filter(games__season__gt=222).distinct().count(), 1
)
self.assertEqual(
Player.objects.filter(games__season__gt__exact=222).distinct().count(), 1
)
# Players who played in 2011
self.assertEqual(
Player.objects.filter(games__season__year=2011).distinct().count(), 2
)
self.assertEqual(
Player.objects.filter(games__season__year__exact=2011).distinct().count(), 2
)
self.assertEqual(
Player.objects.filter(games__season__gt=333).distinct().count(), 2
)
self.assertEqual(
Player.objects.filter(games__season__year__gt=2010).distinct().count(), 2
)
self.assertEqual(
Player.objects.filter(games__season__gt__gt=222).distinct().count(), 2
)
def test_chain_date_time_lookups(self):
self.assertCountEqual(
Article.objects.filter(pub_date__month__gt=7),
[self.a5, self.a6],
)
self.assertCountEqual(
Article.objects.filter(pub_date__day__gte=27),
[self.a2, self.a3, self.a4, self.a7],
)
self.assertCountEqual(
Article.objects.filter(pub_date__hour__lt=8),
[self.a1, self.a2, self.a3, self.a4, self.a7],
)
self.assertCountEqual(
Article.objects.filter(pub_date__minute__lte=0),
[self.a1, self.a2, self.a3, self.a4, self.a5, self.a6, self.a7],
)
def test_exact_none_transform(self):
"""Transforms are used for __exact=None."""
Season.objects.create(year=1, nulled_text_field="not null")
self.assertFalse(Season.objects.filter(nulled_text_field__isnull=True))
self.assertTrue(Season.objects.filter(nulled_text_field__nulled__isnull=True))
self.assertTrue(Season.objects.filter(nulled_text_field__nulled__exact=None))
self.assertTrue(Season.objects.filter(nulled_text_field__nulled=None))
def test_exact_sliced_queryset_limit_one(self):
self.assertCountEqual(
Article.objects.filter(author=Author.objects.all()[:1]),
[self.a1, self.a2, self.a3, self.a4],
)
def test_exact_sliced_queryset_limit_one_offset(self):
self.assertCountEqual(
Article.objects.filter(author=Author.objects.all()[1:2]),
[self.a5, self.a6, self.a7],
)
def test_exact_sliced_queryset_not_limited_to_one(self):
msg = (
"The QuerySet value for an exact lookup must be limited to one "
"result using slicing."
)
with self.assertRaisesMessage(ValueError, msg):
list(Article.objects.filter(author=Author.objects.all()[:2]))
with self.assertRaisesMessage(ValueError, msg):
list(Article.objects.filter(author=Author.objects.all()[1:]))
@skipUnless(connection.vendor == "mysql", "MySQL-specific workaround.")
def test_exact_booleanfield(self):
# MySQL ignores indexes with boolean fields unless they're compared
# directly to a boolean value.
product = Product.objects.create(name="Paper", qty_target=5000)
Stock.objects.create(product=product, short=False, qty_available=5100)
stock_1 = Stock.objects.create(product=product, short=True, qty_available=180)
qs = Stock.objects.filter(short=True)
self.assertSequenceEqual(qs, [stock_1])
self.assertIn(
"%s = True" % connection.ops.quote_name("short"),
str(qs.query),
)
@skipUnless(connection.vendor == "mysql", "MySQL-specific workaround.")
def test_exact_booleanfield_annotation(self):
# MySQL ignores indexes with boolean fields unless they're compared
# directly to a boolean value.
qs = Author.objects.annotate(
case=Case(
When(alias="a1", then=True),
default=False,
output_field=BooleanField(),
)
).filter(case=True)
self.assertSequenceEqual(qs, [self.au1])
self.assertIn(" = True", str(qs.query))
qs = Author.objects.annotate(
wrapped=ExpressionWrapper(Q(alias="a1"), output_field=BooleanField()),
).filter(wrapped=True)
self.assertSequenceEqual(qs, [self.au1])
self.assertIn(" = True", str(qs.query))
# EXISTS(...) shouldn't be compared to a boolean value.
qs = Author.objects.annotate(
exists=Exists(Author.objects.filter(alias="a1", pk=OuterRef("pk"))),
).filter(exists=True)
self.assertSequenceEqual(qs, [self.au1])
self.assertNotIn(" = True", str(qs.query))
def test_custom_field_none_rhs(self):
"""
__exact=value is transformed to __isnull=True if Field.get_prep_value()
converts value to None.
"""
season = Season.objects.create(year=2012, nulled_text_field=None)
self.assertTrue(
Season.objects.filter(pk=season.pk, nulled_text_field__isnull=True)
)
self.assertTrue(Season.objects.filter(pk=season.pk, nulled_text_field=""))
def test_pattern_lookups_with_substr(self):
a = Author.objects.create(name="John Smith", alias="Johx")
b = Author.objects.create(name="Rhonda Simpson", alias="sonx")
tests = (
("startswith", [a]),
("istartswith", [a]),
("contains", [a, b]),
("icontains", [a, b]),
("endswith", [b]),
("iendswith", [b]),
)
for lookup, result in tests:
with self.subTest(lookup=lookup):
authors = Author.objects.filter(
**{"name__%s" % lookup: Substr("alias", 1, 3)}
)
self.assertCountEqual(authors, result)
def test_custom_lookup_none_rhs(self):
"""Lookup.can_use_none_as_rhs=True allows None as a lookup value."""
season = Season.objects.create(year=2012, nulled_text_field=None)
query = Season.objects.get_queryset().query
field = query.model._meta.get_field("nulled_text_field")
self.assertIsInstance(
query.build_lookup(["isnull_none_rhs"], field, None), IsNullWithNoneAsRHS
)
self.assertTrue(
Season.objects.filter(pk=season.pk, nulled_text_field__isnull_none_rhs=True)
)
def test_exact_exists(self):
qs = Article.objects.filter(pk=OuterRef("pk"))
seasons = Season.objects.annotate(pk_exists=Exists(qs)).filter(
pk_exists=Exists(qs),
)
self.assertCountEqual(seasons, Season.objects.all())
def test_nested_outerref_lhs(self):
tag = Tag.objects.create(name=self.au1.alias)
tag.articles.add(self.a1)
qs = Tag.objects.annotate(
has_author_alias_match=Exists(
Article.objects.annotate(
author_exists=Exists(
Author.objects.filter(alias=OuterRef(OuterRef("name")))
),
).filter(author_exists=True)
),
)
self.assertEqual(qs.get(has_author_alias_match=True), tag)
def test_exact_query_rhs_with_selected_columns(self):
newest_author = Author.objects.create(name="Author 2")
authors_max_ids = (
Author.objects.filter(
name="Author 2",
)
.values(
"name",
)
.annotate(
max_id=Max("id"),
)
.values("max_id")
)
authors = Author.objects.filter(id=authors_max_ids[:1])
self.assertEqual(authors.get(), newest_author)
def test_isnull_non_boolean_value(self):
msg = "The QuerySet value for an isnull lookup must be True or False."
tests = [
Author.objects.filter(alias__isnull=1),
Article.objects.filter(author__isnull=1),
Season.objects.filter(games__isnull=1),
Freebie.objects.filter(stock__isnull=1),
]
for qs in tests:
with self.subTest(qs=qs):
with self.assertRaisesMessage(ValueError, msg):
qs.exists()
def test_lookup_rhs(self):
product = Product.objects.create(name="GME", qty_target=5000)
stock_1 = Stock.objects.create(product=product, short=True, qty_available=180)
stock_2 = Stock.objects.create(product=product, short=False, qty_available=5100)
Stock.objects.create(product=product, short=False, qty_available=4000)
self.assertCountEqual(
Stock.objects.filter(short=Q(qty_available__lt=F("product__qty_target"))),
[stock_1, stock_2],
)
self.assertCountEqual(
Stock.objects.filter(
short=ExpressionWrapper(
Q(qty_available__lt=F("product__qty_target")),
output_field=BooleanField(),
)
),
[stock_1, stock_2],
)
class LookupQueryingTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.s1 = Season.objects.create(year=1942, gt=1942)
cls.s2 = Season.objects.create(year=1842, gt=1942, nulled_text_field="text")
cls.s3 = Season.objects.create(year=2042, gt=1942)
Game.objects.create(season=cls.s1, home="NY", away="Boston")
Game.objects.create(season=cls.s1, home="NY", away="Tampa")
Game.objects.create(season=cls.s3, home="Boston", away="Tampa")
def test_annotate(self):
qs = Season.objects.annotate(equal=Exact(F("year"), 1942))
self.assertCountEqual(
qs.values_list("year", "equal"),
((1942, True), (1842, False), (2042, False)),
)
def test_alias(self):
qs = Season.objects.alias(greater=GreaterThan(F("year"), 1910))
self.assertCountEqual(qs.filter(greater=True), [self.s1, self.s3])
def test_annotate_value_greater_than_value(self):
qs = Season.objects.annotate(greater=GreaterThan(Value(40), Value(30)))
self.assertCountEqual(
qs.values_list("year", "greater"),
((1942, True), (1842, True), (2042, True)),
)
def test_annotate_field_greater_than_field(self):
qs = Season.objects.annotate(greater=GreaterThan(F("year"), F("gt")))
self.assertCountEqual(
qs.values_list("year", "greater"),
((1942, False), (1842, False), (2042, True)),
)
def test_annotate_field_greater_than_value(self):
qs = Season.objects.annotate(greater=GreaterThan(F("year"), Value(1930)))
self.assertCountEqual(
qs.values_list("year", "greater"),
((1942, True), (1842, False), (2042, True)),
)
def test_annotate_field_greater_than_literal(self):
qs = Season.objects.annotate(greater=GreaterThan(F("year"), 1930))
self.assertCountEqual(
qs.values_list("year", "greater"),
((1942, True), (1842, False), (2042, True)),
)
def test_annotate_literal_greater_than_field(self):
qs = Season.objects.annotate(greater=GreaterThan(1930, F("year")))
self.assertCountEqual(
qs.values_list("year", "greater"),
((1942, False), (1842, True), (2042, False)),
)
def test_annotate_less_than_float(self):
qs = Season.objects.annotate(lesser=LessThan(F("year"), 1942.1))
self.assertCountEqual(
qs.values_list("year", "lesser"),
((1942, True), (1842, True), (2042, False)),
)
def test_annotate_greater_than_or_equal(self):
qs = Season.objects.annotate(greater=GreaterThanOrEqual(F("year"), 1942))
self.assertCountEqual(
qs.values_list("year", "greater"),
((1942, True), (1842, False), (2042, True)),
)
def test_annotate_greater_than_or_equal_float(self):
qs = Season.objects.annotate(greater=GreaterThanOrEqual(F("year"), 1942.1))
self.assertCountEqual(
qs.values_list("year", "greater"),
((1942, False), (1842, False), (2042, True)),
)
def test_combined_lookups(self):
expression = Exact(F("year"), 1942) | GreaterThan(F("year"), 1942)
qs = Season.objects.annotate(gte=expression)
self.assertCountEqual(
qs.values_list("year", "gte"),
((1942, True), (1842, False), (2042, True)),
)
def test_lookup_in_filter(self):
qs = Season.objects.filter(GreaterThan(F("year"), 1910))
self.assertCountEqual(qs, [self.s1, self.s3])
def test_isnull_lookup_in_filter(self):
self.assertSequenceEqual(
Season.objects.filter(IsNull(F("nulled_text_field"), False)),
[self.s2],
)
self.assertCountEqual(
Season.objects.filter(IsNull(F("nulled_text_field"), True)),
[self.s1, self.s3],
)
def test_filter_lookup_lhs(self):
qs = Season.objects.annotate(before_20=LessThan(F("year"), 2000)).filter(
before_20=LessThan(F("year"), 1900),
)
self.assertCountEqual(qs, [self.s2, self.s3])
def test_filter_wrapped_lookup_lhs(self):
qs = (
Season.objects.annotate(
before_20=ExpressionWrapper(
Q(year__lt=2000),
output_field=BooleanField(),
)
)
.filter(before_20=LessThan(F("year"), 1900))
.values_list("year", flat=True)
)
self.assertCountEqual(qs, [1842, 2042])
def test_filter_exists_lhs(self):
qs = Season.objects.annotate(
before_20=Exists(
Season.objects.filter(pk=OuterRef("pk"), year__lt=2000),
)
).filter(before_20=LessThan(F("year"), 1900))
self.assertCountEqual(qs, [self.s2, self.s3])
def test_filter_subquery_lhs(self):
qs = Season.objects.annotate(
before_20=Subquery(
Season.objects.filter(pk=OuterRef("pk")).values(
lesser=LessThan(F("year"), 2000),
),
)
).filter(before_20=LessThan(F("year"), 1900))
self.assertCountEqual(qs, [self.s2, self.s3])
def test_combined_lookups_in_filter(self):
expression = Exact(F("year"), 1942) | GreaterThan(F("year"), 1942)
qs = Season.objects.filter(expression)
self.assertCountEqual(qs, [self.s1, self.s3])
def test_combined_annotated_lookups_in_filter(self):
expression = Exact(F("year"), 1942) | GreaterThan(F("year"), 1942)
qs = Season.objects.annotate(gte=expression).filter(gte=True)
self.assertCountEqual(qs, [self.s1, self.s3])
def test_combined_annotated_lookups_in_filter_false(self):
expression = Exact(F("year"), 1942) | GreaterThan(F("year"), 1942)
qs = Season.objects.annotate(gte=expression).filter(gte=False)
self.assertSequenceEqual(qs, [self.s2])
def test_lookup_in_order_by(self):
qs = Season.objects.order_by(LessThan(F("year"), 1910), F("year"))
self.assertSequenceEqual(qs, [self.s1, self.s3, self.s2])
@skipUnlessDBFeature("supports_boolean_expr_in_select_clause")
def test_aggregate_combined_lookup(self):
expression = Cast(GreaterThan(F("year"), 1900), models.IntegerField())
qs = Season.objects.aggregate(modern=models.Sum(expression))
self.assertEqual(qs["modern"], 2)
def test_conditional_expression(self):
qs = Season.objects.annotate(
century=Case(
When(
GreaterThan(F("year"), 1900) & LessThanOrEqual(F("year"), 2000),
then=Value("20th"),
),
default=Value("other"),
)
).values("year", "century")
self.assertCountEqual(
qs,
[
{"year": 1942, "century": "20th"},
{"year": 1842, "century": "other"},
{"year": 2042, "century": "other"},
],
)
def test_multivalued_join_reuse(self):
self.assertEqual(
Season.objects.get(Exact(F("games__home"), "NY"), games__away="Boston"),
self.s1,
)
self.assertEqual(
Season.objects.get(Exact(F("games__home"), "NY") & Q(games__away="Boston")),
self.s1,
)
self.assertEqual(
Season.objects.get(
Exact(F("games__home"), "NY") & Exact(F("games__away"), "Boston")
),
self.s1,
)
|
7248882628e89760259b735f683090f220bb3ba1a8e2666363c0d32b043feda9 | """
Serialization
``django.core.serializers`` provides interfaces to converting Django
``QuerySet`` objects to and from "flat" data (i.e. strings).
"""
from decimal import Decimal
from django.db import models
class CategoryMetaDataManager(models.Manager):
def get_by_natural_key(self, kind, name):
return self.get(kind=kind, name=name)
class CategoryMetaData(models.Model):
kind = models.CharField(max_length=10)
name = models.CharField(max_length=10)
value = models.CharField(max_length=10)
objects = CategoryMetaDataManager()
class Meta:
unique_together = (("kind", "name"),)
def __str__(self):
return "[%s:%s]=%s" % (self.kind, self.name, self.value)
def natural_key(self):
return (self.kind, self.name)
class Category(models.Model):
name = models.CharField(max_length=20)
meta_data = models.ForeignKey(
CategoryMetaData, models.SET_NULL, null=True, default=None
)
class Meta:
ordering = ("name",)
def __str__(self):
return self.name
class Author(models.Model):
name = models.CharField(max_length=20)
class Meta:
ordering = ("name",)
def __str__(self):
return self.name
class TopicManager(models.Manager):
def get_queryset(self):
return super().get_queryset().select_related("category")
class Topic(models.Model):
name = models.CharField(max_length=255)
category = models.ForeignKey(Category, models.CASCADE, null=True)
objects = TopicManager()
class Article(models.Model):
author = models.ForeignKey(Author, models.CASCADE)
headline = models.CharField(max_length=50)
pub_date = models.DateTimeField()
categories = models.ManyToManyField(Category)
meta_data = models.ManyToManyField(CategoryMetaData)
topics = models.ManyToManyField(Topic)
class Meta:
ordering = ("pub_date",)
def __str__(self):
return self.headline
class AuthorProfile(models.Model):
author = models.OneToOneField(Author, models.CASCADE, primary_key=True)
date_of_birth = models.DateField()
def __str__(self):
return "Profile of %s" % self.author
class Actor(models.Model):
name = models.CharField(max_length=20, primary_key=True)
class Meta:
ordering = ("name",)
def __str__(self):
return self.name
class Movie(models.Model):
actor = models.ForeignKey(Actor, models.CASCADE)
title = models.CharField(max_length=50)
price = models.DecimalField(max_digits=6, decimal_places=2, default=Decimal("0.00"))
class Meta:
ordering = ("title",)
def __str__(self):
return self.title
class Score(models.Model):
score = models.FloatField()
class Team:
def __init__(self, title):
self.title = title
def __str__(self):
raise NotImplementedError("Not so simple")
def to_string(self):
return str(self.title)
class TeamField(models.CharField):
def __init__(self):
super().__init__(max_length=100)
def get_db_prep_save(self, value, connection):
return str(value.title)
def to_python(self, value):
if isinstance(value, Team):
return value
return Team(value)
def from_db_value(self, value, expression, connection):
return Team(value)
def value_to_string(self, obj):
return self.value_from_object(obj).to_string()
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["max_length"]
return name, path, args, kwargs
class Player(models.Model):
name = models.CharField(max_length=50)
rank = models.IntegerField()
team = TeamField()
def __str__(self):
return "%s (%d) playing for %s" % (self.name, self.rank, self.team.to_string())
class BaseModel(models.Model):
parent_data = models.IntegerField()
class ProxyBaseModel(BaseModel):
class Meta:
proxy = True
class ProxyProxyBaseModel(ProxyBaseModel):
class Meta:
proxy = True
class ComplexModel(models.Model):
field1 = models.CharField(max_length=10)
field2 = models.CharField(max_length=10)
field3 = models.CharField(max_length=10)
|
473984aea943759a50070fe125722f09886616322fe65f5323c74dc4b59242bf | from django.db import connection
from django.db.models import IntegerField, Value
from django.db.models.functions import Length, Lower, Right
from django.test import TestCase
from ..models import Author
class RightTests(TestCase):
@classmethod
def setUpTestData(cls):
Author.objects.create(name="John Smith", alias="smithj")
Author.objects.create(name="Rhonda")
def test_basic(self):
authors = Author.objects.annotate(name_part=Right("name", 5))
self.assertQuerySetEqual(
authors.order_by("name"), ["Smith", "honda"], lambda a: a.name_part
)
# If alias is null, set it to the first 2 lower characters of the name.
Author.objects.filter(alias__isnull=True).update(alias=Lower(Right("name", 2)))
self.assertQuerySetEqual(
authors.order_by("name"), ["smithj", "da"], lambda a: a.alias
)
def test_invalid_length(self):
with self.assertRaisesMessage(ValueError, "'length' must be greater than 0"):
Author.objects.annotate(raises=Right("name", 0))
def test_zero_length(self):
Author.objects.create(name="Tom", alias="tom")
authors = Author.objects.annotate(
name_part=Right("name", Length("name") - Length("alias"))
)
self.assertQuerySetEqual(
authors.order_by("name"),
[
"mith",
"" if connection.features.interprets_empty_strings_as_nulls else None,
"",
],
lambda a: a.name_part,
)
def test_expressions(self):
authors = Author.objects.annotate(
name_part=Right("name", Value(3, output_field=IntegerField()))
)
self.assertQuerySetEqual(
authors.order_by("name"), ["ith", "nda"], lambda a: a.name_part
)
|
2889a1b407af7b5b8e22bdea06d18a11c8bbd0aee27d826ca48b5cc0e481264b | import sys
import unittest
from contextlib import contextmanager
from django.test import LiveServerTestCase, tag
from django.utils.functional import classproperty
from django.utils.module_loading import import_string
from django.utils.text import capfirst
class SeleniumTestCaseBase(type(LiveServerTestCase)):
# List of browsers to dynamically create test classes for.
browsers = []
# A selenium hub URL to test against.
selenium_hub = None
# The external host Selenium Hub can reach.
external_host = None
# Sentinel value to differentiate browser-specific instances.
browser = None
# Run browsers in headless mode.
headless = False
def __new__(cls, name, bases, attrs):
"""
Dynamically create new classes and add them to the test module when
multiple browsers specs are provided (e.g. --selenium=firefox,chrome).
"""
test_class = super().__new__(cls, name, bases, attrs)
# If the test class is either browser-specific or a test base, return it.
if test_class.browser or not any(
name.startswith("test") and callable(value) for name, value in attrs.items()
):
return test_class
elif test_class.browsers:
# Reuse the created test class to make it browser-specific.
# We can't rename it to include the browser name or create a
# subclass like we do with the remaining browsers as it would
# either duplicate tests or prevent pickling of its instances.
first_browser = test_class.browsers[0]
test_class.browser = first_browser
# Listen on an external interface if using a selenium hub.
host = test_class.host if not test_class.selenium_hub else "0.0.0.0"
test_class.host = host
test_class.external_host = cls.external_host
# Create subclasses for each of the remaining browsers and expose
# them through the test's module namespace.
module = sys.modules[test_class.__module__]
for browser in test_class.browsers[1:]:
browser_test_class = cls.__new__(
cls,
"%s%s" % (capfirst(browser), name),
(test_class,),
{
"browser": browser,
"host": host,
"external_host": cls.external_host,
"__module__": test_class.__module__,
},
)
setattr(module, browser_test_class.__name__, browser_test_class)
return test_class
# If no browsers were specified, skip this class (it'll still be discovered).
return unittest.skip("No browsers specified.")(test_class)
@classmethod
def import_webdriver(cls, browser):
return import_string("selenium.webdriver.%s.webdriver.WebDriver" % browser)
@classmethod
def import_options(cls, browser):
return import_string("selenium.webdriver.%s.options.Options" % browser)
@classmethod
def get_capability(cls, browser):
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
return getattr(DesiredCapabilities, browser.upper())
def create_options(self):
options = self.import_options(self.browser)()
if self.headless:
match self.browser:
case "chrome":
options.add_argument("--headless=new")
case "firefox":
options.add_argument("-headless")
return options
def create_webdriver(self):
options = self.create_options()
if self.selenium_hub:
from selenium import webdriver
for key, value in self.get_capability(self.browser).items():
options.set_capability(key, value)
return webdriver.Remote(command_executor=self.selenium_hub, options=options)
return self.import_webdriver(self.browser)(options=options)
@tag("selenium")
class SeleniumTestCase(LiveServerTestCase, metaclass=SeleniumTestCaseBase):
implicit_wait = 10
external_host = None
@classproperty
def live_server_url(cls):
return "http://%s:%s" % (cls.external_host or cls.host, cls.server_thread.port)
@classproperty
def allowed_host(cls):
return cls.external_host or cls.host
@classmethod
def setUpClass(cls):
cls.selenium = cls.create_webdriver()
cls.selenium.implicitly_wait(cls.implicit_wait)
super().setUpClass()
cls.addClassCleanup(cls._quit_selenium)
@classmethod
def _quit_selenium(cls):
# quit() the WebDriver before attempting to terminate and join the
# single-threaded LiveServerThread to avoid a dead lock if the browser
# kept a connection alive.
if hasattr(cls, "selenium"):
cls.selenium.quit()
@contextmanager
def disable_implicit_wait(self):
"""Disable the default implicit wait."""
self.selenium.implicitly_wait(0)
try:
yield
finally:
self.selenium.implicitly_wait(self.implicit_wait)
|
c56ef326cb329372acf9f0478f8a7525117e253bcc1d35c407f06ff25ba1d1cd | import json
import mimetypes
import os
import sys
from copy import copy
from functools import partial
from http import HTTPStatus
from importlib import import_module
from io import BytesIO, IOBase
from urllib.parse import unquote_to_bytes, urljoin, urlparse, urlsplit
from asgiref.sync import sync_to_async
from django.conf import settings
from django.core.handlers.asgi import ASGIRequest
from django.core.handlers.base import BaseHandler
from django.core.handlers.wsgi import LimitedStream, WSGIRequest
from django.core.serializers.json import DjangoJSONEncoder
from django.core.signals import got_request_exception, request_finished, request_started
from django.db import close_old_connections
from django.http import HttpHeaders, HttpRequest, QueryDict, SimpleCookie
from django.test import signals
from django.test.utils import ContextList
from django.urls import resolve
from django.utils.encoding import force_bytes
from django.utils.functional import SimpleLazyObject
from django.utils.http import urlencode
from django.utils.itercompat import is_iterable
from django.utils.regex_helper import _lazy_re_compile
__all__ = (
"AsyncClient",
"AsyncRequestFactory",
"Client",
"RedirectCycleError",
"RequestFactory",
"encode_file",
"encode_multipart",
)
BOUNDARY = "BoUnDaRyStRiNg"
MULTIPART_CONTENT = "multipart/form-data; boundary=%s" % BOUNDARY
CONTENT_TYPE_RE = _lazy_re_compile(r".*; charset=([\w-]+);?")
# Structured suffix spec: https://tools.ietf.org/html/rfc6838#section-4.2.8
JSON_CONTENT_TYPE_RE = _lazy_re_compile(r"^application\/(.+\+)?json")
class RedirectCycleError(Exception):
"""The test client has been asked to follow a redirect loop."""
def __init__(self, message, last_response):
super().__init__(message)
self.last_response = last_response
self.redirect_chain = last_response.redirect_chain
class FakePayload(IOBase):
"""
A wrapper around BytesIO that restricts what can be read since data from
the network can't be sought and cannot be read outside of its content
length. This makes sure that views can't do anything under the test client
that wouldn't work in real life.
"""
def __init__(self, initial_bytes=None):
self.__content = BytesIO()
self.__len = 0
self.read_started = False
if initial_bytes is not None:
self.write(initial_bytes)
def __len__(self):
return self.__len
def read(self, size=-1, /):
if not self.read_started:
self.__content.seek(0)
self.read_started = True
if size == -1 or size is None:
size = self.__len
assert (
self.__len >= size
), "Cannot read more than the available bytes from the HTTP incoming data."
content = self.__content.read(size)
self.__len -= len(content)
return content
def readline(self, size=-1, /):
if not self.read_started:
self.__content.seek(0)
self.read_started = True
if size == -1 or size is None:
size = self.__len
assert (
self.__len >= size
), "Cannot read more than the available bytes from the HTTP incoming data."
content = self.__content.readline(size)
self.__len -= len(content)
return content
def write(self, b, /):
if self.read_started:
raise ValueError("Unable to write a payload after it's been read")
content = force_bytes(b)
self.__content.write(content)
self.__len += len(content)
def closing_iterator_wrapper(iterable, close):
try:
yield from iterable
finally:
request_finished.disconnect(close_old_connections)
close() # will fire request_finished
request_finished.connect(close_old_connections)
async def aclosing_iterator_wrapper(iterable, close):
try:
async for chunk in iterable:
yield chunk
finally:
request_finished.disconnect(close_old_connections)
close() # will fire request_finished
request_finished.connect(close_old_connections)
def conditional_content_removal(request, response):
"""
Simulate the behavior of most web servers by removing the content of
responses for HEAD requests, 1xx, 204, and 304 responses. Ensure
compliance with RFC 9112 Section 6.3.
"""
if 100 <= response.status_code < 200 or response.status_code in (204, 304):
if response.streaming:
response.streaming_content = []
else:
response.content = b""
if request.method == "HEAD":
if response.streaming:
response.streaming_content = []
else:
response.content = b""
return response
class ClientHandler(BaseHandler):
"""
An HTTP Handler that can be used for testing purposes. Use the WSGI
interface to compose requests, but return the raw HttpResponse object with
the originating WSGIRequest attached to its ``wsgi_request`` attribute.
"""
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
self.enforce_csrf_checks = enforce_csrf_checks
super().__init__(*args, **kwargs)
def __call__(self, environ):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._middleware_chain is None:
self.load_middleware()
request_started.disconnect(close_old_connections)
request_started.send(sender=self.__class__, environ=environ)
request_started.connect(close_old_connections)
request = WSGIRequest(environ)
# sneaky little hack so that we can easily get round
# CsrfViewMiddleware. This makes life easier, and is probably
# required for backwards compatibility with external tests against
# admin views.
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
# Request goes through middleware.
response = self.get_response(request)
# Simulate behaviors of most web servers.
conditional_content_removal(request, response)
# Attach the originating request to the response so that it could be
# later retrieved.
response.wsgi_request = request
# Emulate a WSGI server by calling the close method on completion.
if response.streaming:
if response.is_async:
response.streaming_content = aclosing_iterator_wrapper(
response.streaming_content, response.close
)
else:
response.streaming_content = closing_iterator_wrapper(
response.streaming_content, response.close
)
else:
request_finished.disconnect(close_old_connections)
response.close() # will fire request_finished
request_finished.connect(close_old_connections)
return response
class AsyncClientHandler(BaseHandler):
"""An async version of ClientHandler."""
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
self.enforce_csrf_checks = enforce_csrf_checks
super().__init__(*args, **kwargs)
async def __call__(self, scope):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._middleware_chain is None:
self.load_middleware(is_async=True)
# Extract body file from the scope, if provided.
if "_body_file" in scope:
body_file = scope.pop("_body_file")
else:
body_file = FakePayload("")
request_started.disconnect(close_old_connections)
await request_started.asend(sender=self.__class__, scope=scope)
request_started.connect(close_old_connections)
# Wrap FakePayload body_file to allow large read() in test environment.
request = ASGIRequest(scope, LimitedStream(body_file, len(body_file)))
# Sneaky little hack so that we can easily get round
# CsrfViewMiddleware. This makes life easier, and is probably required
# for backwards compatibility with external tests against admin views.
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
# Request goes through middleware.
response = await self.get_response_async(request)
# Simulate behaviors of most web servers.
conditional_content_removal(request, response)
# Attach the originating ASGI request to the response so that it could
# be later retrieved.
response.asgi_request = request
# Emulate a server by calling the close method on completion.
if response.streaming:
if response.is_async:
response.streaming_content = aclosing_iterator_wrapper(
response.streaming_content, response.close
)
else:
response.streaming_content = closing_iterator_wrapper(
response.streaming_content, response.close
)
else:
request_finished.disconnect(close_old_connections)
# Will fire request_finished.
await sync_to_async(response.close, thread_sensitive=False)()
request_finished.connect(close_old_connections)
return response
def store_rendered_templates(store, signal, sender, template, context, **kwargs):
"""
Store templates and contexts that are rendered.
The context is copied so that it is an accurate representation at the time
of rendering.
"""
store.setdefault("templates", []).append(template)
if "context" not in store:
store["context"] = ContextList()
store["context"].append(copy(context))
def encode_multipart(boundary, data):
"""
Encode multipart POST data from a dictionary of form values.
The key will be used as the form data name; the value will be transmitted
as content. If the value is a file, the contents of the file will be sent
as an application/octet-stream; otherwise, str(value) will be sent.
"""
lines = []
def to_bytes(s):
return force_bytes(s, settings.DEFAULT_CHARSET)
# Not by any means perfect, but good enough for our purposes.
def is_file(thing):
return hasattr(thing, "read") and callable(thing.read)
# Each bit of the multipart form data could be either a form value or a
# file, or a *list* of form values and/or files. Remember that HTTP field
# names can be duplicated!
for key, value in data.items():
if value is None:
raise TypeError(
"Cannot encode None for key '%s' as POST data. Did you mean "
"to pass an empty string or omit the value?" % key
)
elif is_file(value):
lines.extend(encode_file(boundary, key, value))
elif not isinstance(value, str) and is_iterable(value):
for item in value:
if is_file(item):
lines.extend(encode_file(boundary, key, item))
else:
lines.extend(
to_bytes(val)
for val in [
"--%s" % boundary,
'Content-Disposition: form-data; name="%s"' % key,
"",
item,
]
)
else:
lines.extend(
to_bytes(val)
for val in [
"--%s" % boundary,
'Content-Disposition: form-data; name="%s"' % key,
"",
value,
]
)
lines.extend(
[
to_bytes("--%s--" % boundary),
b"",
]
)
return b"\r\n".join(lines)
def encode_file(boundary, key, file):
def to_bytes(s):
return force_bytes(s, settings.DEFAULT_CHARSET)
# file.name might not be a string. For example, it's an int for
# tempfile.TemporaryFile().
file_has_string_name = hasattr(file, "name") and isinstance(file.name, str)
filename = os.path.basename(file.name) if file_has_string_name else ""
if hasattr(file, "content_type"):
content_type = file.content_type
elif filename:
content_type = mimetypes.guess_type(filename)[0]
else:
content_type = None
if content_type is None:
content_type = "application/octet-stream"
filename = filename or key
return [
to_bytes("--%s" % boundary),
to_bytes(
'Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename)
),
to_bytes("Content-Type: %s" % content_type),
b"",
to_bytes(file.read()),
]
class RequestFactory:
"""
Class that lets you create mock Request objects for use in testing.
Usage:
rf = RequestFactory()
get_request = rf.get('/hello/')
post_request = rf.post('/submit/', {'foo': 'bar'})
Once you have a request object you can pass it to any view function,
just as if that view had been hooked up using a URLconf.
"""
def __init__(self, *, json_encoder=DjangoJSONEncoder, headers=None, **defaults):
self.json_encoder = json_encoder
self.defaults = defaults
self.cookies = SimpleCookie()
self.errors = BytesIO()
if headers:
self.defaults.update(HttpHeaders.to_wsgi_names(headers))
def _base_environ(self, **request):
"""
The base environment for a request.
"""
# This is a minimal valid WSGI environ dictionary, plus:
# - HTTP_COOKIE: for cookie support,
# - REMOTE_ADDR: often useful, see #8551.
# See https://www.python.org/dev/peps/pep-3333/#environ-variables
return {
"HTTP_COOKIE": "; ".join(
sorted(
"%s=%s" % (morsel.key, morsel.coded_value)
for morsel in self.cookies.values()
)
),
"PATH_INFO": "/",
"REMOTE_ADDR": "127.0.0.1",
"REQUEST_METHOD": "GET",
"SCRIPT_NAME": "",
"SERVER_NAME": "testserver",
"SERVER_PORT": "80",
"SERVER_PROTOCOL": "HTTP/1.1",
"wsgi.version": (1, 0),
"wsgi.url_scheme": "http",
"wsgi.input": FakePayload(b""),
"wsgi.errors": self.errors,
"wsgi.multiprocess": True,
"wsgi.multithread": False,
"wsgi.run_once": False,
**self.defaults,
**request,
}
def request(self, **request):
"Construct a generic request object."
return WSGIRequest(self._base_environ(**request))
def _encode_data(self, data, content_type):
if content_type is MULTIPART_CONTENT:
return encode_multipart(BOUNDARY, data)
else:
# Encode the content so that the byte representation is correct.
match = CONTENT_TYPE_RE.match(content_type)
if match:
charset = match[1]
else:
charset = settings.DEFAULT_CHARSET
return force_bytes(data, encoding=charset)
def _encode_json(self, data, content_type):
"""
Return encoded JSON if data is a dict, list, or tuple and content_type
is application/json.
"""
should_encode = JSON_CONTENT_TYPE_RE.match(content_type) and isinstance(
data, (dict, list, tuple)
)
return json.dumps(data, cls=self.json_encoder) if should_encode else data
def _get_path(self, parsed):
path = parsed.path
# If there are parameters, add them
if parsed.params:
path += ";" + parsed.params
path = unquote_to_bytes(path)
# Replace the behavior where non-ASCII values in the WSGI environ are
# arbitrarily decoded with ISO-8859-1.
# Refs comment in `get_bytes_from_wsgi()`.
return path.decode("iso-8859-1")
def get(self, path, data=None, secure=False, *, headers=None, **extra):
"""Construct a GET request."""
data = {} if data is None else data
return self.generic(
"GET",
path,
secure=secure,
headers=headers,
**{
"QUERY_STRING": urlencode(data, doseq=True),
**extra,
},
)
def post(
self,
path,
data=None,
content_type=MULTIPART_CONTENT,
secure=False,
*,
headers=None,
**extra,
):
"""Construct a POST request."""
data = self._encode_json({} if data is None else data, content_type)
post_data = self._encode_data(data, content_type)
return self.generic(
"POST",
path,
post_data,
content_type,
secure=secure,
headers=headers,
**extra,
)
def head(self, path, data=None, secure=False, *, headers=None, **extra):
"""Construct a HEAD request."""
data = {} if data is None else data
return self.generic(
"HEAD",
path,
secure=secure,
headers=headers,
**{
"QUERY_STRING": urlencode(data, doseq=True),
**extra,
},
)
def trace(self, path, secure=False, *, headers=None, **extra):
"""Construct a TRACE request."""
return self.generic("TRACE", path, secure=secure, headers=headers, **extra)
def options(
self,
path,
data="",
content_type="application/octet-stream",
secure=False,
*,
headers=None,
**extra,
):
"Construct an OPTIONS request."
return self.generic(
"OPTIONS", path, data, content_type, secure=secure, headers=headers, **extra
)
def put(
self,
path,
data="",
content_type="application/octet-stream",
secure=False,
*,
headers=None,
**extra,
):
"""Construct a PUT request."""
data = self._encode_json(data, content_type)
return self.generic(
"PUT", path, data, content_type, secure=secure, headers=headers, **extra
)
def patch(
self,
path,
data="",
content_type="application/octet-stream",
secure=False,
*,
headers=None,
**extra,
):
"""Construct a PATCH request."""
data = self._encode_json(data, content_type)
return self.generic(
"PATCH", path, data, content_type, secure=secure, headers=headers, **extra
)
def delete(
self,
path,
data="",
content_type="application/octet-stream",
secure=False,
*,
headers=None,
**extra,
):
"""Construct a DELETE request."""
data = self._encode_json(data, content_type)
return self.generic(
"DELETE", path, data, content_type, secure=secure, headers=headers, **extra
)
def generic(
self,
method,
path,
data="",
content_type="application/octet-stream",
secure=False,
*,
headers=None,
**extra,
):
"""Construct an arbitrary HTTP request."""
parsed = urlparse(str(path)) # path can be lazy
data = force_bytes(data, settings.DEFAULT_CHARSET)
r = {
"PATH_INFO": self._get_path(parsed),
"REQUEST_METHOD": method,
"SERVER_PORT": "443" if secure else "80",
"wsgi.url_scheme": "https" if secure else "http",
}
if data:
r.update(
{
"CONTENT_LENGTH": str(len(data)),
"CONTENT_TYPE": content_type,
"wsgi.input": FakePayload(data),
}
)
if headers:
extra.update(HttpHeaders.to_wsgi_names(headers))
r.update(extra)
# If QUERY_STRING is absent or empty, we want to extract it from the URL.
if not r.get("QUERY_STRING"):
# WSGI requires latin-1 encoded strings. See get_path_info().
query_string = parsed[4].encode().decode("iso-8859-1")
r["QUERY_STRING"] = query_string
return self.request(**r)
class AsyncRequestFactory(RequestFactory):
"""
Class that lets you create mock ASGI-like Request objects for use in
testing. Usage:
rf = AsyncRequestFactory()
get_request = await rf.get('/hello/')
post_request = await rf.post('/submit/', {'foo': 'bar'})
Once you have a request object you can pass it to any view function,
including synchronous ones. The reason we have a separate class here is:
a) this makes ASGIRequest subclasses, and
b) AsyncTestClient can subclass it.
"""
def _base_scope(self, **request):
"""The base scope for a request."""
# This is a minimal valid ASGI scope, plus:
# - headers['cookie'] for cookie support,
# - 'client' often useful, see #8551.
scope = {
"asgi": {"version": "3.0"},
"type": "http",
"http_version": "1.1",
"client": ["127.0.0.1", 0],
"server": ("testserver", "80"),
"scheme": "http",
"method": "GET",
"headers": [],
**self.defaults,
**request,
}
scope["headers"].append(
(
b"cookie",
b"; ".join(
sorted(
("%s=%s" % (morsel.key, morsel.coded_value)).encode("ascii")
for morsel in self.cookies.values()
)
),
)
)
return scope
def request(self, **request):
"""Construct a generic request object."""
# This is synchronous, which means all methods on this class are.
# AsyncClient, however, has an async request function, which makes all
# its methods async.
if "_body_file" in request:
body_file = request.pop("_body_file")
else:
body_file = FakePayload("")
# Wrap FakePayload body_file to allow large read() in test environment.
return ASGIRequest(
self._base_scope(**request), LimitedStream(body_file, len(body_file))
)
def generic(
self,
method,
path,
data="",
content_type="application/octet-stream",
secure=False,
*,
headers=None,
**extra,
):
"""Construct an arbitrary HTTP request."""
parsed = urlparse(str(path)) # path can be lazy.
data = force_bytes(data, settings.DEFAULT_CHARSET)
s = {
"method": method,
"path": self._get_path(parsed),
"server": ("127.0.0.1", "443" if secure else "80"),
"scheme": "https" if secure else "http",
"headers": [(b"host", b"testserver")],
}
if data:
s["headers"].extend(
[
(b"content-length", str(len(data)).encode("ascii")),
(b"content-type", content_type.encode("ascii")),
]
)
s["_body_file"] = FakePayload(data)
follow = extra.pop("follow", None)
if follow is not None:
s["follow"] = follow
if query_string := extra.pop("QUERY_STRING", None):
s["query_string"] = query_string
if headers:
extra.update(HttpHeaders.to_asgi_names(headers))
s["headers"] += [
(key.lower().encode("ascii"), value.encode("latin1"))
for key, value in extra.items()
]
# If QUERY_STRING is absent or empty, we want to extract it from the
# URL.
if not s.get("query_string"):
s["query_string"] = parsed[4]
return self.request(**s)
class ClientMixin:
"""
Mixin with common methods between Client and AsyncClient.
"""
def store_exc_info(self, **kwargs):
"""Store exceptions when they are generated by a view."""
self.exc_info = sys.exc_info()
def check_exception(self, response):
"""
Look for a signaled exception, clear the current context exception
data, re-raise the signaled exception, and clear the signaled exception
from the local cache.
"""
response.exc_info = self.exc_info
if self.exc_info:
_, exc_value, _ = self.exc_info
self.exc_info = None
if self.raise_request_exception:
raise exc_value
@property
def session(self):
"""Return the current session variables."""
engine = import_module(settings.SESSION_ENGINE)
cookie = self.cookies.get(settings.SESSION_COOKIE_NAME)
if cookie:
return engine.SessionStore(cookie.value)
session = engine.SessionStore()
session.save()
self.cookies[settings.SESSION_COOKIE_NAME] = session.session_key
return session
async def asession(self):
return await sync_to_async(lambda: self.session)()
def login(self, **credentials):
"""
Set the Factory to appear as if it has successfully logged into a site.
Return True if login is possible or False if the provided credentials
are incorrect.
"""
from django.contrib.auth import authenticate
user = authenticate(**credentials)
if user:
self._login(user)
return True
return False
async def alogin(self, **credentials):
"""See login()."""
from django.contrib.auth import aauthenticate
user = await aauthenticate(**credentials)
if user:
await self._alogin(user)
return True
return False
def force_login(self, user, backend=None):
if backend is None:
backend = self._get_backend()
user.backend = backend
self._login(user, backend)
async def aforce_login(self, user, backend=None):
if backend is None:
backend = self._get_backend()
user.backend = backend
await self._alogin(user, backend)
def _get_backend(self):
from django.contrib.auth import load_backend
for backend_path in settings.AUTHENTICATION_BACKENDS:
backend = load_backend(backend_path)
if hasattr(backend, "get_user"):
return backend_path
def _login(self, user, backend=None):
from django.contrib.auth import login
# Create a fake request to store login details.
request = HttpRequest()
if self.session:
request.session = self.session
else:
engine = import_module(settings.SESSION_ENGINE)
request.session = engine.SessionStore()
login(request, user, backend)
# Save the session values.
request.session.save()
self._set_login_cookies(request)
async def _alogin(self, user, backend=None):
from django.contrib.auth import alogin
# Create a fake request to store login details.
request = HttpRequest()
session = await self.asession()
if session:
request.session = session
else:
engine = import_module(settings.SESSION_ENGINE)
request.session = engine.SessionStore()
await alogin(request, user, backend)
# Save the session values.
await sync_to_async(request.session.save)()
self._set_login_cookies(request)
def _set_login_cookies(self, request):
# Set the cookie to represent the session.
session_cookie = settings.SESSION_COOKIE_NAME
self.cookies[session_cookie] = request.session.session_key
cookie_data = {
"max-age": None,
"path": "/",
"domain": settings.SESSION_COOKIE_DOMAIN,
"secure": settings.SESSION_COOKIE_SECURE or None,
"expires": None,
}
self.cookies[session_cookie].update(cookie_data)
def logout(self):
"""Log out the user by removing the cookies and session object."""
from django.contrib.auth import get_user, logout
request = HttpRequest()
if self.session:
request.session = self.session
request.user = get_user(request)
else:
engine = import_module(settings.SESSION_ENGINE)
request.session = engine.SessionStore()
logout(request)
self.cookies = SimpleCookie()
async def alogout(self):
"""See logout()."""
from django.contrib.auth import aget_user, alogout
request = HttpRequest()
session = await self.asession()
if session:
request.session = session
request.user = await aget_user(request)
else:
engine = import_module(settings.SESSION_ENGINE)
request.session = engine.SessionStore()
await alogout(request)
self.cookies = SimpleCookie()
def _parse_json(self, response, **extra):
if not hasattr(response, "_json"):
if not JSON_CONTENT_TYPE_RE.match(response.get("Content-Type")):
raise ValueError(
'Content-Type header is "%s", not "application/json"'
% response.get("Content-Type")
)
response._json = json.loads(
response.content.decode(response.charset), **extra
)
return response._json
class Client(ClientMixin, RequestFactory):
"""
A class that can act as a client for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
Client objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the Client instance.
This is not intended as a replacement for Twill/Selenium or
the like - it is here to allow testing against the
contexts and templates produced by a view, rather than the
HTML rendered to the end-user.
"""
def __init__(
self,
enforce_csrf_checks=False,
raise_request_exception=True,
*,
headers=None,
**defaults,
):
super().__init__(headers=headers, **defaults)
self.handler = ClientHandler(enforce_csrf_checks)
self.raise_request_exception = raise_request_exception
self.exc_info = None
self.extra = None
self.headers = None
def request(self, **request):
"""
Make a generic request. Compose the environment dictionary and pass
to the handler, return the result of the handler. Assume defaults for
the query environment, which can be overridden using the arguments to
the request.
"""
environ = self._base_environ(**request)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = partial(store_rendered_templates, data)
signal_uid = "template-render-%s" % id(request)
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
# Capture exceptions created by the handler.
exception_uid = "request-exception-%s" % id(request)
got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid)
try:
response = self.handler(environ)
finally:
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
got_request_exception.disconnect(dispatch_uid=exception_uid)
# Check for signaled exceptions.
self.check_exception(response)
# Save the client and request that stimulated the response.
response.client = self
response.request = request
# Add any rendered template detail to the response.
response.templates = data.get("templates", [])
response.context = data.get("context")
response.json = partial(self._parse_json, response)
# Attach the ResolverMatch instance to the response.
urlconf = getattr(response.wsgi_request, "urlconf", None)
response.resolver_match = SimpleLazyObject(
lambda: resolve(request["PATH_INFO"], urlconf=urlconf),
)
# Flatten a single context. Not really necessary anymore thanks to the
# __getattr__ flattening in ContextList, but has some edge case
# backwards compatibility implications.
if response.context and len(response.context) == 1:
response.context = response.context[0]
# Update persistent cookie data.
if response.cookies:
self.cookies.update(response.cookies)
return response
def get(
self,
path,
data=None,
follow=False,
secure=False,
*,
headers=None,
**extra,
):
"""Request a response from the server using GET."""
self.extra = extra
self.headers = headers
response = super().get(path, data=data, secure=secure, headers=headers, **extra)
if follow:
response = self._handle_redirects(
response, data=data, headers=headers, **extra
)
return response
def post(
self,
path,
data=None,
content_type=MULTIPART_CONTENT,
follow=False,
secure=False,
*,
headers=None,
**extra,
):
"""Request a response from the server using POST."""
self.extra = extra
self.headers = headers
response = super().post(
path,
data=data,
content_type=content_type,
secure=secure,
headers=headers,
**extra,
)
if follow:
response = self._handle_redirects(
response, data=data, content_type=content_type, headers=headers, **extra
)
return response
def head(
self,
path,
data=None,
follow=False,
secure=False,
*,
headers=None,
**extra,
):
"""Request a response from the server using HEAD."""
self.extra = extra
self.headers = headers
response = super().head(
path, data=data, secure=secure, headers=headers, **extra
)
if follow:
response = self._handle_redirects(
response, data=data, headers=headers, **extra
)
return response
def options(
self,
path,
data="",
content_type="application/octet-stream",
follow=False,
secure=False,
*,
headers=None,
**extra,
):
"""Request a response from the server using OPTIONS."""
self.extra = extra
self.headers = headers
response = super().options(
path,
data=data,
content_type=content_type,
secure=secure,
headers=headers,
**extra,
)
if follow:
response = self._handle_redirects(
response, data=data, content_type=content_type, headers=headers, **extra
)
return response
def put(
self,
path,
data="",
content_type="application/octet-stream",
follow=False,
secure=False,
*,
headers=None,
**extra,
):
"""Send a resource to the server using PUT."""
self.extra = extra
self.headers = headers
response = super().put(
path,
data=data,
content_type=content_type,
secure=secure,
headers=headers,
**extra,
)
if follow:
response = self._handle_redirects(
response, data=data, content_type=content_type, headers=headers, **extra
)
return response
def patch(
self,
path,
data="",
content_type="application/octet-stream",
follow=False,
secure=False,
*,
headers=None,
**extra,
):
"""Send a resource to the server using PATCH."""
self.extra = extra
self.headers = headers
response = super().patch(
path,
data=data,
content_type=content_type,
secure=secure,
headers=headers,
**extra,
)
if follow:
response = self._handle_redirects(
response, data=data, content_type=content_type, headers=headers, **extra
)
return response
def delete(
self,
path,
data="",
content_type="application/octet-stream",
follow=False,
secure=False,
*,
headers=None,
**extra,
):
"""Send a DELETE request to the server."""
self.extra = extra
self.headers = headers
response = super().delete(
path,
data=data,
content_type=content_type,
secure=secure,
headers=headers,
**extra,
)
if follow:
response = self._handle_redirects(
response, data=data, content_type=content_type, headers=headers, **extra
)
return response
def trace(
self,
path,
data="",
follow=False,
secure=False,
*,
headers=None,
**extra,
):
"""Send a TRACE request to the server."""
self.extra = extra
self.headers = headers
response = super().trace(
path, data=data, secure=secure, headers=headers, **extra
)
if follow:
response = self._handle_redirects(
response, data=data, headers=headers, **extra
)
return response
def _handle_redirects(
self,
response,
data="",
content_type="",
headers=None,
**extra,
):
"""
Follow any redirects by requesting responses from the server using GET.
"""
response.redirect_chain = []
redirect_status_codes = (
HTTPStatus.MOVED_PERMANENTLY,
HTTPStatus.FOUND,
HTTPStatus.SEE_OTHER,
HTTPStatus.TEMPORARY_REDIRECT,
HTTPStatus.PERMANENT_REDIRECT,
)
while response.status_code in redirect_status_codes:
response_url = response.url
redirect_chain = response.redirect_chain
redirect_chain.append((response_url, response.status_code))
url = urlsplit(response_url)
if url.scheme:
extra["wsgi.url_scheme"] = url.scheme
if url.hostname:
extra["SERVER_NAME"] = url.hostname
if url.port:
extra["SERVER_PORT"] = str(url.port)
path = url.path
# RFC 3986 Section 6.2.3: Empty path should be normalized to "/".
if not path and url.netloc:
path = "/"
# Prepend the request path to handle relative path redirects
if not path.startswith("/"):
path = urljoin(response.request["PATH_INFO"], path)
if response.status_code in (
HTTPStatus.TEMPORARY_REDIRECT,
HTTPStatus.PERMANENT_REDIRECT,
):
# Preserve request method and query string (if needed)
# post-redirect for 307/308 responses.
request_method = response.request["REQUEST_METHOD"].lower()
if request_method not in ("get", "head"):
extra["QUERY_STRING"] = url.query
request_method = getattr(self, request_method)
else:
request_method = self.get
data = QueryDict(url.query)
content_type = None
response = request_method(
path,
data=data,
content_type=content_type,
follow=False,
headers=headers,
**extra,
)
response.redirect_chain = redirect_chain
if redirect_chain[-1] in redirect_chain[:-1]:
# Check that we're not redirecting to somewhere we've already
# been to, to prevent loops.
raise RedirectCycleError(
"Redirect loop detected.", last_response=response
)
if len(redirect_chain) > 20:
# Such a lengthy chain likely also means a loop, but one with
# a growing path, changing view, or changing query argument;
# 20 is the value of "network.http.redirection-limit" from Firefox.
raise RedirectCycleError("Too many redirects.", last_response=response)
return response
class AsyncClient(ClientMixin, AsyncRequestFactory):
"""
An async version of Client that creates ASGIRequests and calls through an
async request path.
Does not currently support "follow" on its methods.
"""
def __init__(
self,
enforce_csrf_checks=False,
raise_request_exception=True,
*,
headers=None,
**defaults,
):
super().__init__(headers=headers, **defaults)
self.handler = AsyncClientHandler(enforce_csrf_checks)
self.raise_request_exception = raise_request_exception
self.exc_info = None
self.extra = None
self.headers = None
async def request(self, **request):
"""
Make a generic request. Compose the scope dictionary and pass to the
handler, return the result of the handler. Assume defaults for the
query environment, which can be overridden using the arguments to the
request.
"""
if "follow" in request:
raise NotImplementedError(
"AsyncClient request methods do not accept the follow parameter."
)
scope = self._base_scope(**request)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = partial(store_rendered_templates, data)
signal_uid = "template-render-%s" % id(request)
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
# Capture exceptions created by the handler.
exception_uid = "request-exception-%s" % id(request)
got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid)
try:
response = await self.handler(scope)
finally:
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
got_request_exception.disconnect(dispatch_uid=exception_uid)
# Check for signaled exceptions.
self.check_exception(response)
# Save the client and request that stimulated the response.
response.client = self
response.request = request
# Add any rendered template detail to the response.
response.templates = data.get("templates", [])
response.context = data.get("context")
response.json = partial(self._parse_json, response)
# Attach the ResolverMatch instance to the response.
urlconf = getattr(response.asgi_request, "urlconf", None)
response.resolver_match = SimpleLazyObject(
lambda: resolve(request["path"], urlconf=urlconf),
)
# Flatten a single context. Not really necessary anymore thanks to the
# __getattr__ flattening in ContextList, but has some edge case
# backwards compatibility implications.
if response.context and len(response.context) == 1:
response.context = response.context[0]
# Update persistent cookie data.
if response.cookies:
self.cookies.update(response.cookies)
return response
|
add7b138de49ba1280316019bf89360c7d1f45229d3d9a22ebfb538ce5201510 | import functools
import sys
import threading
import warnings
from collections import Counter, defaultdict
from functools import partial
from django.core.exceptions import AppRegistryNotReady, ImproperlyConfigured
from .config import AppConfig
class Apps:
"""
A registry that stores the configuration of installed applications.
It also keeps track of models, e.g. to provide reverse relations.
"""
def __init__(self, installed_apps=()):
# installed_apps is set to None when creating the main registry
# because it cannot be populated at that point. Other registries must
# provide a list of installed apps and are populated immediately.
if installed_apps is None and hasattr(sys.modules[__name__], "apps"):
raise RuntimeError("You must supply an installed_apps argument.")
# Mapping of app labels => model names => model classes. Every time a
# model is imported, ModelBase.__new__ calls apps.register_model which
# creates an entry in all_models. All imported models are registered,
# regardless of whether they're defined in an installed application
# and whether the registry has been populated. Since it isn't possible
# to reimport a module safely (it could reexecute initialization code)
# all_models is never overridden or reset.
self.all_models = defaultdict(dict)
# Mapping of labels to AppConfig instances for installed apps.
self.app_configs = {}
# Stack of app_configs. Used to store the current state in
# set_available_apps and set_installed_apps.
self.stored_app_configs = []
# Whether the registry is populated.
self.apps_ready = self.models_ready = self.ready = False
# For the autoreloader.
self.ready_event = threading.Event()
# Lock for thread-safe population.
self._lock = threading.RLock()
self.loading = False
# Maps ("app_label", "modelname") tuples to lists of functions to be
# called when the corresponding model is ready. Used by this class's
# `lazy_model_operation()` and `do_pending_operations()` methods.
self._pending_operations = defaultdict(list)
# Populate apps and models, unless it's the main registry.
if installed_apps is not None:
self.populate(installed_apps)
def populate(self, installed_apps=None):
"""
Load application configurations and models.
Import each application module and then each model module.
It is thread-safe and idempotent, but not reentrant.
"""
if self.ready:
return
# populate() might be called by two threads in parallel on servers
# that create threads before initializing the WSGI callable.
with self._lock:
if self.ready:
return
# An RLock prevents other threads from entering this section. The
# compare and set operation below is atomic.
if self.loading:
# Prevent reentrant calls to avoid running AppConfig.ready()
# methods twice.
raise RuntimeError("populate() isn't reentrant")
self.loading = True
# Phase 1: initialize app configs and import app modules.
for entry in installed_apps:
if isinstance(entry, AppConfig):
app_config = entry
else:
app_config = AppConfig.create(entry)
if app_config.label in self.app_configs:
raise ImproperlyConfigured(
"Application labels aren't unique, "
"duplicates: %s" % app_config.label
)
self.app_configs[app_config.label] = app_config
app_config.apps = self
# Check for duplicate app names.
counts = Counter(
app_config.name for app_config in self.app_configs.values()
)
duplicates = [name for name, count in counts.most_common() if count > 1]
if duplicates:
raise ImproperlyConfigured(
"Application names aren't unique, "
"duplicates: %s" % ", ".join(duplicates)
)
self.apps_ready = True
# Phase 2: import models modules.
for app_config in self.app_configs.values():
app_config.import_models()
self.clear_cache()
self.models_ready = True
# Phase 3: run ready() methods of app configs.
for app_config in self.get_app_configs():
app_config.ready()
self.ready = True
self.ready_event.set()
def check_apps_ready(self):
"""Raise an exception if all apps haven't been imported yet."""
if not self.apps_ready:
from django.conf import settings
# If "not ready" is due to unconfigured settings, accessing
# INSTALLED_APPS raises a more helpful ImproperlyConfigured
# exception.
settings.INSTALLED_APPS
raise AppRegistryNotReady("Apps aren't loaded yet.")
def check_models_ready(self):
"""Raise an exception if all models haven't been imported yet."""
if not self.models_ready:
raise AppRegistryNotReady("Models aren't loaded yet.")
def get_app_configs(self):
"""Import applications and return an iterable of app configs."""
self.check_apps_ready()
return self.app_configs.values()
def get_app_config(self, app_label):
"""
Import applications and returns an app config for the given label.
Raise LookupError if no application exists with this label.
"""
self.check_apps_ready()
try:
return self.app_configs[app_label]
except KeyError:
message = "No installed app with label '%s'." % app_label
for app_config in self.get_app_configs():
if app_config.name == app_label:
message += " Did you mean '%s'?" % app_config.label
break
raise LookupError(message)
# This method is performance-critical at least for Django's test suite.
@functools.cache
def get_models(self, include_auto_created=False, include_swapped=False):
"""
Return a list of all installed models.
By default, the following models aren't included:
- auto-created models for many-to-many relations without
an explicit intermediate table,
- models that have been swapped out.
Set the corresponding keyword argument to True to include such models.
"""
self.check_models_ready()
result = []
for app_config in self.app_configs.values():
result.extend(app_config.get_models(include_auto_created, include_swapped))
return result
def get_model(self, app_label, model_name=None, require_ready=True):
"""
Return the model matching the given app_label and model_name.
As a shortcut, app_label may be in the form <app_label>.<model_name>.
model_name is case-insensitive.
Raise LookupError if no application exists with this label, or no
model exists with this name in the application. Raise ValueError if
called with a single argument that doesn't contain exactly one dot.
"""
if require_ready:
self.check_models_ready()
else:
self.check_apps_ready()
if model_name is None:
app_label, model_name = app_label.split(".")
app_config = self.get_app_config(app_label)
if not require_ready and app_config.models is None:
app_config.import_models()
return app_config.get_model(model_name, require_ready=require_ready)
def register_model(self, app_label, model):
# Since this method is called when models are imported, it cannot
# perform imports because of the risk of import loops. It mustn't
# call get_app_config().
model_name = model._meta.model_name
app_models = self.all_models[app_label]
if model_name in app_models:
if (
model.__name__ == app_models[model_name].__name__
and model.__module__ == app_models[model_name].__module__
):
warnings.warn(
"Model '%s.%s' was already registered. Reloading models is not "
"advised as it can lead to inconsistencies, most notably with "
"related models." % (app_label, model_name),
RuntimeWarning,
stacklevel=2,
)
else:
raise RuntimeError(
"Conflicting '%s' models in application '%s': %s and %s."
% (model_name, app_label, app_models[model_name], model)
)
app_models[model_name] = model
self.do_pending_operations(model)
self.clear_cache()
def is_installed(self, app_name):
"""
Check whether an application with this name exists in the registry.
app_name is the full name of the app e.g. 'django.contrib.admin'.
"""
self.check_apps_ready()
return any(ac.name == app_name for ac in self.app_configs.values())
def get_containing_app_config(self, object_name):
"""
Look for an app config containing a given object.
object_name is the dotted Python path to the object.
Return the app config for the inner application in case of nesting.
Return None if the object isn't in any registered app config.
"""
self.check_apps_ready()
candidates = []
for app_config in self.app_configs.values():
if object_name.startswith(app_config.name):
subpath = object_name.removeprefix(app_config.name)
if subpath == "" or subpath[0] == ".":
candidates.append(app_config)
if candidates:
return sorted(candidates, key=lambda ac: -len(ac.name))[0]
def get_registered_model(self, app_label, model_name):
"""
Similar to get_model(), but doesn't require that an app exists with
the given app_label.
It's safe to call this method at import time, even while the registry
is being populated.
"""
model = self.all_models[app_label].get(model_name.lower())
if model is None:
raise LookupError("Model '%s.%s' not registered." % (app_label, model_name))
return model
@functools.cache
def get_swappable_settings_name(self, to_string):
"""
For a given model string (e.g. "auth.User"), return the name of the
corresponding settings name if it refers to a swappable model. If the
referred model is not swappable, return None.
This method is decorated with @functools.cache because it's performance
critical when it comes to migrations. Since the swappable settings don't
change after Django has loaded the settings, there is no reason to get
the respective settings attribute over and over again.
"""
to_string = to_string.lower()
for model in self.get_models(include_swapped=True):
swapped = model._meta.swapped
# Is this model swapped out for the model given by to_string?
if swapped and swapped.lower() == to_string:
return model._meta.swappable
# Is this model swappable and the one given by to_string?
if model._meta.swappable and model._meta.label_lower == to_string:
return model._meta.swappable
return None
def set_available_apps(self, available):
"""
Restrict the set of installed apps used by get_app_config[s].
available must be an iterable of application names.
set_available_apps() must be balanced with unset_available_apps().
Primarily used for performance optimization in TransactionTestCase.
This method is safe in the sense that it doesn't trigger any imports.
"""
available = set(available)
installed = {app_config.name for app_config in self.get_app_configs()}
if not available.issubset(installed):
raise ValueError(
"Available apps isn't a subset of installed apps, extra apps: %s"
% ", ".join(available - installed)
)
self.stored_app_configs.append(self.app_configs)
self.app_configs = {
label: app_config
for label, app_config in self.app_configs.items()
if app_config.name in available
}
self.clear_cache()
def unset_available_apps(self):
"""Cancel a previous call to set_available_apps()."""
self.app_configs = self.stored_app_configs.pop()
self.clear_cache()
def set_installed_apps(self, installed):
"""
Enable a different set of installed apps for get_app_config[s].
installed must be an iterable in the same format as INSTALLED_APPS.
set_installed_apps() must be balanced with unset_installed_apps(),
even if it exits with an exception.
Primarily used as a receiver of the setting_changed signal in tests.
This method may trigger new imports, which may add new models to the
registry of all imported models. They will stay in the registry even
after unset_installed_apps(). Since it isn't possible to replay
imports safely (e.g. that could lead to registering listeners twice),
models are registered when they're imported and never removed.
"""
if not self.ready:
raise AppRegistryNotReady("App registry isn't ready yet.")
self.stored_app_configs.append(self.app_configs)
self.app_configs = {}
self.apps_ready = self.models_ready = self.loading = self.ready = False
self.clear_cache()
self.populate(installed)
def unset_installed_apps(self):
"""Cancel a previous call to set_installed_apps()."""
self.app_configs = self.stored_app_configs.pop()
self.apps_ready = self.models_ready = self.ready = True
self.clear_cache()
def clear_cache(self):
"""
Clear all internal caches, for methods that alter the app registry.
This is mostly used in tests.
"""
self.get_swappable_settings_name.cache_clear()
# Call expire cache on each model. This will purge
# the relation tree and the fields cache.
self.get_models.cache_clear()
if self.ready:
# Circumvent self.get_models() to prevent that the cache is refilled.
# This particularly prevents that an empty value is cached while cloning.
for app_config in self.app_configs.values():
for model in app_config.get_models(include_auto_created=True):
model._meta._expire_cache()
def lazy_model_operation(self, function, *model_keys):
"""
Take a function and a number of ("app_label", "modelname") tuples, and
when all the corresponding models have been imported and registered,
call the function with the model classes as its arguments.
The function passed to this method must accept exactly n models as
arguments, where n=len(model_keys).
"""
# Base case: no arguments, just execute the function.
if not model_keys:
function()
# Recursive case: take the head of model_keys, wait for the
# corresponding model class to be imported and registered, then apply
# that argument to the supplied function. Pass the resulting partial
# to lazy_model_operation() along with the remaining model args and
# repeat until all models are loaded and all arguments are applied.
else:
next_model, *more_models = model_keys
# This will be executed after the class corresponding to next_model
# has been imported and registered. The `func` attribute provides
# duck-type compatibility with partials.
def apply_next_model(model):
next_function = partial(apply_next_model.func, model)
self.lazy_model_operation(next_function, *more_models)
apply_next_model.func = function
# If the model has already been imported and registered, partially
# apply it to the function now. If not, add it to the list of
# pending operations for the model, where it will be executed with
# the model class as its sole argument once the model is ready.
try:
model_class = self.get_registered_model(*next_model)
except LookupError:
self._pending_operations[next_model].append(apply_next_model)
else:
apply_next_model(model_class)
def do_pending_operations(self, model):
"""
Take a newly-prepared model and pass it to each function waiting for
it. This is called at the very end of Apps.register_model().
"""
key = model._meta.app_label, model._meta.model_name
for function in self._pending_operations.pop(key, []):
function(model)
apps = Apps(installed_apps=None)
|
df20f0c193d222fdc3d45bd2437af66db65f3623ed7d9b79aef7f64be272375c | import functools
import inspect
import itertools
import re
import sys
import types
import warnings
from pathlib import Path
from django.conf import settings
from django.http import Http404, HttpResponse, HttpResponseNotFound
from django.template import Context, Engine, TemplateDoesNotExist
from django.template.defaultfilters import pprint
from django.urls import resolve
from django.utils import timezone
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_str
from django.utils.module_loading import import_string
from django.utils.regex_helper import _lazy_re_compile
from django.utils.version import PY311, get_docs_version
from django.views.decorators.debug import coroutine_functions_to_sensitive_variables
# Minimal Django templates engine to render the error templates
# regardless of the project's TEMPLATES setting. Templates are
# read directly from the filesystem so that the error handler
# works even if the template loader is broken.
DEBUG_ENGINE = Engine(
debug=True,
libraries={"i18n": "django.templatetags.i18n"},
)
def builtin_template_path(name):
"""
Return a path to a builtin template.
Avoid calling this function at the module level or in a class-definition
because __file__ may not exist, e.g. in frozen environments.
"""
return Path(__file__).parent / "templates" / name
class ExceptionCycleWarning(UserWarning):
pass
class CallableSettingWrapper:
"""
Object to wrap callable appearing in settings.
* Not to call in the debug page (#21345).
* Not to break the debug page if the callable forbidding to set attributes
(#23070).
"""
def __init__(self, callable_setting):
self._wrapped = callable_setting
def __repr__(self):
return repr(self._wrapped)
def technical_500_response(request, exc_type, exc_value, tb, status_code=500):
"""
Create a technical server error response. The last three arguments are
the values returned from sys.exc_info() and friends.
"""
reporter = get_exception_reporter_class(request)(request, exc_type, exc_value, tb)
if request.accepts("text/html"):
html = reporter.get_traceback_html()
return HttpResponse(html, status=status_code)
else:
text = reporter.get_traceback_text()
return HttpResponse(
text, status=status_code, content_type="text/plain; charset=utf-8"
)
@functools.lru_cache
def get_default_exception_reporter_filter():
# Instantiate the default filter for the first time and cache it.
return import_string(settings.DEFAULT_EXCEPTION_REPORTER_FILTER)()
def get_exception_reporter_filter(request):
default_filter = get_default_exception_reporter_filter()
return getattr(request, "exception_reporter_filter", default_filter)
def get_exception_reporter_class(request):
default_exception_reporter_class = import_string(
settings.DEFAULT_EXCEPTION_REPORTER
)
return getattr(
request, "exception_reporter_class", default_exception_reporter_class
)
def get_caller(request):
resolver_match = request.resolver_match
if resolver_match is None:
try:
resolver_match = resolve(request.path)
except Http404:
pass
return "" if resolver_match is None else resolver_match._func_path
class SafeExceptionReporterFilter:
"""
Use annotations made by the sensitive_post_parameters and
sensitive_variables decorators to filter out sensitive information.
"""
cleansed_substitute = "********************"
hidden_settings = _lazy_re_compile(
"API|TOKEN|KEY|SECRET|PASS|SIGNATURE|HTTP_COOKIE", flags=re.I
)
def cleanse_setting(self, key, value):
"""
Cleanse an individual setting key/value of sensitive content. If the
value is a dictionary, recursively cleanse the keys in that dictionary.
"""
if key == settings.SESSION_COOKIE_NAME:
is_sensitive = True
else:
try:
is_sensitive = self.hidden_settings.search(key)
except TypeError:
is_sensitive = False
if is_sensitive:
cleansed = self.cleansed_substitute
elif isinstance(value, dict):
cleansed = {k: self.cleanse_setting(k, v) for k, v in value.items()}
elif isinstance(value, list):
cleansed = [self.cleanse_setting("", v) for v in value]
elif isinstance(value, tuple):
cleansed = tuple([self.cleanse_setting("", v) for v in value])
else:
cleansed = value
if callable(cleansed):
cleansed = CallableSettingWrapper(cleansed)
return cleansed
def get_safe_settings(self):
"""
Return a dictionary of the settings module with values of sensitive
settings replaced with stars (*********).
"""
settings_dict = {}
for k in dir(settings):
if k.isupper():
settings_dict[k] = self.cleanse_setting(k, getattr(settings, k))
return settings_dict
def get_safe_request_meta(self, request):
"""
Return a dictionary of request.META with sensitive values redacted.
"""
if not hasattr(request, "META"):
return {}
return {k: self.cleanse_setting(k, v) for k, v in request.META.items()}
def get_safe_cookies(self, request):
"""
Return a dictionary of request.COOKIES with sensitive values redacted.
"""
if not hasattr(request, "COOKIES"):
return {}
return {k: self.cleanse_setting(k, v) for k, v in request.COOKIES.items()}
def is_active(self, request):
"""
This filter is to add safety in production environments (i.e. DEBUG
is False). If DEBUG is True then your site is not safe anyway.
This hook is provided as a convenience to easily activate or
deactivate the filter on a per request basis.
"""
return settings.DEBUG is False
def get_cleansed_multivaluedict(self, request, multivaluedict):
"""
Replace the keys in a MultiValueDict marked as sensitive with stars.
This mitigates leaking sensitive POST parameters if something like
request.POST['nonexistent_key'] throws an exception (#21098).
"""
sensitive_post_parameters = getattr(request, "sensitive_post_parameters", [])
if self.is_active(request) and sensitive_post_parameters:
multivaluedict = multivaluedict.copy()
for param in sensitive_post_parameters:
if param in multivaluedict:
multivaluedict[param] = self.cleansed_substitute
return multivaluedict
def get_post_parameters(self, request):
"""
Replace the values of POST parameters marked as sensitive with
stars (*********).
"""
if request is None:
return {}
else:
sensitive_post_parameters = getattr(
request, "sensitive_post_parameters", []
)
if self.is_active(request) and sensitive_post_parameters:
cleansed = request.POST.copy()
if sensitive_post_parameters == "__ALL__":
# Cleanse all parameters.
for k in cleansed:
cleansed[k] = self.cleansed_substitute
return cleansed
else:
# Cleanse only the specified parameters.
for param in sensitive_post_parameters:
if param in cleansed:
cleansed[param] = self.cleansed_substitute
return cleansed
else:
return request.POST
def cleanse_special_types(self, request, value):
try:
# If value is lazy or a complex object of another kind, this check
# might raise an exception. isinstance checks that lazy
# MultiValueDicts will have a return value.
is_multivalue_dict = isinstance(value, MultiValueDict)
except Exception as e:
return "{!r} while evaluating {!r}".format(e, value)
if is_multivalue_dict:
# Cleanse MultiValueDicts (request.POST is the one we usually care about)
value = self.get_cleansed_multivaluedict(request, value)
return value
def get_traceback_frame_variables(self, request, tb_frame):
"""
Replace the values of variables marked as sensitive with
stars (*********).
"""
sensitive_variables = None
# Coroutines don't have a proper `f_back` so they need to be inspected
# separately. Handle this by stashing the registered sensitive
# variables in a global dict indexed by `hash(file_path:line_number)`.
if (
tb_frame.f_code.co_flags & inspect.CO_COROUTINE != 0
and tb_frame.f_code.co_name != "sensitive_variables_wrapper"
):
key = hash(
f"{tb_frame.f_code.co_filename}:{tb_frame.f_code.co_firstlineno}"
)
sensitive_variables = coroutine_functions_to_sensitive_variables.get(
key, None
)
if sensitive_variables is None:
# Loop through the frame's callers to see if the
# sensitive_variables decorator was used.
current_frame = tb_frame
while current_frame is not None:
if (
current_frame.f_code.co_name == "sensitive_variables_wrapper"
and "sensitive_variables_wrapper" in current_frame.f_locals
):
# The sensitive_variables decorator was used, so take note
# of the sensitive variables' names.
wrapper = current_frame.f_locals["sensitive_variables_wrapper"]
sensitive_variables = getattr(wrapper, "sensitive_variables", None)
break
current_frame = current_frame.f_back
cleansed = {}
if self.is_active(request) and sensitive_variables:
if sensitive_variables == "__ALL__":
# Cleanse all variables
for name in tb_frame.f_locals:
cleansed[name] = self.cleansed_substitute
else:
# Cleanse specified variables
for name, value in tb_frame.f_locals.items():
if name in sensitive_variables:
value = self.cleansed_substitute
else:
value = self.cleanse_special_types(request, value)
cleansed[name] = value
else:
# Potentially cleanse the request and any MultiValueDicts if they
# are one of the frame variables.
for name, value in tb_frame.f_locals.items():
cleansed[name] = self.cleanse_special_types(request, value)
if (
tb_frame.f_code.co_name == "sensitive_variables_wrapper"
and "sensitive_variables_wrapper" in tb_frame.f_locals
):
# For good measure, obfuscate the decorated function's arguments in
# the sensitive_variables decorator's frame, in case the variables
# associated with those arguments were meant to be obfuscated from
# the decorated function's frame.
cleansed["func_args"] = self.cleansed_substitute
cleansed["func_kwargs"] = self.cleansed_substitute
return cleansed.items()
class ExceptionReporter:
"""Organize and coordinate reporting on exceptions."""
@property
def html_template_path(self):
return builtin_template_path("technical_500.html")
@property
def text_template_path(self):
return builtin_template_path("technical_500.txt")
def __init__(self, request, exc_type, exc_value, tb, is_email=False):
self.request = request
self.filter = get_exception_reporter_filter(self.request)
self.exc_type = exc_type
self.exc_value = exc_value
self.tb = tb
self.is_email = is_email
self.template_info = getattr(self.exc_value, "template_debug", None)
self.template_does_not_exist = False
self.postmortem = None
def _get_raw_insecure_uri(self):
"""
Return an absolute URI from variables available in this request. Skip
allowed hosts protection, so may return insecure URI.
"""
return "{scheme}://{host}{path}".format(
scheme=self.request.scheme,
host=self.request._get_raw_host(),
path=self.request.get_full_path(),
)
def get_traceback_data(self):
"""Return a dictionary containing traceback information."""
if self.exc_type and issubclass(self.exc_type, TemplateDoesNotExist):
self.template_does_not_exist = True
self.postmortem = self.exc_value.chain or [self.exc_value]
frames = self.get_traceback_frames()
for i, frame in enumerate(frames):
if "vars" in frame:
frame_vars = []
for k, v in frame["vars"]:
v = pprint(v)
# Trim large blobs of data
if len(v) > 4096:
v = "%s… <trimmed %d bytes string>" % (v[0:4096], len(v))
frame_vars.append((k, v))
frame["vars"] = frame_vars
frames[i] = frame
unicode_hint = ""
if self.exc_type and issubclass(self.exc_type, UnicodeError):
start = getattr(self.exc_value, "start", None)
end = getattr(self.exc_value, "end", None)
if start is not None and end is not None:
unicode_str = self.exc_value.args[1]
unicode_hint = force_str(
unicode_str[max(start - 5, 0) : min(end + 5, len(unicode_str))],
"ascii",
errors="replace",
)
from django import get_version
if self.request is None:
user_str = None
else:
try:
user_str = str(self.request.user)
except Exception:
# request.user may raise OperationalError if the database is
# unavailable, for example.
user_str = "[unable to retrieve the current user]"
c = {
"is_email": self.is_email,
"unicode_hint": unicode_hint,
"frames": frames,
"request": self.request,
"request_meta": self.filter.get_safe_request_meta(self.request),
"request_COOKIES_items": self.filter.get_safe_cookies(self.request).items(),
"user_str": user_str,
"filtered_POST_items": list(
self.filter.get_post_parameters(self.request).items()
),
"settings": self.filter.get_safe_settings(),
"sys_executable": sys.executable,
"sys_version_info": "%d.%d.%d" % sys.version_info[0:3],
"server_time": timezone.now(),
"django_version_info": get_version(),
"sys_path": sys.path,
"template_info": self.template_info,
"template_does_not_exist": self.template_does_not_exist,
"postmortem": self.postmortem,
}
if self.request is not None:
c["request_GET_items"] = self.request.GET.items()
c["request_FILES_items"] = self.request.FILES.items()
c["request_insecure_uri"] = self._get_raw_insecure_uri()
c["raising_view_name"] = get_caller(self.request)
# Check whether exception info is available
if self.exc_type:
c["exception_type"] = self.exc_type.__name__
if self.exc_value:
c["exception_value"] = str(self.exc_value)
if exc_notes := getattr(self.exc_value, "__notes__", None):
c["exception_notes"] = "\n" + "\n".join(exc_notes)
if frames:
c["lastframe"] = frames[-1]
return c
def get_traceback_html(self):
"""Return HTML version of debug 500 HTTP error page."""
with self.html_template_path.open(encoding="utf-8") as fh:
t = DEBUG_ENGINE.from_string(fh.read())
c = Context(self.get_traceback_data(), use_l10n=False)
return t.render(c)
def get_traceback_text(self):
"""Return plain text version of debug 500 HTTP error page."""
with self.text_template_path.open(encoding="utf-8") as fh:
t = DEBUG_ENGINE.from_string(fh.read())
c = Context(self.get_traceback_data(), autoescape=False, use_l10n=False)
return t.render(c)
def _get_source(self, filename, loader, module_name):
source = None
if hasattr(loader, "get_source"):
try:
source = loader.get_source(module_name)
except ImportError:
pass
if source is not None:
source = source.splitlines()
if source is None:
try:
with open(filename, "rb") as fp:
source = fp.read().splitlines()
except OSError:
pass
return source
def _get_lines_from_file(
self, filename, lineno, context_lines, loader=None, module_name=None
):
"""
Return context_lines before and after lineno from file.
Return (pre_context_lineno, pre_context, context_line, post_context).
"""
source = self._get_source(filename, loader, module_name)
if source is None:
return None, [], None, []
# If we just read the source from a file, or if the loader did not
# apply tokenize.detect_encoding to decode the source into a
# string, then we should do that ourselves.
if isinstance(source[0], bytes):
encoding = "ascii"
for line in source[:2]:
# File coding may be specified. Match pattern from PEP-263
# (https://www.python.org/dev/peps/pep-0263/)
match = re.search(rb"coding[:=]\s*([-\w.]+)", line)
if match:
encoding = match[1].decode("ascii")
break
source = [str(sline, encoding, "replace") for sline in source]
lower_bound = max(0, lineno - context_lines)
upper_bound = lineno + context_lines
try:
pre_context = source[lower_bound:lineno]
context_line = source[lineno]
post_context = source[lineno + 1 : upper_bound]
except IndexError:
return None, [], None, []
return lower_bound, pre_context, context_line, post_context
def _get_explicit_or_implicit_cause(self, exc_value):
explicit = getattr(exc_value, "__cause__", None)
suppress_context = getattr(exc_value, "__suppress_context__", None)
implicit = getattr(exc_value, "__context__", None)
return explicit or (None if suppress_context else implicit)
def get_traceback_frames(self):
# Get the exception and all its causes
exceptions = []
exc_value = self.exc_value
while exc_value:
exceptions.append(exc_value)
exc_value = self._get_explicit_or_implicit_cause(exc_value)
if exc_value in exceptions:
warnings.warn(
"Cycle in the exception chain detected: exception '%s' "
"encountered again." % exc_value,
ExceptionCycleWarning,
)
# Avoid infinite loop if there's a cyclic reference (#29393).
break
frames = []
# No exceptions were supplied to ExceptionReporter
if not exceptions:
return frames
# In case there's just one exception, take the traceback from self.tb
exc_value = exceptions.pop()
tb = self.tb if not exceptions else exc_value.__traceback__
while True:
frames.extend(self.get_exception_traceback_frames(exc_value, tb))
try:
exc_value = exceptions.pop()
except IndexError:
break
tb = exc_value.__traceback__
return frames
def get_exception_traceback_frames(self, exc_value, tb):
exc_cause = self._get_explicit_or_implicit_cause(exc_value)
exc_cause_explicit = getattr(exc_value, "__cause__", True)
if tb is None:
yield {
"exc_cause": exc_cause,
"exc_cause_explicit": exc_cause_explicit,
"tb": None,
"type": "user",
}
while tb is not None:
# Support for __traceback_hide__ which is used by a few libraries
# to hide internal frames.
if tb.tb_frame.f_locals.get("__traceback_hide__"):
tb = tb.tb_next
continue
filename = tb.tb_frame.f_code.co_filename
function = tb.tb_frame.f_code.co_name
lineno = tb.tb_lineno - 1
loader = tb.tb_frame.f_globals.get("__loader__")
module_name = tb.tb_frame.f_globals.get("__name__") or ""
(
pre_context_lineno,
pre_context,
context_line,
post_context,
) = self._get_lines_from_file(
filename,
lineno,
7,
loader,
module_name,
)
if pre_context_lineno is None:
pre_context_lineno = lineno
pre_context = []
context_line = "<source code not available>"
post_context = []
colno = tb_area_colno = ""
if PY311:
_, _, start_column, end_column = next(
itertools.islice(
tb.tb_frame.f_code.co_positions(), tb.tb_lasti // 2, None
)
)
if start_column and end_column:
underline = "^" * (end_column - start_column)
spaces = " " * (start_column + len(str(lineno + 1)) + 2)
colno = f"\n{spaces}{underline}"
tb_area_spaces = " " * (
4
+ start_column
- (len(context_line) - len(context_line.lstrip()))
)
tb_area_colno = f"\n{tb_area_spaces}{underline}"
yield {
"exc_cause": exc_cause,
"exc_cause_explicit": exc_cause_explicit,
"tb": tb,
"type": "django" if module_name.startswith("django.") else "user",
"filename": filename,
"function": function,
"lineno": lineno + 1,
"vars": self.filter.get_traceback_frame_variables(
self.request, tb.tb_frame
),
"id": id(tb),
"pre_context": pre_context,
"context_line": context_line,
"post_context": post_context,
"pre_context_lineno": pre_context_lineno + 1,
"colno": colno,
"tb_area_colno": tb_area_colno,
}
tb = tb.tb_next
def technical_404_response(request, exception):
"""Create a technical 404 error response. `exception` is the Http404."""
try:
error_url = exception.args[0]["path"]
except (IndexError, TypeError, KeyError):
error_url = request.path_info[1:] # Trim leading slash
try:
tried = exception.args[0]["tried"]
except (IndexError, TypeError, KeyError):
resolved = True
tried = request.resolver_match.tried if request.resolver_match else None
else:
resolved = False
if not tried or ( # empty URLconf
request.path == "/"
and len(tried) == 1
and len(tried[0]) == 1 # default URLconf
and getattr(tried[0][0], "app_name", "")
== getattr(tried[0][0], "namespace", "")
== "admin"
):
return default_urlconf(request)
urlconf = getattr(request, "urlconf", settings.ROOT_URLCONF)
if isinstance(urlconf, types.ModuleType):
urlconf = urlconf.__name__
with builtin_template_path("technical_404.html").open(encoding="utf-8") as fh:
t = DEBUG_ENGINE.from_string(fh.read())
reporter_filter = get_default_exception_reporter_filter()
c = Context(
{
"urlconf": urlconf,
"root_urlconf": settings.ROOT_URLCONF,
"request_path": error_url,
"urlpatterns": tried,
"resolved": resolved,
"reason": str(exception),
"request": request,
"settings": reporter_filter.get_safe_settings(),
"raising_view_name": get_caller(request),
}
)
return HttpResponseNotFound(t.render(c))
def default_urlconf(request):
"""Create an empty URLconf 404 error response."""
with builtin_template_path("default_urlconf.html").open(encoding="utf-8") as fh:
t = DEBUG_ENGINE.from_string(fh.read())
c = Context(
{
"version": get_docs_version(),
}
)
return HttpResponse(t.render(c))
|
5111599284cd2663a59bf6b2f7cef6fbf944815d5ec2e9d94bd18a489460f9ef | import copy
import itertools
import operator
from functools import wraps
class cached_property:
"""
Decorator that converts a method with a single self argument into a
property cached on the instance.
A cached property can be made out of an existing method:
(e.g. ``url = cached_property(get_absolute_url)``).
"""
name = None
@staticmethod
def func(instance):
raise TypeError(
"Cannot use cached_property instance without calling "
"__set_name__() on it."
)
def __init__(self, func):
self.real_func = func
self.__doc__ = getattr(func, "__doc__")
def __set_name__(self, owner, name):
if self.name is None:
self.name = name
self.func = self.real_func
elif name != self.name:
raise TypeError(
"Cannot assign the same cached_property to two different names "
"(%r and %r)." % (self.name, name)
)
def __get__(self, instance, cls=None):
"""
Call the function and put the return value in instance.__dict__ so that
subsequent attribute access on the instance returns the cached value
instead of calling cached_property.__get__().
"""
if instance is None:
return self
res = instance.__dict__[self.name] = self.func(instance)
return res
class classproperty:
"""
Decorator that converts a method with a single cls argument into a property
that can be accessed directly from the class.
"""
def __init__(self, method=None):
self.fget = method
def __get__(self, instance, cls=None):
return self.fget(cls)
def getter(self, method):
self.fget = method
return self
class Promise:
"""
Base class for the proxy class created in the closure of the lazy function.
It's used to recognize promises in code.
"""
pass
def lazy(func, *resultclasses):
"""
Turn any callable into a lazy evaluated callable. result classes or types
is required -- at least one is needed so that the automatic forcing of
the lazy evaluation code is triggered. Results are not memoized; the
function is evaluated on every access.
"""
class __proxy__(Promise):
"""
Encapsulate a function call and act as a proxy for methods that are
called on the result of that function. The function is not evaluated
until one of the methods on the result is called.
"""
def __init__(self, args, kw):
self._args = args
self._kw = kw
def __reduce__(self):
return (
_lazy_proxy_unpickle,
(func, self._args, self._kw) + resultclasses,
)
def __deepcopy__(self, memo):
# Instances of this class are effectively immutable. It's just a
# collection of functions. So we don't need to do anything
# complicated for copying.
memo[id(self)] = self
return self
def __cast(self):
return func(*self._args, **self._kw)
# Explicitly wrap methods which are defined on object and hence would
# not have been overloaded by the loop over resultclasses below.
def __repr__(self):
return repr(self.__cast())
def __str__(self):
return str(self.__cast())
def __eq__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return self.__cast() == other
def __ne__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return self.__cast() != other
def __lt__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return self.__cast() < other
def __le__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return self.__cast() <= other
def __gt__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return self.__cast() > other
def __ge__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return self.__cast() >= other
def __hash__(self):
return hash(self.__cast())
def __format__(self, format_spec):
return format(self.__cast(), format_spec)
# Explicitly wrap methods which are required for certain operations on
# int/str objects to function correctly.
def __add__(self, other):
return self.__cast() + other
def __radd__(self, other):
return other + self.__cast()
def __mod__(self, other):
return self.__cast() % other
def __mul__(self, other):
return self.__cast() * other
# Add wrappers for all methods from resultclasses which haven't been
# wrapped explicitly above.
for resultclass in resultclasses:
for type_ in resultclass.mro():
for method_name in type_.__dict__:
# All __promise__ return the same wrapper method, they look up
# the correct implementation when called.
if hasattr(__proxy__, method_name):
continue
# Builds a wrapper around some method. Pass method_name to
# avoid issues due to late binding.
def __wrapper__(self, *args, __method_name=method_name, **kw):
# Automatically triggers the evaluation of a lazy value and
# applies the given method of the result type.
result = func(*self._args, **self._kw)
return getattr(result, __method_name)(*args, **kw)
setattr(__proxy__, method_name, __wrapper__)
@wraps(func)
def __wrapper__(*args, **kw):
# Creates the proxy object, instead of the actual value.
return __proxy__(args, kw)
return __wrapper__
def _lazy_proxy_unpickle(func, args, kwargs, *resultclasses):
return lazy(func, *resultclasses)(*args, **kwargs)
def lazystr(text):
"""
Shortcut for the common case of a lazy callable that returns str.
"""
return lazy(str, str)(text)
def keep_lazy(*resultclasses):
"""
A decorator that allows a function to be called with one or more lazy
arguments. If none of the args are lazy, the function is evaluated
immediately, otherwise a __proxy__ is returned that will evaluate the
function when needed.
"""
if not resultclasses:
raise TypeError("You must pass at least one argument to keep_lazy().")
def decorator(func):
lazy_func = lazy(func, *resultclasses)
@wraps(func)
def wrapper(*args, **kwargs):
if any(
isinstance(arg, Promise)
for arg in itertools.chain(args, kwargs.values())
):
return lazy_func(*args, **kwargs)
return func(*args, **kwargs)
return wrapper
return decorator
def keep_lazy_text(func):
"""
A decorator for functions that accept lazy arguments and return text.
"""
return keep_lazy(str)(func)
empty = object()
def new_method_proxy(func):
def inner(self, *args):
if (_wrapped := self._wrapped) is empty:
self._setup()
_wrapped = self._wrapped
return func(_wrapped, *args)
inner._mask_wrapped = False
return inner
class LazyObject:
"""
A wrapper for another class that can be used to delay instantiation of the
wrapped class.
By subclassing, you have the opportunity to intercept and alter the
instantiation. If you don't need to do that, use SimpleLazyObject.
"""
# Avoid infinite recursion when tracing __init__ (#19456).
_wrapped = None
def __init__(self):
# Note: if a subclass overrides __init__(), it will likely need to
# override __copy__() and __deepcopy__() as well.
self._wrapped = empty
def __getattribute__(self, name):
if name == "_wrapped":
# Avoid recursion when getting wrapped object.
return super().__getattribute__(name)
value = super().__getattribute__(name)
# If attribute is a proxy method, raise an AttributeError to call
# __getattr__() and use the wrapped object method.
if not getattr(value, "_mask_wrapped", True):
raise AttributeError
return value
__getattr__ = new_method_proxy(getattr)
def __setattr__(self, name, value):
if name == "_wrapped":
# Assign to __dict__ to avoid infinite __setattr__ loops.
self.__dict__["_wrapped"] = value
else:
if self._wrapped is empty:
self._setup()
setattr(self._wrapped, name, value)
def __delattr__(self, name):
if name == "_wrapped":
raise TypeError("can't delete _wrapped.")
if self._wrapped is empty:
self._setup()
delattr(self._wrapped, name)
def _setup(self):
"""
Must be implemented by subclasses to initialize the wrapped object.
"""
raise NotImplementedError(
"subclasses of LazyObject must provide a _setup() method"
)
# Because we have messed with __class__ below, we confuse pickle as to what
# class we are pickling. We're going to have to initialize the wrapped
# object to successfully pickle it, so we might as well just pickle the
# wrapped object since they're supposed to act the same way.
#
# Unfortunately, if we try to simply act like the wrapped object, the ruse
# will break down when pickle gets our id(). Thus we end up with pickle
# thinking, in effect, that we are a distinct object from the wrapped
# object, but with the same __dict__. This can cause problems (see #25389).
#
# So instead, we define our own __reduce__ method and custom unpickler. We
# pickle the wrapped object as the unpickler's argument, so that pickle
# will pickle it normally, and then the unpickler simply returns its
# argument.
def __reduce__(self):
if self._wrapped is empty:
self._setup()
return (unpickle_lazyobject, (self._wrapped,))
def __copy__(self):
if self._wrapped is empty:
# If uninitialized, copy the wrapper. Use type(self), not
# self.__class__, because the latter is proxied.
return type(self)()
else:
# If initialized, return a copy of the wrapped object.
return copy.copy(self._wrapped)
def __deepcopy__(self, memo):
if self._wrapped is empty:
# We have to use type(self), not self.__class__, because the
# latter is proxied.
result = type(self)()
memo[id(self)] = result
return result
return copy.deepcopy(self._wrapped, memo)
__bytes__ = new_method_proxy(bytes)
__str__ = new_method_proxy(str)
__bool__ = new_method_proxy(bool)
# Introspection support
__dir__ = new_method_proxy(dir)
# Need to pretend to be the wrapped class, for the sake of objects that
# care about this (especially in equality tests)
__class__ = property(new_method_proxy(operator.attrgetter("__class__")))
__eq__ = new_method_proxy(operator.eq)
__lt__ = new_method_proxy(operator.lt)
__gt__ = new_method_proxy(operator.gt)
__ne__ = new_method_proxy(operator.ne)
__hash__ = new_method_proxy(hash)
# List/Tuple/Dictionary methods support
__getitem__ = new_method_proxy(operator.getitem)
__setitem__ = new_method_proxy(operator.setitem)
__delitem__ = new_method_proxy(operator.delitem)
__iter__ = new_method_proxy(iter)
__len__ = new_method_proxy(len)
__contains__ = new_method_proxy(operator.contains)
def unpickle_lazyobject(wrapped):
"""
Used to unpickle lazy objects. Just return its argument, which will be the
wrapped object.
"""
return wrapped
class SimpleLazyObject(LazyObject):
"""
A lazy object initialized from any function.
Designed for compound objects of unknown type. For builtins or objects of
known type, use django.utils.functional.lazy.
"""
def __init__(self, func):
"""
Pass in a callable that returns the object to be wrapped.
If copies are made of the resulting SimpleLazyObject, which can happen
in various circumstances within Django, then you must ensure that the
callable can be safely run more than once and will return the same
value.
"""
self.__dict__["_setupfunc"] = func
super().__init__()
def _setup(self):
self._wrapped = self._setupfunc()
# Return a meaningful representation of the lazy object for debugging
# without evaluating the wrapped object.
def __repr__(self):
if self._wrapped is empty:
repr_attr = self._setupfunc
else:
repr_attr = self._wrapped
return "<%s: %r>" % (type(self).__name__, repr_attr)
def __copy__(self):
if self._wrapped is empty:
# If uninitialized, copy the wrapper. Use SimpleLazyObject, not
# self.__class__, because the latter is proxied.
return SimpleLazyObject(self._setupfunc)
else:
# If initialized, return a copy of the wrapped object.
return copy.copy(self._wrapped)
def __deepcopy__(self, memo):
if self._wrapped is empty:
# We have to use SimpleLazyObject, not self.__class__, because the
# latter is proxied.
result = SimpleLazyObject(self._setupfunc)
memo[id(self)] = result
return result
return copy.deepcopy(self._wrapped, memo)
__add__ = new_method_proxy(operator.add)
@new_method_proxy
def __radd__(self, other):
return other + self
def partition(predicate, values):
"""
Split the values into two sets, based on the return value of the function
(True/False). e.g.:
>>> partition(lambda x: x > 3, range(5))
[0, 1, 2, 3], [4]
"""
results = ([], [])
for item in values:
results[predicate(item)].append(item)
return results
|
c0a1295ff48d82bdf8de59341714c48663755f7865f44ed0b0cc8deaf4e2aab7 | import re
from django.core.exceptions import ValidationError
from django.forms.utils import RenderableFieldMixin, pretty_name
from django.forms.widgets import MultiWidget, Textarea, TextInput
from django.utils.functional import cached_property
from django.utils.html import format_html, html_safe
from django.utils.translation import gettext_lazy as _
__all__ = ("BoundField",)
class BoundField(RenderableFieldMixin):
"A Field plus data"
def __init__(self, form, field, name):
self.form = form
self.field = field
self.name = name
self.html_name = form.add_prefix(name)
self.html_initial_name = form.add_initial_prefix(name)
self.html_initial_id = form.add_initial_prefix(self.auto_id)
if self.field.label is None:
self.label = pretty_name(name)
else:
self.label = self.field.label
self.help_text = field.help_text or ""
self.renderer = form.renderer
@cached_property
def subwidgets(self):
"""
Most widgets yield a single subwidget, but others like RadioSelect and
CheckboxSelectMultiple produce one subwidget for each choice.
This property is cached so that only one database query occurs when
rendering ModelChoiceFields.
"""
id_ = self.field.widget.attrs.get("id") or self.auto_id
attrs = {"id": id_} if id_ else {}
attrs = self.build_widget_attrs(attrs)
return [
BoundWidget(self.field.widget, widget, self.form.renderer)
for widget in self.field.widget.subwidgets(
self.html_name, self.value(), attrs=attrs
)
]
def __bool__(self):
# BoundField evaluates to True even if it doesn't have subwidgets.
return True
def __iter__(self):
return iter(self.subwidgets)
def __len__(self):
return len(self.subwidgets)
def __getitem__(self, idx):
# Prevent unnecessary reevaluation when accessing BoundField's attrs
# from templates.
if not isinstance(idx, (int, slice)):
raise TypeError(
"BoundField indices must be integers or slices, not %s."
% type(idx).__name__
)
return self.subwidgets[idx]
@property
def errors(self):
"""
Return an ErrorList (empty if there are no errors) for this field.
"""
return self.form.errors.get(
self.name, self.form.error_class(renderer=self.form.renderer)
)
@property
def template_name(self):
return self.field.template_name or self.form.renderer.field_template_name
def get_context(self):
return {"field": self}
def as_widget(self, widget=None, attrs=None, only_initial=False):
"""
Render the field by rendering the passed widget, adding any HTML
attributes passed as attrs. If a widget isn't specified, use the
field's default widget.
"""
widget = widget or self.field.widget
if self.field.localize:
widget.is_localized = True
attrs = attrs or {}
attrs = self.build_widget_attrs(attrs, widget)
if self.auto_id and "id" not in widget.attrs:
attrs.setdefault(
"id", self.html_initial_id if only_initial else self.auto_id
)
if only_initial and self.html_initial_name in self.form.data:
# Propagate the hidden initial value.
value = self.form._widget_data_value(
self.field.hidden_widget(),
self.html_initial_name,
)
else:
value = self.value()
return widget.render(
name=self.html_initial_name if only_initial else self.html_name,
value=value,
attrs=attrs,
renderer=self.form.renderer,
)
def as_text(self, attrs=None, **kwargs):
"""
Return a string of HTML for representing this as an <input type="text">.
"""
return self.as_widget(TextInput(), attrs, **kwargs)
def as_textarea(self, attrs=None, **kwargs):
"""Return a string of HTML for representing this as a <textarea>."""
return self.as_widget(Textarea(), attrs, **kwargs)
def as_hidden(self, attrs=None, **kwargs):
"""
Return a string of HTML for representing this as an <input type="hidden">.
"""
return self.as_widget(self.field.hidden_widget(), attrs, **kwargs)
@property
def data(self):
"""
Return the data for this BoundField, or None if it wasn't given.
"""
return self.form._widget_data_value(self.field.widget, self.html_name)
def value(self):
"""
Return the value for this BoundField, using the initial value if
the form is not bound or the data otherwise.
"""
data = self.initial
if self.form.is_bound:
data = self.field.bound_data(self.data, data)
return self.field.prepare_value(data)
def _has_changed(self):
field = self.field
if field.show_hidden_initial:
hidden_widget = field.hidden_widget()
initial_value = self.form._widget_data_value(
hidden_widget,
self.html_initial_name,
)
try:
initial_value = field.to_python(initial_value)
except ValidationError:
# Always assume data has changed if validation fails.
return True
else:
initial_value = self.initial
return field.has_changed(initial_value, self.data)
def label_tag(self, contents=None, attrs=None, label_suffix=None, tag=None):
"""
Wrap the given contents in a <label>, if the field has an ID attribute.
contents should be mark_safe'd to avoid HTML escaping. If contents
aren't given, use the field's HTML-escaped label.
If attrs are given, use them as HTML attributes on the <label> tag.
label_suffix overrides the form's label_suffix.
"""
contents = contents or self.label
if label_suffix is None:
label_suffix = (
self.field.label_suffix
if self.field.label_suffix is not None
else self.form.label_suffix
)
# Only add the suffix if the label does not end in punctuation.
# Translators: If found as last label character, these punctuation
# characters will prevent the default label_suffix to be appended to the label
if label_suffix and contents and contents[-1] not in _(":?.!"):
contents = format_html("{}{}", contents, label_suffix)
widget = self.field.widget
id_ = widget.attrs.get("id") or self.auto_id
if id_:
id_for_label = widget.id_for_label(id_)
if id_for_label:
attrs = {**(attrs or {}), "for": id_for_label}
if self.field.required and hasattr(self.form, "required_css_class"):
attrs = attrs or {}
if "class" in attrs:
attrs["class"] += " " + self.form.required_css_class
else:
attrs["class"] = self.form.required_css_class
context = {
"field": self,
"label": contents,
"attrs": attrs,
"use_tag": bool(id_),
"tag": tag or "label",
}
return self.form.render(self.form.template_name_label, context)
def legend_tag(self, contents=None, attrs=None, label_suffix=None):
"""
Wrap the given contents in a <legend>, if the field has an ID
attribute. Contents should be mark_safe'd to avoid HTML escaping. If
contents aren't given, use the field's HTML-escaped label.
If attrs are given, use them as HTML attributes on the <legend> tag.
label_suffix overrides the form's label_suffix.
"""
return self.label_tag(contents, attrs, label_suffix, tag="legend")
def css_classes(self, extra_classes=None):
"""
Return a string of space-separated CSS classes for this field.
"""
if hasattr(extra_classes, "split"):
extra_classes = extra_classes.split()
extra_classes = set(extra_classes or [])
if self.errors and hasattr(self.form, "error_css_class"):
extra_classes.add(self.form.error_css_class)
if self.field.required and hasattr(self.form, "required_css_class"):
extra_classes.add(self.form.required_css_class)
return " ".join(extra_classes)
@property
def is_hidden(self):
"""Return True if this BoundField's widget is hidden."""
return self.field.widget.is_hidden
@property
def auto_id(self):
"""
Calculate and return the ID attribute for this BoundField, if the
associated Form has specified auto_id. Return an empty string otherwise.
"""
auto_id = self.form.auto_id # Boolean or string
if auto_id and "%s" in str(auto_id):
return auto_id % self.html_name
elif auto_id:
return self.html_name
return ""
@property
def id_for_label(self):
"""
Wrapper around the field widget's `id_for_label` method.
Useful, for example, for focusing on this field regardless of whether
it has a single widget or a MultiWidget.
"""
widget = self.field.widget
id_ = widget.attrs.get("id") or self.auto_id
return widget.id_for_label(id_)
@cached_property
def initial(self):
return self.form.get_initial_for_field(self.field, self.name)
def build_widget_attrs(self, attrs, widget=None):
widget = widget or self.field.widget
attrs = dict(attrs) # Copy attrs to avoid modifying the argument.
if (
widget.use_required_attribute(self.initial)
and self.field.required
and self.form.use_required_attribute
):
# MultiValueField has require_all_fields: if False, fall back
# on subfields.
if (
hasattr(self.field, "require_all_fields")
and not self.field.require_all_fields
and isinstance(self.field.widget, MultiWidget)
):
for subfield, subwidget in zip(self.field.fields, widget.widgets):
subwidget.attrs["required"] = (
subwidget.use_required_attribute(self.initial)
and subfield.required
)
else:
attrs["required"] = True
if self.field.disabled:
attrs["disabled"] = True
# If a custom aria-describedby attribute is given and help_text is
# used, the custom aria-described by is preserved so user can set the
# desired order.
if custom_aria_described_by_id := widget.attrs.get("aria-describedby"):
attrs["aria-describedby"] = custom_aria_described_by_id
elif self.field.help_text and self.id_for_label:
attrs["aria-describedby"] = f"{self.id_for_label}_helptext"
return attrs
@property
def widget_type(self):
return re.sub(
r"widget$|input$", "", self.field.widget.__class__.__name__.lower()
)
@property
def use_fieldset(self):
"""
Return the value of this BoundField widget's use_fieldset attribute.
"""
return self.field.widget.use_fieldset
@html_safe
class BoundWidget:
"""
A container class used for iterating over widgets. This is useful for
widgets that have choices. For example, the following can be used in a
template:
{% for radio in myform.beatles %}
<label for="{{ radio.id_for_label }}">
{{ radio.choice_label }}
<span class="radio">{{ radio.tag }}</span>
</label>
{% endfor %}
"""
def __init__(self, parent_widget, data, renderer):
self.parent_widget = parent_widget
self.data = data
self.renderer = renderer
def __str__(self):
return self.tag(wrap_label=True)
def tag(self, wrap_label=False):
context = {"widget": {**self.data, "wrap_label": wrap_label}}
return self.parent_widget._render(self.template_name, context, self.renderer)
@property
def template_name(self):
if "template_name" in self.data:
return self.data["template_name"]
return self.parent_widget.template_name
@property
def id_for_label(self):
return self.data["attrs"].get("id")
@property
def choice_label(self):
return self.data["label"]
|
646525fd6101f1308a10642fa90cf4abd3f72b60929948b84c1f6c4c90039ea0 | """
Field classes.
"""
import copy
import datetime
import json
import math
import operator
import os
import re
import uuid
import warnings
from decimal import Decimal, DecimalException
from io import BytesIO
from urllib.parse import urlsplit, urlunsplit
from django.core import validators
from django.core.exceptions import ValidationError
from django.db.models.enums import ChoicesMeta
from django.forms.boundfield import BoundField
from django.forms.utils import from_current_timezone, to_current_timezone
from django.forms.widgets import (
FILE_INPUT_CONTRADICTION,
CheckboxInput,
ClearableFileInput,
DateInput,
DateTimeInput,
EmailInput,
FileInput,
HiddenInput,
MultipleHiddenInput,
NullBooleanSelect,
NumberInput,
Select,
SelectMultiple,
SplitDateTimeWidget,
SplitHiddenDateTimeWidget,
Textarea,
TextInput,
TimeInput,
URLInput,
)
from django.utils import formats
from django.utils.dateparse import parse_datetime, parse_duration
from django.utils.deprecation import RemovedInDjango60Warning
from django.utils.duration import duration_string
from django.utils.ipv6 import clean_ipv6_address
from django.utils.regex_helper import _lazy_re_compile
from django.utils.translation import gettext_lazy as _
from django.utils.translation import ngettext_lazy
__all__ = (
"Field",
"CharField",
"IntegerField",
"DateField",
"TimeField",
"DateTimeField",
"DurationField",
"RegexField",
"EmailField",
"FileField",
"ImageField",
"URLField",
"BooleanField",
"NullBooleanField",
"ChoiceField",
"MultipleChoiceField",
"ComboField",
"MultiValueField",
"FloatField",
"DecimalField",
"SplitDateTimeField",
"GenericIPAddressField",
"FilePathField",
"JSONField",
"SlugField",
"TypedChoiceField",
"TypedMultipleChoiceField",
"UUIDField",
)
class Field:
widget = TextInput # Default widget to use when rendering this type of Field.
hidden_widget = (
HiddenInput # Default widget to use when rendering this as "hidden".
)
default_validators = [] # Default set of validators
# Add an 'invalid' entry to default_error_message if you want a specific
# field error message not raised by the field validators.
default_error_messages = {
"required": _("This field is required."),
}
empty_values = list(validators.EMPTY_VALUES)
def __init__(
self,
*,
required=True,
widget=None,
label=None,
initial=None,
help_text="",
error_messages=None,
show_hidden_initial=False,
validators=(),
localize=False,
disabled=False,
label_suffix=None,
template_name=None,
):
# required -- Boolean that specifies whether the field is required.
# True by default.
# widget -- A Widget class, or instance of a Widget class, that should
# be used for this Field when displaying it. Each Field has a
# default Widget that it'll use if you don't specify this. In
# most cases, the default widget is TextInput.
# label -- A verbose name for this field, for use in displaying this
# field in a form. By default, Django will use a "pretty"
# version of the form field name, if the Field is part of a
# Form.
# initial -- A value to use in this Field's initial display. This value
# is *not* used as a fallback if data isn't given.
# help_text -- An optional string to use as "help text" for this Field.
# error_messages -- An optional dictionary to override the default
# messages that the field will raise.
# show_hidden_initial -- Boolean that specifies if it is needed to render a
# hidden widget with initial value after widget.
# validators -- List of additional validators to use
# localize -- Boolean that specifies if the field should be localized.
# disabled -- Boolean that specifies whether the field is disabled, that
# is its widget is shown in the form but not editable.
# label_suffix -- Suffix to be added to the label. Overrides
# form's label_suffix.
self.required, self.label, self.initial = required, label, initial
self.show_hidden_initial = show_hidden_initial
self.help_text = help_text
self.disabled = disabled
self.label_suffix = label_suffix
widget = widget or self.widget
if isinstance(widget, type):
widget = widget()
else:
widget = copy.deepcopy(widget)
# Trigger the localization machinery if needed.
self.localize = localize
if self.localize:
widget.is_localized = True
# Let the widget know whether it should display as required.
widget.is_required = self.required
# Hook into self.widget_attrs() for any Field-specific HTML attributes.
extra_attrs = self.widget_attrs(widget)
if extra_attrs:
widget.attrs.update(extra_attrs)
self.widget = widget
messages = {}
for c in reversed(self.__class__.__mro__):
messages.update(getattr(c, "default_error_messages", {}))
messages.update(error_messages or {})
self.error_messages = messages
self.validators = [*self.default_validators, *validators]
self.template_name = template_name
super().__init__()
def prepare_value(self, value):
return value
def to_python(self, value):
return value
def validate(self, value):
if value in self.empty_values and self.required:
raise ValidationError(self.error_messages["required"], code="required")
def run_validators(self, value):
if value in self.empty_values:
return
errors = []
for v in self.validators:
try:
v(value)
except ValidationError as e:
if hasattr(e, "code") and e.code in self.error_messages:
e.message = self.error_messages[e.code]
errors.extend(e.error_list)
if errors:
raise ValidationError(errors)
def clean(self, value):
"""
Validate the given value and return its "cleaned" value as an
appropriate Python object. Raise ValidationError for any errors.
"""
value = self.to_python(value)
self.validate(value)
self.run_validators(value)
return value
def bound_data(self, data, initial):
"""
Return the value that should be shown for this field on render of a
bound form, given the submitted POST data for the field and the initial
data, if any.
For most fields, this will simply be data; FileFields need to handle it
a bit differently.
"""
if self.disabled:
return initial
return data
def widget_attrs(self, widget):
"""
Given a Widget instance (*not* a Widget class), return a dictionary of
any HTML attributes that should be added to the Widget, based on this
Field.
"""
return {}
def has_changed(self, initial, data):
"""Return True if data differs from initial."""
# Always return False if the field is disabled since self.bound_data
# always uses the initial value in this case.
if self.disabled:
return False
try:
data = self.to_python(data)
if hasattr(self, "_coerce"):
return self._coerce(data) != self._coerce(initial)
except ValidationError:
return True
# For purposes of seeing whether something has changed, None is
# the same as an empty string, if the data or initial value we get
# is None, replace it with ''.
initial_value = initial if initial is not None else ""
data_value = data if data is not None else ""
return initial_value != data_value
def get_bound_field(self, form, field_name):
"""
Return a BoundField instance that will be used when accessing the form
field in a template.
"""
return BoundField(form, self, field_name)
def __deepcopy__(self, memo):
result = copy.copy(self)
memo[id(self)] = result
result.widget = copy.deepcopy(self.widget, memo)
result.error_messages = self.error_messages.copy()
result.validators = self.validators[:]
return result
class CharField(Field):
def __init__(
self, *, max_length=None, min_length=None, strip=True, empty_value="", **kwargs
):
self.max_length = max_length
self.min_length = min_length
self.strip = strip
self.empty_value = empty_value
super().__init__(**kwargs)
if min_length is not None:
self.validators.append(validators.MinLengthValidator(int(min_length)))
if max_length is not None:
self.validators.append(validators.MaxLengthValidator(int(max_length)))
self.validators.append(validators.ProhibitNullCharactersValidator())
def to_python(self, value):
"""Return a string."""
if value not in self.empty_values:
value = str(value)
if self.strip:
value = value.strip()
if value in self.empty_values:
return self.empty_value
return value
def widget_attrs(self, widget):
attrs = super().widget_attrs(widget)
if self.max_length is not None and not widget.is_hidden:
# The HTML attribute is maxlength, not max_length.
attrs["maxlength"] = str(self.max_length)
if self.min_length is not None and not widget.is_hidden:
# The HTML attribute is minlength, not min_length.
attrs["minlength"] = str(self.min_length)
return attrs
class IntegerField(Field):
widget = NumberInput
default_error_messages = {
"invalid": _("Enter a whole number."),
}
re_decimal = _lazy_re_compile(r"\.0*\s*$")
def __init__(self, *, max_value=None, min_value=None, step_size=None, **kwargs):
self.max_value, self.min_value, self.step_size = max_value, min_value, step_size
if kwargs.get("localize") and self.widget == NumberInput:
# Localized number input is not well supported on most browsers
kwargs.setdefault("widget", super().widget)
super().__init__(**kwargs)
if max_value is not None:
self.validators.append(validators.MaxValueValidator(max_value))
if min_value is not None:
self.validators.append(validators.MinValueValidator(min_value))
if step_size is not None:
self.validators.append(
validators.StepValueValidator(step_size, offset=min_value)
)
def to_python(self, value):
"""
Validate that int() can be called on the input. Return the result
of int() or None for empty values.
"""
value = super().to_python(value)
if value in self.empty_values:
return None
if self.localize:
value = formats.sanitize_separators(value)
# Strip trailing decimal and zeros.
try:
value = int(self.re_decimal.sub("", str(value)))
except (ValueError, TypeError):
raise ValidationError(self.error_messages["invalid"], code="invalid")
return value
def widget_attrs(self, widget):
attrs = super().widget_attrs(widget)
if isinstance(widget, NumberInput):
if self.min_value is not None:
attrs["min"] = self.min_value
if self.max_value is not None:
attrs["max"] = self.max_value
if self.step_size is not None:
attrs["step"] = self.step_size
return attrs
class FloatField(IntegerField):
default_error_messages = {
"invalid": _("Enter a number."),
}
def to_python(self, value):
"""
Validate that float() can be called on the input. Return the result
of float() or None for empty values.
"""
value = super(IntegerField, self).to_python(value)
if value in self.empty_values:
return None
if self.localize:
value = formats.sanitize_separators(value)
try:
value = float(value)
except (ValueError, TypeError):
raise ValidationError(self.error_messages["invalid"], code="invalid")
return value
def validate(self, value):
super().validate(value)
if value in self.empty_values:
return
if not math.isfinite(value):
raise ValidationError(self.error_messages["invalid"], code="invalid")
def widget_attrs(self, widget):
attrs = super().widget_attrs(widget)
if isinstance(widget, NumberInput) and "step" not in widget.attrs:
if self.step_size is not None:
step = str(self.step_size)
else:
step = "any"
attrs.setdefault("step", step)
return attrs
class DecimalField(IntegerField):
default_error_messages = {
"invalid": _("Enter a number."),
}
def __init__(
self,
*,
max_value=None,
min_value=None,
max_digits=None,
decimal_places=None,
**kwargs,
):
self.max_digits, self.decimal_places = max_digits, decimal_places
super().__init__(max_value=max_value, min_value=min_value, **kwargs)
self.validators.append(validators.DecimalValidator(max_digits, decimal_places))
def to_python(self, value):
"""
Validate that the input is a decimal number. Return a Decimal
instance or None for empty values. Ensure that there are no more
than max_digits in the number and no more than decimal_places digits
after the decimal point.
"""
if value in self.empty_values:
return None
if self.localize:
value = formats.sanitize_separators(value)
try:
value = Decimal(str(value))
except DecimalException:
raise ValidationError(self.error_messages["invalid"], code="invalid")
return value
def validate(self, value):
super().validate(value)
if value in self.empty_values:
return
if not value.is_finite():
raise ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
def widget_attrs(self, widget):
attrs = super().widget_attrs(widget)
if isinstance(widget, NumberInput) and "step" not in widget.attrs:
if self.decimal_places is not None:
# Use exponential notation for small values since they might
# be parsed as 0 otherwise. ref #20765
step = str(Decimal(1).scaleb(-self.decimal_places)).lower()
else:
step = "any"
attrs.setdefault("step", step)
return attrs
class BaseTemporalField(Field):
def __init__(self, *, input_formats=None, **kwargs):
super().__init__(**kwargs)
if input_formats is not None:
self.input_formats = input_formats
def to_python(self, value):
value = value.strip()
# Try to strptime against each input format.
for format in self.input_formats:
try:
return self.strptime(value, format)
except (ValueError, TypeError):
continue
raise ValidationError(self.error_messages["invalid"], code="invalid")
def strptime(self, value, format):
raise NotImplementedError("Subclasses must define this method.")
class DateField(BaseTemporalField):
widget = DateInput
input_formats = formats.get_format_lazy("DATE_INPUT_FORMATS")
default_error_messages = {
"invalid": _("Enter a valid date."),
}
def to_python(self, value):
"""
Validate that the input can be converted to a date. Return a Python
datetime.date object.
"""
if value in self.empty_values:
return None
if isinstance(value, datetime.datetime):
return value.date()
if isinstance(value, datetime.date):
return value
return super().to_python(value)
def strptime(self, value, format):
return datetime.datetime.strptime(value, format).date()
class TimeField(BaseTemporalField):
widget = TimeInput
input_formats = formats.get_format_lazy("TIME_INPUT_FORMATS")
default_error_messages = {"invalid": _("Enter a valid time.")}
def to_python(self, value):
"""
Validate that the input can be converted to a time. Return a Python
datetime.time object.
"""
if value in self.empty_values:
return None
if isinstance(value, datetime.time):
return value
return super().to_python(value)
def strptime(self, value, format):
return datetime.datetime.strptime(value, format).time()
class DateTimeFormatsIterator:
def __iter__(self):
yield from formats.get_format("DATETIME_INPUT_FORMATS")
yield from formats.get_format("DATE_INPUT_FORMATS")
class DateTimeField(BaseTemporalField):
widget = DateTimeInput
input_formats = DateTimeFormatsIterator()
default_error_messages = {
"invalid": _("Enter a valid date/time."),
}
def prepare_value(self, value):
if isinstance(value, datetime.datetime):
value = to_current_timezone(value)
return value
def to_python(self, value):
"""
Validate that the input can be converted to a datetime. Return a
Python datetime.datetime object.
"""
if value in self.empty_values:
return None
if isinstance(value, datetime.datetime):
return from_current_timezone(value)
if isinstance(value, datetime.date):
result = datetime.datetime(value.year, value.month, value.day)
return from_current_timezone(result)
try:
result = parse_datetime(value.strip())
except ValueError:
raise ValidationError(self.error_messages["invalid"], code="invalid")
if not result:
result = super().to_python(value)
return from_current_timezone(result)
def strptime(self, value, format):
return datetime.datetime.strptime(value, format)
class DurationField(Field):
default_error_messages = {
"invalid": _("Enter a valid duration."),
"overflow": _("The number of days must be between {min_days} and {max_days}."),
}
def prepare_value(self, value):
if isinstance(value, datetime.timedelta):
return duration_string(value)
return value
def to_python(self, value):
if value in self.empty_values:
return None
if isinstance(value, datetime.timedelta):
return value
try:
value = parse_duration(str(value))
except OverflowError:
raise ValidationError(
self.error_messages["overflow"].format(
min_days=datetime.timedelta.min.days,
max_days=datetime.timedelta.max.days,
),
code="overflow",
)
if value is None:
raise ValidationError(self.error_messages["invalid"], code="invalid")
return value
class RegexField(CharField):
def __init__(self, regex, **kwargs):
"""
regex can be either a string or a compiled regular expression object.
"""
kwargs.setdefault("strip", False)
super().__init__(**kwargs)
self._set_regex(regex)
def _get_regex(self):
return self._regex
def _set_regex(self, regex):
if isinstance(regex, str):
regex = re.compile(regex)
self._regex = regex
if (
hasattr(self, "_regex_validator")
and self._regex_validator in self.validators
):
self.validators.remove(self._regex_validator)
self._regex_validator = validators.RegexValidator(regex=regex)
self.validators.append(self._regex_validator)
regex = property(_get_regex, _set_regex)
class EmailField(CharField):
widget = EmailInput
default_validators = [validators.validate_email]
def __init__(self, **kwargs):
# The default maximum length of an email is 320 characters per RFC 3696
# section 3.
kwargs.setdefault("max_length", 320)
super().__init__(strip=True, **kwargs)
class FileField(Field):
widget = ClearableFileInput
default_error_messages = {
"invalid": _("No file was submitted. Check the encoding type on the form."),
"missing": _("No file was submitted."),
"empty": _("The submitted file is empty."),
"max_length": ngettext_lazy(
"Ensure this filename has at most %(max)d character (it has %(length)d).",
"Ensure this filename has at most %(max)d characters (it has %(length)d).",
"max",
),
"contradiction": _(
"Please either submit a file or check the clear checkbox, not both."
),
}
def __init__(self, *, max_length=None, allow_empty_file=False, **kwargs):
self.max_length = max_length
self.allow_empty_file = allow_empty_file
super().__init__(**kwargs)
def to_python(self, data):
if data in self.empty_values:
return None
# UploadedFile objects should have name and size attributes.
try:
file_name = data.name
file_size = data.size
except AttributeError:
raise ValidationError(self.error_messages["invalid"], code="invalid")
if self.max_length is not None and len(file_name) > self.max_length:
params = {"max": self.max_length, "length": len(file_name)}
raise ValidationError(
self.error_messages["max_length"], code="max_length", params=params
)
if not file_name:
raise ValidationError(self.error_messages["invalid"], code="invalid")
if not self.allow_empty_file and not file_size:
raise ValidationError(self.error_messages["empty"], code="empty")
return data
def clean(self, data, initial=None):
# If the widget got contradictory inputs, we raise a validation error
if data is FILE_INPUT_CONTRADICTION:
raise ValidationError(
self.error_messages["contradiction"], code="contradiction"
)
# False means the field value should be cleared; further validation is
# not needed.
if data is False:
if not self.required:
return False
# If the field is required, clearing is not possible (the widget
# shouldn't return False data in that case anyway). False is not
# in self.empty_value; if a False value makes it this far
# it should be validated from here on out as None (so it will be
# caught by the required check).
data = None
if not data and initial:
return initial
return super().clean(data)
def bound_data(self, _, initial):
return initial
def has_changed(self, initial, data):
return not self.disabled and data is not None
class ImageField(FileField):
default_validators = [validators.validate_image_file_extension]
default_error_messages = {
"invalid_image": _(
"Upload a valid image. The file you uploaded was either not an "
"image or a corrupted image."
),
}
def to_python(self, data):
"""
Check that the file-upload field data contains a valid image (GIF, JPG,
PNG, etc. -- whatever Pillow supports).
"""
f = super().to_python(data)
if f is None:
return None
from PIL import Image
# We need to get a file object for Pillow. We might have a path or we might
# have to read the data into memory.
if hasattr(data, "temporary_file_path"):
file = data.temporary_file_path()
else:
if hasattr(data, "read"):
file = BytesIO(data.read())
else:
file = BytesIO(data["content"])
try:
# load() could spot a truncated JPEG, but it loads the entire
# image in memory, which is a DoS vector. See #3848 and #18520.
image = Image.open(file)
# verify() must be called immediately after the constructor.
image.verify()
# Annotating so subclasses can reuse it for their own validation
f.image = image
# Pillow doesn't detect the MIME type of all formats. In those
# cases, content_type will be None.
f.content_type = Image.MIME.get(image.format)
except Exception as exc:
# Pillow doesn't recognize it as an image.
raise ValidationError(
self.error_messages["invalid_image"],
code="invalid_image",
) from exc
if hasattr(f, "seek") and callable(f.seek):
f.seek(0)
return f
def widget_attrs(self, widget):
attrs = super().widget_attrs(widget)
if isinstance(widget, FileInput) and "accept" not in widget.attrs:
attrs.setdefault("accept", "image/*")
return attrs
class URLField(CharField):
widget = URLInput
default_error_messages = {
"invalid": _("Enter a valid URL."),
}
default_validators = [validators.URLValidator()]
def __init__(self, *, assume_scheme=None, **kwargs):
if assume_scheme is None:
warnings.warn(
"The default scheme will be changed from 'http' to 'https' in Django "
"6.0. Pass the forms.URLField.assume_scheme argument to silence this "
"warning.",
RemovedInDjango60Warning,
stacklevel=2,
)
assume_scheme = "http"
# RemovedInDjango60Warning: When the deprecation ends, replace with:
# self.assume_scheme = assume_scheme or "https"
self.assume_scheme = assume_scheme
super().__init__(strip=True, **kwargs)
def to_python(self, value):
def split_url(url):
"""
Return a list of url parts via urlparse.urlsplit(), or raise
ValidationError for some malformed URLs.
"""
try:
return list(urlsplit(url))
except ValueError:
# urlparse.urlsplit can raise a ValueError with some
# misformatted URLs.
raise ValidationError(self.error_messages["invalid"], code="invalid")
value = super().to_python(value)
if value:
url_fields = split_url(value)
if not url_fields[0]:
# If no URL scheme given, add a scheme.
url_fields[0] = self.assume_scheme
if not url_fields[1]:
# Assume that if no domain is provided, that the path segment
# contains the domain.
url_fields[1] = url_fields[2]
url_fields[2] = ""
# Rebuild the url_fields list, since the domain segment may now
# contain the path too.
url_fields = split_url(urlunsplit(url_fields))
value = urlunsplit(url_fields)
return value
class BooleanField(Field):
widget = CheckboxInput
def to_python(self, value):
"""Return a Python boolean object."""
# Explicitly check for the string 'False', which is what a hidden field
# will submit for False. Also check for '0', since this is what
# RadioSelect will provide. Because bool("True") == bool('1') == True,
# we don't need to handle that explicitly.
if isinstance(value, str) and value.lower() in ("false", "0"):
value = False
else:
value = bool(value)
return super().to_python(value)
def validate(self, value):
if not value and self.required:
raise ValidationError(self.error_messages["required"], code="required")
def has_changed(self, initial, data):
if self.disabled:
return False
# Sometimes data or initial may be a string equivalent of a boolean
# so we should run it through to_python first to get a boolean value
return self.to_python(initial) != self.to_python(data)
class NullBooleanField(BooleanField):
"""
A field whose valid values are None, True, and False. Clean invalid values
to None.
"""
widget = NullBooleanSelect
def to_python(self, value):
"""
Explicitly check for the string 'True' and 'False', which is what a
hidden field will submit for True and False, for 'true' and 'false',
which are likely to be returned by JavaScript serializations of forms,
and for '1' and '0', which is what a RadioField will submit. Unlike
the Booleanfield, this field must check for True because it doesn't
use the bool() function.
"""
if value in (True, "True", "true", "1"):
return True
elif value in (False, "False", "false", "0"):
return False
else:
return None
def validate(self, value):
pass
class CallableChoiceIterator:
def __init__(self, choices_func):
self.choices_func = choices_func
def __iter__(self):
yield from self.choices_func()
class ChoiceField(Field):
widget = Select
default_error_messages = {
"invalid_choice": _(
"Select a valid choice. %(value)s is not one of the available choices."
),
}
def __init__(self, *, choices=(), **kwargs):
super().__init__(**kwargs)
if isinstance(choices, ChoicesMeta):
choices = choices.choices
self.choices = choices
def __deepcopy__(self, memo):
result = super().__deepcopy__(memo)
result._choices = copy.deepcopy(self._choices, memo)
return result
def _get_choices(self):
return self._choices
def _set_choices(self, value):
# Setting choices also sets the choices on the widget.
# choices can be any iterable, but we call list() on it because
# it will be consumed more than once.
if callable(value):
value = CallableChoiceIterator(value)
else:
value = list(value)
self._choices = self.widget.choices = value
choices = property(_get_choices, _set_choices)
def to_python(self, value):
"""Return a string."""
if value in self.empty_values:
return ""
return str(value)
def validate(self, value):
"""Validate that the input is in self.choices."""
super().validate(value)
if value and not self.valid_value(value):
raise ValidationError(
self.error_messages["invalid_choice"],
code="invalid_choice",
params={"value": value},
)
def valid_value(self, value):
"""Check to see if the provided value is a valid choice."""
text_value = str(value)
for k, v in self.choices:
if isinstance(v, (list, tuple)):
# This is an optgroup, so look inside the group for options
for k2, v2 in v:
if value == k2 or text_value == str(k2):
return True
else:
if value == k or text_value == str(k):
return True
return False
class TypedChoiceField(ChoiceField):
def __init__(self, *, coerce=lambda val: val, empty_value="", **kwargs):
self.coerce = coerce
self.empty_value = empty_value
super().__init__(**kwargs)
def _coerce(self, value):
"""
Validate that the value can be coerced to the right type (if not empty).
"""
if value == self.empty_value or value in self.empty_values:
return self.empty_value
try:
value = self.coerce(value)
except (ValueError, TypeError, ValidationError):
raise ValidationError(
self.error_messages["invalid_choice"],
code="invalid_choice",
params={"value": value},
)
return value
def clean(self, value):
value = super().clean(value)
return self._coerce(value)
class MultipleChoiceField(ChoiceField):
hidden_widget = MultipleHiddenInput
widget = SelectMultiple
default_error_messages = {
"invalid_choice": _(
"Select a valid choice. %(value)s is not one of the available choices."
),
"invalid_list": _("Enter a list of values."),
}
def to_python(self, value):
if not value:
return []
elif not isinstance(value, (list, tuple)):
raise ValidationError(
self.error_messages["invalid_list"], code="invalid_list"
)
return [str(val) for val in value]
def validate(self, value):
"""Validate that the input is a list or tuple."""
if self.required and not value:
raise ValidationError(self.error_messages["required"], code="required")
# Validate that each value in the value list is in self.choices.
for val in value:
if not self.valid_value(val):
raise ValidationError(
self.error_messages["invalid_choice"],
code="invalid_choice",
params={"value": val},
)
def has_changed(self, initial, data):
if self.disabled:
return False
if initial is None:
initial = []
if data is None:
data = []
if len(initial) != len(data):
return True
initial_set = {str(value) for value in initial}
data_set = {str(value) for value in data}
return data_set != initial_set
class TypedMultipleChoiceField(MultipleChoiceField):
def __init__(self, *, coerce=lambda val: val, **kwargs):
self.coerce = coerce
self.empty_value = kwargs.pop("empty_value", [])
super().__init__(**kwargs)
def _coerce(self, value):
"""
Validate that the values are in self.choices and can be coerced to the
right type.
"""
if value == self.empty_value or value in self.empty_values:
return self.empty_value
new_value = []
for choice in value:
try:
new_value.append(self.coerce(choice))
except (ValueError, TypeError, ValidationError):
raise ValidationError(
self.error_messages["invalid_choice"],
code="invalid_choice",
params={"value": choice},
)
return new_value
def clean(self, value):
value = super().clean(value)
return self._coerce(value)
def validate(self, value):
if value != self.empty_value:
super().validate(value)
elif self.required:
raise ValidationError(self.error_messages["required"], code="required")
class ComboField(Field):
"""
A Field whose clean() method calls multiple Field clean() methods.
"""
def __init__(self, fields, **kwargs):
super().__init__(**kwargs)
# Set 'required' to False on the individual fields, because the
# required validation will be handled by ComboField, not by those
# individual fields.
for f in fields:
f.required = False
self.fields = fields
def clean(self, value):
"""
Validate the given value against all of self.fields, which is a
list of Field instances.
"""
super().clean(value)
for field in self.fields:
value = field.clean(value)
return value
class MultiValueField(Field):
"""
Aggregate the logic of multiple Fields.
Its clean() method takes a "decompressed" list of values, which are then
cleaned into a single value according to self.fields. Each value in
this list is cleaned by the corresponding field -- the first value is
cleaned by the first field, the second value is cleaned by the second
field, etc. Once all fields are cleaned, the list of clean values is
"compressed" into a single value.
Subclasses should not have to implement clean(). Instead, they must
implement compress(), which takes a list of valid values and returns a
"compressed" version of those values -- a single value.
You'll probably want to use this with MultiWidget.
"""
default_error_messages = {
"invalid": _("Enter a list of values."),
"incomplete": _("Enter a complete value."),
}
def __init__(self, fields, *, require_all_fields=True, **kwargs):
self.require_all_fields = require_all_fields
super().__init__(**kwargs)
for f in fields:
f.error_messages.setdefault("incomplete", self.error_messages["incomplete"])
if self.disabled:
f.disabled = True
if self.require_all_fields:
# Set 'required' to False on the individual fields, because the
# required validation will be handled by MultiValueField, not
# by those individual fields.
f.required = False
self.fields = fields
def __deepcopy__(self, memo):
result = super().__deepcopy__(memo)
result.fields = tuple(x.__deepcopy__(memo) for x in self.fields)
return result
def validate(self, value):
pass
def clean(self, value):
"""
Validate every value in the given list. A value is validated against
the corresponding Field in self.fields.
For example, if this MultiValueField was instantiated with
fields=(DateField(), TimeField()), clean() would call
DateField.clean(value[0]) and TimeField.clean(value[1]).
"""
clean_data = []
errors = []
if self.disabled and not isinstance(value, list):
value = self.widget.decompress(value)
if not value or isinstance(value, (list, tuple)):
if not value or not [v for v in value if v not in self.empty_values]:
if self.required:
raise ValidationError(
self.error_messages["required"], code="required"
)
else:
return self.compress([])
else:
raise ValidationError(self.error_messages["invalid"], code="invalid")
for i, field in enumerate(self.fields):
try:
field_value = value[i]
except IndexError:
field_value = None
if field_value in self.empty_values:
if self.require_all_fields:
# Raise a 'required' error if the MultiValueField is
# required and any field is empty.
if self.required:
raise ValidationError(
self.error_messages["required"], code="required"
)
elif field.required:
# Otherwise, add an 'incomplete' error to the list of
# collected errors and skip field cleaning, if a required
# field is empty.
if field.error_messages["incomplete"] not in errors:
errors.append(field.error_messages["incomplete"])
continue
try:
clean_data.append(field.clean(field_value))
except ValidationError as e:
# Collect all validation errors in a single list, which we'll
# raise at the end of clean(), rather than raising a single
# exception for the first error we encounter. Skip duplicates.
errors.extend(m for m in e.error_list if m not in errors)
if errors:
raise ValidationError(errors)
out = self.compress(clean_data)
self.validate(out)
self.run_validators(out)
return out
def compress(self, data_list):
"""
Return a single value for the given list of values. The values can be
assumed to be valid.
For example, if this MultiValueField was instantiated with
fields=(DateField(), TimeField()), this might return a datetime
object created by combining the date and time in data_list.
"""
raise NotImplementedError("Subclasses must implement this method.")
def has_changed(self, initial, data):
if self.disabled:
return False
if initial is None:
initial = ["" for x in range(0, len(data))]
else:
if not isinstance(initial, list):
initial = self.widget.decompress(initial)
for field, initial, data in zip(self.fields, initial, data):
try:
initial = field.to_python(initial)
except ValidationError:
return True
if field.has_changed(initial, data):
return True
return False
class FilePathField(ChoiceField):
def __init__(
self,
path,
*,
match=None,
recursive=False,
allow_files=True,
allow_folders=False,
**kwargs,
):
self.path, self.match, self.recursive = path, match, recursive
self.allow_files, self.allow_folders = allow_files, allow_folders
super().__init__(choices=(), **kwargs)
if self.required:
self.choices = []
else:
self.choices = [("", "---------")]
if self.match is not None:
self.match_re = re.compile(self.match)
if recursive:
for root, dirs, files in sorted(os.walk(self.path)):
if self.allow_files:
for f in sorted(files):
if self.match is None or self.match_re.search(f):
f = os.path.join(root, f)
self.choices.append((f, f.replace(path, "", 1)))
if self.allow_folders:
for f in sorted(dirs):
if f == "__pycache__":
continue
if self.match is None or self.match_re.search(f):
f = os.path.join(root, f)
self.choices.append((f, f.replace(path, "", 1)))
else:
choices = []
with os.scandir(self.path) as entries:
for f in entries:
if f.name == "__pycache__":
continue
if (
(self.allow_files and f.is_file())
or (self.allow_folders and f.is_dir())
) and (self.match is None or self.match_re.search(f.name)):
choices.append((f.path, f.name))
choices.sort(key=operator.itemgetter(1))
self.choices.extend(choices)
self.widget.choices = self.choices
class SplitDateTimeField(MultiValueField):
widget = SplitDateTimeWidget
hidden_widget = SplitHiddenDateTimeWidget
default_error_messages = {
"invalid_date": _("Enter a valid date."),
"invalid_time": _("Enter a valid time."),
}
def __init__(self, *, input_date_formats=None, input_time_formats=None, **kwargs):
errors = self.default_error_messages.copy()
if "error_messages" in kwargs:
errors.update(kwargs["error_messages"])
localize = kwargs.get("localize", False)
fields = (
DateField(
input_formats=input_date_formats,
error_messages={"invalid": errors["invalid_date"]},
localize=localize,
),
TimeField(
input_formats=input_time_formats,
error_messages={"invalid": errors["invalid_time"]},
localize=localize,
),
)
super().__init__(fields, **kwargs)
def compress(self, data_list):
if data_list:
# Raise a validation error if time or date is empty
# (possible if SplitDateTimeField has required=False).
if data_list[0] in self.empty_values:
raise ValidationError(
self.error_messages["invalid_date"], code="invalid_date"
)
if data_list[1] in self.empty_values:
raise ValidationError(
self.error_messages["invalid_time"], code="invalid_time"
)
result = datetime.datetime.combine(*data_list)
return from_current_timezone(result)
return None
class GenericIPAddressField(CharField):
def __init__(self, *, protocol="both", unpack_ipv4=False, **kwargs):
self.unpack_ipv4 = unpack_ipv4
self.default_validators = validators.ip_address_validators(
protocol, unpack_ipv4
)[0]
super().__init__(**kwargs)
def to_python(self, value):
if value in self.empty_values:
return ""
value = value.strip()
if value and ":" in value:
return clean_ipv6_address(value, self.unpack_ipv4)
return value
class SlugField(CharField):
default_validators = [validators.validate_slug]
def __init__(self, *, allow_unicode=False, **kwargs):
self.allow_unicode = allow_unicode
if self.allow_unicode:
self.default_validators = [validators.validate_unicode_slug]
super().__init__(**kwargs)
class UUIDField(CharField):
default_error_messages = {
"invalid": _("Enter a valid UUID."),
}
def prepare_value(self, value):
if isinstance(value, uuid.UUID):
return str(value)
return value
def to_python(self, value):
value = super().to_python(value)
if value in self.empty_values:
return None
if not isinstance(value, uuid.UUID):
try:
value = uuid.UUID(value)
except ValueError:
raise ValidationError(self.error_messages["invalid"], code="invalid")
return value
class InvalidJSONInput(str):
pass
class JSONString(str):
pass
class JSONField(CharField):
default_error_messages = {
"invalid": _("Enter a valid JSON."),
}
widget = Textarea
def __init__(self, encoder=None, decoder=None, **kwargs):
self.encoder = encoder
self.decoder = decoder
super().__init__(**kwargs)
def to_python(self, value):
if self.disabled:
return value
if value in self.empty_values:
return None
elif isinstance(value, (list, dict, int, float, JSONString)):
return value
try:
converted = json.loads(value, cls=self.decoder)
except json.JSONDecodeError:
raise ValidationError(
self.error_messages["invalid"],
code="invalid",
params={"value": value},
)
if isinstance(converted, str):
return JSONString(converted)
else:
return converted
def bound_data(self, data, initial):
if self.disabled:
return initial
if data is None:
return None
try:
return json.loads(data, cls=self.decoder)
except json.JSONDecodeError:
return InvalidJSONInput(data)
def prepare_value(self, value):
if isinstance(value, InvalidJSONInput):
return value
return json.dumps(value, ensure_ascii=False, cls=self.encoder)
def has_changed(self, initial, data):
if super().has_changed(initial, data):
return True
# For purposes of seeing whether something has changed, True isn't the
# same as 1 and the order of keys doesn't matter.
return json.dumps(initial, sort_keys=True, cls=self.encoder) != json.dumps(
self.to_python(data), sort_keys=True, cls=self.encoder
)
|
c89559655fc182dedf0580b835b726d48b0834cb8444aed441b74645cf09724c | import ipaddress
import math
import re
from pathlib import Path
from urllib.parse import urlsplit, urlunsplit
from django.core.exceptions import ValidationError
from django.utils.deconstruct import deconstructible
from django.utils.encoding import punycode
from django.utils.ipv6 import is_valid_ipv6_address
from django.utils.regex_helper import _lazy_re_compile
from django.utils.translation import gettext_lazy as _
from django.utils.translation import ngettext_lazy
# These values, if given to validate(), will trigger the self.required check.
EMPTY_VALUES = (None, "", [], (), {})
@deconstructible
class RegexValidator:
regex = ""
message = _("Enter a valid value.")
code = "invalid"
inverse_match = False
flags = 0
def __init__(
self, regex=None, message=None, code=None, inverse_match=None, flags=None
):
if regex is not None:
self.regex = regex
if message is not None:
self.message = message
if code is not None:
self.code = code
if inverse_match is not None:
self.inverse_match = inverse_match
if flags is not None:
self.flags = flags
if self.flags and not isinstance(self.regex, str):
raise TypeError(
"If the flags are set, regex must be a regular expression string."
)
self.regex = _lazy_re_compile(self.regex, self.flags)
def __call__(self, value):
"""
Validate that the input contains (or does *not* contain, if
inverse_match is True) a match for the regular expression.
"""
regex_matches = self.regex.search(str(value))
invalid_input = regex_matches if self.inverse_match else not regex_matches
if invalid_input:
raise ValidationError(self.message, code=self.code, params={"value": value})
def __eq__(self, other):
return (
isinstance(other, RegexValidator)
and self.regex.pattern == other.regex.pattern
and self.regex.flags == other.regex.flags
and (self.message == other.message)
and (self.code == other.code)
and (self.inverse_match == other.inverse_match)
)
@deconstructible
class URLValidator(RegexValidator):
ul = "\u00a1-\uffff" # Unicode letters range (must not be a raw string).
# IP patterns
ipv4_re = (
r"(?:0|25[0-5]|2[0-4][0-9]|1[0-9]?[0-9]?|[1-9][0-9]?)"
r"(?:\.(?:0|25[0-5]|2[0-4][0-9]|1[0-9]?[0-9]?|[1-9][0-9]?)){3}"
)
ipv6_re = r"\[[0-9a-f:.]+\]" # (simple regex, validated later)
# Host patterns
hostname_re = (
r"[a-z" + ul + r"0-9](?:[a-z" + ul + r"0-9-]{0,61}[a-z" + ul + r"0-9])?"
)
# Max length for domain name labels is 63 characters per RFC 1034 sec. 3.1
domain_re = r"(?:\.(?!-)[a-z" + ul + r"0-9-]{1,63}(?<!-))*"
tld_re = (
r"\." # dot
r"(?!-)" # can't start with a dash
r"(?:[a-z" + ul + "-]{2,63}" # domain label
r"|xn--[a-z0-9]{1,59})" # or punycode label
r"(?<!-)" # can't end with a dash
r"\.?" # may have a trailing dot
)
host_re = "(" + hostname_re + domain_re + tld_re + "|localhost)"
regex = _lazy_re_compile(
r"^(?:[a-z0-9.+-]*)://" # scheme is validated separately
r"(?:[^\s:@/]+(?::[^\s:@/]*)?@)?" # user:pass authentication
r"(?:" + ipv4_re + "|" + ipv6_re + "|" + host_re + ")"
r"(?::[0-9]{1,5})?" # port
r"(?:[/?#][^\s]*)?" # resource path
r"\Z",
re.IGNORECASE,
)
message = _("Enter a valid URL.")
schemes = ["http", "https", "ftp", "ftps"]
unsafe_chars = frozenset("\t\r\n")
max_length = 2048
def __init__(self, schemes=None, **kwargs):
super().__init__(**kwargs)
if schemes is not None:
self.schemes = schemes
def __call__(self, value):
if not isinstance(value, str) or len(value) > self.max_length:
raise ValidationError(self.message, code=self.code, params={"value": value})
if self.unsafe_chars.intersection(value):
raise ValidationError(self.message, code=self.code, params={"value": value})
# Check if the scheme is valid.
scheme = value.split("://")[0].lower()
if scheme not in self.schemes:
raise ValidationError(self.message, code=self.code, params={"value": value})
# Then check full URL
try:
splitted_url = urlsplit(value)
except ValueError:
raise ValidationError(self.message, code=self.code, params={"value": value})
try:
super().__call__(value)
except ValidationError as e:
# Trivial case failed. Try for possible IDN domain
if value:
scheme, netloc, path, query, fragment = splitted_url
try:
netloc = punycode(netloc) # IDN -> ACE
except UnicodeError: # invalid domain part
raise e
url = urlunsplit((scheme, netloc, path, query, fragment))
super().__call__(url)
else:
raise
else:
# Now verify IPv6 in the netloc part
host_match = re.search(r"^\[(.+)\](?::[0-9]{1,5})?$", splitted_url.netloc)
if host_match:
potential_ip = host_match[1]
try:
validate_ipv6_address(potential_ip)
except ValidationError:
raise ValidationError(
self.message, code=self.code, params={"value": value}
)
# The maximum length of a full host name is 253 characters per RFC 1034
# section 3.1. It's defined to be 255 bytes or less, but this includes
# one byte for the length of the name and one byte for the trailing dot
# that's used to indicate absolute names in DNS.
if splitted_url.hostname is None or len(splitted_url.hostname) > 253:
raise ValidationError(self.message, code=self.code, params={"value": value})
integer_validator = RegexValidator(
_lazy_re_compile(r"^-?\d+\Z"),
message=_("Enter a valid integer."),
code="invalid",
)
def validate_integer(value):
return integer_validator(value)
@deconstructible
class EmailValidator:
message = _("Enter a valid email address.")
code = "invalid"
user_regex = _lazy_re_compile(
# dot-atom
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*\Z"
# quoted-string
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])'
r'*"\Z)',
re.IGNORECASE,
)
domain_regex = _lazy_re_compile(
# max length for domain name labels is 63 characters per RFC 1034
r"((?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+)(?:[A-Z0-9-]{2,63}(?<!-))\Z",
re.IGNORECASE,
)
literal_regex = _lazy_re_compile(
# literal form, ipv4 or ipv6 address (SMTP 4.1.3)
r"\[([A-F0-9:.]+)\]\Z",
re.IGNORECASE,
)
domain_allowlist = ["localhost"]
def __init__(self, message=None, code=None, allowlist=None):
if message is not None:
self.message = message
if code is not None:
self.code = code
if allowlist is not None:
self.domain_allowlist = allowlist
def __call__(self, value):
# The maximum length of an email is 320 characters per RFC 3696
# section 3.
if not value or "@" not in value or len(value) > 320:
raise ValidationError(self.message, code=self.code, params={"value": value})
user_part, domain_part = value.rsplit("@", 1)
if not self.user_regex.match(user_part):
raise ValidationError(self.message, code=self.code, params={"value": value})
if domain_part not in self.domain_allowlist and not self.validate_domain_part(
domain_part
):
# Try for possible IDN domain-part
try:
domain_part = punycode(domain_part)
except UnicodeError:
pass
else:
if self.validate_domain_part(domain_part):
return
raise ValidationError(self.message, code=self.code, params={"value": value})
def validate_domain_part(self, domain_part):
if self.domain_regex.match(domain_part):
return True
literal_match = self.literal_regex.match(domain_part)
if literal_match:
ip_address = literal_match[1]
try:
validate_ipv46_address(ip_address)
return True
except ValidationError:
pass
return False
def __eq__(self, other):
return (
isinstance(other, EmailValidator)
and (self.domain_allowlist == other.domain_allowlist)
and (self.message == other.message)
and (self.code == other.code)
)
validate_email = EmailValidator()
slug_re = _lazy_re_compile(r"^[-a-zA-Z0-9_]+\Z")
validate_slug = RegexValidator(
slug_re,
# Translators: "letters" means latin letters: a-z and A-Z.
_("Enter a valid “slug” consisting of letters, numbers, underscores or hyphens."),
"invalid",
)
slug_unicode_re = _lazy_re_compile(r"^[-\w]+\Z")
validate_unicode_slug = RegexValidator(
slug_unicode_re,
_(
"Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or "
"hyphens."
),
"invalid",
)
def validate_ipv4_address(value):
try:
ipaddress.IPv4Address(value)
except ValueError:
raise ValidationError(
_("Enter a valid IPv4 address."), code="invalid", params={"value": value}
)
def validate_ipv6_address(value):
if not is_valid_ipv6_address(value):
raise ValidationError(
_("Enter a valid IPv6 address."), code="invalid", params={"value": value}
)
def validate_ipv46_address(value):
try:
validate_ipv4_address(value)
except ValidationError:
try:
validate_ipv6_address(value)
except ValidationError:
raise ValidationError(
_("Enter a valid IPv4 or IPv6 address."),
code="invalid",
params={"value": value},
)
ip_address_validator_map = {
"both": ([validate_ipv46_address], _("Enter a valid IPv4 or IPv6 address.")),
"ipv4": ([validate_ipv4_address], _("Enter a valid IPv4 address.")),
"ipv6": ([validate_ipv6_address], _("Enter a valid IPv6 address.")),
}
def ip_address_validators(protocol, unpack_ipv4):
"""
Depending on the given parameters, return the appropriate validators for
the GenericIPAddressField.
"""
if protocol != "both" and unpack_ipv4:
raise ValueError(
"You can only use `unpack_ipv4` if `protocol` is set to 'both'"
)
try:
return ip_address_validator_map[protocol.lower()]
except KeyError:
raise ValueError(
"The protocol '%s' is unknown. Supported: %s"
% (protocol, list(ip_address_validator_map))
)
def int_list_validator(sep=",", message=None, code="invalid", allow_negative=False):
regexp = _lazy_re_compile(
r"^%(neg)s\d+(?:%(sep)s%(neg)s\d+)*\Z"
% {
"neg": "(-)?" if allow_negative else "",
"sep": re.escape(sep),
}
)
return RegexValidator(regexp, message=message, code=code)
validate_comma_separated_integer_list = int_list_validator(
message=_("Enter only digits separated by commas."),
)
@deconstructible
class BaseValidator:
message = _("Ensure this value is %(limit_value)s (it is %(show_value)s).")
code = "limit_value"
def __init__(self, limit_value, message=None):
self.limit_value = limit_value
if message:
self.message = message
def __call__(self, value):
cleaned = self.clean(value)
limit_value = (
self.limit_value() if callable(self.limit_value) else self.limit_value
)
params = {"limit_value": limit_value, "show_value": cleaned, "value": value}
if self.compare(cleaned, limit_value):
raise ValidationError(self.message, code=self.code, params=params)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return (
self.limit_value == other.limit_value
and self.message == other.message
and self.code == other.code
)
def compare(self, a, b):
return a is not b
def clean(self, x):
return x
@deconstructible
class MaxValueValidator(BaseValidator):
message = _("Ensure this value is less than or equal to %(limit_value)s.")
code = "max_value"
def compare(self, a, b):
return a > b
@deconstructible
class MinValueValidator(BaseValidator):
message = _("Ensure this value is greater than or equal to %(limit_value)s.")
code = "min_value"
def compare(self, a, b):
return a < b
@deconstructible
class StepValueValidator(BaseValidator):
message = _("Ensure this value is a multiple of step size %(limit_value)s.")
code = "step_size"
def __init__(self, limit_value, message=None, offset=None):
super().__init__(limit_value, message)
if offset is not None:
self.message = _(
"Ensure this value is a multiple of step size %(limit_value)s, "
"starting from %(offset)s, e.g. %(offset)s, %(valid_value1)s, "
"%(valid_value2)s, and so on."
)
self.offset = offset
def __call__(self, value):
if self.offset is None:
super().__call__(value)
else:
cleaned = self.clean(value)
limit_value = (
self.limit_value() if callable(self.limit_value) else self.limit_value
)
if self.compare(cleaned, limit_value):
offset = cleaned.__class__(self.offset)
params = {
"limit_value": limit_value,
"offset": offset,
"valid_value1": offset + limit_value,
"valid_value2": offset + 2 * limit_value,
}
raise ValidationError(self.message, code=self.code, params=params)
def compare(self, a, b):
offset = 0 if self.offset is None else self.offset
return not math.isclose(math.remainder(a - offset, b), 0, abs_tol=1e-9)
@deconstructible
class MinLengthValidator(BaseValidator):
message = ngettext_lazy(
"Ensure this value has at least %(limit_value)d character (it has "
"%(show_value)d).",
"Ensure this value has at least %(limit_value)d characters (it has "
"%(show_value)d).",
"limit_value",
)
code = "min_length"
def compare(self, a, b):
return a < b
def clean(self, x):
return len(x)
@deconstructible
class MaxLengthValidator(BaseValidator):
message = ngettext_lazy(
"Ensure this value has at most %(limit_value)d character (it has "
"%(show_value)d).",
"Ensure this value has at most %(limit_value)d characters (it has "
"%(show_value)d).",
"limit_value",
)
code = "max_length"
def compare(self, a, b):
return a > b
def clean(self, x):
return len(x)
@deconstructible
class DecimalValidator:
"""
Validate that the input does not exceed the maximum number of digits
expected, otherwise raise ValidationError.
"""
messages = {
"invalid": _("Enter a number."),
"max_digits": ngettext_lazy(
"Ensure that there are no more than %(max)s digit in total.",
"Ensure that there are no more than %(max)s digits in total.",
"max",
),
"max_decimal_places": ngettext_lazy(
"Ensure that there are no more than %(max)s decimal place.",
"Ensure that there are no more than %(max)s decimal places.",
"max",
),
"max_whole_digits": ngettext_lazy(
"Ensure that there are no more than %(max)s digit before the decimal "
"point.",
"Ensure that there are no more than %(max)s digits before the decimal "
"point.",
"max",
),
}
def __init__(self, max_digits, decimal_places):
self.max_digits = max_digits
self.decimal_places = decimal_places
def __call__(self, value):
digit_tuple, exponent = value.as_tuple()[1:]
if exponent in {"F", "n", "N"}:
raise ValidationError(
self.messages["invalid"], code="invalid", params={"value": value}
)
if exponent >= 0:
digits = len(digit_tuple)
if digit_tuple != (0,):
# A positive exponent adds that many trailing zeros.
digits += exponent
decimals = 0
else:
# If the absolute value of the negative exponent is larger than the
# number of digits, then it's the same as the number of digits,
# because it'll consume all of the digits in digit_tuple and then
# add abs(exponent) - len(digit_tuple) leading zeros after the
# decimal point.
if abs(exponent) > len(digit_tuple):
digits = decimals = abs(exponent)
else:
digits = len(digit_tuple)
decimals = abs(exponent)
whole_digits = digits - decimals
if self.max_digits is not None and digits > self.max_digits:
raise ValidationError(
self.messages["max_digits"],
code="max_digits",
params={"max": self.max_digits, "value": value},
)
if self.decimal_places is not None and decimals > self.decimal_places:
raise ValidationError(
self.messages["max_decimal_places"],
code="max_decimal_places",
params={"max": self.decimal_places, "value": value},
)
if (
self.max_digits is not None
and self.decimal_places is not None
and whole_digits > (self.max_digits - self.decimal_places)
):
raise ValidationError(
self.messages["max_whole_digits"],
code="max_whole_digits",
params={"max": (self.max_digits - self.decimal_places), "value": value},
)
def __eq__(self, other):
return (
isinstance(other, self.__class__)
and self.max_digits == other.max_digits
and self.decimal_places == other.decimal_places
)
@deconstructible
class FileExtensionValidator:
message = _(
"File extension “%(extension)s” is not allowed. "
"Allowed extensions are: %(allowed_extensions)s."
)
code = "invalid_extension"
def __init__(self, allowed_extensions=None, message=None, code=None):
if allowed_extensions is not None:
allowed_extensions = [
allowed_extension.lower() for allowed_extension in allowed_extensions
]
self.allowed_extensions = allowed_extensions
if message is not None:
self.message = message
if code is not None:
self.code = code
def __call__(self, value):
extension = Path(value.name).suffix[1:].lower()
if (
self.allowed_extensions is not None
and extension not in self.allowed_extensions
):
raise ValidationError(
self.message,
code=self.code,
params={
"extension": extension,
"allowed_extensions": ", ".join(self.allowed_extensions),
"value": value,
},
)
def __eq__(self, other):
return (
isinstance(other, self.__class__)
and self.allowed_extensions == other.allowed_extensions
and self.message == other.message
and self.code == other.code
)
def get_available_image_extensions():
try:
from PIL import Image
except ImportError:
return []
else:
Image.init()
return [ext.lower()[1:] for ext in Image.EXTENSION]
def validate_image_file_extension(value):
return FileExtensionValidator(allowed_extensions=get_available_image_extensions())(
value
)
@deconstructible
class ProhibitNullCharactersValidator:
"""Validate that the string doesn't contain the null character."""
message = _("Null characters are not allowed.")
code = "null_characters_not_allowed"
def __init__(self, message=None, code=None):
if message is not None:
self.message = message
if code is not None:
self.code = code
def __call__(self, value):
if "\x00" in str(value):
raise ValidationError(self.message, code=self.code, params={"value": value})
def __eq__(self, other):
return (
isinstance(other, self.__class__)
and self.message == other.message
and self.code == other.code
)
|
2369811debc3ca816557ed76606cbc97213dd1cb0a1a9c5b24711853863c4f73 | """
Decorators for views based on HTTP headers.
"""
import datetime
from functools import wraps
from asgiref.sync import iscoroutinefunction
from django.http import HttpResponseNotAllowed
from django.middleware.http import ConditionalGetMiddleware
from django.utils import timezone
from django.utils.cache import get_conditional_response
from django.utils.decorators import decorator_from_middleware
from django.utils.http import http_date, quote_etag
from django.utils.log import log_response
conditional_page = decorator_from_middleware(ConditionalGetMiddleware)
def require_http_methods(request_method_list):
"""
Decorator to make a view only accept particular request methods. Usage::
@require_http_methods(["GET", "POST"])
def my_view(request):
# I can assume now that only GET or POST requests make it this far
# ...
Note that request methods should be in uppercase.
"""
def decorator(func):
if iscoroutinefunction(func):
@wraps(func)
async def inner(request, *args, **kwargs):
if request.method not in request_method_list:
response = HttpResponseNotAllowed(request_method_list)
log_response(
"Method Not Allowed (%s): %s",
request.method,
request.path,
response=response,
request=request,
)
return response
return await func(request, *args, **kwargs)
else:
@wraps(func)
def inner(request, *args, **kwargs):
if request.method not in request_method_list:
response = HttpResponseNotAllowed(request_method_list)
log_response(
"Method Not Allowed (%s): %s",
request.method,
request.path,
response=response,
request=request,
)
return response
return func(request, *args, **kwargs)
return inner
return decorator
require_GET = require_http_methods(["GET"])
require_GET.__doc__ = "Decorator to require that a view only accepts the GET method."
require_POST = require_http_methods(["POST"])
require_POST.__doc__ = "Decorator to require that a view only accepts the POST method."
require_safe = require_http_methods(["GET", "HEAD"])
require_safe.__doc__ = (
"Decorator to require that a view only accepts safe methods: GET and HEAD."
)
def condition(etag_func=None, last_modified_func=None):
"""
Decorator to support conditional retrieval (or change) for a view
function.
The parameters are callables to compute the ETag and last modified time for
the requested resource, respectively. The callables are passed the same
parameters as the view itself. The ETag function should return a string (or
None if the resource doesn't exist), while the last_modified function
should return a datetime object (or None if the resource doesn't exist).
The ETag function should return a complete ETag, including quotes (e.g.
'"etag"'), since that's the only way to distinguish between weak and strong
ETags. If an unquoted ETag is returned (e.g. 'etag'), it will be converted
to a strong ETag by adding quotes.
This decorator will either pass control to the wrapped view function or
return an HTTP 304 response (unmodified) or 412 response (precondition
failed), depending upon the request method. In either case, the decorator
will add the generated ETag and Last-Modified headers to the response if
the headers aren't already set and if the request's method is safe.
"""
def decorator(func):
def _pre_process_request(request, *args, **kwargs):
# Compute values (if any) for the requested resource.
res_last_modified = None
if last_modified_func:
if dt := last_modified_func(request, *args, **kwargs):
if not timezone.is_aware(dt):
dt = timezone.make_aware(dt, datetime.timezone.utc)
res_last_modified = int(dt.timestamp())
# The value from etag_func() could be quoted or unquoted.
res_etag = etag_func(request, *args, **kwargs) if etag_func else None
res_etag = quote_etag(res_etag) if res_etag is not None else None
response = get_conditional_response(
request,
etag=res_etag,
last_modified=res_last_modified,
)
return response, res_etag, res_last_modified
def _post_process_request(request, response, res_etag, res_last_modified):
# Set relevant headers on the response if they don't already exist
# and if the request method is safe.
if request.method in ("GET", "HEAD"):
if res_last_modified and not response.has_header("Last-Modified"):
response.headers["Last-Modified"] = http_date(res_last_modified)
if res_etag:
response.headers.setdefault("ETag", res_etag)
if iscoroutinefunction(func):
@wraps(func)
async def inner(request, *args, **kwargs):
response, res_etag, res_last_modified = _pre_process_request(
request, *args, **kwargs
)
if response is None:
response = await func(request, *args, **kwargs)
_post_process_request(request, response, res_etag, res_last_modified)
return response
else:
@wraps(func)
def inner(request, *args, **kwargs):
response, res_etag, res_last_modified = _pre_process_request(
request, *args, **kwargs
)
if response is None:
response = func(request, *args, **kwargs)
_post_process_request(request, response, res_etag, res_last_modified)
return response
return inner
return decorator
# Shortcut decorators for common cases based on ETag or Last-Modified only
def etag(etag_func):
return condition(etag_func=etag_func)
def last_modified(last_modified_func):
return condition(last_modified_func=last_modified_func)
|
d406fce1a121e814eafc949785c52dbbb1385569011544c9aec8f21c40c1fc17 | import inspect
from functools import wraps
from asgiref.sync import iscoroutinefunction
from django.http import HttpRequest
coroutine_functions_to_sensitive_variables = {}
def sensitive_variables(*variables):
"""
Indicate which variables used in the decorated function are sensitive so
that those variables can later be treated in a special way, for example
by hiding them when logging unhandled exceptions.
Accept two forms:
* with specified variable names:
@sensitive_variables('user', 'password', 'credit_card')
def my_function(user):
password = user.pass_word
credit_card = user.credit_card_number
...
* without any specified variable names, in which case consider all
variables are sensitive:
@sensitive_variables()
def my_function()
...
"""
if len(variables) == 1 and callable(variables[0]):
raise TypeError(
"sensitive_variables() must be called to use it as a decorator, "
"e.g., use @sensitive_variables(), not @sensitive_variables."
)
def decorator(func):
if iscoroutinefunction(func):
sensitive_variables_wrapper = func
wrapped_func = func
while getattr(wrapped_func, "__wrapped__", None) is not None:
wrapped_func = wrapped_func.__wrapped__
try:
file_path = inspect.getfile(wrapped_func)
_, first_file_line = inspect.getsourcelines(wrapped_func)
except TypeError: # Raises for builtins or native functions.
raise ValueError(
f"{func.__name__} cannot safely be wrapped by "
"@sensitive_variables, make it either non-async or defined in a "
"Python file (not a builtin or from a native extension)."
)
else:
key = hash(f"{file_path}:{first_file_line}")
if variables:
coroutine_functions_to_sensitive_variables[key] = variables
else:
coroutine_functions_to_sensitive_variables[key] = "__ALL__"
else:
@wraps(func)
def sensitive_variables_wrapper(*func_args, **func_kwargs):
if variables:
sensitive_variables_wrapper.sensitive_variables = variables
else:
sensitive_variables_wrapper.sensitive_variables = "__ALL__"
return func(*func_args, **func_kwargs)
return sensitive_variables_wrapper
return decorator
def sensitive_post_parameters(*parameters):
"""
Indicate which POST parameters used in the decorated view are sensitive,
so that those parameters can later be treated in a special way, for example
by hiding them when logging unhandled exceptions.
Accept two forms:
* with specified parameters:
@sensitive_post_parameters('password', 'credit_card')
def my_view(request):
pw = request.POST['password']
cc = request.POST['credit_card']
...
* without any specified parameters, in which case consider all
variables are sensitive:
@sensitive_post_parameters()
def my_view(request)
...
"""
if len(parameters) == 1 and callable(parameters[0]):
raise TypeError(
"sensitive_post_parameters() must be called to use it as a "
"decorator, e.g., use @sensitive_post_parameters(), not "
"@sensitive_post_parameters."
)
def decorator(view):
if iscoroutinefunction(view):
@wraps(view)
async def sensitive_post_parameters_wrapper(request, *args, **kwargs):
if not isinstance(request, HttpRequest):
raise TypeError(
"sensitive_post_parameters didn't receive an HttpRequest "
"object. If you are decorating a classmethod, make sure to use "
"@method_decorator."
)
if parameters:
request.sensitive_post_parameters = parameters
else:
request.sensitive_post_parameters = "__ALL__"
return await view(request, *args, **kwargs)
else:
@wraps(view)
def sensitive_post_parameters_wrapper(request, *args, **kwargs):
if not isinstance(request, HttpRequest):
raise TypeError(
"sensitive_post_parameters didn't receive an HttpRequest "
"object. If you are decorating a classmethod, make sure to use "
"@method_decorator."
)
if parameters:
request.sensitive_post_parameters = parameters
else:
request.sensitive_post_parameters = "__ALL__"
return view(request, *args, **kwargs)
return sensitive_post_parameters_wrapper
return decorator
|
4ebf285bc324f5a812909a80245b954fe5fcc73b1eb21bb130cec89136ec8f38 | """
The main QuerySet implementation. This provides the public API for the ORM.
"""
import copy
import operator
import warnings
from itertools import chain, islice
from asgiref.sync import sync_to_async
import django
from django.conf import settings
from django.core import exceptions
from django.db import (
DJANGO_VERSION_PICKLE_KEY,
IntegrityError,
NotSupportedError,
connections,
router,
transaction,
)
from django.db.models import AutoField, DateField, DateTimeField, Field, sql
from django.db.models.constants import LOOKUP_SEP, OnConflict
from django.db.models.deletion import Collector
from django.db.models.expressions import Case, F, Value, When
from django.db.models.functions import Cast, Trunc
from django.db.models.query_utils import FilteredRelation, Q
from django.db.models.sql.constants import CURSOR, GET_ITERATOR_CHUNK_SIZE
from django.db.models.utils import (
AltersData,
create_namedtuple_class,
resolve_callables,
)
from django.utils import timezone
from django.utils.functional import cached_property, partition
# The maximum number of results to fetch in a get() query.
MAX_GET_RESULTS = 21
# The maximum number of items to display in a QuerySet.__repr__
REPR_OUTPUT_SIZE = 20
class BaseIterable:
def __init__(
self, queryset, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE
):
self.queryset = queryset
self.chunked_fetch = chunked_fetch
self.chunk_size = chunk_size
async def _async_generator(self):
# Generators don't actually start running until the first time you call
# next() on them, so make the generator object in the async thread and
# then repeatedly dispatch to it in a sync thread.
sync_generator = self.__iter__()
def next_slice(gen):
return list(islice(gen, self.chunk_size))
while True:
chunk = await sync_to_async(next_slice)(sync_generator)
for item in chunk:
yield item
if len(chunk) < self.chunk_size:
break
# __aiter__() is a *synchronous* method that has to then return an
# *asynchronous* iterator/generator. Thus, nest an async generator inside
# it.
# This is a generic iterable converter for now, and is going to suffer a
# performance penalty on large sets of items due to the cost of crossing
# over the sync barrier for each chunk. Custom __aiter__() methods should
# be added to each Iterable subclass, but that needs some work in the
# Compiler first.
def __aiter__(self):
return self._async_generator()
class ModelIterable(BaseIterable):
"""Iterable that yields a model instance for each row."""
def __iter__(self):
queryset = self.queryset
db = queryset.db
compiler = queryset.query.get_compiler(using=db)
# Execute the query. This will also fill compiler.select, klass_info,
# and annotations.
results = compiler.execute_sql(
chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size
)
select, klass_info, annotation_col_map = (
compiler.select,
compiler.klass_info,
compiler.annotation_col_map,
)
model_cls = klass_info["model"]
select_fields = klass_info["select_fields"]
model_fields_start, model_fields_end = select_fields[0], select_fields[-1] + 1
init_list = [
f[0].target.attname for f in select[model_fields_start:model_fields_end]
]
related_populators = get_related_populators(klass_info, select, db)
known_related_objects = [
(
field,
related_objs,
operator.attrgetter(
*[
field.attname
if from_field == "self"
else queryset.model._meta.get_field(from_field).attname
for from_field in field.from_fields
]
),
)
for field, related_objs in queryset._known_related_objects.items()
]
for row in compiler.results_iter(results):
obj = model_cls.from_db(
db, init_list, row[model_fields_start:model_fields_end]
)
for rel_populator in related_populators:
rel_populator.populate(row, obj)
if annotation_col_map:
for attr_name, col_pos in annotation_col_map.items():
setattr(obj, attr_name, row[col_pos])
# Add the known related objects to the model.
for field, rel_objs, rel_getter in known_related_objects:
# Avoid overwriting objects loaded by, e.g., select_related().
if field.is_cached(obj):
continue
rel_obj_id = rel_getter(obj)
try:
rel_obj = rel_objs[rel_obj_id]
except KeyError:
pass # May happen in qs1 | qs2 scenarios.
else:
setattr(obj, field.name, rel_obj)
yield obj
class RawModelIterable(BaseIterable):
"""
Iterable that yields a model instance for each row from a raw queryset.
"""
def __iter__(self):
# Cache some things for performance reasons outside the loop.
db = self.queryset.db
query = self.queryset.query
connection = connections[db]
compiler = connection.ops.compiler("SQLCompiler")(query, connection, db)
query_iterator = iter(query)
try:
(
model_init_names,
model_init_pos,
annotation_fields,
) = self.queryset.resolve_model_init_order()
model_cls = self.queryset.model
if model_cls._meta.pk.attname not in model_init_names:
raise exceptions.FieldDoesNotExist(
"Raw query must include the primary key"
)
fields = [self.queryset.model_fields.get(c) for c in self.queryset.columns]
converters = compiler.get_converters(
[f.get_col(f.model._meta.db_table) if f else None for f in fields]
)
if converters:
query_iterator = compiler.apply_converters(query_iterator, converters)
for values in query_iterator:
# Associate fields to values
model_init_values = [values[pos] for pos in model_init_pos]
instance = model_cls.from_db(db, model_init_names, model_init_values)
if annotation_fields:
for column, pos in annotation_fields:
setattr(instance, column, values[pos])
yield instance
finally:
# Done iterating the Query. If it has its own cursor, close it.
if hasattr(query, "cursor") and query.cursor:
query.cursor.close()
class ValuesIterable(BaseIterable):
"""
Iterable returned by QuerySet.values() that yields a dict for each row.
"""
def __iter__(self):
queryset = self.queryset
query = queryset.query
compiler = query.get_compiler(queryset.db)
# extra(select=...) cols are always at the start of the row.
names = [
*query.extra_select,
*query.values_select,
*query.annotation_select,
]
indexes = range(len(names))
for row in compiler.results_iter(
chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size
):
yield {names[i]: row[i] for i in indexes}
class ValuesListIterable(BaseIterable):
"""
Iterable returned by QuerySet.values_list(flat=False) that yields a tuple
for each row.
"""
def __iter__(self):
queryset = self.queryset
query = queryset.query
compiler = query.get_compiler(queryset.db)
if queryset._fields:
# extra(select=...) cols are always at the start of the row.
names = [
*query.extra_select,
*query.values_select,
*query.annotation_select,
]
fields = [
*queryset._fields,
*(f for f in query.annotation_select if f not in queryset._fields),
]
if fields != names:
# Reorder according to fields.
index_map = {name: idx for idx, name in enumerate(names)}
rowfactory = operator.itemgetter(*[index_map[f] for f in fields])
return map(
rowfactory,
compiler.results_iter(
chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size
),
)
return compiler.results_iter(
tuple_expected=True,
chunked_fetch=self.chunked_fetch,
chunk_size=self.chunk_size,
)
class NamedValuesListIterable(ValuesListIterable):
"""
Iterable returned by QuerySet.values_list(named=True) that yields a
namedtuple for each row.
"""
def __iter__(self):
queryset = self.queryset
if queryset._fields:
names = queryset._fields
else:
query = queryset.query
names = [
*query.extra_select,
*query.values_select,
*query.annotation_select,
]
tuple_class = create_namedtuple_class(*names)
new = tuple.__new__
for row in super().__iter__():
yield new(tuple_class, row)
class FlatValuesListIterable(BaseIterable):
"""
Iterable returned by QuerySet.values_list(flat=True) that yields single
values.
"""
def __iter__(self):
queryset = self.queryset
compiler = queryset.query.get_compiler(queryset.db)
for row in compiler.results_iter(
chunked_fetch=self.chunked_fetch, chunk_size=self.chunk_size
):
yield row[0]
class QuerySet(AltersData):
"""Represent a lazy database lookup for a set of objects."""
def __init__(self, model=None, query=None, using=None, hints=None):
self.model = model
self._db = using
self._hints = hints or {}
self._query = query or sql.Query(self.model)
self._result_cache = None
self._sticky_filter = False
self._for_write = False
self._prefetch_related_lookups = ()
self._prefetch_done = False
self._known_related_objects = {} # {rel_field: {pk: rel_obj}}
self._iterable_class = ModelIterable
self._fields = None
self._defer_next_filter = False
self._deferred_filter = None
@property
def query(self):
if self._deferred_filter:
negate, args, kwargs = self._deferred_filter
self._filter_or_exclude_inplace(negate, args, kwargs)
self._deferred_filter = None
return self._query
@query.setter
def query(self, value):
if value.values_select:
self._iterable_class = ValuesIterable
self._query = value
def as_manager(cls):
# Address the circular dependency between `Queryset` and `Manager`.
from django.db.models.manager import Manager
manager = Manager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
########################
# PYTHON MAGIC METHODS #
########################
def __deepcopy__(self, memo):
"""Don't populate the QuerySet's cache."""
obj = self.__class__()
for k, v in self.__dict__.items():
if k == "_result_cache":
obj.__dict__[k] = None
else:
obj.__dict__[k] = copy.deepcopy(v, memo)
return obj
def __getstate__(self):
# Force the cache to be fully populated.
self._fetch_all()
return {**self.__dict__, DJANGO_VERSION_PICKLE_KEY: django.__version__}
def __setstate__(self, state):
pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY)
if pickled_version:
if pickled_version != django.__version__:
warnings.warn(
"Pickled queryset instance's Django version %s does not "
"match the current version %s."
% (pickled_version, django.__version__),
RuntimeWarning,
stacklevel=2,
)
else:
warnings.warn(
"Pickled queryset instance's Django version is not specified.",
RuntimeWarning,
stacklevel=2,
)
self.__dict__.update(state)
def __repr__(self):
data = list(self[: REPR_OUTPUT_SIZE + 1])
if len(data) > REPR_OUTPUT_SIZE:
data[-1] = "...(remaining elements truncated)..."
return "<%s %r>" % (self.__class__.__name__, data)
def __len__(self):
self._fetch_all()
return len(self._result_cache)
def __iter__(self):
"""
The queryset iterator protocol uses three nested iterators in the
default case:
1. sql.compiler.execute_sql()
- Returns 100 rows at time (constants.GET_ITERATOR_CHUNK_SIZE)
using cursor.fetchmany(). This part is responsible for
doing some column masking, and returning the rows in chunks.
2. sql.compiler.results_iter()
- Returns one row at time. At this point the rows are still just
tuples. In some cases the return values are converted to
Python values at this location.
3. self.iterator()
- Responsible for turning the rows into model objects.
"""
self._fetch_all()
return iter(self._result_cache)
def __aiter__(self):
# Remember, __aiter__ itself is synchronous, it's the thing it returns
# that is async!
async def generator():
await sync_to_async(self._fetch_all)()
for item in self._result_cache:
yield item
return generator()
def __bool__(self):
self._fetch_all()
return bool(self._result_cache)
def __getitem__(self, k):
"""Retrieve an item or slice from the set of results."""
if not isinstance(k, (int, slice)):
raise TypeError(
"QuerySet indices must be integers or slices, not %s."
% type(k).__name__
)
if (isinstance(k, int) and k < 0) or (
isinstance(k, slice)
and (
(k.start is not None and k.start < 0)
or (k.stop is not None and k.stop < 0)
)
):
raise ValueError("Negative indexing is not supported.")
if self._result_cache is not None:
return self._result_cache[k]
if isinstance(k, slice):
qs = self._chain()
if k.start is not None:
start = int(k.start)
else:
start = None
if k.stop is not None:
stop = int(k.stop)
else:
stop = None
qs.query.set_limits(start, stop)
return list(qs)[:: k.step] if k.step else qs
qs = self._chain()
qs.query.set_limits(k, k + 1)
qs._fetch_all()
return qs._result_cache[0]
def __class_getitem__(cls, *args, **kwargs):
return cls
def __and__(self, other):
self._check_operator_queryset(other, "&")
self._merge_sanity_check(other)
if isinstance(other, EmptyQuerySet):
return other
if isinstance(self, EmptyQuerySet):
return self
combined = self._chain()
combined._merge_known_related_objects(other)
combined.query.combine(other.query, sql.AND)
return combined
def __or__(self, other):
self._check_operator_queryset(other, "|")
self._merge_sanity_check(other)
if isinstance(self, EmptyQuerySet):
return other
if isinstance(other, EmptyQuerySet):
return self
query = (
self
if self.query.can_filter()
else self.model._base_manager.filter(pk__in=self.values("pk"))
)
combined = query._chain()
combined._merge_known_related_objects(other)
if not other.query.can_filter():
other = other.model._base_manager.filter(pk__in=other.values("pk"))
combined.query.combine(other.query, sql.OR)
return combined
def __xor__(self, other):
self._check_operator_queryset(other, "^")
self._merge_sanity_check(other)
if isinstance(self, EmptyQuerySet):
return other
if isinstance(other, EmptyQuerySet):
return self
query = (
self
if self.query.can_filter()
else self.model._base_manager.filter(pk__in=self.values("pk"))
)
combined = query._chain()
combined._merge_known_related_objects(other)
if not other.query.can_filter():
other = other.model._base_manager.filter(pk__in=other.values("pk"))
combined.query.combine(other.query, sql.XOR)
return combined
####################################
# METHODS THAT DO DATABASE QUERIES #
####################################
def _iterator(self, use_chunked_fetch, chunk_size):
iterable = self._iterable_class(
self,
chunked_fetch=use_chunked_fetch,
chunk_size=chunk_size or 2000,
)
if not self._prefetch_related_lookups or chunk_size is None:
yield from iterable
return
iterator = iter(iterable)
while results := list(islice(iterator, chunk_size)):
prefetch_related_objects(results, *self._prefetch_related_lookups)
yield from results
def iterator(self, chunk_size=None):
"""
An iterator over the results from applying this QuerySet to the
database. chunk_size must be provided for QuerySets that prefetch
related objects. Otherwise, a default chunk_size of 2000 is supplied.
"""
if chunk_size is None:
if self._prefetch_related_lookups:
raise ValueError(
"chunk_size must be provided when using QuerySet.iterator() after "
"prefetch_related()."
)
elif chunk_size <= 0:
raise ValueError("Chunk size must be strictly positive.")
use_chunked_fetch = not connections[self.db].settings_dict.get(
"DISABLE_SERVER_SIDE_CURSORS"
)
return self._iterator(use_chunked_fetch, chunk_size)
async def aiterator(self, chunk_size=2000):
"""
An asynchronous iterator over the results from applying this QuerySet
to the database.
"""
if self._prefetch_related_lookups:
raise NotSupportedError(
"Using QuerySet.aiterator() after prefetch_related() is not supported."
)
if chunk_size <= 0:
raise ValueError("Chunk size must be strictly positive.")
use_chunked_fetch = not connections[self.db].settings_dict.get(
"DISABLE_SERVER_SIDE_CURSORS"
)
async for item in self._iterable_class(
self, chunked_fetch=use_chunked_fetch, chunk_size=chunk_size
):
yield item
def aggregate(self, *args, **kwargs):
"""
Return a dictionary containing the calculations (aggregation)
over the current queryset.
If args is present the expression is passed as a kwarg using
the Aggregate object's default alias.
"""
if self.query.distinct_fields:
raise NotImplementedError("aggregate() + distinct(fields) not implemented.")
self._validate_values_are_expressions(
(*args, *kwargs.values()), method_name="aggregate"
)
for arg in args:
# The default_alias property raises TypeError if default_alias
# can't be set automatically or AttributeError if it isn't an
# attribute.
try:
arg.default_alias
except (AttributeError, TypeError):
raise TypeError("Complex aggregates require an alias")
kwargs[arg.default_alias] = arg
return self.query.chain().get_aggregation(self.db, kwargs)
async def aaggregate(self, *args, **kwargs):
return await sync_to_async(self.aggregate)(*args, **kwargs)
def count(self):
"""
Perform a SELECT COUNT() and return the number of records as an
integer.
If the QuerySet is already fully cached, return the length of the
cached results set to avoid multiple SELECT COUNT(*) calls.
"""
if self._result_cache is not None:
return len(self._result_cache)
return self.query.get_count(using=self.db)
async def acount(self):
return await sync_to_async(self.count)()
def get(self, *args, **kwargs):
"""
Perform the query and return a single object matching the given
keyword arguments.
"""
if self.query.combinator and (args or kwargs):
raise NotSupportedError(
"Calling QuerySet.get(...) with filters after %s() is not "
"supported." % self.query.combinator
)
clone = self._chain() if self.query.combinator else self.filter(*args, **kwargs)
if self.query.can_filter() and not self.query.distinct_fields:
clone = clone.order_by()
limit = None
if (
not clone.query.select_for_update
or connections[clone.db].features.supports_select_for_update_with_limit
):
limit = MAX_GET_RESULTS
clone.query.set_limits(high=limit)
num = len(clone)
if num == 1:
return clone._result_cache[0]
if not num:
raise self.model.DoesNotExist(
"%s matching query does not exist." % self.model._meta.object_name
)
raise self.model.MultipleObjectsReturned(
"get() returned more than one %s -- it returned %s!"
% (
self.model._meta.object_name,
num if not limit or num < limit else "more than %s" % (limit - 1),
)
)
async def aget(self, *args, **kwargs):
return await sync_to_async(self.get)(*args, **kwargs)
def create(self, **kwargs):
"""
Create a new object with the given kwargs, saving it to the database
and returning the created object.
"""
obj = self.model(**kwargs)
self._for_write = True
obj.save(force_insert=True, using=self.db)
return obj
async def acreate(self, **kwargs):
return await sync_to_async(self.create)(**kwargs)
def _prepare_for_bulk_create(self, objs):
from django.db.models.expressions import DatabaseDefault
connection = connections[self.db]
for obj in objs:
if obj.pk is None:
# Populate new PK values.
obj.pk = obj._meta.pk.get_pk_value_on_save(obj)
if not connection.features.supports_default_keyword_in_bulk_insert:
for field in obj._meta.fields:
value = getattr(obj, field.attname)
if isinstance(value, DatabaseDefault):
setattr(obj, field.attname, field.db_default)
obj._prepare_related_fields_for_save(operation_name="bulk_create")
def _check_bulk_create_options(
self, ignore_conflicts, update_conflicts, update_fields, unique_fields
):
if ignore_conflicts and update_conflicts:
raise ValueError(
"ignore_conflicts and update_conflicts are mutually exclusive."
)
db_features = connections[self.db].features
if ignore_conflicts:
if not db_features.supports_ignore_conflicts:
raise NotSupportedError(
"This database backend does not support ignoring conflicts."
)
return OnConflict.IGNORE
elif update_conflicts:
if not db_features.supports_update_conflicts:
raise NotSupportedError(
"This database backend does not support updating conflicts."
)
if not update_fields:
raise ValueError(
"Fields that will be updated when a row insertion fails "
"on conflicts must be provided."
)
if unique_fields and not db_features.supports_update_conflicts_with_target:
raise NotSupportedError(
"This database backend does not support updating "
"conflicts with specifying unique fields that can trigger "
"the upsert."
)
if not unique_fields and db_features.supports_update_conflicts_with_target:
raise ValueError(
"Unique fields that can trigger the upsert must be provided."
)
# Updating primary keys and non-concrete fields is forbidden.
if any(not f.concrete or f.many_to_many for f in update_fields):
raise ValueError(
"bulk_create() can only be used with concrete fields in "
"update_fields."
)
if any(f.primary_key for f in update_fields):
raise ValueError(
"bulk_create() cannot be used with primary keys in "
"update_fields."
)
if unique_fields:
if any(not f.concrete or f.many_to_many for f in unique_fields):
raise ValueError(
"bulk_create() can only be used with concrete fields "
"in unique_fields."
)
return OnConflict.UPDATE
return None
def bulk_create(
self,
objs,
batch_size=None,
ignore_conflicts=False,
update_conflicts=False,
update_fields=None,
unique_fields=None,
):
"""
Insert each of the instances into the database. Do *not* call
save() on each of the instances, do not send any pre/post_save
signals, and do not set the primary key attribute if it is an
autoincrement field (except if features.can_return_rows_from_bulk_insert=True).
Multi-table models are not supported.
"""
# When you bulk insert you don't get the primary keys back (if it's an
# autoincrement, except if can_return_rows_from_bulk_insert=True), so
# you can't insert into the child tables which references this. There
# are two workarounds:
# 1) This could be implemented if you didn't have an autoincrement pk
# 2) You could do it by doing O(n) normal inserts into the parent
# tables to get the primary keys back and then doing a single bulk
# insert into the childmost table.
# We currently set the primary keys on the objects when using
# PostgreSQL via the RETURNING ID clause. It should be possible for
# Oracle as well, but the semantics for extracting the primary keys is
# trickier so it's not done yet.
if batch_size is not None and batch_size <= 0:
raise ValueError("Batch size must be a positive integer.")
# Check that the parents share the same concrete model with the our
# model to detect the inheritance pattern ConcreteGrandParent ->
# MultiTableParent -> ProxyChild. Simply checking self.model._meta.proxy
# would not identify that case as involving multiple tables.
for parent in self.model._meta.get_parent_list():
if parent._meta.concrete_model is not self.model._meta.concrete_model:
raise ValueError("Can't bulk create a multi-table inherited model")
if not objs:
return objs
opts = self.model._meta
if unique_fields:
# Primary key is allowed in unique_fields.
unique_fields = [
self.model._meta.get_field(opts.pk.name if name == "pk" else name)
for name in unique_fields
]
if update_fields:
update_fields = [self.model._meta.get_field(name) for name in update_fields]
on_conflict = self._check_bulk_create_options(
ignore_conflicts,
update_conflicts,
update_fields,
unique_fields,
)
self._for_write = True
fields = opts.concrete_fields
objs = list(objs)
self._prepare_for_bulk_create(objs)
with transaction.atomic(using=self.db, savepoint=False):
objs_with_pk, objs_without_pk = partition(lambda o: o.pk is None, objs)
if objs_with_pk:
returned_columns = self._batched_insert(
objs_with_pk,
fields,
batch_size,
on_conflict=on_conflict,
update_fields=update_fields,
unique_fields=unique_fields,
)
for obj_with_pk, results in zip(objs_with_pk, returned_columns):
for result, field in zip(results, opts.db_returning_fields):
if field != opts.pk:
setattr(obj_with_pk, field.attname, result)
for obj_with_pk in objs_with_pk:
obj_with_pk._state.adding = False
obj_with_pk._state.db = self.db
if objs_without_pk:
fields = [f for f in fields if not isinstance(f, AutoField)]
returned_columns = self._batched_insert(
objs_without_pk,
fields,
batch_size,
on_conflict=on_conflict,
update_fields=update_fields,
unique_fields=unique_fields,
)
connection = connections[self.db]
if (
connection.features.can_return_rows_from_bulk_insert
and on_conflict is None
):
assert len(returned_columns) == len(objs_without_pk)
for obj_without_pk, results in zip(objs_without_pk, returned_columns):
for result, field in zip(results, opts.db_returning_fields):
setattr(obj_without_pk, field.attname, result)
obj_without_pk._state.adding = False
obj_without_pk._state.db = self.db
return objs
async def abulk_create(
self,
objs,
batch_size=None,
ignore_conflicts=False,
update_conflicts=False,
update_fields=None,
unique_fields=None,
):
return await sync_to_async(self.bulk_create)(
objs=objs,
batch_size=batch_size,
ignore_conflicts=ignore_conflicts,
update_conflicts=update_conflicts,
update_fields=update_fields,
unique_fields=unique_fields,
)
def bulk_update(self, objs, fields, batch_size=None):
"""
Update the given fields in each of the given objects in the database.
"""
if batch_size is not None and batch_size <= 0:
raise ValueError("Batch size must be a positive integer.")
if not fields:
raise ValueError("Field names must be given to bulk_update().")
objs = tuple(objs)
if any(obj.pk is None for obj in objs):
raise ValueError("All bulk_update() objects must have a primary key set.")
fields = [self.model._meta.get_field(name) for name in fields]
if any(not f.concrete or f.many_to_many for f in fields):
raise ValueError("bulk_update() can only be used with concrete fields.")
if any(f.primary_key for f in fields):
raise ValueError("bulk_update() cannot be used with primary key fields.")
if not objs:
return 0
for obj in objs:
obj._prepare_related_fields_for_save(
operation_name="bulk_update", fields=fields
)
# PK is used twice in the resulting update query, once in the filter
# and once in the WHEN. Each field will also have one CAST.
self._for_write = True
connection = connections[self.db]
max_batch_size = connection.ops.bulk_batch_size(["pk", "pk"] + fields, objs)
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
requires_casting = connection.features.requires_casted_case_in_updates
batches = (objs[i : i + batch_size] for i in range(0, len(objs), batch_size))
updates = []
for batch_objs in batches:
update_kwargs = {}
for field in fields:
when_statements = []
for obj in batch_objs:
attr = getattr(obj, field.attname)
if not hasattr(attr, "resolve_expression"):
attr = Value(attr, output_field=field)
when_statements.append(When(pk=obj.pk, then=attr))
case_statement = Case(*when_statements, output_field=field)
if requires_casting:
case_statement = Cast(case_statement, output_field=field)
update_kwargs[field.attname] = case_statement
updates.append(([obj.pk for obj in batch_objs], update_kwargs))
rows_updated = 0
queryset = self.using(self.db)
with transaction.atomic(using=self.db, savepoint=False):
for pks, update_kwargs in updates:
rows_updated += queryset.filter(pk__in=pks).update(**update_kwargs)
return rows_updated
bulk_update.alters_data = True
async def abulk_update(self, objs, fields, batch_size=None):
return await sync_to_async(self.bulk_update)(
objs=objs,
fields=fields,
batch_size=batch_size,
)
abulk_update.alters_data = True
def get_or_create(self, defaults=None, **kwargs):
"""
Look up an object with the given kwargs, creating one if necessary.
Return a tuple of (object, created), where created is a boolean
specifying whether an object was created.
"""
# The get() needs to be targeted at the write database in order
# to avoid potential transaction consistency problems.
self._for_write = True
try:
return self.get(**kwargs), False
except self.model.DoesNotExist:
params = self._extract_model_params(defaults, **kwargs)
# Try to create an object using passed params.
try:
with transaction.atomic(using=self.db):
params = dict(resolve_callables(params))
return self.create(**params), True
except IntegrityError:
try:
return self.get(**kwargs), False
except self.model.DoesNotExist:
pass
raise
async def aget_or_create(self, defaults=None, **kwargs):
return await sync_to_async(self.get_or_create)(
defaults=defaults,
**kwargs,
)
def update_or_create(self, defaults=None, create_defaults=None, **kwargs):
"""
Look up an object with the given kwargs, updating one with defaults
if it exists, otherwise create a new one. Optionally, an object can
be created with different values than defaults by using
create_defaults.
Return a tuple (object, created), where created is a boolean
specifying whether an object was created.
"""
if create_defaults is None:
update_defaults = create_defaults = defaults or {}
else:
update_defaults = defaults or {}
self._for_write = True
with transaction.atomic(using=self.db):
# Lock the row so that a concurrent update is blocked until
# update_or_create() has performed its save.
obj, created = self.select_for_update().get_or_create(
create_defaults, **kwargs
)
if created:
return obj, created
for k, v in resolve_callables(update_defaults):
setattr(obj, k, v)
update_fields = set(update_defaults)
concrete_field_names = self.model._meta._non_pk_concrete_field_names
# update_fields does not support non-concrete fields.
if concrete_field_names.issuperset(update_fields):
# Add fields which are set on pre_save(), e.g. auto_now fields.
# This is to maintain backward compatibility as these fields
# are not updated unless explicitly specified in the
# update_fields list.
for field in self.model._meta.local_concrete_fields:
if not (
field.primary_key or field.__class__.pre_save is Field.pre_save
):
update_fields.add(field.name)
if field.name != field.attname:
update_fields.add(field.attname)
obj.save(using=self.db, update_fields=update_fields)
else:
obj.save(using=self.db)
return obj, False
async def aupdate_or_create(self, defaults=None, create_defaults=None, **kwargs):
return await sync_to_async(self.update_or_create)(
defaults=defaults,
create_defaults=create_defaults,
**kwargs,
)
def _extract_model_params(self, defaults, **kwargs):
"""
Prepare `params` for creating a model instance based on the given
kwargs; for use by get_or_create().
"""
defaults = defaults or {}
params = {k: v for k, v in kwargs.items() if LOOKUP_SEP not in k}
params.update(defaults)
property_names = self.model._meta._property_names
invalid_params = []
for param in params:
try:
self.model._meta.get_field(param)
except exceptions.FieldDoesNotExist:
# It's okay to use a model's property if it has a setter.
if not (param in property_names and getattr(self.model, param).fset):
invalid_params.append(param)
if invalid_params:
raise exceptions.FieldError(
"Invalid field name(s) for model %s: '%s'."
% (
self.model._meta.object_name,
"', '".join(sorted(invalid_params)),
)
)
return params
def _earliest(self, *fields):
"""
Return the earliest object according to fields (if given) or by the
model's Meta.get_latest_by.
"""
if fields:
order_by = fields
else:
order_by = getattr(self.model._meta, "get_latest_by")
if order_by and not isinstance(order_by, (tuple, list)):
order_by = (order_by,)
if order_by is None:
raise ValueError(
"earliest() and latest() require either fields as positional "
"arguments or 'get_latest_by' in the model's Meta."
)
obj = self._chain()
obj.query.set_limits(high=1)
obj.query.clear_ordering(force=True)
obj.query.add_ordering(*order_by)
return obj.get()
def earliest(self, *fields):
if self.query.is_sliced:
raise TypeError("Cannot change a query once a slice has been taken.")
return self._earliest(*fields)
async def aearliest(self, *fields):
return await sync_to_async(self.earliest)(*fields)
def latest(self, *fields):
"""
Return the latest object according to fields (if given) or by the
model's Meta.get_latest_by.
"""
if self.query.is_sliced:
raise TypeError("Cannot change a query once a slice has been taken.")
return self.reverse()._earliest(*fields)
async def alatest(self, *fields):
return await sync_to_async(self.latest)(*fields)
def first(self):
"""Return the first object of a query or None if no match is found."""
if self.ordered:
queryset = self
else:
self._check_ordering_first_last_queryset_aggregation(method="first")
queryset = self.order_by("pk")
for obj in queryset[:1]:
return obj
async def afirst(self):
return await sync_to_async(self.first)()
def last(self):
"""Return the last object of a query or None if no match is found."""
if self.ordered:
queryset = self.reverse()
else:
self._check_ordering_first_last_queryset_aggregation(method="last")
queryset = self.order_by("-pk")
for obj in queryset[:1]:
return obj
async def alast(self):
return await sync_to_async(self.last)()
def in_bulk(self, id_list=None, *, field_name="pk"):
"""
Return a dictionary mapping each of the given IDs to the object with
that ID. If `id_list` isn't provided, evaluate the entire QuerySet.
"""
if self.query.is_sliced:
raise TypeError("Cannot use 'limit' or 'offset' with in_bulk().")
opts = self.model._meta
unique_fields = [
constraint.fields[0]
for constraint in opts.total_unique_constraints
if len(constraint.fields) == 1
]
if (
field_name != "pk"
and not opts.get_field(field_name).unique
and field_name not in unique_fields
and self.query.distinct_fields != (field_name,)
):
raise ValueError(
"in_bulk()'s field_name must be a unique field but %r isn't."
% field_name
)
if id_list is not None:
if not id_list:
return {}
filter_key = "{}__in".format(field_name)
batch_size = connections[self.db].features.max_query_params
id_list = tuple(id_list)
# If the database has a limit on the number of query parameters
# (e.g. SQLite), retrieve objects in batches if necessary.
if batch_size and batch_size < len(id_list):
qs = ()
for offset in range(0, len(id_list), batch_size):
batch = id_list[offset : offset + batch_size]
qs += tuple(self.filter(**{filter_key: batch}))
else:
qs = self.filter(**{filter_key: id_list})
else:
qs = self._chain()
return {getattr(obj, field_name): obj for obj in qs}
async def ain_bulk(self, id_list=None, *, field_name="pk"):
return await sync_to_async(self.in_bulk)(
id_list=id_list,
field_name=field_name,
)
def delete(self):
"""Delete the records in the current QuerySet."""
self._not_support_combined_queries("delete")
if self.query.is_sliced:
raise TypeError("Cannot use 'limit' or 'offset' with delete().")
if self.query.distinct_fields:
raise TypeError("Cannot call delete() after .distinct(*fields).")
if self._fields is not None:
raise TypeError("Cannot call delete() after .values() or .values_list()")
del_query = self._chain()
# The delete is actually 2 queries - one to find related objects,
# and one to delete. Make sure that the discovery of related
# objects is performed on the same database as the deletion.
del_query._for_write = True
# Disable non-supported fields.
del_query.query.select_for_update = False
del_query.query.select_related = False
del_query.query.clear_ordering(force=True)
collector = Collector(using=del_query.db, origin=self)
collector.collect(del_query)
deleted, _rows_count = collector.delete()
# Clear the result cache, in case this QuerySet gets reused.
self._result_cache = None
return deleted, _rows_count
delete.alters_data = True
delete.queryset_only = True
async def adelete(self):
return await sync_to_async(self.delete)()
adelete.alters_data = True
adelete.queryset_only = True
def _raw_delete(self, using):
"""
Delete objects found from the given queryset in single direct SQL
query. No signals are sent and there is no protection for cascades.
"""
query = self.query.clone()
query.__class__ = sql.DeleteQuery
cursor = query.get_compiler(using).execute_sql(CURSOR)
if cursor:
with cursor:
return cursor.rowcount
return 0
_raw_delete.alters_data = True
def update(self, **kwargs):
"""
Update all elements in the current QuerySet, setting all the given
fields to the appropriate values.
"""
self._not_support_combined_queries("update")
if self.query.is_sliced:
raise TypeError("Cannot update a query once a slice has been taken.")
self._for_write = True
query = self.query.chain(sql.UpdateQuery)
query.add_update_values(kwargs)
# Inline annotations in order_by(), if possible.
new_order_by = []
for col in query.order_by:
alias = col
descending = False
if isinstance(alias, str) and alias.startswith("-"):
alias = alias.removeprefix("-")
descending = True
if annotation := query.annotations.get(alias):
if getattr(annotation, "contains_aggregate", False):
raise exceptions.FieldError(
f"Cannot update when ordering by an aggregate: {annotation}"
)
if descending:
annotation = annotation.desc()
new_order_by.append(annotation)
else:
new_order_by.append(col)
query.order_by = tuple(new_order_by)
# Clear any annotations so that they won't be present in subqueries.
query.annotations = {}
with transaction.mark_for_rollback_on_error(using=self.db):
rows = query.get_compiler(self.db).execute_sql(CURSOR)
self._result_cache = None
return rows
update.alters_data = True
async def aupdate(self, **kwargs):
return await sync_to_async(self.update)(**kwargs)
aupdate.alters_data = True
def _update(self, values):
"""
A version of update() that accepts field objects instead of field names.
Used primarily for model saving and not intended for use by general
code (it requires too much poking around at model internals to be
useful at that level).
"""
if self.query.is_sliced:
raise TypeError("Cannot update a query once a slice has been taken.")
query = self.query.chain(sql.UpdateQuery)
query.add_update_fields(values)
# Clear any annotations so that they won't be present in subqueries.
query.annotations = {}
self._result_cache = None
return query.get_compiler(self.db).execute_sql(CURSOR)
_update.alters_data = True
_update.queryset_only = False
def exists(self):
"""
Return True if the QuerySet would have any results, False otherwise.
"""
if self._result_cache is None:
return self.query.has_results(using=self.db)
return bool(self._result_cache)
async def aexists(self):
return await sync_to_async(self.exists)()
def contains(self, obj):
"""
Return True if the QuerySet contains the provided obj,
False otherwise.
"""
self._not_support_combined_queries("contains")
if self._fields is not None:
raise TypeError(
"Cannot call QuerySet.contains() after .values() or .values_list()."
)
try:
if obj._meta.concrete_model != self.model._meta.concrete_model:
return False
except AttributeError:
raise TypeError("'obj' must be a model instance.")
if obj.pk is None:
raise ValueError("QuerySet.contains() cannot be used on unsaved objects.")
if self._result_cache is not None:
return obj in self._result_cache
return self.filter(pk=obj.pk).exists()
async def acontains(self, obj):
return await sync_to_async(self.contains)(obj=obj)
def _prefetch_related_objects(self):
# This method can only be called once the result cache has been filled.
prefetch_related_objects(self._result_cache, *self._prefetch_related_lookups)
self._prefetch_done = True
def explain(self, *, format=None, **options):
"""
Runs an EXPLAIN on the SQL query this QuerySet would perform, and
returns the results.
"""
return self.query.explain(using=self.db, format=format, **options)
async def aexplain(self, *, format=None, **options):
return await sync_to_async(self.explain)(format=format, **options)
##################################################
# PUBLIC METHODS THAT RETURN A QUERYSET SUBCLASS #
##################################################
def raw(self, raw_query, params=(), translations=None, using=None):
if using is None:
using = self.db
qs = RawQuerySet(
raw_query,
model=self.model,
params=params,
translations=translations,
using=using,
)
qs._prefetch_related_lookups = self._prefetch_related_lookups[:]
return qs
def _values(self, *fields, **expressions):
clone = self._chain()
if expressions:
clone = clone.annotate(**expressions)
clone._fields = fields
clone.query.set_values(fields)
return clone
def values(self, *fields, **expressions):
fields += tuple(expressions)
clone = self._values(*fields, **expressions)
clone._iterable_class = ValuesIterable
return clone
def values_list(self, *fields, flat=False, named=False):
if flat and named:
raise TypeError("'flat' and 'named' can't be used together.")
if flat and len(fields) > 1:
raise TypeError(
"'flat' is not valid when values_list is called with more than one "
"field."
)
field_names = {f for f in fields if not hasattr(f, "resolve_expression")}
_fields = []
expressions = {}
counter = 1
for field in fields:
if hasattr(field, "resolve_expression"):
field_id_prefix = getattr(
field, "default_alias", field.__class__.__name__.lower()
)
while True:
field_id = field_id_prefix + str(counter)
counter += 1
if field_id not in field_names:
break
expressions[field_id] = field
_fields.append(field_id)
else:
_fields.append(field)
clone = self._values(*_fields, **expressions)
clone._iterable_class = (
NamedValuesListIterable
if named
else FlatValuesListIterable
if flat
else ValuesListIterable
)
return clone
def dates(self, field_name, kind, order="ASC"):
"""
Return a list of date objects representing all available dates for
the given field_name, scoped to 'kind'.
"""
if kind not in ("year", "month", "week", "day"):
raise ValueError("'kind' must be one of 'year', 'month', 'week', or 'day'.")
if order not in ("ASC", "DESC"):
raise ValueError("'order' must be either 'ASC' or 'DESC'.")
return (
self.annotate(
datefield=Trunc(field_name, kind, output_field=DateField()),
plain_field=F(field_name),
)
.values_list("datefield", flat=True)
.distinct()
.filter(plain_field__isnull=False)
.order_by(("-" if order == "DESC" else "") + "datefield")
)
def datetimes(self, field_name, kind, order="ASC", tzinfo=None):
"""
Return a list of datetime objects representing all available
datetimes for the given field_name, scoped to 'kind'.
"""
if kind not in ("year", "month", "week", "day", "hour", "minute", "second"):
raise ValueError(
"'kind' must be one of 'year', 'month', 'week', 'day', "
"'hour', 'minute', or 'second'."
)
if order not in ("ASC", "DESC"):
raise ValueError("'order' must be either 'ASC' or 'DESC'.")
if settings.USE_TZ:
if tzinfo is None:
tzinfo = timezone.get_current_timezone()
else:
tzinfo = None
return (
self.annotate(
datetimefield=Trunc(
field_name,
kind,
output_field=DateTimeField(),
tzinfo=tzinfo,
),
plain_field=F(field_name),
)
.values_list("datetimefield", flat=True)
.distinct()
.filter(plain_field__isnull=False)
.order_by(("-" if order == "DESC" else "") + "datetimefield")
)
def none(self):
"""Return an empty QuerySet."""
clone = self._chain()
clone.query.set_empty()
return clone
##################################################################
# PUBLIC METHODS THAT ALTER ATTRIBUTES AND RETURN A NEW QUERYSET #
##################################################################
def all(self):
"""
Return a new QuerySet that is a copy of the current one. This allows a
QuerySet to proxy for a model manager in some cases.
"""
return self._chain()
def filter(self, *args, **kwargs):
"""
Return a new QuerySet instance with the args ANDed to the existing
set.
"""
self._not_support_combined_queries("filter")
return self._filter_or_exclude(False, args, kwargs)
def exclude(self, *args, **kwargs):
"""
Return a new QuerySet instance with NOT (args) ANDed to the existing
set.
"""
self._not_support_combined_queries("exclude")
return self._filter_or_exclude(True, args, kwargs)
def _filter_or_exclude(self, negate, args, kwargs):
if (args or kwargs) and self.query.is_sliced:
raise TypeError("Cannot filter a query once a slice has been taken.")
clone = self._chain()
if self._defer_next_filter:
self._defer_next_filter = False
clone._deferred_filter = negate, args, kwargs
else:
clone._filter_or_exclude_inplace(negate, args, kwargs)
return clone
def _filter_or_exclude_inplace(self, negate, args, kwargs):
if negate:
self._query.add_q(~Q(*args, **kwargs))
else:
self._query.add_q(Q(*args, **kwargs))
def complex_filter(self, filter_obj):
"""
Return a new QuerySet instance with filter_obj added to the filters.
filter_obj can be a Q object or a dictionary of keyword lookup
arguments.
This exists to support framework features such as 'limit_choices_to',
and usually it will be more natural to use other methods.
"""
if isinstance(filter_obj, Q):
clone = self._chain()
clone.query.add_q(filter_obj)
return clone
else:
return self._filter_or_exclude(False, args=(), kwargs=filter_obj)
def _combinator_query(self, combinator, *other_qs, all=False):
# Clone the query to inherit the select list and everything
clone = self._chain()
# Clear limits and ordering so they can be reapplied
clone.query.clear_ordering(force=True)
clone.query.clear_limits()
clone.query.combined_queries = (self.query,) + tuple(
qs.query for qs in other_qs
)
clone.query.combinator = combinator
clone.query.combinator_all = all
return clone
def union(self, *other_qs, all=False):
# If the query is an EmptyQuerySet, combine all nonempty querysets.
if isinstance(self, EmptyQuerySet):
qs = [q for q in other_qs if not isinstance(q, EmptyQuerySet)]
if not qs:
return self
if len(qs) == 1:
return qs[0]
return qs[0]._combinator_query("union", *qs[1:], all=all)
return self._combinator_query("union", *other_qs, all=all)
def intersection(self, *other_qs):
# If any query is an EmptyQuerySet, return it.
if isinstance(self, EmptyQuerySet):
return self
for other in other_qs:
if isinstance(other, EmptyQuerySet):
return other
return self._combinator_query("intersection", *other_qs)
def difference(self, *other_qs):
# If the query is an EmptyQuerySet, return it.
if isinstance(self, EmptyQuerySet):
return self
return self._combinator_query("difference", *other_qs)
def select_for_update(self, nowait=False, skip_locked=False, of=(), no_key=False):
"""
Return a new QuerySet instance that will select objects with a
FOR UPDATE lock.
"""
if nowait and skip_locked:
raise ValueError("The nowait option cannot be used with skip_locked.")
obj = self._chain()
obj._for_write = True
obj.query.select_for_update = True
obj.query.select_for_update_nowait = nowait
obj.query.select_for_update_skip_locked = skip_locked
obj.query.select_for_update_of = of
obj.query.select_for_no_key_update = no_key
return obj
def select_related(self, *fields):
"""
Return a new QuerySet instance that will select related objects.
If fields are specified, they must be ForeignKey fields and only those
related objects are included in the selection.
If select_related(None) is called, clear the list.
"""
self._not_support_combined_queries("select_related")
if self._fields is not None:
raise TypeError(
"Cannot call select_related() after .values() or .values_list()"
)
obj = self._chain()
if fields == (None,):
obj.query.select_related = False
elif fields:
obj.query.add_select_related(fields)
else:
obj.query.select_related = True
return obj
def prefetch_related(self, *lookups):
"""
Return a new QuerySet instance that will prefetch the specified
Many-To-One and Many-To-Many related objects when the QuerySet is
evaluated.
When prefetch_related() is called more than once, append to the list of
prefetch lookups. If prefetch_related(None) is called, clear the list.
"""
self._not_support_combined_queries("prefetch_related")
clone = self._chain()
if lookups == (None,):
clone._prefetch_related_lookups = ()
else:
for lookup in lookups:
if isinstance(lookup, Prefetch):
lookup = lookup.prefetch_to
lookup = lookup.split(LOOKUP_SEP, 1)[0]
if lookup in self.query._filtered_relations:
raise ValueError(
"prefetch_related() is not supported with FilteredRelation."
)
clone._prefetch_related_lookups = clone._prefetch_related_lookups + lookups
return clone
def annotate(self, *args, **kwargs):
"""
Return a query set in which the returned objects have been annotated
with extra data or aggregations.
"""
self._not_support_combined_queries("annotate")
return self._annotate(args, kwargs, select=True)
def alias(self, *args, **kwargs):
"""
Return a query set with added aliases for extra data or aggregations.
"""
self._not_support_combined_queries("alias")
return self._annotate(args, kwargs, select=False)
def _annotate(self, args, kwargs, select=True):
self._validate_values_are_expressions(
args + tuple(kwargs.values()), method_name="annotate"
)
annotations = {}
for arg in args:
# The default_alias property may raise a TypeError.
try:
if arg.default_alias in kwargs:
raise ValueError(
"The named annotation '%s' conflicts with the "
"default name for another annotation." % arg.default_alias
)
except TypeError:
raise TypeError("Complex annotations require an alias")
annotations[arg.default_alias] = arg
annotations.update(kwargs)
clone = self._chain()
names = self._fields
if names is None:
names = set(
chain.from_iterable(
(field.name, field.attname)
if hasattr(field, "attname")
else (field.name,)
for field in self.model._meta.get_fields()
)
)
for alias, annotation in annotations.items():
if alias in names:
raise ValueError(
"The annotation '%s' conflicts with a field on "
"the model." % alias
)
if isinstance(annotation, FilteredRelation):
clone.query.add_filtered_relation(annotation, alias)
else:
clone.query.add_annotation(
annotation,
alias,
select=select,
)
for alias, annotation in clone.query.annotations.items():
if alias in annotations and annotation.contains_aggregate:
if clone._fields is None:
clone.query.group_by = True
else:
clone.query.set_group_by()
break
return clone
def order_by(self, *field_names):
"""Return a new QuerySet instance with the ordering changed."""
if self.query.is_sliced:
raise TypeError("Cannot reorder a query once a slice has been taken.")
obj = self._chain()
obj.query.clear_ordering(force=True, clear_default=False)
obj.query.add_ordering(*field_names)
return obj
def distinct(self, *field_names):
"""
Return a new QuerySet instance that will select only distinct results.
"""
self._not_support_combined_queries("distinct")
if self.query.is_sliced:
raise TypeError(
"Cannot create distinct fields once a slice has been taken."
)
obj = self._chain()
obj.query.add_distinct_fields(*field_names)
return obj
def extra(
self,
select=None,
where=None,
params=None,
tables=None,
order_by=None,
select_params=None,
):
"""Add extra SQL fragments to the query."""
self._not_support_combined_queries("extra")
if self.query.is_sliced:
raise TypeError("Cannot change a query once a slice has been taken.")
clone = self._chain()
clone.query.add_extra(select, select_params, where, params, tables, order_by)
return clone
def reverse(self):
"""Reverse the ordering of the QuerySet."""
if self.query.is_sliced:
raise TypeError("Cannot reverse a query once a slice has been taken.")
clone = self._chain()
clone.query.standard_ordering = not clone.query.standard_ordering
return clone
def defer(self, *fields):
"""
Defer the loading of data for certain fields until they are accessed.
Add the set of deferred fields to any existing set of deferred fields.
The only exception to this is if None is passed in as the only
parameter, in which case removal all deferrals.
"""
self._not_support_combined_queries("defer")
if self._fields is not None:
raise TypeError("Cannot call defer() after .values() or .values_list()")
clone = self._chain()
if fields == (None,):
clone.query.clear_deferred_loading()
else:
clone.query.add_deferred_loading(fields)
return clone
def only(self, *fields):
"""
Essentially, the opposite of defer(). Only the fields passed into this
method and that are not already specified as deferred are loaded
immediately when the queryset is evaluated.
"""
self._not_support_combined_queries("only")
if self._fields is not None:
raise TypeError("Cannot call only() after .values() or .values_list()")
if fields == (None,):
# Can only pass None to defer(), not only(), as the rest option.
# That won't stop people trying to do this, so let's be explicit.
raise TypeError("Cannot pass None as an argument to only().")
for field in fields:
field = field.split(LOOKUP_SEP, 1)[0]
if field in self.query._filtered_relations:
raise ValueError("only() is not supported with FilteredRelation.")
clone = self._chain()
clone.query.add_immediate_loading(fields)
return clone
def using(self, alias):
"""Select which database this QuerySet should execute against."""
clone = self._chain()
clone._db = alias
return clone
###################################
# PUBLIC INTROSPECTION ATTRIBUTES #
###################################
@property
def ordered(self):
"""
Return True if the QuerySet is ordered -- i.e. has an order_by()
clause or a default ordering on the model (or is empty).
"""
if isinstance(self, EmptyQuerySet):
return True
if self.query.extra_order_by or self.query.order_by:
return True
elif (
self.query.default_ordering
and self.query.get_meta().ordering
and
# A default ordering doesn't affect GROUP BY queries.
not self.query.group_by
):
return True
else:
return False
@property
def db(self):
"""Return the database used if this query is executed now."""
if self._for_write:
return self._db or router.db_for_write(self.model, **self._hints)
return self._db or router.db_for_read(self.model, **self._hints)
###################
# PRIVATE METHODS #
###################
def _insert(
self,
objs,
fields,
returning_fields=None,
raw=False,
using=None,
on_conflict=None,
update_fields=None,
unique_fields=None,
):
"""
Insert a new record for the given model. This provides an interface to
the InsertQuery class and is how Model.save() is implemented.
"""
self._for_write = True
if using is None:
using = self.db
query = sql.InsertQuery(
self.model,
on_conflict=on_conflict,
update_fields=update_fields,
unique_fields=unique_fields,
)
query.insert_values(fields, objs, raw=raw)
return query.get_compiler(using=using).execute_sql(returning_fields)
_insert.alters_data = True
_insert.queryset_only = False
def _batched_insert(
self,
objs,
fields,
batch_size,
on_conflict=None,
update_fields=None,
unique_fields=None,
):
"""
Helper method for bulk_create() to insert objs one batch at a time.
"""
connection = connections[self.db]
ops = connection.ops
max_batch_size = max(ops.bulk_batch_size(fields, objs), 1)
batch_size = min(batch_size, max_batch_size) if batch_size else max_batch_size
inserted_rows = []
bulk_return = connection.features.can_return_rows_from_bulk_insert
for item in [objs[i : i + batch_size] for i in range(0, len(objs), batch_size)]:
if bulk_return and on_conflict is None:
inserted_rows.extend(
self._insert(
item,
fields=fields,
using=self.db,
returning_fields=self.model._meta.db_returning_fields,
)
)
else:
self._insert(
item,
fields=fields,
using=self.db,
on_conflict=on_conflict,
update_fields=update_fields,
unique_fields=unique_fields,
)
return inserted_rows
def _chain(self):
"""
Return a copy of the current QuerySet that's ready for another
operation.
"""
obj = self._clone()
if obj._sticky_filter:
obj.query.filter_is_sticky = True
obj._sticky_filter = False
return obj
def _clone(self):
"""
Return a copy of the current QuerySet. A lightweight alternative
to deepcopy().
"""
c = self.__class__(
model=self.model,
query=self.query.chain(),
using=self._db,
hints=self._hints,
)
c._sticky_filter = self._sticky_filter
c._for_write = self._for_write
c._prefetch_related_lookups = self._prefetch_related_lookups[:]
c._known_related_objects = self._known_related_objects
c._iterable_class = self._iterable_class
c._fields = self._fields
return c
def _fetch_all(self):
if self._result_cache is None:
self._result_cache = list(self._iterable_class(self))
if self._prefetch_related_lookups and not self._prefetch_done:
self._prefetch_related_objects()
def _next_is_sticky(self):
"""
Indicate that the next filter call and the one following that should
be treated as a single filter. This is only important when it comes to
determining when to reuse tables for many-to-many filters. Required so
that we can filter naturally on the results of related managers.
This doesn't return a clone of the current QuerySet (it returns
"self"). The method is only used internally and should be immediately
followed by a filter() that does create a clone.
"""
self._sticky_filter = True
return self
def _merge_sanity_check(self, other):
"""Check that two QuerySet classes may be merged."""
if self._fields is not None and (
set(self.query.values_select) != set(other.query.values_select)
or set(self.query.extra_select) != set(other.query.extra_select)
or set(self.query.annotation_select) != set(other.query.annotation_select)
):
raise TypeError(
"Merging '%s' classes must involve the same values in each case."
% self.__class__.__name__
)
def _merge_known_related_objects(self, other):
"""
Keep track of all known related objects from either QuerySet instance.
"""
for field, objects in other._known_related_objects.items():
self._known_related_objects.setdefault(field, {}).update(objects)
def resolve_expression(self, *args, **kwargs):
if self._fields and len(self._fields) > 1:
# values() queryset can only be used as nested queries
# if they are set up to select only a single field.
raise TypeError("Cannot use multi-field values as a filter value.")
query = self.query.resolve_expression(*args, **kwargs)
query._db = self._db
return query
resolve_expression.queryset_only = True
def _add_hints(self, **hints):
"""
Update hinting information for use by routers. Add new key/values or
overwrite existing key/values.
"""
self._hints.update(hints)
def _has_filters(self):
"""
Check if this QuerySet has any filtering going on. This isn't
equivalent with checking if all objects are present in results, for
example, qs[1:]._has_filters() -> False.
"""
return self.query.has_filters()
@staticmethod
def _validate_values_are_expressions(values, method_name):
invalid_args = sorted(
str(arg) for arg in values if not hasattr(arg, "resolve_expression")
)
if invalid_args:
raise TypeError(
"QuerySet.%s() received non-expression(s): %s."
% (
method_name,
", ".join(invalid_args),
)
)
def _not_support_combined_queries(self, operation_name):
if self.query.combinator:
raise NotSupportedError(
"Calling QuerySet.%s() after %s() is not supported."
% (operation_name, self.query.combinator)
)
def _check_operator_queryset(self, other, operator_):
if self.query.combinator or other.query.combinator:
raise TypeError(f"Cannot use {operator_} operator with combined queryset.")
def _check_ordering_first_last_queryset_aggregation(self, method):
if isinstance(self.query.group_by, tuple) and not any(
col.output_field is self.model._meta.pk for col in self.query.group_by
):
raise TypeError(
f"Cannot use QuerySet.{method}() on an unordered queryset performing "
f"aggregation. Add an ordering with order_by()."
)
class InstanceCheckMeta(type):
def __instancecheck__(self, instance):
return isinstance(instance, QuerySet) and instance.query.is_empty()
class EmptyQuerySet(metaclass=InstanceCheckMeta):
"""
Marker class to checking if a queryset is empty by .none():
isinstance(qs.none(), EmptyQuerySet) -> True
"""
def __init__(self, *args, **kwargs):
raise TypeError("EmptyQuerySet can't be instantiated")
class RawQuerySet:
"""
Provide an iterator which converts the results of raw SQL queries into
annotated model instances.
"""
def __init__(
self,
raw_query,
model=None,
query=None,
params=(),
translations=None,
using=None,
hints=None,
):
self.raw_query = raw_query
self.model = model
self._db = using
self._hints = hints or {}
self.query = query or sql.RawQuery(sql=raw_query, using=self.db, params=params)
self.params = params
self.translations = translations or {}
self._result_cache = None
self._prefetch_related_lookups = ()
self._prefetch_done = False
def resolve_model_init_order(self):
"""Resolve the init field names and value positions."""
converter = connections[self.db].introspection.identifier_converter
model_init_fields = [
f for f in self.model._meta.fields if converter(f.column) in self.columns
]
annotation_fields = [
(column, pos)
for pos, column in enumerate(self.columns)
if column not in self.model_fields
]
model_init_order = [
self.columns.index(converter(f.column)) for f in model_init_fields
]
model_init_names = [f.attname for f in model_init_fields]
return model_init_names, model_init_order, annotation_fields
def prefetch_related(self, *lookups):
"""Same as QuerySet.prefetch_related()"""
clone = self._clone()
if lookups == (None,):
clone._prefetch_related_lookups = ()
else:
clone._prefetch_related_lookups = clone._prefetch_related_lookups + lookups
return clone
def _prefetch_related_objects(self):
prefetch_related_objects(self._result_cache, *self._prefetch_related_lookups)
self._prefetch_done = True
def _clone(self):
"""Same as QuerySet._clone()"""
c = self.__class__(
self.raw_query,
model=self.model,
query=self.query,
params=self.params,
translations=self.translations,
using=self._db,
hints=self._hints,
)
c._prefetch_related_lookups = self._prefetch_related_lookups[:]
return c
def _fetch_all(self):
if self._result_cache is None:
self._result_cache = list(self.iterator())
if self._prefetch_related_lookups and not self._prefetch_done:
self._prefetch_related_objects()
def __len__(self):
self._fetch_all()
return len(self._result_cache)
def __bool__(self):
self._fetch_all()
return bool(self._result_cache)
def __iter__(self):
self._fetch_all()
return iter(self._result_cache)
def __aiter__(self):
# Remember, __aiter__ itself is synchronous, it's the thing it returns
# that is async!
async def generator():
await sync_to_async(self._fetch_all)()
for item in self._result_cache:
yield item
return generator()
def iterator(self):
yield from RawModelIterable(self)
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self.query)
def __getitem__(self, k):
return list(self)[k]
@property
def db(self):
"""Return the database used if this query is executed now."""
return self._db or router.db_for_read(self.model, **self._hints)
def using(self, alias):
"""Select the database this RawQuerySet should execute against."""
return RawQuerySet(
self.raw_query,
model=self.model,
query=self.query.chain(using=alias),
params=self.params,
translations=self.translations,
using=alias,
)
@cached_property
def columns(self):
"""
A list of model field names in the order they'll appear in the
query results.
"""
columns = self.query.get_columns()
# Adjust any column names which don't match field names
for query_name, model_name in self.translations.items():
# Ignore translations for nonexistent column names
try:
index = columns.index(query_name)
except ValueError:
pass
else:
columns[index] = model_name
return columns
@cached_property
def model_fields(self):
"""A dict mapping column names to model field names."""
converter = connections[self.db].introspection.identifier_converter
model_fields = {}
for field in self.model._meta.fields:
name, column = field.get_attname_column()
model_fields[converter(column)] = field
return model_fields
class Prefetch:
def __init__(self, lookup, queryset=None, to_attr=None):
# `prefetch_through` is the path we traverse to perform the prefetch.
self.prefetch_through = lookup
# `prefetch_to` is the path to the attribute that stores the result.
self.prefetch_to = lookup
if queryset is not None and (
isinstance(queryset, RawQuerySet)
or (
hasattr(queryset, "_iterable_class")
and not issubclass(queryset._iterable_class, ModelIterable)
)
):
raise ValueError(
"Prefetch querysets cannot use raw(), values(), and values_list()."
)
if to_attr:
self.prefetch_to = LOOKUP_SEP.join(
lookup.split(LOOKUP_SEP)[:-1] + [to_attr]
)
self.queryset = queryset
self.to_attr = to_attr
def __getstate__(self):
obj_dict = self.__dict__.copy()
if self.queryset is not None:
queryset = self.queryset._chain()
# Prevent the QuerySet from being evaluated
queryset._result_cache = []
queryset._prefetch_done = True
obj_dict["queryset"] = queryset
return obj_dict
def add_prefix(self, prefix):
self.prefetch_through = prefix + LOOKUP_SEP + self.prefetch_through
self.prefetch_to = prefix + LOOKUP_SEP + self.prefetch_to
def get_current_prefetch_to(self, level):
return LOOKUP_SEP.join(self.prefetch_to.split(LOOKUP_SEP)[: level + 1])
def get_current_to_attr(self, level):
parts = self.prefetch_to.split(LOOKUP_SEP)
to_attr = parts[level]
as_attr = self.to_attr and level == len(parts) - 1
return to_attr, as_attr
def get_current_queryset(self, level):
if self.get_current_prefetch_to(level) == self.prefetch_to:
return self.queryset
return None
def __eq__(self, other):
if not isinstance(other, Prefetch):
return NotImplemented
return self.prefetch_to == other.prefetch_to
def __hash__(self):
return hash((self.__class__, self.prefetch_to))
def normalize_prefetch_lookups(lookups, prefix=None):
"""Normalize lookups into Prefetch objects."""
ret = []
for lookup in lookups:
if not isinstance(lookup, Prefetch):
lookup = Prefetch(lookup)
if prefix:
lookup.add_prefix(prefix)
ret.append(lookup)
return ret
def prefetch_related_objects(model_instances, *related_lookups):
"""
Populate prefetched object caches for a list of model instances based on
the lookups/Prefetch instances given.
"""
if not model_instances:
return # nothing to do
# We need to be able to dynamically add to the list of prefetch_related
# lookups that we look up (see below). So we need some book keeping to
# ensure we don't do duplicate work.
done_queries = {} # dictionary of things like 'foo__bar': [results]
auto_lookups = set() # we add to this as we go through.
followed_descriptors = set() # recursion protection
all_lookups = normalize_prefetch_lookups(reversed(related_lookups))
while all_lookups:
lookup = all_lookups.pop()
if lookup.prefetch_to in done_queries:
if lookup.queryset is not None:
raise ValueError(
"'%s' lookup was already seen with a different queryset. "
"You may need to adjust the ordering of your lookups."
% lookup.prefetch_to
)
continue
# Top level, the list of objects to decorate is the result cache
# from the primary QuerySet. It won't be for deeper levels.
obj_list = model_instances
through_attrs = lookup.prefetch_through.split(LOOKUP_SEP)
for level, through_attr in enumerate(through_attrs):
# Prepare main instances
if not obj_list:
break
prefetch_to = lookup.get_current_prefetch_to(level)
if prefetch_to in done_queries:
# Skip any prefetching, and any object preparation
obj_list = done_queries[prefetch_to]
continue
# Prepare objects:
good_objects = True
for obj in obj_list:
# Since prefetching can re-use instances, it is possible to have
# the same instance multiple times in obj_list, so obj might
# already be prepared.
if not hasattr(obj, "_prefetched_objects_cache"):
try:
obj._prefetched_objects_cache = {}
except (AttributeError, TypeError):
# Must be an immutable object from
# values_list(flat=True), for example (TypeError) or
# a QuerySet subclass that isn't returning Model
# instances (AttributeError), either in Django or a 3rd
# party. prefetch_related() doesn't make sense, so quit.
good_objects = False
break
if not good_objects:
break
# Descend down tree
# We assume that objects retrieved are homogeneous (which is the premise
# of prefetch_related), so what applies to first object applies to all.
first_obj = obj_list[0]
to_attr = lookup.get_current_to_attr(level)[0]
prefetcher, descriptor, attr_found, is_fetched = get_prefetcher(
first_obj, through_attr, to_attr
)
if not attr_found:
raise AttributeError(
"Cannot find '%s' on %s object, '%s' is an invalid "
"parameter to prefetch_related()"
% (
through_attr,
first_obj.__class__.__name__,
lookup.prefetch_through,
)
)
if level == len(through_attrs) - 1 and prefetcher is None:
# Last one, this *must* resolve to something that supports
# prefetching, otherwise there is no point adding it and the
# developer asking for it has made a mistake.
raise ValueError(
"'%s' does not resolve to an item that supports "
"prefetching - this is an invalid parameter to "
"prefetch_related()." % lookup.prefetch_through
)
obj_to_fetch = None
if prefetcher is not None:
obj_to_fetch = [obj for obj in obj_list if not is_fetched(obj)]
if obj_to_fetch:
obj_list, additional_lookups = prefetch_one_level(
obj_to_fetch,
prefetcher,
lookup,
level,
)
# We need to ensure we don't keep adding lookups from the
# same relationships to stop infinite recursion. So, if we
# are already on an automatically added lookup, don't add
# the new lookups from relationships we've seen already.
if not (
prefetch_to in done_queries
and lookup in auto_lookups
and descriptor in followed_descriptors
):
done_queries[prefetch_to] = obj_list
new_lookups = normalize_prefetch_lookups(
reversed(additional_lookups), prefetch_to
)
auto_lookups.update(new_lookups)
all_lookups.extend(new_lookups)
followed_descriptors.add(descriptor)
else:
# Either a singly related object that has already been fetched
# (e.g. via select_related), or hopefully some other property
# that doesn't support prefetching but needs to be traversed.
# We replace the current list of parent objects with the list
# of related objects, filtering out empty or missing values so
# that we can continue with nullable or reverse relations.
new_obj_list = []
for obj in obj_list:
if through_attr in getattr(obj, "_prefetched_objects_cache", ()):
# If related objects have been prefetched, use the
# cache rather than the object's through_attr.
new_obj = list(obj._prefetched_objects_cache.get(through_attr))
else:
try:
new_obj = getattr(obj, through_attr)
except exceptions.ObjectDoesNotExist:
continue
if new_obj is None:
continue
# We special-case `list` rather than something more generic
# like `Iterable` because we don't want to accidentally match
# user models that define __iter__.
if isinstance(new_obj, list):
new_obj_list.extend(new_obj)
else:
new_obj_list.append(new_obj)
obj_list = new_obj_list
def get_prefetcher(instance, through_attr, to_attr):
"""
For the attribute 'through_attr' on the given instance, find
an object that has a get_prefetch_queryset().
Return a 4 tuple containing:
(the object with get_prefetch_queryset (or None),
the descriptor object representing this relationship (or None),
a boolean that is False if the attribute was not found at all,
a function that takes an instance and returns a boolean that is True if
the attribute has already been fetched for that instance)
"""
def has_to_attr_attribute(instance):
return hasattr(instance, to_attr)
prefetcher = None
is_fetched = has_to_attr_attribute
# For singly related objects, we have to avoid getting the attribute
# from the object, as this will trigger the query. So we first try
# on the class, in order to get the descriptor object.
rel_obj_descriptor = getattr(instance.__class__, through_attr, None)
if rel_obj_descriptor is None:
attr_found = hasattr(instance, through_attr)
else:
attr_found = True
if rel_obj_descriptor:
# singly related object, descriptor object has the
# get_prefetch_queryset() method.
if hasattr(rel_obj_descriptor, "get_prefetch_queryset"):
prefetcher = rel_obj_descriptor
is_fetched = rel_obj_descriptor.is_cached
else:
# descriptor doesn't support prefetching, so we go ahead and get
# the attribute on the instance rather than the class to
# support many related managers
rel_obj = getattr(instance, through_attr)
if hasattr(rel_obj, "get_prefetch_queryset"):
prefetcher = rel_obj
if through_attr != to_attr:
# Special case cached_property instances because hasattr
# triggers attribute computation and assignment.
if isinstance(
getattr(instance.__class__, to_attr, None), cached_property
):
def has_cached_property(instance):
return to_attr in instance.__dict__
is_fetched = has_cached_property
else:
def in_prefetched_cache(instance):
return through_attr in instance._prefetched_objects_cache
is_fetched = in_prefetched_cache
return prefetcher, rel_obj_descriptor, attr_found, is_fetched
def prefetch_one_level(instances, prefetcher, lookup, level):
"""
Helper function for prefetch_related_objects().
Run prefetches on all instances using the prefetcher object,
assigning results to relevant caches in instance.
Return the prefetched objects along with any additional prefetches that
must be done due to prefetch_related lookups found from default managers.
"""
# prefetcher must have a method get_prefetch_queryset() which takes a list
# of instances, and returns a tuple:
# (queryset of instances of self.model that are related to passed in instances,
# callable that gets value to be matched for returned instances,
# callable that gets value to be matched for passed in instances,
# boolean that is True for singly related objects,
# cache or field name to assign to,
# boolean that is True when the previous argument is a cache name vs a field name).
# The 'values to be matched' must be hashable as they will be used
# in a dictionary.
(
rel_qs,
rel_obj_attr,
instance_attr,
single,
cache_name,
is_descriptor,
) = prefetcher.get_prefetch_queryset(instances, lookup.get_current_queryset(level))
# We have to handle the possibility that the QuerySet we just got back
# contains some prefetch_related lookups. We don't want to trigger the
# prefetch_related functionality by evaluating the query. Rather, we need
# to merge in the prefetch_related lookups.
# Copy the lookups in case it is a Prefetch object which could be reused
# later (happens in nested prefetch_related).
additional_lookups = [
copy.copy(additional_lookup)
for additional_lookup in getattr(rel_qs, "_prefetch_related_lookups", ())
]
if additional_lookups:
# Don't need to clone because the manager should have given us a fresh
# instance, so we access an internal instead of using public interface
# for performance reasons.
rel_qs._prefetch_related_lookups = ()
all_related_objects = list(rel_qs)
rel_obj_cache = {}
for rel_obj in all_related_objects:
rel_attr_val = rel_obj_attr(rel_obj)
rel_obj_cache.setdefault(rel_attr_val, []).append(rel_obj)
to_attr, as_attr = lookup.get_current_to_attr(level)
# Make sure `to_attr` does not conflict with a field.
if as_attr and instances:
# We assume that objects retrieved are homogeneous (which is the premise
# of prefetch_related), so what applies to first object applies to all.
model = instances[0].__class__
try:
model._meta.get_field(to_attr)
except exceptions.FieldDoesNotExist:
pass
else:
msg = "to_attr={} conflicts with a field on the {} model."
raise ValueError(msg.format(to_attr, model.__name__))
# Whether or not we're prefetching the last part of the lookup.
leaf = len(lookup.prefetch_through.split(LOOKUP_SEP)) - 1 == level
for obj in instances:
instance_attr_val = instance_attr(obj)
vals = rel_obj_cache.get(instance_attr_val, [])
if single:
val = vals[0] if vals else None
if as_attr:
# A to_attr has been given for the prefetch.
setattr(obj, to_attr, val)
elif is_descriptor:
# cache_name points to a field name in obj.
# This field is a descriptor for a related object.
setattr(obj, cache_name, val)
else:
# No to_attr has been given for this prefetch operation and the
# cache_name does not point to a descriptor. Store the value of
# the field in the object's field cache.
obj._state.fields_cache[cache_name] = val
else:
if as_attr:
setattr(obj, to_attr, vals)
else:
manager = getattr(obj, to_attr)
if leaf and lookup.queryset is not None:
qs = manager._apply_rel_filters(lookup.queryset)
else:
qs = manager.get_queryset()
qs._result_cache = vals
# We don't want the individual qs doing prefetch_related now,
# since we have merged this into the current work.
qs._prefetch_done = True
obj._prefetched_objects_cache[cache_name] = qs
return all_related_objects, additional_lookups
class RelatedPopulator:
"""
RelatedPopulator is used for select_related() object instantiation.
The idea is that each select_related() model will be populated by a
different RelatedPopulator instance. The RelatedPopulator instances get
klass_info and select (computed in SQLCompiler) plus the used db as
input for initialization. That data is used to compute which columns
to use, how to instantiate the model, and how to populate the links
between the objects.
The actual creation of the objects is done in populate() method. This
method gets row and from_obj as input and populates the select_related()
model instance.
"""
def __init__(self, klass_info, select, db):
self.db = db
# Pre-compute needed attributes. The attributes are:
# - model_cls: the possibly deferred model class to instantiate
# - either:
# - cols_start, cols_end: usually the columns in the row are
# in the same order model_cls.__init__ expects them, so we
# can instantiate by model_cls(*row[cols_start:cols_end])
# - reorder_for_init: When select_related descends to a child
# class, then we want to reuse the already selected parent
# data. However, in this case the parent data isn't necessarily
# in the same order that Model.__init__ expects it to be, so
# we have to reorder the parent data. The reorder_for_init
# attribute contains a function used to reorder the field data
# in the order __init__ expects it.
# - pk_idx: the index of the primary key field in the reordered
# model data. Used to check if a related object exists at all.
# - init_list: the field attnames fetched from the database. For
# deferred models this isn't the same as all attnames of the
# model's fields.
# - related_populators: a list of RelatedPopulator instances if
# select_related() descends to related models from this model.
# - local_setter, remote_setter: Methods to set cached values on
# the object being populated and on the remote object. Usually
# these are Field.set_cached_value() methods.
select_fields = klass_info["select_fields"]
from_parent = klass_info["from_parent"]
if not from_parent:
self.cols_start = select_fields[0]
self.cols_end = select_fields[-1] + 1
self.init_list = [
f[0].target.attname for f in select[self.cols_start : self.cols_end]
]
self.reorder_for_init = None
else:
attname_indexes = {
select[idx][0].target.attname: idx for idx in select_fields
}
model_init_attnames = (
f.attname for f in klass_info["model"]._meta.concrete_fields
)
self.init_list = [
attname for attname in model_init_attnames if attname in attname_indexes
]
self.reorder_for_init = operator.itemgetter(
*[attname_indexes[attname] for attname in self.init_list]
)
self.model_cls = klass_info["model"]
self.pk_idx = self.init_list.index(self.model_cls._meta.pk.attname)
self.related_populators = get_related_populators(klass_info, select, self.db)
self.local_setter = klass_info["local_setter"]
self.remote_setter = klass_info["remote_setter"]
def populate(self, row, from_obj):
if self.reorder_for_init:
obj_data = self.reorder_for_init(row)
else:
obj_data = row[self.cols_start : self.cols_end]
if obj_data[self.pk_idx] is None:
obj = None
else:
obj = self.model_cls.from_db(self.db, self.init_list, obj_data)
for rel_iter in self.related_populators:
rel_iter.populate(row, obj)
self.local_setter(from_obj, obj)
if obj is not None:
self.remote_setter(obj, from_obj)
def get_related_populators(klass_info, select, db):
iterators = []
related_klass_infos = klass_info.get("related_klass_infos", [])
for rel_klass_info in related_klass_infos:
rel_cls = RelatedPopulator(rel_klass_info, select, db)
iterators.append(rel_cls)
return iterators
|
915d6f5b6887f1c35ef437d2e406a6d87c2d7c53e9ad971dbbb8b258178b4b8d | import bisect
import copy
import inspect
import warnings
from collections import defaultdict
from django.apps import apps
from django.conf import settings
from django.core.exceptions import FieldDoesNotExist, ImproperlyConfigured
from django.db import connections
from django.db.models import AutoField, Manager, OrderWrt, UniqueConstraint
from django.db.models.query_utils import PathInfo
from django.utils.datastructures import ImmutableList, OrderedSet
from django.utils.deprecation import RemovedInDjango51Warning
from django.utils.functional import cached_property
from django.utils.module_loading import import_string
from django.utils.text import camel_case_to_spaces, format_lazy
from django.utils.translation import override
PROXY_PARENTS = object()
EMPTY_RELATION_TREE = ()
IMMUTABLE_WARNING = (
"The return type of '%s' should never be mutated. If you want to manipulate this "
"list for your own use, make a copy first."
)
DEFAULT_NAMES = (
"verbose_name",
"verbose_name_plural",
"db_table",
"db_table_comment",
"ordering",
"unique_together",
"permissions",
"get_latest_by",
"order_with_respect_to",
"app_label",
"db_tablespace",
"abstract",
"managed",
"proxy",
"swappable",
"auto_created",
"index_together", # RemovedInDjango51Warning.
"apps",
"default_permissions",
"select_on_save",
"default_related_name",
"required_db_features",
"required_db_vendor",
"base_manager_name",
"default_manager_name",
"indexes",
"constraints",
)
def normalize_together(option_together):
"""
option_together can be either a tuple of tuples, or a single
tuple of two strings. Normalize it to a tuple of tuples, so that
calling code can uniformly expect that.
"""
try:
if not option_together:
return ()
if not isinstance(option_together, (tuple, list)):
raise TypeError
first_element = option_together[0]
if not isinstance(first_element, (tuple, list)):
option_together = (option_together,)
# Normalize everything to tuples
return tuple(tuple(ot) for ot in option_together)
except TypeError:
# If the value of option_together isn't valid, return it
# verbatim; this will be picked up by the check framework later.
return option_together
def make_immutable_fields_list(name, data):
return ImmutableList(data, warning=IMMUTABLE_WARNING % name)
class Options:
FORWARD_PROPERTIES = {
"fields",
"many_to_many",
"concrete_fields",
"local_concrete_fields",
"_non_pk_concrete_field_names",
"_forward_fields_map",
"managers",
"managers_map",
"base_manager",
"default_manager",
}
REVERSE_PROPERTIES = {"related_objects", "fields_map", "_relation_tree"}
default_apps = apps
def __init__(self, meta, app_label=None):
self._get_fields_cache = {}
self.local_fields = []
self.local_many_to_many = []
self.private_fields = []
self.local_managers = []
self.base_manager_name = None
self.default_manager_name = None
self.model_name = None
self.verbose_name = None
self.verbose_name_plural = None
self.db_table = ""
self.db_table_comment = ""
self.ordering = []
self._ordering_clash = False
self.indexes = []
self.constraints = []
self.unique_together = []
self.index_together = [] # RemovedInDjango51Warning.
self.select_on_save = False
self.default_permissions = ("add", "change", "delete", "view")
self.permissions = []
self.object_name = None
self.app_label = app_label
self.get_latest_by = None
self.order_with_respect_to = None
self.db_tablespace = settings.DEFAULT_TABLESPACE
self.required_db_features = []
self.required_db_vendor = None
self.meta = meta
self.pk = None
self.auto_field = None
self.abstract = False
self.managed = True
self.proxy = False
# For any class that is a proxy (including automatically created
# classes for deferred object loading), proxy_for_model tells us
# which class this model is proxying. Note that proxy_for_model
# can create a chain of proxy models. For non-proxy models, the
# variable is always None.
self.proxy_for_model = None
# For any non-abstract class, the concrete class is the model
# in the end of the proxy_for_model chain. In particular, for
# concrete models, the concrete_model is always the class itself.
self.concrete_model = None
self.swappable = None
self.parents = {}
self.auto_created = False
# List of all lookups defined in ForeignKey 'limit_choices_to' options
# from *other* models. Needed for some admin checks. Internal use only.
self.related_fkey_lookups = []
# A custom app registry to use, if you're making a separate model set.
self.apps = self.default_apps
self.default_related_name = None
@property
def label(self):
return "%s.%s" % (self.app_label, self.object_name)
@property
def label_lower(self):
return "%s.%s" % (self.app_label, self.model_name)
@property
def app_config(self):
# Don't go through get_app_config to avoid triggering imports.
return self.apps.app_configs.get(self.app_label)
def contribute_to_class(self, cls, name):
from django.db import connection
from django.db.backends.utils import truncate_name
cls._meta = self
self.model = cls
# First, construct the default values for these options.
self.object_name = cls.__name__
self.model_name = self.object_name.lower()
self.verbose_name = camel_case_to_spaces(self.object_name)
# Store the original user-defined values for each option,
# for use when serializing the model definition
self.original_attrs = {}
# Next, apply any overridden values from 'class Meta'.
if self.meta:
meta_attrs = self.meta.__dict__.copy()
for name in self.meta.__dict__:
# Ignore any private attributes that Django doesn't care about.
# NOTE: We can't modify a dictionary's contents while looping
# over it, so we loop over the *original* dictionary instead.
if name.startswith("_"):
del meta_attrs[name]
for attr_name in DEFAULT_NAMES:
if attr_name in meta_attrs:
setattr(self, attr_name, meta_attrs.pop(attr_name))
self.original_attrs[attr_name] = getattr(self, attr_name)
elif hasattr(self.meta, attr_name):
setattr(self, attr_name, getattr(self.meta, attr_name))
self.original_attrs[attr_name] = getattr(self, attr_name)
self.unique_together = normalize_together(self.unique_together)
self.index_together = normalize_together(self.index_together)
if self.index_together:
warnings.warn(
f"'index_together' is deprecated. Use 'Meta.indexes' in "
f"{self.label!r} instead.",
RemovedInDjango51Warning,
)
# App label/class name interpolation for names of constraints and
# indexes.
if not getattr(cls._meta, "abstract", False):
for attr_name in {"constraints", "indexes"}:
objs = getattr(self, attr_name, [])
setattr(self, attr_name, self._format_names_with_class(cls, objs))
# verbose_name_plural is a special case because it uses a 's'
# by default.
if self.verbose_name_plural is None:
self.verbose_name_plural = format_lazy("{}s", self.verbose_name)
# order_with_respect_and ordering are mutually exclusive.
self._ordering_clash = bool(self.ordering and self.order_with_respect_to)
# Any leftover attributes must be invalid.
if meta_attrs != {}:
raise TypeError(
"'class Meta' got invalid attribute(s): %s" % ",".join(meta_attrs)
)
else:
self.verbose_name_plural = format_lazy("{}s", self.verbose_name)
del self.meta
# If the db_table wasn't provided, use the app_label + model_name.
if not self.db_table:
self.db_table = "%s_%s" % (self.app_label, self.model_name)
self.db_table = truncate_name(
self.db_table, connection.ops.max_name_length()
)
def _format_names_with_class(self, cls, objs):
"""App label/class name interpolation for object names."""
new_objs = []
for obj in objs:
obj = obj.clone()
obj.name = obj.name % {
"app_label": cls._meta.app_label.lower(),
"class": cls.__name__.lower(),
}
new_objs.append(obj)
return new_objs
def _get_default_pk_class(self):
pk_class_path = getattr(
self.app_config,
"default_auto_field",
settings.DEFAULT_AUTO_FIELD,
)
if self.app_config and self.app_config._is_default_auto_field_overridden:
app_config_class = type(self.app_config)
source = (
f"{app_config_class.__module__}."
f"{app_config_class.__qualname__}.default_auto_field"
)
else:
source = "DEFAULT_AUTO_FIELD"
if not pk_class_path:
raise ImproperlyConfigured(f"{source} must not be empty.")
try:
pk_class = import_string(pk_class_path)
except ImportError as e:
msg = (
f"{source} refers to the module '{pk_class_path}' that could "
f"not be imported."
)
raise ImproperlyConfigured(msg) from e
if not issubclass(pk_class, AutoField):
raise ValueError(
f"Primary key '{pk_class_path}' referred by {source} must "
f"subclass AutoField."
)
return pk_class
def _prepare(self, model):
if self.order_with_respect_to:
# The app registry will not be ready at this point, so we cannot
# use get_field().
query = self.order_with_respect_to
try:
self.order_with_respect_to = next(
f
for f in self._get_fields(reverse=False)
if f.name == query or f.attname == query
)
except StopIteration:
raise FieldDoesNotExist(
"%s has no field named '%s'" % (self.object_name, query)
)
self.ordering = ("_order",)
if not any(
isinstance(field, OrderWrt) for field in model._meta.local_fields
):
model.add_to_class("_order", OrderWrt())
else:
self.order_with_respect_to = None
if self.pk is None:
if self.parents:
# Promote the first parent link in lieu of adding yet another
# field.
field = next(iter(self.parents.values()))
# Look for a local field with the same name as the
# first parent link. If a local field has already been
# created, use it instead of promoting the parent
already_created = [
fld for fld in self.local_fields if fld.name == field.name
]
if already_created:
field = already_created[0]
field.primary_key = True
self.setup_pk(field)
else:
pk_class = self._get_default_pk_class()
auto = pk_class(verbose_name="ID", primary_key=True, auto_created=True)
model.add_to_class("id", auto)
def add_manager(self, manager):
self.local_managers.append(manager)
self._expire_cache()
def add_field(self, field, private=False):
# Insert the given field in the order in which it was created, using
# the "creation_counter" attribute of the field.
# Move many-to-many related fields from self.fields into
# self.many_to_many.
if private:
self.private_fields.append(field)
elif field.is_relation and field.many_to_many:
bisect.insort(self.local_many_to_many, field)
else:
bisect.insort(self.local_fields, field)
self.setup_pk(field)
# If the field being added is a relation to another known field,
# expire the cache on this field and the forward cache on the field
# being referenced, because there will be new relationships in the
# cache. Otherwise, expire the cache of references *to* this field.
# The mechanism for getting at the related model is slightly odd -
# ideally, we'd just ask for field.related_model. However, related_model
# is a cached property, and all the models haven't been loaded yet, so
# we need to make sure we don't cache a string reference.
if (
field.is_relation
and hasattr(field.remote_field, "model")
and field.remote_field.model
):
try:
field.remote_field.model._meta._expire_cache(forward=False)
except AttributeError:
pass
self._expire_cache()
else:
self._expire_cache(reverse=False)
def setup_pk(self, field):
if not self.pk and field.primary_key:
self.pk = field
field.serialize = False
def setup_proxy(self, target):
"""
Do the internal setup so that the current model is a proxy for
"target".
"""
self.pk = target._meta.pk
self.proxy_for_model = target
self.db_table = target._meta.db_table
def __repr__(self):
return "<Options for %s>" % self.object_name
def __str__(self):
return self.label_lower
def can_migrate(self, connection):
"""
Return True if the model can/should be migrated on the `connection`.
`connection` can be either a real connection or a connection alias.
"""
if self.proxy or self.swapped or not self.managed:
return False
if isinstance(connection, str):
connection = connections[connection]
if self.required_db_vendor:
return self.required_db_vendor == connection.vendor
if self.required_db_features:
return all(
getattr(connection.features, feat, False)
for feat in self.required_db_features
)
return True
@property
def verbose_name_raw(self):
"""Return the untranslated verbose name."""
with override(None):
return str(self.verbose_name)
@property
def swapped(self):
"""
Has this model been swapped out for another? If so, return the model
name of the replacement; otherwise, return None.
For historical reasons, model name lookups using get_model() are
case insensitive, so we make sure we are case insensitive here.
"""
if self.swappable:
swapped_for = getattr(settings, self.swappable, None)
if swapped_for:
try:
swapped_label, swapped_object = swapped_for.split(".")
except ValueError:
# setting not in the format app_label.model_name
# raising ImproperlyConfigured here causes problems with
# test cleanup code - instead it is raised in get_user_model
# or as part of validation.
return swapped_for
if (
"%s.%s" % (swapped_label, swapped_object.lower())
!= self.label_lower
):
return swapped_for
return None
@cached_property
def managers(self):
managers = []
seen_managers = set()
bases = (b for b in self.model.mro() if hasattr(b, "_meta"))
for depth, base in enumerate(bases):
for manager in base._meta.local_managers:
if manager.name in seen_managers:
continue
manager = copy.copy(manager)
manager.model = self.model
seen_managers.add(manager.name)
managers.append((depth, manager.creation_counter, manager))
return make_immutable_fields_list(
"managers",
(m[2] for m in sorted(managers)),
)
@cached_property
def managers_map(self):
return {manager.name: manager for manager in self.managers}
@cached_property
def base_manager(self):
base_manager_name = self.base_manager_name
if not base_manager_name:
# Get the first parent's base_manager_name if there's one.
for parent in self.model.mro()[1:]:
if hasattr(parent, "_meta"):
if parent._base_manager.name != "_base_manager":
base_manager_name = parent._base_manager.name
break
if base_manager_name:
try:
return self.managers_map[base_manager_name]
except KeyError:
raise ValueError(
"%s has no manager named %r"
% (
self.object_name,
base_manager_name,
)
)
manager = Manager()
manager.name = "_base_manager"
manager.model = self.model
manager.auto_created = True
return manager
@cached_property
def default_manager(self):
default_manager_name = self.default_manager_name
if not default_manager_name and not self.local_managers:
# Get the first parent's default_manager_name if there's one.
for parent in self.model.mro()[1:]:
if hasattr(parent, "_meta"):
default_manager_name = parent._meta.default_manager_name
break
if default_manager_name:
try:
return self.managers_map[default_manager_name]
except KeyError:
raise ValueError(
"%s has no manager named %r"
% (
self.object_name,
default_manager_name,
)
)
if self.managers:
return self.managers[0]
@cached_property
def fields(self):
"""
Return a list of all forward fields on the model and its parents,
excluding ManyToManyFields.
Private API intended only to be used by Django itself; get_fields()
combined with filtering of field properties is the public API for
obtaining this field list.
"""
# For legacy reasons, the fields property should only contain forward
# fields that are not private or with a m2m cardinality. Therefore we
# pass these three filters as filters to the generator.
# The third lambda is a longwinded way of checking f.related_model - we don't
# use that property directly because related_model is a cached property,
# and all the models may not have been loaded yet; we don't want to cache
# the string reference to the related_model.
def is_not_an_m2m_field(f):
return not (f.is_relation and f.many_to_many)
def is_not_a_generic_relation(f):
return not (f.is_relation and f.one_to_many)
def is_not_a_generic_foreign_key(f):
return not (
f.is_relation
and f.many_to_one
and not (hasattr(f.remote_field, "model") and f.remote_field.model)
)
return make_immutable_fields_list(
"fields",
(
f
for f in self._get_fields(reverse=False)
if is_not_an_m2m_field(f)
and is_not_a_generic_relation(f)
and is_not_a_generic_foreign_key(f)
),
)
@cached_property
def concrete_fields(self):
"""
Return a list of all concrete fields on the model and its parents.
Private API intended only to be used by Django itself; get_fields()
combined with filtering of field properties is the public API for
obtaining this field list.
"""
return make_immutable_fields_list(
"concrete_fields", (f for f in self.fields if f.concrete)
)
@cached_property
def local_concrete_fields(self):
"""
Return a list of all concrete fields on the model.
Private API intended only to be used by Django itself; get_fields()
combined with filtering of field properties is the public API for
obtaining this field list.
"""
return make_immutable_fields_list(
"local_concrete_fields", (f for f in self.local_fields if f.concrete)
)
@cached_property
def many_to_many(self):
"""
Return a list of all many to many fields on the model and its parents.
Private API intended only to be used by Django itself; get_fields()
combined with filtering of field properties is the public API for
obtaining this list.
"""
return make_immutable_fields_list(
"many_to_many",
(
f
for f in self._get_fields(reverse=False)
if f.is_relation and f.many_to_many
),
)
@cached_property
def related_objects(self):
"""
Return all related objects pointing to the current model. The related
objects can come from a one-to-one, one-to-many, or many-to-many field
relation type.
Private API intended only to be used by Django itself; get_fields()
combined with filtering of field properties is the public API for
obtaining this field list.
"""
all_related_fields = self._get_fields(
forward=False, reverse=True, include_hidden=True
)
return make_immutable_fields_list(
"related_objects",
(
obj
for obj in all_related_fields
if not obj.hidden or obj.field.many_to_many
),
)
@cached_property
def _forward_fields_map(self):
res = {}
fields = self._get_fields(reverse=False)
for field in fields:
res[field.name] = field
# Due to the way Django's internals work, get_field() should also
# be able to fetch a field by attname. In the case of a concrete
# field with relation, includes the *_id name too
try:
res[field.attname] = field
except AttributeError:
pass
return res
@cached_property
def fields_map(self):
res = {}
fields = self._get_fields(forward=False, include_hidden=True)
for field in fields:
res[field.name] = field
# Due to the way Django's internals work, get_field() should also
# be able to fetch a field by attname. In the case of a concrete
# field with relation, includes the *_id name too
try:
res[field.attname] = field
except AttributeError:
pass
return res
def get_field(self, field_name):
"""
Return a field instance given the name of a forward or reverse field.
"""
try:
# In order to avoid premature loading of the relation tree
# (expensive) we prefer checking if the field is a forward field.
return self._forward_fields_map[field_name]
except KeyError:
# If the app registry is not ready, reverse fields are
# unavailable, therefore we throw a FieldDoesNotExist exception.
if not self.apps.models_ready:
raise FieldDoesNotExist(
"%s has no field named '%s'. The app cache isn't ready yet, "
"so if this is an auto-created related field, it won't "
"be available yet." % (self.object_name, field_name)
)
try:
# Retrieve field instance by name from cached or just-computed
# field map.
return self.fields_map[field_name]
except KeyError:
raise FieldDoesNotExist(
"%s has no field named '%s'" % (self.object_name, field_name)
)
def get_base_chain(self, model):
"""
Return a list of parent classes leading to `model` (ordered from
closest to most distant ancestor). This has to handle the case where
`model` is a grandparent or even more distant relation.
"""
if not self.parents:
return []
if model in self.parents:
return [model]
for parent in self.parents:
res = parent._meta.get_base_chain(model)
if res:
res.insert(0, parent)
return res
return []
def get_parent_list(self):
"""
Return all the ancestors of this model as a list ordered by MRO.
Useful for determining if something is an ancestor, regardless of lineage.
"""
result = OrderedSet(self.parents)
for parent in self.parents:
for ancestor in parent._meta.get_parent_list():
result.add(ancestor)
return list(result)
def get_ancestor_link(self, ancestor):
"""
Return the field on the current model which points to the given
"ancestor". This is possible an indirect link (a pointer to a parent
model, which points, eventually, to the ancestor). Used when
constructing table joins for model inheritance.
Return None if the model isn't an ancestor of this one.
"""
if ancestor in self.parents:
return self.parents[ancestor]
for parent in self.parents:
# Tries to get a link field from the immediate parent
parent_link = parent._meta.get_ancestor_link(ancestor)
if parent_link:
# In case of a proxied model, the first link
# of the chain to the ancestor is that parent
# links
return self.parents[parent] or parent_link
def get_path_to_parent(self, parent):
"""
Return a list of PathInfos containing the path from the current
model to the parent model, or an empty list if parent is not a
parent of the current model.
"""
if self.model is parent:
return []
# Skip the chain of proxy to the concrete proxied model.
proxied_model = self.concrete_model
path = []
opts = self
for int_model in self.get_base_chain(parent):
if int_model is proxied_model:
opts = int_model._meta
else:
final_field = opts.parents[int_model]
targets = (final_field.remote_field.get_related_field(),)
opts = int_model._meta
path.append(
PathInfo(
from_opts=final_field.model._meta,
to_opts=opts,
target_fields=targets,
join_field=final_field,
m2m=False,
direct=True,
filtered_relation=None,
)
)
return path
def get_path_from_parent(self, parent):
"""
Return a list of PathInfos containing the path from the parent
model to the current model, or an empty list if parent is not a
parent of the current model.
"""
if self.model is parent:
return []
model = self.concrete_model
# Get a reversed base chain including both the current and parent
# models.
chain = model._meta.get_base_chain(parent)
chain.reverse()
chain.append(model)
# Construct a list of the PathInfos between models in chain.
path = []
for i, ancestor in enumerate(chain[:-1]):
child = chain[i + 1]
link = child._meta.get_ancestor_link(ancestor)
path.extend(link.reverse_path_infos)
return path
def _populate_directed_relation_graph(self):
"""
This method is used by each model to find its reverse objects. As this
method is very expensive and is accessed frequently (it looks up every
field in a model, in every app), it is computed on first access and then
is set as a property on every model.
"""
related_objects_graph = defaultdict(list)
all_models = self.apps.get_models(include_auto_created=True)
for model in all_models:
opts = model._meta
# Abstract model's fields are copied to child models, hence we will
# see the fields from the child models.
if opts.abstract:
continue
fields_with_relations = (
f
for f in opts._get_fields(reverse=False, include_parents=False)
if f.is_relation and f.related_model is not None
)
for f in fields_with_relations:
if not isinstance(f.remote_field.model, str):
remote_label = f.remote_field.model._meta.concrete_model._meta.label
related_objects_graph[remote_label].append(f)
for model in all_models:
# Set the relation_tree using the internal __dict__. In this way
# we avoid calling the cached property. In attribute lookup,
# __dict__ takes precedence over a data descriptor (such as
# @cached_property). This means that the _meta._relation_tree is
# only called if related_objects is not in __dict__.
related_objects = related_objects_graph[
model._meta.concrete_model._meta.label
]
model._meta.__dict__["_relation_tree"] = related_objects
# It seems it is possible that self is not in all_models, so guard
# against that with default for get().
return self.__dict__.get("_relation_tree", EMPTY_RELATION_TREE)
@cached_property
def _relation_tree(self):
return self._populate_directed_relation_graph()
def _expire_cache(self, forward=True, reverse=True):
# This method is usually called by apps.cache_clear(), when the
# registry is finalized, or when a new field is added.
if forward:
for cache_key in self.FORWARD_PROPERTIES:
if cache_key in self.__dict__:
delattr(self, cache_key)
if reverse and not self.abstract:
for cache_key in self.REVERSE_PROPERTIES:
if cache_key in self.__dict__:
delattr(self, cache_key)
self._get_fields_cache = {}
def get_fields(self, include_parents=True, include_hidden=False):
"""
Return a list of fields associated to the model. By default, include
forward and reverse fields, fields derived from inheritance, but not
hidden fields. The returned fields can be changed using the parameters:
- include_parents: include fields derived from inheritance
- include_hidden: include fields that have a related_name that
starts with a "+"
"""
if include_parents is False:
include_parents = PROXY_PARENTS
return self._get_fields(
include_parents=include_parents, include_hidden=include_hidden
)
def _get_fields(
self,
forward=True,
reverse=True,
include_parents=True,
include_hidden=False,
topmost_call=True,
):
"""
Internal helper function to return fields of the model.
* If forward=True, then fields defined on this model are returned.
* If reverse=True, then relations pointing to this model are returned.
* If include_hidden=True, then fields with is_hidden=True are returned.
* The include_parents argument toggles if fields from parent models
should be included. It has three values: True, False, and
PROXY_PARENTS. When set to PROXY_PARENTS, the call will return all
fields defined for the current model or any of its parents in the
parent chain to the model's concrete model.
"""
if include_parents not in (True, False, PROXY_PARENTS):
raise TypeError(
"Invalid argument for include_parents: %s" % (include_parents,)
)
# This helper function is used to allow recursion in ``get_fields()``
# implementation and to provide a fast way for Django's internals to
# access specific subsets of fields.
# Creates a cache key composed of all arguments
cache_key = (forward, reverse, include_parents, include_hidden, topmost_call)
try:
# In order to avoid list manipulation. Always return a shallow copy
# of the results.
return self._get_fields_cache[cache_key]
except KeyError:
pass
fields = []
# Recursively call _get_fields() on each parent, with the same
# options provided in this call.
if include_parents is not False:
# In diamond inheritance it is possible that we see the same model
# from two different routes. In that case, avoid adding fields from
# the same parent again.
parent_fields = set()
for parent in self.parents:
if (
parent._meta.concrete_model != self.concrete_model
and include_parents == PROXY_PARENTS
):
continue
for obj in parent._meta._get_fields(
forward=forward,
reverse=reverse,
include_parents=include_parents,
include_hidden=include_hidden,
topmost_call=False,
):
if (
not getattr(obj, "parent_link", False)
or obj.model == self.concrete_model
) and obj not in parent_fields:
fields.append(obj)
parent_fields.add(obj)
if reverse and not self.proxy:
# Tree is computed once and cached until the app cache is expired.
# It is composed of a list of fields pointing to the current model
# from other models.
all_fields = self._relation_tree
for field in all_fields:
# If hidden fields should be included or the relation is not
# intentionally hidden, add to the fields dict.
if include_hidden or not field.remote_field.hidden:
fields.append(field.remote_field)
if forward:
fields += self.local_fields
fields += self.local_many_to_many
# Private fields are recopied to each child model, and they get a
# different model as field.model in each child. Hence we have to
# add the private fields separately from the topmost call. If we
# did this recursively similar to local_fields, we would get field
# instances with field.model != self.model.
if topmost_call:
fields += self.private_fields
# In order to avoid list manipulation. Always
# return a shallow copy of the results
fields = make_immutable_fields_list("get_fields()", fields)
# Store result into cache for later access
self._get_fields_cache[cache_key] = fields
return fields
@cached_property
def total_unique_constraints(self):
"""
Return a list of total unique constraints. Useful for determining set
of fields guaranteed to be unique for all rows.
"""
return [
constraint
for constraint in self.constraints
if (
isinstance(constraint, UniqueConstraint)
and constraint.condition is None
and not constraint.contains_expressions
)
]
@cached_property
def _property_names(self):
"""Return a set of the names of the properties defined on the model."""
names = []
for name in dir(self.model):
attr = inspect.getattr_static(self.model, name)
if isinstance(attr, property):
names.append(name)
return frozenset(names)
@cached_property
def _non_pk_concrete_field_names(self):
"""
Return a set of the non-pk concrete field names defined on the model.
"""
names = []
for field in self.concrete_fields:
if not field.primary_key:
names.append(field.name)
if field.name != field.attname:
names.append(field.attname)
return frozenset(names)
@cached_property
def db_returning_fields(self):
"""
Private API intended only to be used by Django itself.
Fields to be returned after a database insert.
"""
return [
field
for field in self._get_fields(
forward=True, reverse=False, include_parents=PROXY_PARENTS
)
if getattr(field, "db_returning", False)
]
|
7b8b48b374767449c25bec00478c678d85b445cf77ebc7e8365c79052b5a860e | import copy
import inspect
import warnings
from functools import partialmethod
from itertools import chain
from asgiref.sync import sync_to_async
import django
from django.apps import apps
from django.conf import settings
from django.core import checks
from django.core.exceptions import (
NON_FIELD_ERRORS,
FieldDoesNotExist,
FieldError,
MultipleObjectsReturned,
ObjectDoesNotExist,
ValidationError,
)
from django.db import (
DJANGO_VERSION_PICKLE_KEY,
DatabaseError,
connection,
connections,
router,
transaction,
)
from django.db.models import NOT_PROVIDED, ExpressionWrapper, IntegerField, Max, Value
from django.db.models.constants import LOOKUP_SEP
from django.db.models.constraints import CheckConstraint, UniqueConstraint
from django.db.models.deletion import CASCADE, Collector
from django.db.models.expressions import RawSQL
from django.db.models.fields.related import (
ForeignObjectRel,
OneToOneField,
lazy_related_operation,
resolve_relation,
)
from django.db.models.functions import Coalesce
from django.db.models.manager import Manager
from django.db.models.options import Options
from django.db.models.query import F, Q
from django.db.models.signals import (
class_prepared,
post_init,
post_save,
pre_init,
pre_save,
)
from django.db.models.utils import AltersData, make_model_tuple
from django.utils.encoding import force_str
from django.utils.hashable import make_hashable
from django.utils.text import capfirst, get_text_list
from django.utils.translation import gettext_lazy as _
class Deferred:
def __repr__(self):
return "<Deferred field>"
def __str__(self):
return "<Deferred field>"
DEFERRED = Deferred()
def subclass_exception(name, bases, module, attached_to):
"""
Create exception subclass. Used by ModelBase below.
The exception is created in a way that allows it to be pickled, assuming
that the returned exception class will be added as an attribute to the
'attached_to' class.
"""
return type(
name,
bases,
{
"__module__": module,
"__qualname__": "%s.%s" % (attached_to.__qualname__, name),
},
)
def _has_contribute_to_class(value):
# Only call contribute_to_class() if it's bound.
return not inspect.isclass(value) and hasattr(value, "contribute_to_class")
class ModelBase(type):
"""Metaclass for all models."""
def __new__(cls, name, bases, attrs, **kwargs):
super_new = super().__new__
# Also ensure initialization is only performed for subclasses of Model
# (excluding Model class itself).
parents = [b for b in bases if isinstance(b, ModelBase)]
if not parents:
return super_new(cls, name, bases, attrs)
# Create the class.
module = attrs.pop("__module__")
new_attrs = {"__module__": module}
classcell = attrs.pop("__classcell__", None)
if classcell is not None:
new_attrs["__classcell__"] = classcell
attr_meta = attrs.pop("Meta", None)
# Pass all attrs without a (Django-specific) contribute_to_class()
# method to type.__new__() so that they're properly initialized
# (i.e. __set_name__()).
contributable_attrs = {}
for obj_name, obj in attrs.items():
if _has_contribute_to_class(obj):
contributable_attrs[obj_name] = obj
else:
new_attrs[obj_name] = obj
new_class = super_new(cls, name, bases, new_attrs, **kwargs)
abstract = getattr(attr_meta, "abstract", False)
meta = attr_meta or getattr(new_class, "Meta", None)
base_meta = getattr(new_class, "_meta", None)
app_label = None
# Look for an application configuration to attach the model to.
app_config = apps.get_containing_app_config(module)
if getattr(meta, "app_label", None) is None:
if app_config is None:
if not abstract:
raise RuntimeError(
"Model class %s.%s doesn't declare an explicit "
"app_label and isn't in an application in "
"INSTALLED_APPS." % (module, name)
)
else:
app_label = app_config.label
new_class.add_to_class("_meta", Options(meta, app_label))
if not abstract:
new_class.add_to_class(
"DoesNotExist",
subclass_exception(
"DoesNotExist",
tuple(
x.DoesNotExist
for x in parents
if hasattr(x, "_meta") and not x._meta.abstract
)
or (ObjectDoesNotExist,),
module,
attached_to=new_class,
),
)
new_class.add_to_class(
"MultipleObjectsReturned",
subclass_exception(
"MultipleObjectsReturned",
tuple(
x.MultipleObjectsReturned
for x in parents
if hasattr(x, "_meta") and not x._meta.abstract
)
or (MultipleObjectsReturned,),
module,
attached_to=new_class,
),
)
if base_meta and not base_meta.abstract:
# Non-abstract child classes inherit some attributes from their
# non-abstract parent (unless an ABC comes before it in the
# method resolution order).
if not hasattr(meta, "ordering"):
new_class._meta.ordering = base_meta.ordering
if not hasattr(meta, "get_latest_by"):
new_class._meta.get_latest_by = base_meta.get_latest_by
is_proxy = new_class._meta.proxy
# If the model is a proxy, ensure that the base class
# hasn't been swapped out.
if is_proxy and base_meta and base_meta.swapped:
raise TypeError(
"%s cannot proxy the swapped model '%s'." % (name, base_meta.swapped)
)
# Add remaining attributes (those with a contribute_to_class() method)
# to the class.
for obj_name, obj in contributable_attrs.items():
new_class.add_to_class(obj_name, obj)
# All the fields of any type declared on this model
new_fields = chain(
new_class._meta.local_fields,
new_class._meta.local_many_to_many,
new_class._meta.private_fields,
)
field_names = {f.name for f in new_fields}
# Basic setup for proxy models.
if is_proxy:
base = None
for parent in [kls for kls in parents if hasattr(kls, "_meta")]:
if parent._meta.abstract:
if parent._meta.fields:
raise TypeError(
"Abstract base class containing model fields not "
"permitted for proxy model '%s'." % name
)
else:
continue
if base is None:
base = parent
elif parent._meta.concrete_model is not base._meta.concrete_model:
raise TypeError(
"Proxy model '%s' has more than one non-abstract model base "
"class." % name
)
if base is None:
raise TypeError(
"Proxy model '%s' has no non-abstract model base class." % name
)
new_class._meta.setup_proxy(base)
new_class._meta.concrete_model = base._meta.concrete_model
else:
new_class._meta.concrete_model = new_class
# Collect the parent links for multi-table inheritance.
parent_links = {}
for base in reversed([new_class] + parents):
# Conceptually equivalent to `if base is Model`.
if not hasattr(base, "_meta"):
continue
# Skip concrete parent classes.
if base != new_class and not base._meta.abstract:
continue
# Locate OneToOneField instances.
for field in base._meta.local_fields:
if isinstance(field, OneToOneField) and field.remote_field.parent_link:
related = resolve_relation(new_class, field.remote_field.model)
parent_links[make_model_tuple(related)] = field
# Track fields inherited from base models.
inherited_attributes = set()
# Do the appropriate setup for any model parents.
for base in new_class.mro():
if base not in parents or not hasattr(base, "_meta"):
# Things without _meta aren't functional models, so they're
# uninteresting parents.
inherited_attributes.update(base.__dict__)
continue
parent_fields = base._meta.local_fields + base._meta.local_many_to_many
if not base._meta.abstract:
# Check for clashes between locally declared fields and those
# on the base classes.
for field in parent_fields:
if field.name in field_names:
raise FieldError(
"Local field %r in class %r clashes with field of "
"the same name from base class %r."
% (
field.name,
name,
base.__name__,
)
)
else:
inherited_attributes.add(field.name)
# Concrete classes...
base = base._meta.concrete_model
base_key = make_model_tuple(base)
if base_key in parent_links:
field = parent_links[base_key]
elif not is_proxy:
attr_name = "%s_ptr" % base._meta.model_name
field = OneToOneField(
base,
on_delete=CASCADE,
name=attr_name,
auto_created=True,
parent_link=True,
)
if attr_name in field_names:
raise FieldError(
"Auto-generated field '%s' in class %r for "
"parent_link to base class %r clashes with "
"declared field of the same name."
% (
attr_name,
name,
base.__name__,
)
)
# Only add the ptr field if it's not already present;
# e.g. migrations will already have it specified
if not hasattr(new_class, attr_name):
new_class.add_to_class(attr_name, field)
else:
field = None
new_class._meta.parents[base] = field
else:
base_parents = base._meta.parents.copy()
# Add fields from abstract base class if it wasn't overridden.
for field in parent_fields:
if (
field.name not in field_names
and field.name not in new_class.__dict__
and field.name not in inherited_attributes
):
new_field = copy.deepcopy(field)
new_class.add_to_class(field.name, new_field)
# Replace parent links defined on this base by the new
# field. It will be appropriately resolved if required.
if field.one_to_one:
for parent, parent_link in base_parents.items():
if field == parent_link:
base_parents[parent] = new_field
# Pass any non-abstract parent classes onto child.
new_class._meta.parents.update(base_parents)
# Inherit private fields (like GenericForeignKey) from the parent
# class
for field in base._meta.private_fields:
if field.name in field_names:
if not base._meta.abstract:
raise FieldError(
"Local field %r in class %r clashes with field of "
"the same name from base class %r."
% (
field.name,
name,
base.__name__,
)
)
else:
field = copy.deepcopy(field)
if not base._meta.abstract:
field.mti_inherited = True
new_class.add_to_class(field.name, field)
# Copy indexes so that index names are unique when models extend an
# abstract model.
new_class._meta.indexes = [
copy.deepcopy(idx) for idx in new_class._meta.indexes
]
if abstract:
# Abstract base models can't be instantiated and don't appear in
# the list of models for an app. We do the final setup for them a
# little differently from normal models.
attr_meta.abstract = False
new_class.Meta = attr_meta
return new_class
new_class._prepare()
new_class._meta.apps.register_model(new_class._meta.app_label, new_class)
return new_class
def add_to_class(cls, name, value):
if _has_contribute_to_class(value):
value.contribute_to_class(cls, name)
else:
setattr(cls, name, value)
def _prepare(cls):
"""Create some methods once self._meta has been populated."""
opts = cls._meta
opts._prepare(cls)
if opts.order_with_respect_to:
cls.get_next_in_order = partialmethod(
cls._get_next_or_previous_in_order, is_next=True
)
cls.get_previous_in_order = partialmethod(
cls._get_next_or_previous_in_order, is_next=False
)
# Defer creating accessors on the foreign class until it has been
# created and registered. If remote_field is None, we're ordering
# with respect to a GenericForeignKey and don't know what the
# foreign class is - we'll add those accessors later in
# contribute_to_class().
if opts.order_with_respect_to.remote_field:
wrt = opts.order_with_respect_to
remote = wrt.remote_field.model
lazy_related_operation(make_foreign_order_accessors, cls, remote)
# Give the class a docstring -- its definition.
if cls.__doc__ is None:
cls.__doc__ = "%s(%s)" % (
cls.__name__,
", ".join(f.name for f in opts.fields),
)
get_absolute_url_override = settings.ABSOLUTE_URL_OVERRIDES.get(
opts.label_lower
)
if get_absolute_url_override:
setattr(cls, "get_absolute_url", get_absolute_url_override)
if not opts.managers:
if any(f.name == "objects" for f in opts.fields):
raise ValueError(
"Model %s must specify a custom Manager, because it has a "
"field named 'objects'." % cls.__name__
)
manager = Manager()
manager.auto_created = True
cls.add_to_class("objects", manager)
# Set the name of _meta.indexes. This can't be done in
# Options.contribute_to_class() because fields haven't been added to
# the model at that point.
for index in cls._meta.indexes:
if not index.name:
index.set_name_with_model(cls)
class_prepared.send(sender=cls)
@property
def _base_manager(cls):
return cls._meta.base_manager
@property
def _default_manager(cls):
return cls._meta.default_manager
class ModelStateFieldsCacheDescriptor:
def __get__(self, instance, cls=None):
if instance is None:
return self
res = instance.fields_cache = {}
return res
class ModelState:
"""Store model instance state."""
db = None
# If true, uniqueness validation checks will consider this a new, unsaved
# object. Necessary for correct validation of new instances of objects with
# explicit (non-auto) PKs. This impacts validation only; it has no effect
# on the actual save.
adding = True
fields_cache = ModelStateFieldsCacheDescriptor()
class Model(AltersData, metaclass=ModelBase):
def __init__(self, *args, **kwargs):
# Alias some things as locals to avoid repeat global lookups
cls = self.__class__
opts = self._meta
_setattr = setattr
_DEFERRED = DEFERRED
if opts.abstract:
raise TypeError("Abstract models cannot be instantiated.")
pre_init.send(sender=cls, args=args, kwargs=kwargs)
# Set up the storage for instance state
self._state = ModelState()
# There is a rather weird disparity here; if kwargs, it's set, then args
# overrides it. It should be one or the other; don't duplicate the work
# The reason for the kwargs check is that standard iterator passes in by
# args, and instantiation for iteration is 33% faster.
if len(args) > len(opts.concrete_fields):
# Daft, but matches old exception sans the err msg.
raise IndexError("Number of args exceeds number of fields")
if not kwargs:
fields_iter = iter(opts.concrete_fields)
# The ordering of the zip calls matter - zip throws StopIteration
# when an iter throws it. So if the first iter throws it, the second
# is *not* consumed. We rely on this, so don't change the order
# without changing the logic.
for val, field in zip(args, fields_iter):
if val is _DEFERRED:
continue
_setattr(self, field.attname, val)
else:
# Slower, kwargs-ready version.
fields_iter = iter(opts.fields)
for val, field in zip(args, fields_iter):
if val is _DEFERRED:
continue
_setattr(self, field.attname, val)
if kwargs.pop(field.name, NOT_PROVIDED) is not NOT_PROVIDED:
raise TypeError(
f"{cls.__qualname__}() got both positional and "
f"keyword arguments for field '{field.name}'."
)
# Now we're left with the unprocessed fields that *must* come from
# keywords, or default.
for field in fields_iter:
is_related_object = False
# Virtual field
if field.attname not in kwargs and field.column is None:
continue
if kwargs:
if isinstance(field.remote_field, ForeignObjectRel):
try:
# Assume object instance was passed in.
rel_obj = kwargs.pop(field.name)
is_related_object = True
except KeyError:
try:
# Object instance wasn't passed in -- must be an ID.
val = kwargs.pop(field.attname)
except KeyError:
val = field.get_default()
else:
try:
val = kwargs.pop(field.attname)
except KeyError:
# This is done with an exception rather than the
# default argument on pop because we don't want
# get_default() to be evaluated, and then not used.
# Refs #12057.
val = field.get_default()
else:
val = field.get_default()
if is_related_object:
# If we are passed a related instance, set it using the
# field.name instead of field.attname (e.g. "user" instead of
# "user_id") so that the object gets properly cached (and type
# checked) by the RelatedObjectDescriptor.
if rel_obj is not _DEFERRED:
_setattr(self, field.name, rel_obj)
else:
if val is not _DEFERRED:
_setattr(self, field.attname, val)
if kwargs:
property_names = opts._property_names
unexpected = ()
for prop, value in kwargs.items():
# Any remaining kwargs must correspond to properties or virtual
# fields.
if prop in property_names:
if value is not _DEFERRED:
_setattr(self, prop, value)
else:
try:
opts.get_field(prop)
except FieldDoesNotExist:
unexpected += (prop,)
else:
if value is not _DEFERRED:
_setattr(self, prop, value)
if unexpected:
unexpected_names = ", ".join(repr(n) for n in unexpected)
raise TypeError(
f"{cls.__name__}() got unexpected keyword arguments: "
f"{unexpected_names}"
)
super().__init__()
post_init.send(sender=cls, instance=self)
@classmethod
def from_db(cls, db, field_names, values):
if len(values) != len(cls._meta.concrete_fields):
values_iter = iter(values)
values = [
next(values_iter) if f.attname in field_names else DEFERRED
for f in cls._meta.concrete_fields
]
new = cls(*values)
new._state.adding = False
new._state.db = db
return new
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self)
def __str__(self):
return "%s object (%s)" % (self.__class__.__name__, self.pk)
def __eq__(self, other):
if not isinstance(other, Model):
return NotImplemented
if self._meta.concrete_model != other._meta.concrete_model:
return False
my_pk = self.pk
if my_pk is None:
return self is other
return my_pk == other.pk
def __hash__(self):
if self.pk is None:
raise TypeError("Model instances without primary key value are unhashable")
return hash(self.pk)
def __reduce__(self):
data = self.__getstate__()
data[DJANGO_VERSION_PICKLE_KEY] = django.__version__
class_id = self._meta.app_label, self._meta.object_name
return model_unpickle, (class_id,), data
def __getstate__(self):
"""Hook to allow choosing the attributes to pickle."""
state = self.__dict__.copy()
state["_state"] = copy.copy(state["_state"])
state["_state"].fields_cache = state["_state"].fields_cache.copy()
# memoryview cannot be pickled, so cast it to bytes and store
# separately.
_memoryview_attrs = []
for attr, value in state.items():
if isinstance(value, memoryview):
_memoryview_attrs.append((attr, bytes(value)))
if _memoryview_attrs:
state["_memoryview_attrs"] = _memoryview_attrs
for attr, value in _memoryview_attrs:
state.pop(attr)
return state
def __setstate__(self, state):
pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY)
if pickled_version:
if pickled_version != django.__version__:
warnings.warn(
"Pickled model instance's Django version %s does not "
"match the current version %s."
% (pickled_version, django.__version__),
RuntimeWarning,
stacklevel=2,
)
else:
warnings.warn(
"Pickled model instance's Django version is not specified.",
RuntimeWarning,
stacklevel=2,
)
if "_memoryview_attrs" in state:
for attr, value in state.pop("_memoryview_attrs"):
state[attr] = memoryview(value)
self.__dict__.update(state)
def _get_pk_val(self, meta=None):
meta = meta or self._meta
return getattr(self, meta.pk.attname)
def _set_pk_val(self, value):
for parent_link in self._meta.parents.values():
if parent_link and parent_link != self._meta.pk:
setattr(self, parent_link.target_field.attname, value)
return setattr(self, self._meta.pk.attname, value)
pk = property(_get_pk_val, _set_pk_val)
def get_deferred_fields(self):
"""
Return a set containing names of deferred fields for this instance.
"""
return {
f.attname
for f in self._meta.concrete_fields
if f.attname not in self.__dict__
}
def refresh_from_db(self, using=None, fields=None):
"""
Reload field values from the database.
By default, the reloading happens from the database this instance was
loaded from, or by the read router if this instance wasn't loaded from
any database. The using parameter will override the default.
Fields can be used to specify which fields to reload. The fields
should be an iterable of field attnames. If fields is None, then
all non-deferred fields are reloaded.
When accessing deferred fields of an instance, the deferred loading
of the field will call this method.
"""
if fields is None:
self._prefetched_objects_cache = {}
else:
prefetched_objects_cache = getattr(self, "_prefetched_objects_cache", ())
for field in fields:
if field in prefetched_objects_cache:
del prefetched_objects_cache[field]
fields.remove(field)
if not fields:
return
if any(LOOKUP_SEP in f for f in fields):
raise ValueError(
'Found "%s" in fields argument. Relations and transforms '
"are not allowed in fields." % LOOKUP_SEP
)
hints = {"instance": self}
db_instance_qs = self.__class__._base_manager.db_manager(
using, hints=hints
).filter(pk=self.pk)
# Use provided fields, if not set then reload all non-deferred fields.
deferred_fields = self.get_deferred_fields()
if fields is not None:
fields = list(fields)
db_instance_qs = db_instance_qs.only(*fields)
elif deferred_fields:
fields = [
f.attname
for f in self._meta.concrete_fields
if f.attname not in deferred_fields
]
db_instance_qs = db_instance_qs.only(*fields)
db_instance = db_instance_qs.get()
non_loaded_fields = db_instance.get_deferred_fields()
for field in self._meta.concrete_fields:
if field.attname in non_loaded_fields:
# This field wasn't refreshed - skip ahead.
continue
setattr(self, field.attname, getattr(db_instance, field.attname))
# Clear cached foreign keys.
if field.is_relation and field.is_cached(self):
field.delete_cached_value(self)
# Clear cached relations.
for field in self._meta.related_objects:
if field.is_cached(self):
field.delete_cached_value(self)
# Clear cached private relations.
for field in self._meta.private_fields:
if field.is_relation and field.is_cached(self):
field.delete_cached_value(self)
self._state.db = db_instance._state.db
async def arefresh_from_db(self, using=None, fields=None):
return await sync_to_async(self.refresh_from_db)(using=using, fields=fields)
def serializable_value(self, field_name):
"""
Return the value of the field name for this instance. If the field is
a foreign key, return the id value instead of the object. If there's
no Field object with this name on the model, return the model
attribute's value.
Used to serialize a field's value (in the serializer, or form output,
for example). Normally, you would just access the attribute directly
and not use this method.
"""
try:
field = self._meta.get_field(field_name)
except FieldDoesNotExist:
return getattr(self, field_name)
return getattr(self, field.attname)
def save(
self, force_insert=False, force_update=False, using=None, update_fields=None
):
"""
Save the current instance. Override this in a subclass if you want to
control the saving process.
The 'force_insert' and 'force_update' parameters can be used to insist
that the "save" must be an SQL insert or update (or equivalent for
non-SQL backends), respectively. Normally, they should not be set.
"""
self._prepare_related_fields_for_save(operation_name="save")
using = using or router.db_for_write(self.__class__, instance=self)
if force_insert and (force_update or update_fields):
raise ValueError("Cannot force both insert and updating in model saving.")
deferred_fields = self.get_deferred_fields()
if update_fields is not None:
# If update_fields is empty, skip the save. We do also check for
# no-op saves later on for inheritance cases. This bailout is
# still needed for skipping signal sending.
if not update_fields:
return
update_fields = frozenset(update_fields)
field_names = self._meta._non_pk_concrete_field_names
non_model_fields = update_fields.difference(field_names)
if non_model_fields:
raise ValueError(
"The following fields do not exist in this model, are m2m "
"fields, or are non-concrete fields: %s"
% ", ".join(non_model_fields)
)
# If saving to the same database, and this model is deferred, then
# automatically do an "update_fields" save on the loaded fields.
elif not force_insert and deferred_fields and using == self._state.db:
field_names = set()
for field in self._meta.concrete_fields:
if not field.primary_key and not hasattr(field, "through"):
field_names.add(field.attname)
loaded_fields = field_names.difference(deferred_fields)
if loaded_fields:
update_fields = frozenset(loaded_fields)
self.save_base(
using=using,
force_insert=force_insert,
force_update=force_update,
update_fields=update_fields,
)
save.alters_data = True
async def asave(
self, force_insert=False, force_update=False, using=None, update_fields=None
):
return await sync_to_async(self.save)(
force_insert=force_insert,
force_update=force_update,
using=using,
update_fields=update_fields,
)
asave.alters_data = True
@classmethod
def _validate_force_insert(cls, force_insert):
if force_insert is False:
return ()
if force_insert is True:
return (cls,)
if not isinstance(force_insert, tuple):
raise TypeError("force_insert must be a bool or tuple.")
for member in force_insert:
if not isinstance(member, ModelBase):
raise TypeError(
f"Invalid force_insert member. {member!r} must be a model subclass."
)
if not issubclass(cls, member):
raise TypeError(
f"Invalid force_insert member. {member.__qualname__} must be a "
f"base of {cls.__qualname__}."
)
return force_insert
def save_base(
self,
raw=False,
force_insert=False,
force_update=False,
using=None,
update_fields=None,
):
"""
Handle the parts of saving which should be done only once per save,
yet need to be done in raw saves, too. This includes some sanity
checks and signal sending.
The 'raw' argument is telling save_base not to save any parent
models and not to do any changes to the values before save. This
is used by fixture loading.
"""
using = using or router.db_for_write(self.__class__, instance=self)
assert not (force_insert and (force_update or update_fields))
assert update_fields is None or update_fields
cls = origin = self.__class__
# Skip proxies, but keep the origin as the proxy model.
if cls._meta.proxy:
cls = cls._meta.concrete_model
meta = cls._meta
if not meta.auto_created:
pre_save.send(
sender=origin,
instance=self,
raw=raw,
using=using,
update_fields=update_fields,
)
# A transaction isn't needed if one query is issued.
if meta.parents:
context_manager = transaction.atomic(using=using, savepoint=False)
else:
context_manager = transaction.mark_for_rollback_on_error(using=using)
with context_manager:
parent_inserted = False
if not raw:
# Validate force insert only when parents are inserted.
force_insert = self._validate_force_insert(force_insert)
parent_inserted = self._save_parents(
cls, using, update_fields, force_insert
)
updated = self._save_table(
raw,
cls,
force_insert or parent_inserted,
force_update,
using,
update_fields,
)
# Store the database on which the object was saved
self._state.db = using
# Once saved, this is no longer a to-be-added instance.
self._state.adding = False
# Signal that the save is complete
if not meta.auto_created:
post_save.send(
sender=origin,
instance=self,
created=(not updated),
update_fields=update_fields,
raw=raw,
using=using,
)
save_base.alters_data = True
def _save_parents(
self, cls, using, update_fields, force_insert, updated_parents=None
):
"""Save all the parents of cls using values from self."""
meta = cls._meta
inserted = False
if updated_parents is None:
updated_parents = {}
for parent, field in meta.parents.items():
# Make sure the link fields are synced between parent and self.
if (
field
and getattr(self, parent._meta.pk.attname) is None
and getattr(self, field.attname) is not None
):
setattr(self, parent._meta.pk.attname, getattr(self, field.attname))
if (parent_updated := updated_parents.get(parent)) is None:
parent_inserted = self._save_parents(
cls=parent,
using=using,
update_fields=update_fields,
force_insert=force_insert,
updated_parents=updated_parents,
)
updated = self._save_table(
cls=parent,
using=using,
update_fields=update_fields,
force_insert=parent_inserted or issubclass(parent, force_insert),
)
if not updated:
inserted = True
updated_parents[parent] = updated
elif not parent_updated:
inserted = True
# Set the parent's PK value to self.
if field:
setattr(self, field.attname, self._get_pk_val(parent._meta))
# Since we didn't have an instance of the parent handy set
# attname directly, bypassing the descriptor. Invalidate
# the related object cache, in case it's been accidentally
# populated. A fresh instance will be re-built from the
# database if necessary.
if field.is_cached(self):
field.delete_cached_value(self)
return inserted
def _save_table(
self,
raw=False,
cls=None,
force_insert=False,
force_update=False,
using=None,
update_fields=None,
):
"""
Do the heavy-lifting involved in saving. Update or insert the data
for a single table.
"""
meta = cls._meta
non_pks = [f for f in meta.local_concrete_fields if not f.primary_key]
if update_fields:
non_pks = [
f
for f in non_pks
if f.name in update_fields or f.attname in update_fields
]
pk_val = self._get_pk_val(meta)
if pk_val is None:
pk_val = meta.pk.get_pk_value_on_save(self)
setattr(self, meta.pk.attname, pk_val)
pk_set = pk_val is not None
if not pk_set and (force_update or update_fields):
raise ValueError("Cannot force an update in save() with no primary key.")
updated = False
# Skip an UPDATE when adding an instance and primary key has a default.
if (
not raw
and not force_insert
and self._state.adding
and (
(meta.pk.default and meta.pk.default is not NOT_PROVIDED)
or (meta.pk.db_default and meta.pk.db_default is not NOT_PROVIDED)
)
):
force_insert = True
# If possible, try an UPDATE. If that doesn't update anything, do an INSERT.
if pk_set and not force_insert:
base_qs = cls._base_manager.using(using)
values = [
(
f,
None,
(getattr(self, f.attname) if raw else f.pre_save(self, False)),
)
for f in non_pks
]
forced_update = update_fields or force_update
updated = self._do_update(
base_qs, using, pk_val, values, update_fields, forced_update
)
if force_update and not updated:
raise DatabaseError("Forced update did not affect any rows.")
if update_fields and not updated:
raise DatabaseError("Save with update_fields did not affect any rows.")
if not updated:
if meta.order_with_respect_to:
# If this is a model with an order_with_respect_to
# autopopulate the _order field
field = meta.order_with_respect_to
filter_args = field.get_filter_kwargs_for_object(self)
self._order = (
cls._base_manager.using(using)
.filter(**filter_args)
.aggregate(
_order__max=Coalesce(
ExpressionWrapper(
Max("_order") + Value(1), output_field=IntegerField()
),
Value(0),
),
)["_order__max"]
)
fields = meta.local_concrete_fields
if not pk_set:
fields = [f for f in fields if f is not meta.auto_field]
returning_fields = meta.db_returning_fields
results = self._do_insert(
cls._base_manager, using, fields, returning_fields, raw
)
if results:
for value, field in zip(results[0], returning_fields):
setattr(self, field.attname, value)
return updated
def _do_update(self, base_qs, using, pk_val, values, update_fields, forced_update):
"""
Try to update the model. Return True if the model was updated (if an
update query was done and a matching row was found in the DB).
"""
filtered = base_qs.filter(pk=pk_val)
if not values:
# We can end up here when saving a model in inheritance chain where
# update_fields doesn't target any field in current model. In that
# case we just say the update succeeded. Another case ending up here
# is a model with just PK - in that case check that the PK still
# exists.
return update_fields is not None or filtered.exists()
if self._meta.select_on_save and not forced_update:
return (
filtered.exists()
and
# It may happen that the object is deleted from the DB right after
# this check, causing the subsequent UPDATE to return zero matching
# rows. The same result can occur in some rare cases when the
# database returns zero despite the UPDATE being executed
# successfully (a row is matched and updated). In order to
# distinguish these two cases, the object's existence in the
# database is again checked for if the UPDATE query returns 0.
(filtered._update(values) > 0 or filtered.exists())
)
return filtered._update(values) > 0
def _do_insert(self, manager, using, fields, returning_fields, raw):
"""
Do an INSERT. If returning_fields is defined then this method should
return the newly created data for the model.
"""
return manager._insert(
[self],
fields=fields,
returning_fields=returning_fields,
using=using,
raw=raw,
)
def _prepare_related_fields_for_save(self, operation_name, fields=None):
# Ensure that a model instance without a PK hasn't been assigned to
# a ForeignKey, GenericForeignKey or OneToOneField on this model. If
# the field is nullable, allowing the save would result in silent data
# loss.
for field in self._meta.concrete_fields:
if fields and field not in fields:
continue
# If the related field isn't cached, then an instance hasn't been
# assigned and there's no need to worry about this check.
if field.is_relation and field.is_cached(self):
obj = getattr(self, field.name, None)
if not obj:
continue
# A pk may have been assigned manually to a model instance not
# saved to the database (or auto-generated in a case like
# UUIDField), but we allow the save to proceed and rely on the
# database to raise an IntegrityError if applicable. If
# constraints aren't supported by the database, there's the
# unavoidable risk of data corruption.
if obj.pk is None:
# Remove the object from a related instance cache.
if not field.remote_field.multiple:
field.remote_field.delete_cached_value(obj)
raise ValueError(
"%s() prohibited to prevent data loss due to unsaved "
"related object '%s'." % (operation_name, field.name)
)
elif getattr(self, field.attname) in field.empty_values:
# Set related object if it has been saved after an
# assignment.
setattr(self, field.name, obj)
# If the relationship's pk/to_field was changed, clear the
# cached relationship.
if getattr(obj, field.target_field.attname) != getattr(
self, field.attname
):
field.delete_cached_value(self)
# GenericForeignKeys are private.
for field in self._meta.private_fields:
if fields and field not in fields:
continue
if (
field.is_relation
and field.is_cached(self)
and hasattr(field, "fk_field")
):
obj = field.get_cached_value(self, default=None)
if obj and obj.pk is None:
raise ValueError(
f"{operation_name}() prohibited to prevent data loss due to "
f"unsaved related object '{field.name}'."
)
def delete(self, using=None, keep_parents=False):
if self.pk is None:
raise ValueError(
"%s object can't be deleted because its %s attribute is set "
"to None." % (self._meta.object_name, self._meta.pk.attname)
)
using = using or router.db_for_write(self.__class__, instance=self)
collector = Collector(using=using, origin=self)
collector.collect([self], keep_parents=keep_parents)
return collector.delete()
delete.alters_data = True
async def adelete(self, using=None, keep_parents=False):
return await sync_to_async(self.delete)(
using=using,
keep_parents=keep_parents,
)
adelete.alters_data = True
def _get_FIELD_display(self, field):
value = getattr(self, field.attname)
choices_dict = dict(make_hashable(field.flatchoices))
# force_str() to coerce lazy strings.
return force_str(
choices_dict.get(make_hashable(value), value), strings_only=True
)
def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs):
if not self.pk:
raise ValueError("get_next/get_previous cannot be used on unsaved objects.")
op = "gt" if is_next else "lt"
order = "" if is_next else "-"
param = getattr(self, field.attname)
q = Q.create([(field.name, param), (f"pk__{op}", self.pk)], connector=Q.AND)
q = Q.create([q, (f"{field.name}__{op}", param)], connector=Q.OR)
qs = (
self.__class__._default_manager.using(self._state.db)
.filter(**kwargs)
.filter(q)
.order_by("%s%s" % (order, field.name), "%spk" % order)
)
try:
return qs[0]
except IndexError:
raise self.DoesNotExist(
"%s matching query does not exist." % self.__class__._meta.object_name
)
def _get_next_or_previous_in_order(self, is_next):
cachename = "__%s_order_cache" % is_next
if not hasattr(self, cachename):
op = "gt" if is_next else "lt"
order = "_order" if is_next else "-_order"
order_field = self._meta.order_with_respect_to
filter_args = order_field.get_filter_kwargs_for_object(self)
obj = (
self.__class__._default_manager.filter(**filter_args)
.filter(
**{
"_order__%s"
% op: self.__class__._default_manager.values("_order").filter(
**{self._meta.pk.name: self.pk}
)
}
)
.order_by(order)[:1]
.get()
)
setattr(self, cachename, obj)
return getattr(self, cachename)
def _get_field_value_map(self, meta, exclude=None):
if exclude is None:
exclude = set()
meta = meta or self._meta
return {
field.name: Value(getattr(self, field.attname), field)
for field in meta.local_concrete_fields
if field.name not in exclude
}
def prepare_database_save(self, field):
if self.pk is None:
raise ValueError(
"Unsaved model instance %r cannot be used in an ORM query." % self
)
return getattr(self, field.remote_field.get_related_field().attname)
def clean(self):
"""
Hook for doing any extra model-wide validation after clean() has been
called on every field by self.clean_fields. Any ValidationError raised
by this method will not be associated with a particular field; it will
have a special-case association with the field defined by NON_FIELD_ERRORS.
"""
pass
def validate_unique(self, exclude=None):
"""
Check unique constraints on the model and raise ValidationError if any
failed.
"""
unique_checks, date_checks = self._get_unique_checks(exclude=exclude)
errors = self._perform_unique_checks(unique_checks)
date_errors = self._perform_date_checks(date_checks)
for k, v in date_errors.items():
errors.setdefault(k, []).extend(v)
if errors:
raise ValidationError(errors)
def _get_unique_checks(self, exclude=None, include_meta_constraints=False):
"""
Return a list of checks to perform. Since validate_unique() could be
called from a ModelForm, some fields may have been excluded; we can't
perform a unique check on a model that is missing fields involved
in that check. Fields that did not validate should also be excluded,
but they need to be passed in via the exclude argument.
"""
if exclude is None:
exclude = set()
unique_checks = []
unique_togethers = [(self.__class__, self._meta.unique_together)]
constraints = []
if include_meta_constraints:
constraints = [(self.__class__, self._meta.total_unique_constraints)]
for parent_class in self._meta.get_parent_list():
if parent_class._meta.unique_together:
unique_togethers.append(
(parent_class, parent_class._meta.unique_together)
)
if include_meta_constraints and parent_class._meta.total_unique_constraints:
constraints.append(
(parent_class, parent_class._meta.total_unique_constraints)
)
for model_class, unique_together in unique_togethers:
for check in unique_together:
if not any(name in exclude for name in check):
# Add the check if the field isn't excluded.
unique_checks.append((model_class, tuple(check)))
if include_meta_constraints:
for model_class, model_constraints in constraints:
for constraint in model_constraints:
if not any(name in exclude for name in constraint.fields):
unique_checks.append((model_class, constraint.fields))
# These are checks for the unique_for_<date/year/month>.
date_checks = []
# Gather a list of checks for fields declared as unique and add them to
# the list of checks.
fields_with_class = [(self.__class__, self._meta.local_fields)]
for parent_class in self._meta.get_parent_list():
fields_with_class.append((parent_class, parent_class._meta.local_fields))
for model_class, fields in fields_with_class:
for f in fields:
name = f.name
if name in exclude:
continue
if f.unique:
unique_checks.append((model_class, (name,)))
if f.unique_for_date and f.unique_for_date not in exclude:
date_checks.append((model_class, "date", name, f.unique_for_date))
if f.unique_for_year and f.unique_for_year not in exclude:
date_checks.append((model_class, "year", name, f.unique_for_year))
if f.unique_for_month and f.unique_for_month not in exclude:
date_checks.append((model_class, "month", name, f.unique_for_month))
return unique_checks, date_checks
def _perform_unique_checks(self, unique_checks):
errors = {}
for model_class, unique_check in unique_checks:
# Try to look up an existing object with the same values as this
# object's values for all the unique field.
lookup_kwargs = {}
for field_name in unique_check:
f = self._meta.get_field(field_name)
lookup_value = getattr(self, f.attname)
# TODO: Handle multiple backends with different feature flags.
if lookup_value is None or (
lookup_value == ""
and connection.features.interprets_empty_strings_as_nulls
):
# no value, skip the lookup
continue
if f.primary_key and not self._state.adding:
# no need to check for unique primary key when editing
continue
lookup_kwargs[str(field_name)] = lookup_value
# some fields were skipped, no reason to do the check
if len(unique_check) != len(lookup_kwargs):
continue
qs = model_class._default_manager.filter(**lookup_kwargs)
# Exclude the current object from the query if we are editing an
# instance (as opposed to creating a new one)
# Note that we need to use the pk as defined by model_class, not
# self.pk. These can be different fields because model inheritance
# allows single model to have effectively multiple primary keys.
# Refs #17615.
model_class_pk = self._get_pk_val(model_class._meta)
if not self._state.adding and model_class_pk is not None:
qs = qs.exclude(pk=model_class_pk)
if qs.exists():
if len(unique_check) == 1:
key = unique_check[0]
else:
key = NON_FIELD_ERRORS
errors.setdefault(key, []).append(
self.unique_error_message(model_class, unique_check)
)
return errors
def _perform_date_checks(self, date_checks):
errors = {}
for model_class, lookup_type, field, unique_for in date_checks:
lookup_kwargs = {}
# there's a ticket to add a date lookup, we can remove this special
# case if that makes it's way in
date = getattr(self, unique_for)
if date is None:
continue
if lookup_type == "date":
lookup_kwargs["%s__day" % unique_for] = date.day
lookup_kwargs["%s__month" % unique_for] = date.month
lookup_kwargs["%s__year" % unique_for] = date.year
else:
lookup_kwargs["%s__%s" % (unique_for, lookup_type)] = getattr(
date, lookup_type
)
lookup_kwargs[field] = getattr(self, field)
qs = model_class._default_manager.filter(**lookup_kwargs)
# Exclude the current object from the query if we are editing an
# instance (as opposed to creating a new one)
if not self._state.adding and self.pk is not None:
qs = qs.exclude(pk=self.pk)
if qs.exists():
errors.setdefault(field, []).append(
self.date_error_message(lookup_type, field, unique_for)
)
return errors
def date_error_message(self, lookup_type, field_name, unique_for):
opts = self._meta
field = opts.get_field(field_name)
return ValidationError(
message=field.error_messages["unique_for_date"],
code="unique_for_date",
params={
"model": self,
"model_name": capfirst(opts.verbose_name),
"lookup_type": lookup_type,
"field": field_name,
"field_label": capfirst(field.verbose_name),
"date_field": unique_for,
"date_field_label": capfirst(opts.get_field(unique_for).verbose_name),
},
)
def unique_error_message(self, model_class, unique_check):
opts = model_class._meta
params = {
"model": self,
"model_class": model_class,
"model_name": capfirst(opts.verbose_name),
"unique_check": unique_check,
}
# A unique field
if len(unique_check) == 1:
field = opts.get_field(unique_check[0])
params["field_label"] = capfirst(field.verbose_name)
return ValidationError(
message=field.error_messages["unique"],
code="unique",
params=params,
)
# unique_together
else:
field_labels = [
capfirst(opts.get_field(f).verbose_name) for f in unique_check
]
params["field_labels"] = get_text_list(field_labels, _("and"))
return ValidationError(
message=_("%(model_name)s with this %(field_labels)s already exists."),
code="unique_together",
params=params,
)
def get_constraints(self):
constraints = [(self.__class__, self._meta.constraints)]
for parent_class in self._meta.get_parent_list():
if parent_class._meta.constraints:
constraints.append((parent_class, parent_class._meta.constraints))
return constraints
def validate_constraints(self, exclude=None):
constraints = self.get_constraints()
using = router.db_for_write(self.__class__, instance=self)
errors = {}
for model_class, model_constraints in constraints:
for constraint in model_constraints:
try:
constraint.validate(model_class, self, exclude=exclude, using=using)
except ValidationError as e:
if (
getattr(e, "code", None) == "unique"
and len(constraint.fields) == 1
):
errors.setdefault(constraint.fields[0], []).append(e)
else:
errors = e.update_error_dict(errors)
if errors:
raise ValidationError(errors)
def full_clean(self, exclude=None, validate_unique=True, validate_constraints=True):
"""
Call clean_fields(), clean(), validate_unique(), and
validate_constraints() on the model. Raise a ValidationError for any
errors that occur.
"""
errors = {}
if exclude is None:
exclude = set()
else:
exclude = set(exclude)
try:
self.clean_fields(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
# Form.clean() is run even if other validation fails, so do the
# same with Model.clean() for consistency.
try:
self.clean()
except ValidationError as e:
errors = e.update_error_dict(errors)
# Run unique checks, but only for fields that passed validation.
if validate_unique:
for name in errors:
if name != NON_FIELD_ERRORS and name not in exclude:
exclude.add(name)
try:
self.validate_unique(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
# Run constraints checks, but only for fields that passed validation.
if validate_constraints:
for name in errors:
if name != NON_FIELD_ERRORS and name not in exclude:
exclude.add(name)
try:
self.validate_constraints(exclude=exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
if errors:
raise ValidationError(errors)
def clean_fields(self, exclude=None):
"""
Clean all fields and raise a ValidationError containing a dict
of all validation errors if any occur.
"""
if exclude is None:
exclude = set()
errors = {}
for f in self._meta.fields:
if f.name in exclude:
continue
# Skip validation for empty fields with blank=True. The developer
# is responsible for making sure they have a valid value.
raw_value = getattr(self, f.attname)
if f.blank and raw_value in f.empty_values:
continue
try:
setattr(self, f.attname, f.clean(raw_value, self))
except ValidationError as e:
errors[f.name] = e.error_list
if errors:
raise ValidationError(errors)
@classmethod
def check(cls, **kwargs):
errors = [
*cls._check_swappable(),
*cls._check_model(),
*cls._check_managers(**kwargs),
]
if not cls._meta.swapped:
databases = kwargs.get("databases") or []
errors += [
*cls._check_fields(**kwargs),
*cls._check_m2m_through_same_relationship(),
*cls._check_long_column_names(databases),
]
clash_errors = (
*cls._check_id_field(),
*cls._check_field_name_clashes(),
*cls._check_model_name_db_lookup_clashes(),
*cls._check_property_name_related_field_accessor_clashes(),
*cls._check_single_primary_key(),
)
errors.extend(clash_errors)
# If there are field name clashes, hide consequent column name
# clashes.
if not clash_errors:
errors.extend(cls._check_column_name_clashes())
errors += [
*cls._check_index_together(),
*cls._check_unique_together(),
*cls._check_indexes(databases),
*cls._check_ordering(),
*cls._check_constraints(databases),
*cls._check_default_pk(),
*cls._check_db_table_comment(databases),
]
return errors
@classmethod
def _check_default_pk(cls):
if (
not cls._meta.abstract
and cls._meta.pk.auto_created
and
# Inherited PKs are checked in parents models.
not (
isinstance(cls._meta.pk, OneToOneField)
and cls._meta.pk.remote_field.parent_link
)
and not settings.is_overridden("DEFAULT_AUTO_FIELD")
and cls._meta.app_config
and not cls._meta.app_config._is_default_auto_field_overridden
):
return [
checks.Warning(
f"Auto-created primary key used when not defining a "
f"primary key type, by default "
f"'{settings.DEFAULT_AUTO_FIELD}'.",
hint=(
f"Configure the DEFAULT_AUTO_FIELD setting or the "
f"{cls._meta.app_config.__class__.__qualname__}."
f"default_auto_field attribute to point to a subclass "
f"of AutoField, e.g. 'django.db.models.BigAutoField'."
),
obj=cls,
id="models.W042",
),
]
return []
@classmethod
def _check_db_table_comment(cls, databases):
if not cls._meta.db_table_comment:
return []
errors = []
for db in databases:
if not router.allow_migrate_model(db, cls):
continue
connection = connections[db]
if not (
connection.features.supports_comments
or "supports_comments" in cls._meta.required_db_features
):
errors.append(
checks.Warning(
f"{connection.display_name} does not support comments on "
f"tables (db_table_comment).",
obj=cls,
id="models.W046",
)
)
return errors
@classmethod
def _check_swappable(cls):
"""Check if the swapped model exists."""
errors = []
if cls._meta.swapped:
try:
apps.get_model(cls._meta.swapped)
except ValueError:
errors.append(
checks.Error(
"'%s' is not of the form 'app_label.app_name'."
% cls._meta.swappable,
id="models.E001",
)
)
except LookupError:
app_label, model_name = cls._meta.swapped.split(".")
errors.append(
checks.Error(
"'%s' references '%s.%s', which has not been "
"installed, or is abstract."
% (cls._meta.swappable, app_label, model_name),
id="models.E002",
)
)
return errors
@classmethod
def _check_model(cls):
errors = []
if cls._meta.proxy:
if cls._meta.local_fields or cls._meta.local_many_to_many:
errors.append(
checks.Error(
"Proxy model '%s' contains model fields." % cls.__name__,
id="models.E017",
)
)
return errors
@classmethod
def _check_managers(cls, **kwargs):
"""Perform all manager checks."""
errors = []
for manager in cls._meta.managers:
errors.extend(manager.check(**kwargs))
return errors
@classmethod
def _check_fields(cls, **kwargs):
"""Perform all field checks."""
errors = []
for field in cls._meta.local_fields:
errors.extend(field.check(**kwargs))
for field in cls._meta.local_many_to_many:
errors.extend(field.check(from_model=cls, **kwargs))
return errors
@classmethod
def _check_m2m_through_same_relationship(cls):
"""Check if no relationship model is used by more than one m2m field."""
errors = []
seen_intermediary_signatures = []
fields = cls._meta.local_many_to_many
# Skip when the target model wasn't found.
fields = (f for f in fields if isinstance(f.remote_field.model, ModelBase))
# Skip when the relationship model wasn't found.
fields = (f for f in fields if isinstance(f.remote_field.through, ModelBase))
for f in fields:
signature = (
f.remote_field.model,
cls,
f.remote_field.through,
f.remote_field.through_fields,
)
if signature in seen_intermediary_signatures:
errors.append(
checks.Error(
"The model has two identical many-to-many relations "
"through the intermediate model '%s'."
% f.remote_field.through._meta.label,
obj=cls,
id="models.E003",
)
)
else:
seen_intermediary_signatures.append(signature)
return errors
@classmethod
def _check_id_field(cls):
"""Check if `id` field is a primary key."""
fields = [
f for f in cls._meta.local_fields if f.name == "id" and f != cls._meta.pk
]
# fields is empty or consists of the invalid "id" field
if fields and not fields[0].primary_key and cls._meta.pk.name == "id":
return [
checks.Error(
"'id' can only be used as a field name if the field also "
"sets 'primary_key=True'.",
obj=cls,
id="models.E004",
)
]
else:
return []
@classmethod
def _check_field_name_clashes(cls):
"""Forbid field shadowing in multi-table inheritance."""
errors = []
used_fields = {} # name or attname -> field
# Check that multi-inheritance doesn't cause field name shadowing.
for parent in cls._meta.get_parent_list():
for f in parent._meta.local_fields:
clash = used_fields.get(f.name) or used_fields.get(f.attname) or None
if clash:
errors.append(
checks.Error(
"The field '%s' from parent model "
"'%s' clashes with the field '%s' "
"from parent model '%s'."
% (clash.name, clash.model._meta, f.name, f.model._meta),
obj=cls,
id="models.E005",
)
)
used_fields[f.name] = f
used_fields[f.attname] = f
# Check that fields defined in the model don't clash with fields from
# parents, including auto-generated fields like multi-table inheritance
# child accessors.
for parent in cls._meta.get_parent_list():
for f in parent._meta.get_fields():
if f not in used_fields:
used_fields[f.name] = f
# Check that parent links in diamond-shaped MTI models don't clash.
for parent_link in cls._meta.parents.values():
if not parent_link:
continue
clash = used_fields.get(parent_link.name) or None
if clash:
errors.append(
checks.Error(
f"The field '{parent_link.name}' clashes with the field "
f"'{clash.name}' from model '{clash.model._meta}'.",
obj=cls,
id="models.E006",
)
)
for f in cls._meta.local_fields:
clash = used_fields.get(f.name) or used_fields.get(f.attname) or None
# Note that we may detect clash between user-defined non-unique
# field "id" and automatically added unique field "id", both
# defined at the same model. This special case is considered in
# _check_id_field and here we ignore it.
id_conflict = (
f.name == "id" and clash and clash.name == "id" and clash.model == cls
)
if clash and not id_conflict:
errors.append(
checks.Error(
"The field '%s' clashes with the field '%s' "
"from model '%s'." % (f.name, clash.name, clash.model._meta),
obj=f,
id="models.E006",
)
)
used_fields[f.name] = f
used_fields[f.attname] = f
return errors
@classmethod
def _check_column_name_clashes(cls):
# Store a list of column names which have already been used by other fields.
used_column_names = []
errors = []
for f in cls._meta.local_fields:
_, column_name = f.get_attname_column()
# Ensure the column name is not already in use.
if column_name and column_name in used_column_names:
errors.append(
checks.Error(
"Field '%s' has column name '%s' that is used by "
"another field." % (f.name, column_name),
hint="Specify a 'db_column' for the field.",
obj=cls,
id="models.E007",
)
)
else:
used_column_names.append(column_name)
return errors
@classmethod
def _check_model_name_db_lookup_clashes(cls):
errors = []
model_name = cls.__name__
if model_name.startswith("_") or model_name.endswith("_"):
errors.append(
checks.Error(
"The model name '%s' cannot start or end with an underscore "
"as it collides with the query lookup syntax." % model_name,
obj=cls,
id="models.E023",
)
)
elif LOOKUP_SEP in model_name:
errors.append(
checks.Error(
"The model name '%s' cannot contain double underscores as "
"it collides with the query lookup syntax." % model_name,
obj=cls,
id="models.E024",
)
)
return errors
@classmethod
def _check_property_name_related_field_accessor_clashes(cls):
errors = []
property_names = cls._meta._property_names
related_field_accessors = (
f.get_attname()
for f in cls._meta._get_fields(reverse=False)
if f.is_relation and f.related_model is not None
)
for accessor in related_field_accessors:
if accessor in property_names:
errors.append(
checks.Error(
"The property '%s' clashes with a related field "
"accessor." % accessor,
obj=cls,
id="models.E025",
)
)
return errors
@classmethod
def _check_single_primary_key(cls):
errors = []
if sum(1 for f in cls._meta.local_fields if f.primary_key) > 1:
errors.append(
checks.Error(
"The model cannot have more than one field with "
"'primary_key=True'.",
obj=cls,
id="models.E026",
)
)
return errors
# RemovedInDjango51Warning.
@classmethod
def _check_index_together(cls):
"""Check the value of "index_together" option."""
if not isinstance(cls._meta.index_together, (tuple, list)):
return [
checks.Error(
"'index_together' must be a list or tuple.",
obj=cls,
id="models.E008",
)
]
elif any(
not isinstance(fields, (tuple, list)) for fields in cls._meta.index_together
):
return [
checks.Error(
"All 'index_together' elements must be lists or tuples.",
obj=cls,
id="models.E009",
)
]
else:
errors = []
for fields in cls._meta.index_together:
errors.extend(cls._check_local_fields(fields, "index_together"))
return errors
@classmethod
def _check_unique_together(cls):
"""Check the value of "unique_together" option."""
if not isinstance(cls._meta.unique_together, (tuple, list)):
return [
checks.Error(
"'unique_together' must be a list or tuple.",
obj=cls,
id="models.E010",
)
]
elif any(
not isinstance(fields, (tuple, list))
for fields in cls._meta.unique_together
):
return [
checks.Error(
"All 'unique_together' elements must be lists or tuples.",
obj=cls,
id="models.E011",
)
]
else:
errors = []
for fields in cls._meta.unique_together:
errors.extend(cls._check_local_fields(fields, "unique_together"))
return errors
@classmethod
def _check_indexes(cls, databases):
"""Check fields, names, and conditions of indexes."""
errors = []
references = set()
for index in cls._meta.indexes:
# Index name can't start with an underscore or a number, restricted
# for cross-database compatibility with Oracle.
if index.name[0] == "_" or index.name[0].isdigit():
errors.append(
checks.Error(
"The index name '%s' cannot start with an underscore "
"or a number." % index.name,
obj=cls,
id="models.E033",
),
)
if len(index.name) > index.max_name_length:
errors.append(
checks.Error(
"The index name '%s' cannot be longer than %d "
"characters." % (index.name, index.max_name_length),
obj=cls,
id="models.E034",
),
)
if index.contains_expressions:
for expression in index.expressions:
references.update(
ref[0] for ref in cls._get_expr_references(expression)
)
for db in databases:
if not router.allow_migrate_model(db, cls):
continue
connection = connections[db]
if not (
connection.features.supports_partial_indexes
or "supports_partial_indexes" in cls._meta.required_db_features
) and any(index.condition is not None for index in cls._meta.indexes):
errors.append(
checks.Warning(
"%s does not support indexes with conditions."
% connection.display_name,
hint=(
"Conditions will be ignored. Silence this warning "
"if you don't care about it."
),
obj=cls,
id="models.W037",
)
)
if not (
connection.features.supports_covering_indexes
or "supports_covering_indexes" in cls._meta.required_db_features
) and any(index.include for index in cls._meta.indexes):
errors.append(
checks.Warning(
"%s does not support indexes with non-key columns."
% connection.display_name,
hint=(
"Non-key columns will be ignored. Silence this "
"warning if you don't care about it."
),
obj=cls,
id="models.W040",
)
)
if not (
connection.features.supports_expression_indexes
or "supports_expression_indexes" in cls._meta.required_db_features
) and any(index.contains_expressions for index in cls._meta.indexes):
errors.append(
checks.Warning(
"%s does not support indexes on expressions."
% connection.display_name,
hint=(
"An index won't be created. Silence this warning "
"if you don't care about it."
),
obj=cls,
id="models.W043",
)
)
fields = [
field for index in cls._meta.indexes for field, _ in index.fields_orders
]
fields += [include for index in cls._meta.indexes for include in index.include]
fields += references
errors.extend(cls._check_local_fields(fields, "indexes"))
return errors
@classmethod
def _check_local_fields(cls, fields, option):
from django.db import models
# In order to avoid hitting the relation tree prematurely, we use our
# own fields_map instead of using get_field()
forward_fields_map = {}
for field in cls._meta._get_fields(reverse=False):
forward_fields_map[field.name] = field
if hasattr(field, "attname"):
forward_fields_map[field.attname] = field
errors = []
for field_name in fields:
try:
field = forward_fields_map[field_name]
except KeyError:
errors.append(
checks.Error(
"'%s' refers to the nonexistent field '%s'."
% (
option,
field_name,
),
obj=cls,
id="models.E012",
)
)
else:
if isinstance(field.remote_field, models.ManyToManyRel):
errors.append(
checks.Error(
"'%s' refers to a ManyToManyField '%s', but "
"ManyToManyFields are not permitted in '%s'."
% (
option,
field_name,
option,
),
obj=cls,
id="models.E013",
)
)
elif field not in cls._meta.local_fields:
errors.append(
checks.Error(
"'%s' refers to field '%s' which is not local to model "
"'%s'." % (option, field_name, cls._meta.object_name),
hint="This issue may be caused by multi-table inheritance.",
obj=cls,
id="models.E016",
)
)
return errors
@classmethod
def _check_ordering(cls):
"""
Check "ordering" option -- is it a list of strings and do all fields
exist?
"""
if cls._meta._ordering_clash:
return [
checks.Error(
"'ordering' and 'order_with_respect_to' cannot be used together.",
obj=cls,
id="models.E021",
),
]
if cls._meta.order_with_respect_to or not cls._meta.ordering:
return []
if not isinstance(cls._meta.ordering, (list, tuple)):
return [
checks.Error(
"'ordering' must be a tuple or list (even if you want to order by "
"only one field).",
obj=cls,
id="models.E014",
)
]
errors = []
fields = cls._meta.ordering
# Skip expressions and '?' fields.
fields = (f for f in fields if isinstance(f, str) and f != "?")
# Convert "-field" to "field".
fields = (f.removeprefix("-") for f in fields)
# Separate related fields and non-related fields.
_fields = []
related_fields = []
for f in fields:
if LOOKUP_SEP in f:
related_fields.append(f)
else:
_fields.append(f)
fields = _fields
# Check related fields.
for field in related_fields:
_cls = cls
fld = None
for part in field.split(LOOKUP_SEP):
try:
# pk is an alias that won't be found by opts.get_field.
if part == "pk":
fld = _cls._meta.pk
else:
fld = _cls._meta.get_field(part)
if fld.is_relation:
_cls = fld.path_infos[-1].to_opts.model
else:
_cls = None
except (FieldDoesNotExist, AttributeError):
if fld is None or (
fld.get_transform(part) is None and fld.get_lookup(part) is None
):
errors.append(
checks.Error(
"'ordering' refers to the nonexistent field, "
"related field, or lookup '%s'." % field,
obj=cls,
id="models.E015",
)
)
# Skip ordering on pk. This is always a valid order_by field
# but is an alias and therefore won't be found by opts.get_field.
fields = {f for f in fields if f != "pk"}
# Check for invalid or nonexistent fields in ordering.
invalid_fields = []
# Any field name that is not present in field_names does not exist.
# Also, ordering by m2m fields is not allowed.
opts = cls._meta
valid_fields = set(
chain.from_iterable(
(f.name, f.attname)
if not (f.auto_created and not f.concrete)
else (f.field.related_query_name(),)
for f in chain(opts.fields, opts.related_objects)
)
)
invalid_fields.extend(fields - valid_fields)
for invalid_field in invalid_fields:
errors.append(
checks.Error(
"'ordering' refers to the nonexistent field, related "
"field, or lookup '%s'." % invalid_field,
obj=cls,
id="models.E015",
)
)
return errors
@classmethod
def _check_long_column_names(cls, databases):
"""
Check that any auto-generated column names are shorter than the limits
for each database in which the model will be created.
"""
if not databases:
return []
errors = []
allowed_len = None
db_alias = None
# Find the minimum max allowed length among all specified db_aliases.
for db in databases:
# skip databases where the model won't be created
if not router.allow_migrate_model(db, cls):
continue
connection = connections[db]
max_name_length = connection.ops.max_name_length()
if max_name_length is None or connection.features.truncates_names:
continue
else:
if allowed_len is None:
allowed_len = max_name_length
db_alias = db
elif max_name_length < allowed_len:
allowed_len = max_name_length
db_alias = db
if allowed_len is None:
return errors
for f in cls._meta.local_fields:
_, column_name = f.get_attname_column()
# Check if auto-generated name for the field is too long
# for the database.
if (
f.db_column is None
and column_name is not None
and len(column_name) > allowed_len
):
errors.append(
checks.Error(
'Autogenerated column name too long for field "%s". '
'Maximum length is "%s" for database "%s".'
% (column_name, allowed_len, db_alias),
hint="Set the column name manually using 'db_column'.",
obj=cls,
id="models.E018",
)
)
for f in cls._meta.local_many_to_many:
# Skip nonexistent models.
if isinstance(f.remote_field.through, str):
continue
# Check if auto-generated name for the M2M field is too long
# for the database.
for m2m in f.remote_field.through._meta.local_fields:
_, rel_name = m2m.get_attname_column()
if (
m2m.db_column is None
and rel_name is not None
and len(rel_name) > allowed_len
):
errors.append(
checks.Error(
"Autogenerated column name too long for M2M field "
'"%s". Maximum length is "%s" for database "%s".'
% (rel_name, allowed_len, db_alias),
hint=(
"Use 'through' to create a separate model for "
"M2M and then set column_name using 'db_column'."
),
obj=cls,
id="models.E019",
)
)
return errors
@classmethod
def _get_expr_references(cls, expr):
if isinstance(expr, Q):
for child in expr.children:
if isinstance(child, tuple):
lookup, value = child
yield tuple(lookup.split(LOOKUP_SEP))
yield from cls._get_expr_references(value)
else:
yield from cls._get_expr_references(child)
elif isinstance(expr, F):
yield tuple(expr.name.split(LOOKUP_SEP))
elif hasattr(expr, "get_source_expressions"):
for src_expr in expr.get_source_expressions():
yield from cls._get_expr_references(src_expr)
@classmethod
def _check_constraints(cls, databases):
errors = []
for db in databases:
if not router.allow_migrate_model(db, cls):
continue
connection = connections[db]
if not (
connection.features.supports_table_check_constraints
or "supports_table_check_constraints" in cls._meta.required_db_features
) and any(
isinstance(constraint, CheckConstraint)
for constraint in cls._meta.constraints
):
errors.append(
checks.Warning(
"%s does not support check constraints."
% connection.display_name,
hint=(
"A constraint won't be created. Silence this "
"warning if you don't care about it."
),
obj=cls,
id="models.W027",
)
)
if not (
connection.features.supports_partial_indexes
or "supports_partial_indexes" in cls._meta.required_db_features
) and any(
isinstance(constraint, UniqueConstraint)
and constraint.condition is not None
for constraint in cls._meta.constraints
):
errors.append(
checks.Warning(
"%s does not support unique constraints with "
"conditions." % connection.display_name,
hint=(
"A constraint won't be created. Silence this "
"warning if you don't care about it."
),
obj=cls,
id="models.W036",
)
)
if not (
connection.features.supports_deferrable_unique_constraints
or "supports_deferrable_unique_constraints"
in cls._meta.required_db_features
) and any(
isinstance(constraint, UniqueConstraint)
and constraint.deferrable is not None
for constraint in cls._meta.constraints
):
errors.append(
checks.Warning(
"%s does not support deferrable unique constraints."
% connection.display_name,
hint=(
"A constraint won't be created. Silence this "
"warning if you don't care about it."
),
obj=cls,
id="models.W038",
)
)
if not (
connection.features.supports_covering_indexes
or "supports_covering_indexes" in cls._meta.required_db_features
) and any(
isinstance(constraint, UniqueConstraint) and constraint.include
for constraint in cls._meta.constraints
):
errors.append(
checks.Warning(
"%s does not support unique constraints with non-key "
"columns." % connection.display_name,
hint=(
"A constraint won't be created. Silence this "
"warning if you don't care about it."
),
obj=cls,
id="models.W039",
)
)
if not (
connection.features.supports_expression_indexes
or "supports_expression_indexes" in cls._meta.required_db_features
) and any(
isinstance(constraint, UniqueConstraint)
and constraint.contains_expressions
for constraint in cls._meta.constraints
):
errors.append(
checks.Warning(
"%s does not support unique constraints on "
"expressions." % connection.display_name,
hint=(
"A constraint won't be created. Silence this "
"warning if you don't care about it."
),
obj=cls,
id="models.W044",
)
)
fields = set(
chain.from_iterable(
(*constraint.fields, *constraint.include)
for constraint in cls._meta.constraints
if isinstance(constraint, UniqueConstraint)
)
)
references = set()
for constraint in cls._meta.constraints:
if isinstance(constraint, UniqueConstraint):
if (
connection.features.supports_partial_indexes
or "supports_partial_indexes"
not in cls._meta.required_db_features
) and isinstance(constraint.condition, Q):
references.update(
cls._get_expr_references(constraint.condition)
)
if (
connection.features.supports_expression_indexes
or "supports_expression_indexes"
not in cls._meta.required_db_features
) and constraint.contains_expressions:
for expression in constraint.expressions:
references.update(cls._get_expr_references(expression))
elif isinstance(constraint, CheckConstraint):
if (
connection.features.supports_table_check_constraints
or "supports_table_check_constraints"
not in cls._meta.required_db_features
):
if isinstance(constraint.check, Q):
references.update(
cls._get_expr_references(constraint.check)
)
if any(
isinstance(expr, RawSQL)
for expr in constraint.check.flatten()
):
errors.append(
checks.Warning(
f"Check constraint {constraint.name!r} contains "
f"RawSQL() expression and won't be validated "
f"during the model full_clean().",
hint=(
"Silence this warning if you don't care about "
"it."
),
obj=cls,
id="models.W045",
),
)
for field_name, *lookups in references:
# pk is an alias that won't be found by opts.get_field.
if field_name != "pk":
fields.add(field_name)
if not lookups:
# If it has no lookups it cannot result in a JOIN.
continue
try:
if field_name == "pk":
field = cls._meta.pk
else:
field = cls._meta.get_field(field_name)
if not field.is_relation or field.many_to_many or field.one_to_many:
continue
except FieldDoesNotExist:
continue
# JOIN must happen at the first lookup.
first_lookup = lookups[0]
if (
hasattr(field, "get_transform")
and hasattr(field, "get_lookup")
and field.get_transform(first_lookup) is None
and field.get_lookup(first_lookup) is None
):
errors.append(
checks.Error(
"'constraints' refers to the joined field '%s'."
% LOOKUP_SEP.join([field_name] + lookups),
obj=cls,
id="models.E041",
)
)
errors.extend(cls._check_local_fields(fields, "constraints"))
return errors
############################################
# HELPER FUNCTIONS (CURRIED MODEL METHODS) #
############################################
# ORDERING METHODS #########################
def method_set_order(self, ordered_obj, id_list, using=None):
order_wrt = ordered_obj._meta.order_with_respect_to
filter_args = order_wrt.get_forward_related_filter(self)
ordered_obj.objects.db_manager(using).filter(**filter_args).bulk_update(
[ordered_obj(pk=pk, _order=order) for order, pk in enumerate(id_list)],
["_order"],
)
def method_get_order(self, ordered_obj):
order_wrt = ordered_obj._meta.order_with_respect_to
filter_args = order_wrt.get_forward_related_filter(self)
pk_name = ordered_obj._meta.pk.name
return ordered_obj.objects.filter(**filter_args).values_list(pk_name, flat=True)
def make_foreign_order_accessors(model, related_model):
setattr(
related_model,
"get_%s_order" % model.__name__.lower(),
partialmethod(method_get_order, model),
)
setattr(
related_model,
"set_%s_order" % model.__name__.lower(),
partialmethod(method_set_order, model),
)
########
# MISC #
########
def model_unpickle(model_id):
"""Used to unpickle Model subclasses with deferred fields."""
if isinstance(model_id, tuple):
model = apps.get_model(*model_id)
else:
# Backwards compat - the model was cached directly in earlier versions.
model = model_id
return model.__new__(model)
model_unpickle.__safe_for_unpickle__ = True
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.