hash
stringlengths 64
64
| content
stringlengths 0
1.51M
|
---|---|
a0d219e1eb26a31c01cc4cf9399fa98d444024c05c4d855373f6eeebcdad0042 | from decimal import Decimal
from django.contrib.gis.db.models.fields import BaseSpatialField, GeometryField
from django.contrib.gis.db.models.sql import AreaField, DistanceField
from django.contrib.gis.geos import GEOSGeometry
from django.core.exceptions import FieldError
from django.db.models import (
BooleanField, FloatField, IntegerField, TextField, Transform,
)
from django.db.models.expressions import Func, Value
from django.db.models.functions import Cast
from django.db.utils import NotSupportedError
from django.utils.functional import cached_property
NUMERIC_TYPES = (int, float, Decimal)
class GeoFuncMixin:
function = None
geom_param_pos = (0,)
def __init__(self, *expressions, **extra):
super().__init__(*expressions, **extra)
# Ensure that value expressions are geometric.
for pos in self.geom_param_pos:
expr = self.source_expressions[pos]
if not isinstance(expr, Value):
continue
try:
output_field = expr.output_field
except FieldError:
output_field = None
geom = expr.value
if not isinstance(geom, GEOSGeometry) or output_field and not isinstance(output_field, GeometryField):
raise TypeError("%s function requires a geometric argument in position %d." % (self.name, pos + 1))
if not geom.srid and not output_field:
raise ValueError("SRID is required for all geometries.")
if not output_field:
self.source_expressions[pos] = Value(geom, output_field=GeometryField(srid=geom.srid))
@property
def name(self):
return self.__class__.__name__
@cached_property
def geo_field(self):
return self.source_expressions[self.geom_param_pos[0]].field
def as_sql(self, compiler, connection, function=None, **extra_context):
if not self.function and not function:
function = connection.ops.spatial_function_name(self.name)
return super().as_sql(compiler, connection, function=function, **extra_context)
def resolve_expression(self, *args, **kwargs):
res = super().resolve_expression(*args, **kwargs)
# Ensure that expressions are geometric.
source_fields = res.get_source_fields()
for pos in self.geom_param_pos:
field = source_fields[pos]
if not isinstance(field, GeometryField):
raise TypeError(
"%s function requires a GeometryField in position %s, got %s." % (
self.name, pos + 1, type(field).__name__,
)
)
base_srid = res.geo_field.srid
for pos in self.geom_param_pos[1:]:
expr = res.source_expressions[pos]
expr_srid = expr.output_field.srid
if expr_srid != base_srid:
# Automatic SRID conversion so objects are comparable.
res.source_expressions[pos] = Transform(expr, base_srid).resolve_expression(*args, **kwargs)
return res
def _handle_param(self, value, param_name='', check_types=None):
if not hasattr(value, 'resolve_expression'):
if check_types and not isinstance(value, check_types):
raise TypeError(
"The %s parameter has the wrong type: should be %s." % (
param_name, check_types)
)
return value
class GeoFunc(GeoFuncMixin, Func):
pass
class GeomOutputGeoFunc(GeoFunc):
@cached_property
def output_field(self):
return GeometryField(srid=self.geo_field.srid)
class SQLiteDecimalToFloatMixin:
"""
By default, Decimal values are converted to str by the SQLite backend, which
is not acceptable by the GIS functions expecting numeric values.
"""
def as_sqlite(self, compiler, connection, **extra_context):
for expr in self.get_source_expressions():
if hasattr(expr, 'value') and isinstance(expr.value, Decimal):
expr.value = float(expr.value)
return super().as_sql(compiler, connection, **extra_context)
class OracleToleranceMixin:
tolerance = 0.05
def as_oracle(self, compiler, connection, **extra_context):
tol = self.extra.get('tolerance', self.tolerance)
return self.as_sql(
compiler, connection,
template="%%(function)s(%%(expressions)s, %s)" % tol,
**extra_context
)
class Area(OracleToleranceMixin, GeoFunc):
arity = 1
@cached_property
def output_field(self):
return AreaField(self.geo_field)
def as_sql(self, compiler, connection, **extra_context):
if not connection.features.supports_area_geodetic and self.geo_field.geodetic(connection):
raise NotSupportedError('Area on geodetic coordinate systems not supported.')
return super().as_sql(compiler, connection, **extra_context)
def as_sqlite(self, compiler, connection, **extra_context):
if self.geo_field.geodetic(connection):
extra_context['template'] = '%(function)s(%(expressions)s, %(spheroid)d)'
extra_context['spheroid'] = True
return self.as_sql(compiler, connection, **extra_context)
class Azimuth(GeoFunc):
output_field = FloatField()
arity = 2
geom_param_pos = (0, 1)
class AsGeoJSON(GeoFunc):
output_field = TextField()
def __init__(self, expression, bbox=False, crs=False, precision=8, **extra):
expressions = [expression]
if precision is not None:
expressions.append(self._handle_param(precision, 'precision', int))
options = 0
if crs and bbox:
options = 3
elif bbox:
options = 1
elif crs:
options = 2
if options:
expressions.append(options)
super().__init__(*expressions, **extra)
class AsGML(GeoFunc):
geom_param_pos = (1,)
output_field = TextField()
def __init__(self, expression, version=2, precision=8, **extra):
expressions = [version, expression]
if precision is not None:
expressions.append(self._handle_param(precision, 'precision', int))
super().__init__(*expressions, **extra)
def as_oracle(self, compiler, connection, **extra_context):
source_expressions = self.get_source_expressions()
version = source_expressions[0]
clone = self.copy()
clone.set_source_expressions([source_expressions[1]])
extra_context['function'] = 'SDO_UTIL.TO_GML311GEOMETRY' if version.value == 3 else 'SDO_UTIL.TO_GMLGEOMETRY'
return super(AsGML, clone).as_sql(compiler, connection, **extra_context)
class AsKML(AsGML):
def as_sqlite(self, compiler, connection, **extra_context):
# No version parameter
clone = self.copy()
clone.set_source_expressions(self.get_source_expressions()[1:])
return clone.as_sql(compiler, connection, **extra_context)
class AsSVG(GeoFunc):
output_field = TextField()
def __init__(self, expression, relative=False, precision=8, **extra):
relative = relative if hasattr(relative, 'resolve_expression') else int(relative)
expressions = [
expression,
relative,
self._handle_param(precision, 'precision', int),
]
super().__init__(*expressions, **extra)
class BoundingCircle(OracleToleranceMixin, GeoFunc):
def __init__(self, expression, num_seg=48, **extra):
super().__init__(expression, num_seg, **extra)
def as_oracle(self, compiler, connection, **extra_context):
clone = self.copy()
clone.set_source_expressions([self.get_source_expressions()[0]])
return super(BoundingCircle, clone).as_oracle(compiler, connection, **extra_context)
class Centroid(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 1
class Difference(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 2
geom_param_pos = (0, 1)
class DistanceResultMixin:
@cached_property
def output_field(self):
return DistanceField(self.geo_field)
def source_is_geography(self):
return self.geo_field.geography and self.geo_field.srid == 4326
class Distance(DistanceResultMixin, OracleToleranceMixin, GeoFunc):
geom_param_pos = (0, 1)
spheroid = None
def __init__(self, expr1, expr2, spheroid=None, **extra):
expressions = [expr1, expr2]
if spheroid is not None:
self.spheroid = self._handle_param(spheroid, 'spheroid', bool)
super().__init__(*expressions, **extra)
def as_postgresql(self, compiler, connection, **extra_context):
clone = self.copy()
function = None
expr2 = clone.source_expressions[1]
geography = self.source_is_geography()
if expr2.output_field.geography != geography:
if isinstance(expr2, Value):
expr2.output_field.geography = geography
else:
clone.source_expressions[1] = Cast(
expr2,
GeometryField(srid=expr2.output_field.srid, geography=geography),
)
if not geography and self.geo_field.geodetic(connection):
# Geometry fields with geodetic (lon/lat) coordinates need special distance functions
if self.spheroid:
# DistanceSpheroid is more accurate and resource intensive than DistanceSphere
function = connection.ops.spatial_function_name('DistanceSpheroid')
# Replace boolean param by the real spheroid of the base field
clone.source_expressions.append(Value(self.geo_field.spheroid(connection)))
else:
function = connection.ops.spatial_function_name('DistanceSphere')
return super(Distance, clone).as_sql(compiler, connection, function=function, **extra_context)
def as_sqlite(self, compiler, connection, **extra_context):
if self.geo_field.geodetic(connection):
# SpatiaLite returns NULL instead of zero on geodetic coordinates
extra_context['template'] = 'COALESCE(%(function)s(%(expressions)s, %(spheroid)s), 0)'
extra_context['spheroid'] = int(bool(self.spheroid))
return super().as_sql(compiler, connection, **extra_context)
class Envelope(GeomOutputGeoFunc):
arity = 1
class ForcePolygonCW(GeomOutputGeoFunc):
arity = 1
class GeoHash(GeoFunc):
output_field = TextField()
def __init__(self, expression, precision=None, **extra):
expressions = [expression]
if precision is not None:
expressions.append(self._handle_param(precision, 'precision', int))
super().__init__(*expressions, **extra)
def as_mysql(self, compiler, connection, **extra_context):
clone = self.copy()
# If no precision is provided, set it to the maximum.
if len(clone.source_expressions) < 2:
clone.source_expressions.append(Value(100))
return clone.as_sql(compiler, connection, **extra_context)
class Intersection(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 2
geom_param_pos = (0, 1)
@BaseSpatialField.register_lookup
class IsValid(OracleToleranceMixin, GeoFuncMixin, Transform):
lookup_name = 'isvalid'
output_field = BooleanField()
def as_oracle(self, compiler, connection, **extra_context):
sql, params = super().as_oracle(compiler, connection, **extra_context)
return "CASE %s WHEN 'TRUE' THEN 1 ELSE 0 END" % sql, params
class Length(DistanceResultMixin, OracleToleranceMixin, GeoFunc):
def __init__(self, expr1, spheroid=True, **extra):
self.spheroid = spheroid
super().__init__(expr1, **extra)
def as_sql(self, compiler, connection, **extra_context):
if self.geo_field.geodetic(connection) and not connection.features.supports_length_geodetic:
raise NotSupportedError("This backend doesn't support Length on geodetic fields")
return super().as_sql(compiler, connection, **extra_context)
def as_postgresql(self, compiler, connection, **extra_context):
clone = self.copy()
function = None
if self.source_is_geography():
clone.source_expressions.append(Value(self.spheroid))
elif self.geo_field.geodetic(connection):
# Geometry fields with geodetic (lon/lat) coordinates need length_spheroid
function = connection.ops.spatial_function_name('LengthSpheroid')
clone.source_expressions.append(Value(self.geo_field.spheroid(connection)))
else:
dim = min(f.dim for f in self.get_source_fields() if f)
if dim > 2:
function = connection.ops.length3d
return super(Length, clone).as_sql(compiler, connection, function=function, **extra_context)
def as_sqlite(self, compiler, connection, **extra_context):
function = None
if self.geo_field.geodetic(connection):
function = 'GeodesicLength' if self.spheroid else 'GreatCircleLength'
return super().as_sql(compiler, connection, function=function, **extra_context)
class LineLocatePoint(GeoFunc):
output_field = FloatField()
arity = 2
geom_param_pos = (0, 1)
class MakeValid(GeoFunc):
pass
class MemSize(GeoFunc):
output_field = IntegerField()
arity = 1
class NumGeometries(GeoFunc):
output_field = IntegerField()
arity = 1
class NumPoints(GeoFunc):
output_field = IntegerField()
arity = 1
class Perimeter(DistanceResultMixin, OracleToleranceMixin, GeoFunc):
arity = 1
def as_postgresql(self, compiler, connection, **extra_context):
function = None
if self.geo_field.geodetic(connection) and not self.source_is_geography():
raise NotSupportedError("ST_Perimeter cannot use a non-projected non-geography field.")
dim = min(f.dim for f in self.get_source_fields())
if dim > 2:
function = connection.ops.perimeter3d
return super().as_sql(compiler, connection, function=function, **extra_context)
def as_sqlite(self, compiler, connection, **extra_context):
if self.geo_field.geodetic(connection):
raise NotSupportedError("Perimeter cannot use a non-projected field.")
return super().as_sql(compiler, connection, **extra_context)
class PointOnSurface(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 1
class Reverse(GeoFunc):
arity = 1
class Scale(SQLiteDecimalToFloatMixin, GeomOutputGeoFunc):
def __init__(self, expression, x, y, z=0.0, **extra):
expressions = [
expression,
self._handle_param(x, 'x', NUMERIC_TYPES),
self._handle_param(y, 'y', NUMERIC_TYPES),
]
if z != 0.0:
expressions.append(self._handle_param(z, 'z', NUMERIC_TYPES))
super().__init__(*expressions, **extra)
class SnapToGrid(SQLiteDecimalToFloatMixin, GeomOutputGeoFunc):
def __init__(self, expression, *args, **extra):
nargs = len(args)
expressions = [expression]
if nargs in (1, 2):
expressions.extend(
[self._handle_param(arg, '', NUMERIC_TYPES) for arg in args]
)
elif nargs == 4:
# Reverse origin and size param ordering
expressions += [
*(self._handle_param(arg, '', NUMERIC_TYPES) for arg in args[2:]),
*(self._handle_param(arg, '', NUMERIC_TYPES) for arg in args[0:2]),
]
else:
raise ValueError('Must provide 1, 2, or 4 arguments to `SnapToGrid`.')
super().__init__(*expressions, **extra)
class SymDifference(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 2
geom_param_pos = (0, 1)
class Transform(GeomOutputGeoFunc):
def __init__(self, expression, srid, **extra):
expressions = [
expression,
self._handle_param(srid, 'srid', int),
]
if 'output_field' not in extra:
extra['output_field'] = GeometryField(srid=srid)
super().__init__(*expressions, **extra)
class Translate(Scale):
def as_sqlite(self, compiler, connection, **extra_context):
clone = self.copy()
if len(self.source_expressions) < 4:
# Always provide the z parameter for ST_Translate
clone.source_expressions.append(Value(0))
return super(Translate, clone).as_sqlite(compiler, connection, **extra_context)
class Union(OracleToleranceMixin, GeomOutputGeoFunc):
arity = 2
geom_param_pos = (0, 1)
|
a24a634122c2d56477e0895aa1805c6928aee9f97d9162ce14575105ebee2c00 | """
This module contains the spatial lookup types, and the `get_geo_where_clause`
routine for Oracle Spatial.
Please note that WKT support is broken on the XE version, and thus
this backend will not work on such platforms. Specifically, XE lacks
support for an internal JVM, and Java libraries are required to use
the WKT constructors.
"""
import re
from django.contrib.gis.db.backends.base.operations import (
BaseSpatialOperations,
)
from django.contrib.gis.db.backends.oracle.adapter import OracleSpatialAdapter
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.db.models import aggregates
from django.contrib.gis.geos.geometry import GEOSGeometry, GEOSGeometryBase
from django.contrib.gis.geos.prototypes.io import wkb_r
from django.contrib.gis.measure import Distance
from django.db.backends.oracle.operations import DatabaseOperations
DEFAULT_TOLERANCE = '0.05'
class SDOOperator(SpatialOperator):
sql_template = "%(func)s(%(lhs)s, %(rhs)s) = 'TRUE'"
class SDODWithin(SpatialOperator):
sql_template = "SDO_WITHIN_DISTANCE(%(lhs)s, %(rhs)s, %%s) = 'TRUE'"
class SDODisjoint(SpatialOperator):
sql_template = "SDO_GEOM.RELATE(%%(lhs)s, 'DISJOINT', %%(rhs)s, %s) = 'DISJOINT'" % DEFAULT_TOLERANCE
class SDORelate(SpatialOperator):
sql_template = "SDO_RELATE(%(lhs)s, %(rhs)s, 'mask=%(mask)s') = 'TRUE'"
def check_relate_argument(self, arg):
masks = 'TOUCH|OVERLAPBDYDISJOINT|OVERLAPBDYINTERSECT|EQUAL|INSIDE|COVEREDBY|CONTAINS|COVERS|ANYINTERACT|ON'
mask_regex = re.compile(r'^(%s)(\+(%s))*$' % (masks, masks), re.I)
if not isinstance(arg, str) or not mask_regex.match(arg):
raise ValueError('Invalid SDO_RELATE mask: "%s"' % arg)
def as_sql(self, connection, lookup, template_params, sql_params):
template_params['mask'] = sql_params.pop()
return super().as_sql(connection, lookup, template_params, sql_params)
class OracleOperations(BaseSpatialOperations, DatabaseOperations):
name = 'oracle'
oracle = True
disallowed_aggregates = (aggregates.Collect, aggregates.Extent3D, aggregates.MakeLine)
Adapter = OracleSpatialAdapter
extent = 'SDO_AGGR_MBR'
unionagg = 'SDO_AGGR_UNION'
from_text = 'SDO_GEOMETRY'
function_names = {
'Area': 'SDO_GEOM.SDO_AREA',
'BoundingCircle': 'SDO_GEOM.SDO_MBC',
'Centroid': 'SDO_GEOM.SDO_CENTROID',
'Difference': 'SDO_GEOM.SDO_DIFFERENCE',
'Distance': 'SDO_GEOM.SDO_DISTANCE',
'Envelope': 'SDO_GEOM_MBR',
'Intersection': 'SDO_GEOM.SDO_INTERSECTION',
'IsValid': 'SDO_GEOM.VALIDATE_GEOMETRY_WITH_CONTEXT',
'Length': 'SDO_GEOM.SDO_LENGTH',
'NumGeometries': 'SDO_UTIL.GETNUMELEM',
'NumPoints': 'SDO_UTIL.GETNUMVERTICES',
'Perimeter': 'SDO_GEOM.SDO_LENGTH',
'PointOnSurface': 'SDO_GEOM.SDO_POINTONSURFACE',
'Reverse': 'SDO_UTIL.REVERSE_LINESTRING',
'SymDifference': 'SDO_GEOM.SDO_XOR',
'Transform': 'SDO_CS.TRANSFORM',
'Union': 'SDO_GEOM.SDO_UNION',
}
# We want to get SDO Geometries as WKT because it is much easier to
# instantiate GEOS proxies from WKT than SDO_GEOMETRY(...) strings.
# However, this adversely affects performance (i.e., Java is called
# to convert to WKT on every query). If someone wishes to write a
# SDO_GEOMETRY(...) parser in Python, let me know =)
select = 'SDO_UTIL.TO_WKBGEOMETRY(%s)'
gis_operators = {
'contains': SDOOperator(func='SDO_CONTAINS'),
'coveredby': SDOOperator(func='SDO_COVEREDBY'),
'covers': SDOOperator(func='SDO_COVERS'),
'disjoint': SDODisjoint(),
'intersects': SDOOperator(func='SDO_OVERLAPBDYINTERSECT'), # TODO: Is this really the same as ST_Intersects()?
'equals': SDOOperator(func='SDO_EQUAL'),
'exact': SDOOperator(func='SDO_EQUAL'),
'overlaps': SDOOperator(func='SDO_OVERLAPS'),
'same_as': SDOOperator(func='SDO_EQUAL'),
'relate': SDORelate(), # Oracle uses a different syntax, e.g., 'mask=inside+touch'
'touches': SDOOperator(func='SDO_TOUCH'),
'within': SDOOperator(func='SDO_INSIDE'),
'dwithin': SDODWithin(),
}
unsupported_functions = {
'AsGeoJSON', 'AsKML', 'AsSVG', 'Azimuth', 'ForcePolygonCW', 'GeoHash',
'LineLocatePoint', 'MakeValid', 'MemSize', 'Scale', 'SnapToGrid',
'Translate',
}
def geo_quote_name(self, name):
return super().geo_quote_name(name).upper()
def convert_extent(self, clob):
if clob:
# Generally, Oracle returns a polygon for the extent -- however,
# it can return a single point if there's only one Point in the
# table.
ext_geom = GEOSGeometry(memoryview(clob.read()))
gtype = str(ext_geom.geom_type)
if gtype == 'Polygon':
# Construct the 4-tuple from the coordinates in the polygon.
shell = ext_geom.shell
ll, ur = shell[0][:2], shell[2][:2]
elif gtype == 'Point':
ll = ext_geom.coords[:2]
ur = ll
else:
raise Exception('Unexpected geometry type returned for extent: %s' % gtype)
xmin, ymin = ll
xmax, ymax = ur
return (xmin, ymin, xmax, ymax)
else:
return None
def geo_db_type(self, f):
"""
Return the geometry database type for Oracle. Unlike other spatial
backends, no stored procedure is necessary and it's the same for all
geometry types.
"""
return 'MDSYS.SDO_GEOMETRY'
def get_distance(self, f, value, lookup_type):
"""
Return the distance parameters given the value and the lookup type.
On Oracle, geometry columns with a geodetic coordinate system behave
implicitly like a geography column, and thus meters will be used as
the distance parameter on them.
"""
if not value:
return []
value = value[0]
if isinstance(value, Distance):
if f.geodetic(self.connection):
dist_param = value.m
else:
dist_param = getattr(value, Distance.unit_attname(f.units_name(self.connection)))
else:
dist_param = value
# dwithin lookups on Oracle require a special string parameter
# that starts with "distance=".
if lookup_type == 'dwithin':
dist_param = 'distance=%s' % dist_param
return [dist_param]
def get_geom_placeholder(self, f, value, compiler):
if value is None:
return 'NULL'
return super().get_geom_placeholder(f, value, compiler)
def spatial_aggregate_name(self, agg_name):
"""
Return the spatial aggregate SQL name.
"""
agg_name = 'unionagg' if agg_name.lower() == 'union' else agg_name.lower()
return getattr(self, agg_name)
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
from django.contrib.gis.db.backends.oracle.models import OracleGeometryColumns
return OracleGeometryColumns
def spatial_ref_sys(self):
from django.contrib.gis.db.backends.oracle.models import OracleSpatialRefSys
return OracleSpatialRefSys
def modify_insert_params(self, placeholder, params):
"""Drop out insert parameters for NULL placeholder. Needed for Oracle Spatial
backend due to #10888.
"""
if placeholder == 'NULL':
return []
return super().modify_insert_params(placeholder, params)
def get_geometry_converter(self, expression):
read = wkb_r().read
srid = expression.output_field.srid
if srid == -1:
srid = None
geom_class = expression.output_field.geom_class
def converter(value, expression, connection):
if value is not None:
geom = GEOSGeometryBase(read(memoryview(value.read())), geom_class)
if srid:
geom.srid = srid
return geom
return converter
def get_area_att_for_field(self, field):
return 'sq_m'
|
873f61fc3e410f7f4cd7eb86be23884ab3dfb021fb10b24bd4c60cd078911324 | import re
from django.contrib.gis.db.models import aggregates
class BaseSpatialFeatures:
gis_enabled = True
# Does the database contain a SpatialRefSys model to store SRID information?
has_spatialrefsys_table = True
# Does the backend support the django.contrib.gis.utils.add_srs_entry() utility?
supports_add_srs_entry = True
# Does the backend introspect GeometryField to its subtypes?
supports_geometry_field_introspection = True
# Does the backend support storing 3D geometries?
supports_3d_storage = False
# Reference implementation of 3D functions is:
# https://postgis.net/docs/PostGIS_Special_Functions_Index.html#PostGIS_3D_Functions
supports_3d_functions = False
# Does the database support SRID transform operations?
supports_transform = True
# Do geometric relationship operations operate on real shapes (or only on bounding boxes)?
supports_real_shape_operations = True
# Can geometry fields be null?
supports_null_geometries = True
# Are empty geometries supported?
supports_empty_geometries = False
# Can the function be applied on geodetic coordinate systems?
supports_distance_geodetic = True
supports_length_geodetic = True
supports_perimeter_geodetic = False
supports_area_geodetic = True
# Is the database able to count vertices on polygons (with `num_points`)?
supports_num_points_poly = True
# The following properties indicate if the database backend support
# certain lookups (dwithin, left and right, relate, ...)
supports_left_right_lookups = False
# Does the database have raster support?
supports_raster = False
# Does the database support a unique index on geometry fields?
supports_geometry_field_unique_index = True
@property
def supports_bbcontains_lookup(self):
return 'bbcontains' in self.connection.ops.gis_operators
@property
def supports_contained_lookup(self):
return 'contained' in self.connection.ops.gis_operators
@property
def supports_crosses_lookup(self):
return 'crosses' in self.connection.ops.gis_operators
@property
def supports_distances_lookups(self):
return self.has_Distance_function
@property
def supports_dwithin_lookup(self):
return 'dwithin' in self.connection.ops.gis_operators
@property
def supports_relate_lookup(self):
return 'relate' in self.connection.ops.gis_operators
@property
def supports_isvalid_lookup(self):
return self.has_IsValid_function
# Is the aggregate supported by the database?
@property
def supports_collect_aggr(self):
return aggregates.Collect not in self.connection.ops.disallowed_aggregates
@property
def supports_extent_aggr(self):
return aggregates.Extent not in self.connection.ops.disallowed_aggregates
@property
def supports_make_line_aggr(self):
return aggregates.MakeLine not in self.connection.ops.disallowed_aggregates
@property
def supports_union_aggr(self):
return aggregates.Union not in self.connection.ops.disallowed_aggregates
def __getattr__(self, name):
m = re.match(r'has_(\w*)_function$', name)
if m:
func_name = m.group(1)
return func_name not in self.connection.ops.unsupported_functions
raise AttributeError
|
aeaa4c0514685dc62812427abee308dcdab27fc57b9c883f93d79fb0cfd2b299 | from django.contrib.gis.db.models import GeometryField
from django.contrib.gis.db.models.functions import Distance
from django.contrib.gis.measure import (
Area as AreaMeasure, Distance as DistanceMeasure,
)
from django.db.utils import NotSupportedError
from django.utils.functional import cached_property
class BaseSpatialOperations:
# Quick booleans for the type of this spatial backend, and
# an attribute for the spatial database version tuple (if applicable)
postgis = False
spatialite = False
mysql = False
oracle = False
spatial_version = None
# How the geometry column should be selected.
select = '%s'
@cached_property
def select_extent(self):
return self.select
# Does the spatial database have a geometry or geography type?
geography = False
geometry = False
# Aggregates
disallowed_aggregates = ()
geom_func_prefix = ''
# Mapping between Django function names and backend names, when names do not
# match; used in spatial_function_name().
function_names = {}
# Blacklist/set of known unsupported functions of the backend
unsupported_functions = {
'Area', 'AsGeoJSON', 'AsGML', 'AsKML', 'AsSVG', 'Azimuth',
'BoundingCircle', 'Centroid', 'Difference', 'Distance', 'Envelope',
'GeoHash', 'Intersection', 'IsValid', 'Length', 'LineLocatePoint',
'MakeValid', 'MemSize', 'NumGeometries', 'NumPoints', 'Perimeter',
'PointOnSurface', 'Reverse', 'Scale', 'SnapToGrid', 'SymDifference',
'Transform', 'Translate', 'Union',
}
# Constructors
from_text = False
# Default conversion functions for aggregates; will be overridden if implemented
# for the spatial backend.
def convert_extent(self, box, srid):
raise NotImplementedError('Aggregate extent not implemented for this spatial backend.')
def convert_extent3d(self, box, srid):
raise NotImplementedError('Aggregate 3D extent not implemented for this spatial backend.')
# For quoting column values, rather than columns.
def geo_quote_name(self, name):
return "'%s'" % name
# GeometryField operations
def geo_db_type(self, f):
"""
Return the database column type for the geometry field on
the spatial backend.
"""
raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_type() method')
def get_distance(self, f, value, lookup_type):
"""
Return the distance parameters for the given geometry field,
lookup value, and lookup type.
"""
raise NotImplementedError('Distance operations not available on this spatial backend.')
def get_geom_placeholder(self, f, value, compiler):
"""
Return the placeholder for the given geometry field with the given
value. Depending on the spatial backend, the placeholder may contain a
stored procedure call to the transformation function of the spatial
backend.
"""
def transform_value(value, field):
return value is not None and value.srid != field.srid
if hasattr(value, 'as_sql'):
return (
'%s(%%s, %s)' % (self.spatial_function_name('Transform'), f.srid)
if transform_value(value.output_field, f)
else '%s'
)
if transform_value(value, f):
# Add Transform() to the SQL placeholder.
return '%s(%s(%%s,%s), %s)' % (
self.spatial_function_name('Transform'),
self.from_text, value.srid, f.srid,
)
elif self.connection.features.has_spatialrefsys_table:
return '%s(%%s,%s)' % (self.from_text, f.srid)
else:
# For backwards compatibility on MySQL (#27464).
return '%s(%%s)' % self.from_text
def check_expression_support(self, expression):
if isinstance(expression, self.disallowed_aggregates):
raise NotSupportedError(
"%s spatial aggregation is not supported by this database backend." % expression.name
)
super().check_expression_support(expression)
def spatial_aggregate_name(self, agg_name):
raise NotImplementedError('Aggregate support not implemented for this spatial backend.')
def spatial_function_name(self, func_name):
if func_name in self.unsupported_functions:
raise NotSupportedError("This backend doesn't support the %s function." % func_name)
return self.function_names.get(func_name, self.geom_func_prefix + func_name)
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
raise NotImplementedError('Subclasses of BaseSpatialOperations must provide a geometry_columns() method.')
def spatial_ref_sys(self):
raise NotImplementedError('subclasses of BaseSpatialOperations must a provide spatial_ref_sys() method')
distance_expr_for_lookup = staticmethod(Distance)
def get_db_converters(self, expression):
converters = super().get_db_converters(expression)
if isinstance(expression.output_field, GeometryField):
converters.append(self.get_geometry_converter(expression))
return converters
def get_geometry_converter(self, expression):
raise NotImplementedError(
'Subclasses of BaseSpatialOperations must provide a '
'get_geometry_converter() method.'
)
def get_area_att_for_field(self, field):
if field.geodetic(self.connection):
if self.connection.features.supports_area_geodetic:
return 'sq_m'
raise NotImplementedError('Area on geodetic coordinate systems not supported.')
else:
units_name = field.units_name(self.connection)
if units_name:
return AreaMeasure.unit_attname(units_name)
def get_distance_att_for_field(self, field):
dist_att = None
if field.geodetic(self.connection):
if self.connection.features.supports_distance_geodetic:
dist_att = 'm'
else:
units = field.units_name(self.connection)
if units:
dist_att = DistanceMeasure.unit_attname(units)
return dist_att
|
de3b725beece59058220082bb748ee9aad9c78846e157971758b471d84a4abdc | from django.contrib.gis.db.backends.base.features import BaseSpatialFeatures
from django.db.backends.mysql.features import (
DatabaseFeatures as MySQLDatabaseFeatures,
)
from django.utils.functional import cached_property
class DatabaseFeatures(BaseSpatialFeatures, MySQLDatabaseFeatures):
has_spatialrefsys_table = False
supports_add_srs_entry = False
supports_distance_geodetic = False
supports_length_geodetic = False
supports_area_geodetic = False
supports_transform = False
supports_real_shape_operations = False
supports_null_geometries = False
supports_num_points_poly = False
@cached_property
def supports_empty_geometry_collection(self):
return self.connection.mysql_version >= (5, 7, 5)
@cached_property
def supports_geometry_field_unique_index(self):
# Not supported in MySQL since https://dev.mysql.com/worklog/task/?id=11808
return self.connection.mysql_is_mariadb
|
896360b7da69b667f47d259318a73827bc5d9a98ef8d32df342b606501667fc1 | from MySQLdb.constants import FIELD_TYPE
from django.contrib.gis.gdal import OGRGeomType
from django.db.backends.mysql.introspection import DatabaseIntrospection
class MySQLIntrospection(DatabaseIntrospection):
# Updating the data_types_reverse dictionary with the appropriate
# type for Geometry fields.
data_types_reverse = DatabaseIntrospection.data_types_reverse.copy()
data_types_reverse[FIELD_TYPE.GEOMETRY] = 'GeometryField'
def get_geometry_type(self, table_name, geo_col):
with self.connection.cursor() as cursor:
# In order to get the specific geometry type of the field,
# we introspect on the table definition using `DESCRIBE`.
cursor.execute('DESCRIBE %s' %
self.connection.ops.quote_name(table_name))
# Increment over description info until we get to the geometry
# column.
for column, typ, null, key, default, extra in cursor.fetchall():
if column == geo_col:
# Using OGRGeomType to convert from OGC name to Django field.
# MySQL does not support 3D or SRIDs, so the field params
# are empty.
field_type = OGRGeomType(typ).django
field_params = {}
break
return field_type, field_params
def supports_spatial_index(self, cursor, table_name):
# Supported with MyISAM/Aria, or InnoDB on MySQL 5.7.5+/MariaDB 10.2.2+
storage_engine = self.get_storage_engine(cursor, table_name)
if storage_engine == 'InnoDB':
return self.connection.mysql_version >= (
(10, 2, 2) if self.connection.mysql_is_mariadb else (5, 7, 5)
)
return storage_engine in ('MyISAM', 'Aria')
|
d9e2c5c2f418760f2e87ed5bf7e7700a170c7320287ba005ec68b9bb289fc556 | from django.contrib.gis.db.backends.base.adapter import WKTAdapter
from django.contrib.gis.db.backends.base.operations import (
BaseSpatialOperations,
)
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.db.models import aggregates
from django.contrib.gis.geos.geometry import GEOSGeometryBase
from django.contrib.gis.geos.prototypes.io import wkb_r
from django.contrib.gis.measure import Distance
from django.db.backends.mysql.operations import DatabaseOperations
from django.utils.functional import cached_property
class MySQLOperations(BaseSpatialOperations, DatabaseOperations):
mysql = True
name = 'mysql'
geom_func_prefix = 'ST_'
Adapter = WKTAdapter
@cached_property
def select(self):
return self.geom_func_prefix + 'AsBinary(%s)'
@cached_property
def from_text(self):
return self.geom_func_prefix + 'GeomFromText'
@cached_property
def gis_operators(self):
MBREquals = 'MBREqual' if (
self.connection.mysql_is_mariadb or self.connection.mysql_version < (5, 7, 6)
) else 'MBREquals'
return {
'bbcontains': SpatialOperator(func='MBRContains'), # For consistency w/PostGIS API
'bboverlaps': SpatialOperator(func='MBROverlaps'), # ...
'contained': SpatialOperator(func='MBRWithin'), # ...
'contains': SpatialOperator(func='MBRContains'),
'disjoint': SpatialOperator(func='MBRDisjoint'),
'equals': SpatialOperator(func=MBREquals),
'exact': SpatialOperator(func=MBREquals),
'intersects': SpatialOperator(func='MBRIntersects'),
'overlaps': SpatialOperator(func='MBROverlaps'),
'same_as': SpatialOperator(func=MBREquals),
'touches': SpatialOperator(func='MBRTouches'),
'within': SpatialOperator(func='MBRWithin'),
}
disallowed_aggregates = (
aggregates.Collect, aggregates.Extent, aggregates.Extent3D,
aggregates.MakeLine, aggregates.Union,
)
@cached_property
def unsupported_functions(self):
unsupported = {
'AsGML', 'AsKML', 'AsSVG', 'Azimuth', 'BoundingCircle',
'ForcePolygonCW', 'LineLocatePoint', 'MakeValid', 'MemSize',
'Perimeter', 'PointOnSurface', 'Reverse', 'Scale', 'SnapToGrid',
'Transform', 'Translate',
}
if self.connection.mysql_is_mariadb:
unsupported.update({'GeoHash', 'IsValid'})
if self.connection.mysql_version < (10, 2, 4):
unsupported.add('AsGeoJSON')
elif self.connection.mysql_version < (5, 7, 5):
unsupported.update({'AsGeoJSON', 'GeoHash', 'IsValid'})
return unsupported
def geo_db_type(self, f):
return f.geom_type
def get_distance(self, f, value, lookup_type):
value = value[0]
if isinstance(value, Distance):
if f.geodetic(self.connection):
raise ValueError(
'Only numeric values of degree units are allowed on '
'geodetic distance queries.'
)
dist_param = getattr(value, Distance.unit_attname(f.units_name(self.connection)))
else:
dist_param = value
return [dist_param]
def get_geometry_converter(self, expression):
read = wkb_r().read
srid = expression.output_field.srid
if srid == -1:
srid = None
geom_class = expression.output_field.geom_class
def converter(value, expression, connection):
if value is not None:
geom = GEOSGeometryBase(read(memoryview(value)), geom_class)
if srid:
geom.srid = srid
return geom
return converter
|
0eda5113a6219b78915805356727dd518964115d73a5029ee50c3315d8d150ca | """
SQL functions reference lists:
https://www.gaia-gis.it/gaia-sins/spatialite-sql-4.2.1.html
"""
from django.contrib.gis.db.backends.base.operations import (
BaseSpatialOperations,
)
from django.contrib.gis.db.backends.spatialite.adapter import SpatiaLiteAdapter
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.db.models import aggregates
from django.contrib.gis.geos.geometry import GEOSGeometry, GEOSGeometryBase
from django.contrib.gis.geos.prototypes.io import wkb_r, wkt_r
from django.contrib.gis.measure import Distance
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.sqlite3.operations import DatabaseOperations
from django.utils.functional import cached_property
from django.utils.version import get_version_tuple
class SpatialiteNullCheckOperator(SpatialOperator):
def as_sql(self, connection, lookup, template_params, sql_params):
sql, params = super().as_sql(connection, lookup, template_params, sql_params)
return '%s > 0' % sql, params
class SpatiaLiteOperations(BaseSpatialOperations, DatabaseOperations):
name = 'spatialite'
spatialite = True
Adapter = SpatiaLiteAdapter
collect = 'Collect'
extent = 'Extent'
makeline = 'MakeLine'
unionagg = 'GUnion'
from_text = 'GeomFromText'
gis_operators = {
# Binary predicates
'equals': SpatialiteNullCheckOperator(func='Equals'),
'disjoint': SpatialiteNullCheckOperator(func='Disjoint'),
'touches': SpatialiteNullCheckOperator(func='Touches'),
'crosses': SpatialiteNullCheckOperator(func='Crosses'),
'within': SpatialiteNullCheckOperator(func='Within'),
'overlaps': SpatialiteNullCheckOperator(func='Overlaps'),
'contains': SpatialiteNullCheckOperator(func='Contains'),
'intersects': SpatialiteNullCheckOperator(func='Intersects'),
'relate': SpatialiteNullCheckOperator(func='Relate'),
'coveredby': SpatialiteNullCheckOperator(func='CoveredBy'),
'covers': SpatialiteNullCheckOperator(func='Covers'),
# Returns true if B's bounding box completely contains A's bounding box.
'contained': SpatialOperator(func='MbrWithin'),
# Returns true if A's bounding box completely contains B's bounding box.
'bbcontains': SpatialOperator(func='MbrContains'),
# Returns true if A's bounding box overlaps B's bounding box.
'bboverlaps': SpatialOperator(func='MbrOverlaps'),
# These are implemented here as synonyms for Equals
'same_as': SpatialiteNullCheckOperator(func='Equals'),
'exact': SpatialiteNullCheckOperator(func='Equals'),
# Distance predicates
'dwithin': SpatialOperator(func='PtDistWithin'),
}
disallowed_aggregates = (aggregates.Extent3D,)
@cached_property
def select(self):
return 'CAST (AsEWKB(%s) AS BLOB)' if self.spatial_version >= (4, 3, 0) else 'AsText(%s)'
function_names = {
'ForcePolygonCW': 'ST_ForceLHR',
'Length': 'ST_Length',
'LineLocatePoint': 'ST_Line_Locate_Point',
'NumPoints': 'ST_NPoints',
'Reverse': 'ST_Reverse',
'Scale': 'ScaleCoords',
'Translate': 'ST_Translate',
'Union': 'ST_Union',
}
@cached_property
def unsupported_functions(self):
unsupported = {'BoundingCircle', 'MemSize'}
if not self.lwgeom_version():
unsupported |= {'Azimuth', 'GeoHash', 'IsValid', 'MakeValid'}
return unsupported
@cached_property
def spatial_version(self):
"""Determine the version of the SpatiaLite library."""
try:
version = self.spatialite_version_tuple()[1:]
except Exception as exc:
raise ImproperlyConfigured(
'Cannot determine the SpatiaLite version for the "%s" database. '
'Was the SpatiaLite initialization SQL loaded on this database?' % (
self.connection.settings_dict['NAME'],
)
) from exc
if version < (4, 1, 0):
raise ImproperlyConfigured('GeoDjango only supports SpatiaLite versions 4.1.0 and above.')
return version
def convert_extent(self, box):
"""
Convert the polygon data received from SpatiaLite to min/max values.
"""
if box is None:
return None
shell = GEOSGeometry(box).shell
xmin, ymin = shell[0][:2]
xmax, ymax = shell[2][:2]
return (xmin, ymin, xmax, ymax)
def geo_db_type(self, f):
"""
Return None because geometry columns are added via the
`AddGeometryColumn` stored procedure on SpatiaLite.
"""
return None
def get_distance(self, f, value, lookup_type):
"""
Return the distance parameters for the given geometry field,
lookup value, and lookup type.
"""
if not value:
return []
value = value[0]
if isinstance(value, Distance):
if f.geodetic(self.connection):
if lookup_type == 'dwithin':
raise ValueError(
'Only numeric values of degree units are allowed on '
'geographic DWithin queries.'
)
dist_param = value.m
else:
dist_param = getattr(value, Distance.unit_attname(f.units_name(self.connection)))
else:
dist_param = value
return [dist_param]
def _get_spatialite_func(self, func):
"""
Helper routine for calling SpatiaLite functions and returning
their result.
Any error occurring in this method should be handled by the caller.
"""
cursor = self.connection._cursor()
try:
cursor.execute('SELECT %s' % func)
row = cursor.fetchone()
finally:
cursor.close()
return row[0]
def geos_version(self):
"Return the version of GEOS used by SpatiaLite as a string."
return self._get_spatialite_func('geos_version()')
def proj4_version(self):
"Return the version of the PROJ.4 library used by SpatiaLite."
return self._get_spatialite_func('proj4_version()')
def lwgeom_version(self):
"""Return the version of LWGEOM library used by SpatiaLite."""
return self._get_spatialite_func('lwgeom_version()')
def spatialite_version(self):
"Return the SpatiaLite library version as a string."
return self._get_spatialite_func('spatialite_version()')
def spatialite_version_tuple(self):
"""
Return the SpatiaLite version as a tuple (version string, major,
minor, subminor).
"""
version = self.spatialite_version()
return (version,) + get_version_tuple(version)
def spatial_aggregate_name(self, agg_name):
"""
Return the spatial aggregate SQL template and function for the
given Aggregate instance.
"""
agg_name = 'unionagg' if agg_name.lower() == 'union' else agg_name.lower()
return getattr(self, agg_name)
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
from django.contrib.gis.db.backends.spatialite.models import SpatialiteGeometryColumns
return SpatialiteGeometryColumns
def spatial_ref_sys(self):
from django.contrib.gis.db.backends.spatialite.models import SpatialiteSpatialRefSys
return SpatialiteSpatialRefSys
def get_geometry_converter(self, expression):
geom_class = expression.output_field.geom_class
if self.spatial_version >= (4, 3, 0):
read = wkb_r().read
def converter(value, expression, connection):
return None if value is None else GEOSGeometryBase(read(value), geom_class)
else:
read = wkt_r().read
srid = expression.output_field.srid
if srid == -1:
srid = None
def converter(value, expression, connection):
if value is not None:
geom = GEOSGeometryBase(read(value), geom_class)
if srid:
geom.srid = srid
return geom
return converter
|
ff42a1cf2f436a9ede421d8dafb33a14f24909d680c2fbfc0b6bd9ec2f768ba4 | import re
from django.conf import settings
from django.contrib.gis.db.backends.base.operations import (
BaseSpatialOperations,
)
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.db.models import GeometryField, RasterField
from django.contrib.gis.gdal import GDALRaster
from django.contrib.gis.geos.geometry import GEOSGeometryBase
from django.contrib.gis.geos.prototypes.io import wkb_r
from django.contrib.gis.measure import Distance
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.postgresql.operations import DatabaseOperations
from django.db.models import Func, Value
from django.db.utils import NotSupportedError, ProgrammingError
from django.utils.functional import cached_property
from django.utils.version import get_version_tuple
from .adapter import PostGISAdapter
from .models import PostGISGeometryColumns, PostGISSpatialRefSys
from .pgraster import from_pgraster
# Identifier to mark raster lookups as bilateral.
BILATERAL = 'bilateral'
class PostGISOperator(SpatialOperator):
def __init__(self, geography=False, raster=False, **kwargs):
# Only a subset of the operators and functions are available for the
# geography type.
self.geography = geography
# Only a subset of the operators and functions are available for the
# raster type. Lookups that don't support raster will be converted to
# polygons. If the raster argument is set to BILATERAL, then the
# operator cannot handle mixed geom-raster lookups.
self.raster = raster
super().__init__(**kwargs)
def as_sql(self, connection, lookup, template_params, *args):
if lookup.lhs.output_field.geography and not self.geography:
raise ValueError('PostGIS geography does not support the "%s" '
'function/operator.' % (self.func or self.op,))
template_params = self.check_raster(lookup, template_params)
return super().as_sql(connection, lookup, template_params, *args)
def check_raster(self, lookup, template_params):
spheroid = lookup.rhs_params and lookup.rhs_params[-1] == 'spheroid'
# Check which input is a raster.
lhs_is_raster = lookup.lhs.field.geom_type == 'RASTER'
rhs_is_raster = isinstance(lookup.rhs, GDALRaster)
# Look for band indices and inject them if provided.
if lookup.band_lhs is not None and lhs_is_raster:
if not self.func:
raise ValueError('Band indices are not allowed for this operator, it works on bbox only.')
template_params['lhs'] = '%s, %s' % (template_params['lhs'], lookup.band_lhs)
if lookup.band_rhs is not None and rhs_is_raster:
if not self.func:
raise ValueError('Band indices are not allowed for this operator, it works on bbox only.')
template_params['rhs'] = '%s, %s' % (template_params['rhs'], lookup.band_rhs)
# Convert rasters to polygons if necessary.
if not self.raster or spheroid:
# Operators without raster support.
if lhs_is_raster:
template_params['lhs'] = 'ST_Polygon(%s)' % template_params['lhs']
if rhs_is_raster:
template_params['rhs'] = 'ST_Polygon(%s)' % template_params['rhs']
elif self.raster == BILATERAL:
# Operators with raster support but don't support mixed (rast-geom)
# lookups.
if lhs_is_raster and not rhs_is_raster:
template_params['lhs'] = 'ST_Polygon(%s)' % template_params['lhs']
elif rhs_is_raster and not lhs_is_raster:
template_params['rhs'] = 'ST_Polygon(%s)' % template_params['rhs']
return template_params
class ST_Polygon(Func):
function = 'ST_Polygon'
def __init__(self, expr):
super().__init__(expr)
expr = self.source_expressions[0]
if isinstance(expr, Value) and not expr._output_field_or_none:
self.source_expressions[0] = Value(expr.value, output_field=RasterField(srid=expr.value.srid))
@cached_property
def output_field(self):
return GeometryField(srid=self.source_expressions[0].field.srid)
class PostGISOperations(BaseSpatialOperations, DatabaseOperations):
name = 'postgis'
postgis = True
geography = True
geom_func_prefix = 'ST_'
Adapter = PostGISAdapter
collect = geom_func_prefix + 'Collect'
extent = geom_func_prefix + 'Extent'
extent3d = geom_func_prefix + '3DExtent'
length3d = geom_func_prefix + '3DLength'
makeline = geom_func_prefix + 'MakeLine'
perimeter3d = geom_func_prefix + '3DPerimeter'
unionagg = geom_func_prefix + 'Union'
gis_operators = {
'bbcontains': PostGISOperator(op='~', raster=True),
'bboverlaps': PostGISOperator(op='&&', geography=True, raster=True),
'contained': PostGISOperator(op='@', raster=True),
'overlaps_left': PostGISOperator(op='&<', raster=BILATERAL),
'overlaps_right': PostGISOperator(op='&>', raster=BILATERAL),
'overlaps_below': PostGISOperator(op='&<|'),
'overlaps_above': PostGISOperator(op='|&>'),
'left': PostGISOperator(op='<<'),
'right': PostGISOperator(op='>>'),
'strictly_below': PostGISOperator(op='<<|'),
'strictly_above': PostGISOperator(op='|>>'),
'same_as': PostGISOperator(op='~=', raster=BILATERAL),
'exact': PostGISOperator(op='~=', raster=BILATERAL), # alias of same_as
'contains': PostGISOperator(func='ST_Contains', raster=BILATERAL),
'contains_properly': PostGISOperator(func='ST_ContainsProperly', raster=BILATERAL),
'coveredby': PostGISOperator(func='ST_CoveredBy', geography=True, raster=BILATERAL),
'covers': PostGISOperator(func='ST_Covers', geography=True, raster=BILATERAL),
'crosses': PostGISOperator(func='ST_Crosses'),
'disjoint': PostGISOperator(func='ST_Disjoint', raster=BILATERAL),
'equals': PostGISOperator(func='ST_Equals'),
'intersects': PostGISOperator(func='ST_Intersects', geography=True, raster=BILATERAL),
'overlaps': PostGISOperator(func='ST_Overlaps', raster=BILATERAL),
'relate': PostGISOperator(func='ST_Relate'),
'touches': PostGISOperator(func='ST_Touches', raster=BILATERAL),
'within': PostGISOperator(func='ST_Within', raster=BILATERAL),
'dwithin': PostGISOperator(func='ST_DWithin', geography=True, raster=BILATERAL),
}
unsupported_functions = set()
select = '%s::bytea'
select_extent = None
@cached_property
def function_names(self):
function_names = {
'BoundingCircle': 'ST_MinimumBoundingCircle',
'NumPoints': 'ST_NPoints',
}
if self.spatial_version < (2, 2, 0):
function_names.update({
'DistanceSphere': 'ST_distance_sphere',
'DistanceSpheroid': 'ST_distance_spheroid',
'LengthSpheroid': 'ST_length_spheroid',
'MemSize': 'ST_mem_size',
})
if self.spatial_version < (2, 4, 0):
function_names['ForcePolygonCW'] = 'ST_ForceRHR'
return function_names
@cached_property
def spatial_version(self):
"""Determine the version of the PostGIS library."""
# Trying to get the PostGIS version because the function
# signatures will depend on the version used. The cost
# here is a database query to determine the version, which
# can be mitigated by setting `POSTGIS_VERSION` with a 3-tuple
# comprising user-supplied values for the major, minor, and
# subminor revision of PostGIS.
if hasattr(settings, 'POSTGIS_VERSION'):
version = settings.POSTGIS_VERSION
else:
# Run a basic query to check the status of the connection so we're
# sure we only raise the error below if the problem comes from
# PostGIS and not from PostgreSQL itself (see #24862).
self._get_postgis_func('version')
try:
vtup = self.postgis_version_tuple()
except ProgrammingError:
raise ImproperlyConfigured(
'Cannot determine PostGIS version for database "%s" '
'using command "SELECT postgis_lib_version()". '
'GeoDjango requires at least PostGIS version 2.1. '
'Was the database created from a spatial database '
'template?' % self.connection.settings_dict['NAME']
)
version = vtup[1:]
return version
def convert_extent(self, box):
"""
Return a 4-tuple extent for the `Extent` aggregate by converting
the bounding box text returned by PostGIS (`box` argument), for
example: "BOX(-90.0 30.0, -85.0 40.0)".
"""
if box is None:
return None
ll, ur = box[4:-1].split(',')
xmin, ymin = map(float, ll.split())
xmax, ymax = map(float, ur.split())
return (xmin, ymin, xmax, ymax)
def convert_extent3d(self, box3d):
"""
Return a 6-tuple extent for the `Extent3D` aggregate by converting
the 3d bounding-box text returned by PostGIS (`box3d` argument), for
example: "BOX3D(-90.0 30.0 1, -85.0 40.0 2)".
"""
if box3d is None:
return None
ll, ur = box3d[6:-1].split(',')
xmin, ymin, zmin = map(float, ll.split())
xmax, ymax, zmax = map(float, ur.split())
return (xmin, ymin, zmin, xmax, ymax, zmax)
def geo_db_type(self, f):
"""
Return the database field type for the given spatial field.
"""
if f.geom_type == 'RASTER':
return 'raster'
# Type-based geometries.
# TODO: Support 'M' extension.
if f.dim == 3:
geom_type = f.geom_type + 'Z'
else:
geom_type = f.geom_type
if f.geography:
if f.srid != 4326:
raise NotSupportedError('PostGIS only supports geography columns with an SRID of 4326.')
return 'geography(%s,%d)' % (geom_type, f.srid)
else:
return 'geometry(%s,%d)' % (geom_type, f.srid)
def get_distance(self, f, dist_val, lookup_type):
"""
Retrieve the distance parameters for the given geometry field,
distance lookup value, and the distance lookup type.
This is the most complex implementation of the spatial backends due to
what is supported on geodetic geometry columns vs. what's available on
projected geometry columns. In addition, it has to take into account
the geography column type.
"""
# Getting the distance parameter
value = dist_val[0]
# Shorthand boolean flags.
geodetic = f.geodetic(self.connection)
geography = f.geography
if isinstance(value, Distance):
if geography:
dist_param = value.m
elif geodetic:
if lookup_type == 'dwithin':
raise ValueError('Only numeric values of degree units are '
'allowed on geographic DWithin queries.')
dist_param = value.m
else:
dist_param = getattr(value, Distance.unit_attname(f.units_name(self.connection)))
else:
# Assuming the distance is in the units of the field.
dist_param = value
return [dist_param]
def get_geom_placeholder(self, f, value, compiler):
"""
Provide a proper substitution value for Geometries or rasters that are
not in the SRID of the field. Specifically, this routine will
substitute in the ST_Transform() function call.
"""
transform_func = self.spatial_function_name('Transform')
if hasattr(value, 'as_sql'):
if value.field.srid == f.srid:
placeholder = '%s'
else:
placeholder = '%s(%%s, %s)' % (transform_func, f.srid)
return placeholder
# Get the srid for this object
if value is None:
value_srid = None
else:
value_srid = value.srid
# Adding Transform() to the SQL placeholder if the value srid
# is not equal to the field srid.
if value_srid is None or value_srid == f.srid:
placeholder = '%s'
else:
placeholder = '%s(%%s, %s)' % (transform_func, f.srid)
return placeholder
def _get_postgis_func(self, func):
"""
Helper routine for calling PostGIS functions and returning their result.
"""
# Close out the connection. See #9437.
with self.connection.temporary_connection() as cursor:
cursor.execute('SELECT %s()' % func)
return cursor.fetchone()[0]
def postgis_geos_version(self):
"Return the version of the GEOS library used with PostGIS."
return self._get_postgis_func('postgis_geos_version')
def postgis_lib_version(self):
"Return the version number of the PostGIS library used with PostgreSQL."
return self._get_postgis_func('postgis_lib_version')
def postgis_proj_version(self):
"Return the version of the PROJ.4 library used with PostGIS."
return self._get_postgis_func('postgis_proj_version')
def postgis_version(self):
"Return PostGIS version number and compile-time options."
return self._get_postgis_func('postgis_version')
def postgis_full_version(self):
"Return PostGIS version number and compile-time options."
return self._get_postgis_func('postgis_full_version')
def postgis_version_tuple(self):
"""
Return the PostGIS version as a tuple (version string, major,
minor, subminor).
"""
version = self.postgis_lib_version()
return (version,) + get_version_tuple(version)
def proj_version_tuple(self):
"""
Return the version of PROJ.4 used by PostGIS as a tuple of the
major, minor, and subminor release numbers.
"""
proj_regex = re.compile(r'(\d+)\.(\d+)\.(\d+)')
proj_ver_str = self.postgis_proj_version()
m = proj_regex.search(proj_ver_str)
if m:
return tuple(map(int, [m.group(1), m.group(2), m.group(3)]))
else:
raise Exception('Could not determine PROJ.4 version from PostGIS.')
def spatial_aggregate_name(self, agg_name):
if agg_name == 'Extent3D':
return self.extent3d
else:
return self.geom_func_prefix + agg_name
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
return PostGISGeometryColumns
def spatial_ref_sys(self):
return PostGISSpatialRefSys
def parse_raster(self, value):
"""Convert a PostGIS HEX String into a dict readable by GDALRaster."""
return from_pgraster(value)
def distance_expr_for_lookup(self, lhs, rhs, **kwargs):
return super().distance_expr_for_lookup(
self._normalize_distance_lookup_arg(lhs),
self._normalize_distance_lookup_arg(rhs),
**kwargs
)
@staticmethod
def _normalize_distance_lookup_arg(arg):
is_raster = (
arg.field.geom_type == 'RASTER'
if hasattr(arg, 'field') else
isinstance(arg, GDALRaster)
)
return ST_Polygon(arg) if is_raster else arg
def get_geometry_converter(self, expression):
read = wkb_r().read
geom_class = expression.output_field.geom_class
def converter(value, expression, connection):
return None if value is None else GEOSGeometryBase(read(value), geom_class)
return converter
def get_area_att_for_field(self, field):
return 'sq_m'
|
c9f1c95cffb3a640ed00cfed8b0f6df8a435ae9fd6ee3453ee65300d0eee508c | """
Tests for django.core.servers.
"""
import errno
import os
import socket
from http.client import HTTPConnection
from urllib.error import HTTPError
from urllib.parse import urlencode
from urllib.request import urlopen
from django.test import LiveServerTestCase, override_settings
from .models import Person
TEST_ROOT = os.path.dirname(__file__)
TEST_SETTINGS = {
'MEDIA_URL': '/media/',
'MEDIA_ROOT': os.path.join(TEST_ROOT, 'media'),
'STATIC_URL': '/static/',
'STATIC_ROOT': os.path.join(TEST_ROOT, 'static'),
}
@override_settings(ROOT_URLCONF='servers.urls', **TEST_SETTINGS)
class LiveServerBase(LiveServerTestCase):
available_apps = [
'servers',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
]
fixtures = ['testdata.json']
def urlopen(self, url):
return urlopen(self.live_server_url + url)
class LiveServerAddress(LiveServerBase):
@classmethod
def setUpClass(cls):
super().setUpClass()
# put it in a list to prevent descriptor lookups in test
cls.live_server_url_test = [cls.live_server_url]
def test_live_server_url_is_class_property(self):
self.assertIsInstance(self.live_server_url_test[0], str)
self.assertEqual(self.live_server_url_test[0], self.live_server_url)
class LiveServerViews(LiveServerBase):
def test_protocol(self):
"""Launched server serves with HTTP 1.1."""
with self.urlopen('/example_view/') as f:
self.assertEqual(f.version, 11)
def test_closes_connection_without_content_length(self):
"""
A HTTP 1.1 server is supposed to support keep-alive. Since our
development server is rather simple we support it only in cases where
we can detect a content length from the response. This should be doable
for all simple views and streaming responses where an iterable with
length of one is passed. The latter follows as result of `set_content_length`
from https://github.com/python/cpython/blob/master/Lib/wsgiref/handlers.py.
If we cannot detect a content length we explicitly set the `Connection`
header to `close` to notify the client that we do not actually support
it.
"""
conn = HTTPConnection(LiveServerViews.server_thread.host, LiveServerViews.server_thread.port, timeout=1)
try:
conn.request('GET', '/streaming_example_view/', headers={'Connection': 'keep-alive'})
response = conn.getresponse()
self.assertTrue(response.will_close)
self.assertEqual(response.read(), b'Iamastream')
self.assertEqual(response.status, 200)
self.assertEqual(response.getheader('Connection'), 'close')
conn.request('GET', '/streaming_example_view/', headers={'Connection': 'close'})
response = conn.getresponse()
self.assertTrue(response.will_close)
self.assertEqual(response.read(), b'Iamastream')
self.assertEqual(response.status, 200)
self.assertEqual(response.getheader('Connection'), 'close')
finally:
conn.close()
def test_keep_alive_on_connection_with_content_length(self):
"""
See `test_closes_connection_without_content_length` for details. This
is a follow up test, which ensure that we do not close the connection
if not needed, hence allowing us to take advantage of keep-alive.
"""
conn = HTTPConnection(LiveServerViews.server_thread.host, LiveServerViews.server_thread.port)
try:
conn.request('GET', '/example_view/', headers={"Connection": "keep-alive"})
response = conn.getresponse()
self.assertFalse(response.will_close)
self.assertEqual(response.read(), b'example view')
self.assertEqual(response.status, 200)
self.assertIsNone(response.getheader('Connection'))
conn.request('GET', '/example_view/', headers={"Connection": "close"})
response = conn.getresponse()
self.assertFalse(response.will_close)
self.assertEqual(response.read(), b'example view')
self.assertEqual(response.status, 200)
self.assertIsNone(response.getheader('Connection'))
finally:
conn.close()
def test_keep_alive_connection_clears_previous_request_data(self):
conn = HTTPConnection(LiveServerViews.server_thread.host, LiveServerViews.server_thread.port)
try:
conn.request('POST', '/method_view/', b'{}', headers={"Connection": "keep-alive"})
response = conn.getresponse()
self.assertFalse(response.will_close)
self.assertEqual(response.status, 200)
self.assertEqual(response.read(), b'POST')
conn.request('POST', '/method_view/', b'{}', headers={"Connection": "close"})
response = conn.getresponse()
self.assertFalse(response.will_close)
self.assertEqual(response.status, 200)
self.assertEqual(response.read(), b'POST')
finally:
conn.close()
def test_404(self):
with self.assertRaises(HTTPError) as err:
self.urlopen('/')
err.exception.close()
self.assertEqual(err.exception.code, 404, 'Expected 404 response')
def test_view(self):
with self.urlopen('/example_view/') as f:
self.assertEqual(f.read(), b'example view')
def test_static_files(self):
with self.urlopen('/static/example_static_file.txt') as f:
self.assertEqual(f.read().rstrip(b'\r\n'), b'example static file')
def test_no_collectstatic_emulation(self):
"""
LiveServerTestCase reports a 404 status code when HTTP client
tries to access a static file that isn't explicitly put under
STATIC_ROOT.
"""
with self.assertRaises(HTTPError) as err:
self.urlopen('/static/another_app/another_app_static_file.txt')
err.exception.close()
self.assertEqual(err.exception.code, 404, 'Expected 404 response')
def test_media_files(self):
with self.urlopen('/media/example_media_file.txt') as f:
self.assertEqual(f.read().rstrip(b'\r\n'), b'example media file')
def test_environ(self):
with self.urlopen('/environ_view/?%s' % urlencode({'q': 'тест'})) as f:
self.assertIn(b"QUERY_STRING: 'q=%D1%82%D0%B5%D1%81%D1%82'", f.read())
class LiveServerDatabase(LiveServerBase):
def test_fixtures_loaded(self):
"""
Fixtures are properly loaded and visible to the live server thread.
"""
with self.urlopen('/model_view/') as f:
self.assertEqual(f.read().splitlines(), [b'jane', b'robert'])
def test_database_writes(self):
"""
Data written to the database by a view can be read.
"""
with self.urlopen('/create_model_instance/'):
pass
self.assertQuerysetEqual(
Person.objects.all().order_by('pk'),
['jane', 'robert', 'emily'],
lambda b: b.name
)
class LiveServerPort(LiveServerBase):
def test_port_bind(self):
"""
Each LiveServerTestCase binds to a unique port or fails to start a
server thread when run concurrently (#26011).
"""
TestCase = type("TestCase", (LiveServerBase,), {})
try:
TestCase.setUpClass()
except socket.error as e:
if e.errno == errno.EADDRINUSE:
# We're out of ports, LiveServerTestCase correctly fails with
# a socket error.
return
# Unexpected error.
raise
try:
# We've acquired a port, ensure our server threads acquired
# different addresses.
self.assertNotEqual(
self.live_server_url, TestCase.live_server_url,
"Acquired duplicate server addresses for server threads: %s" % self.live_server_url
)
finally:
TestCase.tearDownClass()
def test_specified_port_bind(self):
"""LiveServerTestCase.port customizes the server's port."""
TestCase = type(str('TestCase'), (LiveServerBase,), {})
# Find an open port and tell TestCase to use it.
s = socket.socket()
s.bind(('', 0))
TestCase.port = s.getsockname()[1]
s.close()
TestCase.setUpClass()
try:
self.assertEqual(
TestCase.port, TestCase.server_thread.port,
'Did not use specified port for LiveServerTestCase thread: %s' % TestCase.port
)
finally:
TestCase.tearDownClass()
class LiverServerThreadedTests(LiveServerBase):
"""If LiverServerTestCase isn't threaded, these tests will hang."""
def test_view_calls_subview(self):
url = '/subview_calling_view/?%s' % urlencode({'url': self.live_server_url})
with self.urlopen(url) as f:
self.assertEqual(f.read(), b'subview calling view: subview')
def test_check_model_instance_from_subview(self):
url = '/check_model_instance_from_subview/?%s' % urlencode({
'url': self.live_server_url,
})
with self.urlopen(url) as f:
self.assertIn(b'emily', f.read())
|
e36a7f1d1c0f3ce44bbf72cc371ffdbb3adbf54001300257c35196c93c47447f | from urllib.request import urlopen
from django.http import HttpResponse, StreamingHttpResponse
from django.views.decorators.csrf import csrf_exempt
from .models import Person
def example_view(request):
return HttpResponse('example view')
def streaming_example_view(request):
return StreamingHttpResponse((b'I', b'am', b'a', b'stream'))
def model_view(request):
people = Person.objects.all()
return HttpResponse('\n'.join(person.name for person in people))
def create_model_instance(request):
person = Person(name='emily')
person.save()
return HttpResponse('')
def environ_view(request):
return HttpResponse("\n".join("%s: %r" % (k, v) for k, v in request.environ.items()))
def subview(request):
return HttpResponse('subview')
def subview_calling_view(request):
with urlopen(request.GET['url'] + '/subview/') as response:
return HttpResponse('subview calling view: {}'.format(response.read().decode()))
def check_model_instance_from_subview(request):
with urlopen(request.GET['url'] + '/create_model_instance/'):
pass
with urlopen(request.GET['url'] + '/model_view/') as response:
return HttpResponse('subview calling view: {}'.format(response.read().decode()))
@csrf_exempt
def method_view(request):
return HttpResponse(request.method)
|
0d6a54d56f7b6738eb503f55bec43bbf609aab1ba94673f98305aaf48dbc3dd3 | from django.db import migrations, models
from django.db.migrations import operations
from django.db.migrations.optimizer import MigrationOptimizer
from django.test import SimpleTestCase
from .models import EmptyManager, UnicodeModel
class OptimizerTests(SimpleTestCase):
"""
Tests the migration autodetector.
"""
def optimize(self, operations, app_label):
"""
Handy shortcut for getting results + number of loops
"""
optimizer = MigrationOptimizer()
return optimizer.optimize(operations, app_label), optimizer._iterations
def assertOptimizesTo(self, operations, expected, exact=None, less_than=None, app_label=None):
result, iterations = self.optimize(operations, app_label)
result = [repr(f.deconstruct()) for f in result]
expected = [repr(f.deconstruct()) for f in expected]
self.assertEqual(expected, result)
if exact is not None and iterations != exact:
raise self.failureException(
"Optimization did not take exactly %s iterations (it took %s)" % (exact, iterations)
)
if less_than is not None and iterations >= less_than:
raise self.failureException(
"Optimization did not take less than %s iterations (it took %s)" % (less_than, iterations)
)
def assertDoesNotOptimize(self, operations, **kwargs):
self.assertOptimizesTo(operations, operations, **kwargs)
def test_single(self):
"""
The optimizer does nothing on a single operation,
and that it does it in just one pass.
"""
self.assertOptimizesTo(
[migrations.DeleteModel("Foo")],
[migrations.DeleteModel("Foo")],
exact=1,
)
def test_create_delete_model(self):
"""
CreateModel and DeleteModel should collapse into nothing.
"""
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.DeleteModel("Foo"),
],
[],
)
def test_create_rename_model(self):
"""
CreateModel should absorb RenameModels.
"""
managers = [('objects', EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[("name", models.CharField(max_length=255))],
options={'verbose_name': 'Foo'},
bases=(UnicodeModel,),
managers=managers,
),
migrations.RenameModel("Foo", "Bar"),
],
[
migrations.CreateModel(
"Bar",
[("name", models.CharField(max_length=255))],
options={'verbose_name': 'Foo'},
bases=(UnicodeModel,),
managers=managers,
)
],
)
def test_rename_model_self(self):
"""
RenameModels should absorb themselves.
"""
self.assertOptimizesTo(
[
migrations.RenameModel("Foo", "Baa"),
migrations.RenameModel("Baa", "Bar"),
],
[
migrations.RenameModel("Foo", "Bar"),
],
)
def test_create_alter_model_options(self):
self.assertOptimizesTo(
[
migrations.CreateModel('Foo', fields=[]),
migrations.AlterModelOptions(name='Foo', options={'verbose_name_plural': 'Foozes'}),
],
[
migrations.CreateModel('Foo', fields=[], options={'verbose_name_plural': 'Foozes'}),
]
)
def _test_create_alter_foo_delete_model(self, alter_foo):
"""
CreateModel, AlterModelTable, AlterUniqueTogether/AlterIndexTogether/
AlterOrderWithRespectTo, and DeleteModel should collapse into nothing.
"""
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.AlterModelTable("Foo", "woohoo"),
alter_foo,
migrations.DeleteModel("Foo"),
],
[],
)
def test_create_alter_unique_delete_model(self):
self._test_create_alter_foo_delete_model(migrations.AlterUniqueTogether("Foo", [["a", "b"]]))
def test_create_alter_index_delete_model(self):
self._test_create_alter_foo_delete_model(migrations.AlterIndexTogether("Foo", [["a", "b"]]))
def test_create_alter_owrt_delete_model(self):
self._test_create_alter_foo_delete_model(migrations.AlterOrderWithRespectTo("Foo", "a"))
def _test_alter_alter_model(self, alter_foo, alter_bar):
"""
Two AlterUniqueTogether/AlterIndexTogether/AlterOrderWithRespectTo
should collapse into the second.
"""
self.assertOptimizesTo(
[
alter_foo,
alter_bar,
],
[
alter_bar,
],
)
def test_alter_alter_table_model(self):
self._test_alter_alter_model(
migrations.AlterModelTable("Foo", "a"),
migrations.AlterModelTable("Foo", "b"),
)
def test_alter_alter_unique_model(self):
self._test_alter_alter_model(
migrations.AlterUniqueTogether("Foo", [["a", "b"]]),
migrations.AlterUniqueTogether("Foo", [["a", "c"]]),
)
def test_alter_alter_index_model(self):
self._test_alter_alter_model(
migrations.AlterIndexTogether("Foo", [["a", "b"]]),
migrations.AlterIndexTogether("Foo", [["a", "c"]]),
)
def test_alter_alter_owrt_model(self):
self._test_alter_alter_model(
migrations.AlterOrderWithRespectTo("Foo", "a"),
migrations.AlterOrderWithRespectTo("Foo", "b"),
)
def test_optimize_through_create(self):
"""
We should be able to optimize away create/delete through a create or delete
of a different model, but only if the create operation does not mention the model
at all.
"""
# These should work
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.DeleteModel("Foo"),
],
[
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
],
)
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.DeleteModel("Bar"),
migrations.DeleteModel("Foo"),
],
[],
)
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.DeleteModel("Foo"),
migrations.DeleteModel("Bar"),
],
[],
)
# This should not work - FK should block it
self.assertDoesNotOptimize(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]),
migrations.DeleteModel("Foo"),
],
)
# The same operations should be optimized if app_label is specified and
# a FK references a model from the other app.
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]),
migrations.DeleteModel("Foo"),
],
[
migrations.CreateModel("Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]),
],
app_label="otherapp",
)
# But it shouldn't work if a FK references a model with the same
# app_label.
self.assertDoesNotOptimize(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]),
migrations.DeleteModel("Foo"),
],
app_label="testapp",
)
# This should not work - bases should block it
self.assertDoesNotOptimize(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())], bases=("testapp.Foo",)),
migrations.DeleteModel("Foo"),
],
)
# The same operations should be optimized if app_label and none of
# bases belong to that app.
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())], bases=("testapp.Foo",)),
migrations.DeleteModel("Foo"),
],
[
migrations.CreateModel("Bar", [("size", models.IntegerField())], bases=("testapp.Foo",)),
],
app_label="otherapp",
)
# But it shouldn't work if some of bases belongs to the specified app.
self.assertDoesNotOptimize(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())], bases=("testapp.Foo",)),
migrations.DeleteModel("Foo"),
],
app_label="testapp",
)
self.assertOptimizesTo(
[
migrations.CreateModel('Book', [('name', models.CharField(max_length=255))]),
migrations.CreateModel('Person', [('name', models.CharField(max_length=255))]),
migrations.AddField('book', 'author', models.ForeignKey('test_app.Person', models.CASCADE)),
migrations.CreateModel('Review', [('book', models.ForeignKey('test_app.Book', models.CASCADE))]),
migrations.CreateModel('Reviewer', [('name', models.CharField(max_length=255))]),
migrations.AddField('review', 'reviewer', models.ForeignKey('test_app.Reviewer', models.CASCADE)),
migrations.RemoveField('book', 'author'),
migrations.DeleteModel('Person'),
],
[
migrations.CreateModel('Book', [('name', models.CharField(max_length=255))]),
migrations.CreateModel('Reviewer', [('name', models.CharField(max_length=255))]),
migrations.CreateModel('Review', [
('book', models.ForeignKey('test_app.Book', models.CASCADE)),
('reviewer', models.ForeignKey('test_app.Reviewer', models.CASCADE)),
]),
],
)
def test_create_model_add_field(self):
"""
AddField should optimize into CreateModel.
"""
managers = [('objects', EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[("name", models.CharField(max_length=255))],
options={'verbose_name': 'Foo'},
bases=(UnicodeModel,),
managers=managers,
),
migrations.AddField("Foo", "age", models.IntegerField()),
],
[
migrations.CreateModel(
name="Foo",
fields=[
("name", models.CharField(max_length=255)),
("age", models.IntegerField()),
],
options={'verbose_name': 'Foo'},
bases=(UnicodeModel,),
managers=managers,
),
],
)
def test_create_model_reordering(self):
"""
AddField optimizes into CreateModel if it's a FK to a model that's
between them (and there's no FK in the other direction), by changing
the order of the CreateModel operations.
"""
self.assertOptimizesTo(
[
migrations.CreateModel('Foo', [('name', models.CharField(max_length=255))]),
migrations.CreateModel('Link', [('url', models.TextField())]),
migrations.AddField('Foo', 'link', models.ForeignKey('migrations.Link', models.CASCADE)),
],
[
migrations.CreateModel('Link', [('url', models.TextField())]),
migrations.CreateModel('Foo', [
('name', models.CharField(max_length=255)),
('link', models.ForeignKey('migrations.Link', models.CASCADE))
]),
],
)
def test_create_model_reordering_circular_fk(self):
"""
CreateModel reordering behavior doesn't result in an infinite loop if
there are FKs in both directions.
"""
self.assertOptimizesTo(
[
migrations.CreateModel('Bar', [('url', models.TextField())]),
migrations.CreateModel('Foo', [('name', models.CharField(max_length=255))]),
migrations.AddField('Bar', 'foo_fk', models.ForeignKey('migrations.Foo', models.CASCADE)),
migrations.AddField('Foo', 'bar_fk', models.ForeignKey('migrations.Bar', models.CASCADE)),
],
[
migrations.CreateModel('Foo', [('name', models.CharField(max_length=255))]),
migrations.CreateModel('Bar', [
('url', models.TextField()),
('foo_fk', models.ForeignKey('migrations.Foo', models.CASCADE)),
]),
migrations.AddField('Foo', 'bar_fk', models.ForeignKey('migrations.Foo', models.CASCADE)),
],
)
def test_create_model_no_reordering_for_unrelated_fk(self):
"""
CreateModel order remains unchanged if the later AddField operation
isn't a FK between them.
"""
self.assertDoesNotOptimize(
[
migrations.CreateModel('Foo', [('name', models.CharField(max_length=255))]),
migrations.CreateModel('Link', [('url', models.TextField())]),
migrations.AddField('Other', 'link', models.ForeignKey('migrations.Link', models.CASCADE)),
],
)
def test_create_model_no_reordering_of_inherited_model(self):
"""
A CreateModel that inherits from another isn't reordered to avoid
moving it earlier than its parent CreateModel operation.
"""
self.assertOptimizesTo(
[
migrations.CreateModel('Other', [('foo', models.CharField(max_length=255))]),
migrations.CreateModel('ParentModel', [('bar', models.CharField(max_length=255))]),
migrations.CreateModel(
'ChildModel',
[('baz', models.CharField(max_length=255))],
bases=('migrations.parentmodel',),
),
migrations.AddField('Other', 'fk', models.ForeignKey('migrations.ChildModel', models.CASCADE)),
],
[
migrations.CreateModel('ParentModel', [('bar', models.CharField(max_length=255))]),
migrations.CreateModel(
'ChildModel',
[('baz', models.CharField(max_length=255))],
bases=('migrations.parentmodel',),
),
migrations.CreateModel(
'Other', [
('foo', models.CharField(max_length=255)),
('fk', models.ForeignKey('migrations.ChildModel', models.CASCADE)),
]
),
],
)
def test_create_model_add_field_not_through_m2m_through(self):
"""
AddField should NOT optimize into CreateModel if it's an M2M using a
through that's created between them.
"""
self.assertDoesNotOptimize(
[
migrations.CreateModel('Employee', []),
migrations.CreateModel('Employer', []),
migrations.CreateModel('Employment', [
('employee', models.ForeignKey('migrations.Employee', models.CASCADE)),
('employment', models.ForeignKey('migrations.Employer', models.CASCADE)),
]),
migrations.AddField(
'Employer', 'employees', models.ManyToManyField(
'migrations.Employee', through='migrations.Employment',
)
),
],
)
def test_create_model_alter_field(self):
"""
AlterField should optimize into CreateModel.
"""
managers = [('objects', EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[("name", models.CharField(max_length=255))],
options={'verbose_name': 'Foo'},
bases=(UnicodeModel,),
managers=managers,
),
migrations.AlterField("Foo", "name", models.IntegerField()),
],
[
migrations.CreateModel(
name="Foo",
fields=[
("name", models.IntegerField()),
],
options={'verbose_name': 'Foo'},
bases=(UnicodeModel,),
managers=managers,
),
],
)
def test_create_model_rename_field(self):
"""
RenameField should optimize into CreateModel.
"""
managers = [('objects', EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[("name", models.CharField(max_length=255))],
options={'verbose_name': 'Foo'},
bases=(UnicodeModel,),
managers=managers,
),
migrations.RenameField("Foo", "name", "title"),
],
[
migrations.CreateModel(
name="Foo",
fields=[
("title", models.CharField(max_length=255)),
],
options={'verbose_name': 'Foo'},
bases=(UnicodeModel,),
managers=managers,
),
],
)
def test_add_field_rename_field(self):
"""
RenameField should optimize into AddField
"""
self.assertOptimizesTo(
[
migrations.AddField("Foo", "name", models.CharField(max_length=255)),
migrations.RenameField("Foo", "name", "title"),
],
[
migrations.AddField("Foo", "title", models.CharField(max_length=255)),
],
)
def test_alter_field_rename_field(self):
"""
RenameField should optimize to the other side of AlterField,
and into itself.
"""
self.assertOptimizesTo(
[
migrations.AlterField("Foo", "name", models.CharField(max_length=255)),
migrations.RenameField("Foo", "name", "title"),
migrations.RenameField("Foo", "title", "nom"),
],
[
migrations.RenameField("Foo", "name", "nom"),
migrations.AlterField("Foo", "nom", models.CharField(max_length=255)),
],
)
def test_create_model_remove_field(self):
"""
RemoveField should optimize into CreateModel.
"""
managers = [('objects', EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[
("name", models.CharField(max_length=255)),
("age", models.IntegerField()),
],
options={'verbose_name': 'Foo'},
bases=(UnicodeModel,),
managers=managers,
),
migrations.RemoveField("Foo", "age"),
],
[
migrations.CreateModel(
name="Foo",
fields=[
("name", models.CharField(max_length=255)),
],
options={'verbose_name': 'Foo'},
bases=(UnicodeModel,),
managers=managers,
),
],
)
def test_add_field_alter_field(self):
"""
AlterField should optimize into AddField.
"""
self.assertOptimizesTo(
[
migrations.AddField("Foo", "age", models.IntegerField()),
migrations.AlterField("Foo", "age", models.FloatField(default=2.4)),
],
[
migrations.AddField("Foo", name="age", field=models.FloatField(default=2.4)),
],
)
def test_add_field_delete_field(self):
"""
RemoveField should cancel AddField
"""
self.assertOptimizesTo(
[
migrations.AddField("Foo", "age", models.IntegerField()),
migrations.RemoveField("Foo", "age"),
],
[],
)
def test_alter_field_delete_field(self):
"""
RemoveField should absorb AlterField
"""
self.assertOptimizesTo(
[
migrations.AlterField("Foo", "age", models.IntegerField()),
migrations.RemoveField("Foo", "age"),
],
[
migrations.RemoveField("Foo", "age"),
],
)
def _test_create_alter_foo_field(self, alter):
"""
CreateModel, AlterFooTogether/AlterOrderWithRespectTo followed by an
add/alter/rename field should optimize to CreateModel with options.
"""
option_value = getattr(alter, alter.option_name)
options = {alter.option_name: option_value}
# AddField
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
]),
alter,
migrations.AddField("Foo", "c", models.IntegerField()),
],
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
("c", models.IntegerField()),
], options=options),
],
)
# AlterField
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
]),
alter,
migrations.AlterField("Foo", "b", models.CharField(max_length=255)),
],
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.CharField(max_length=255)),
], options=options),
],
)
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
("c", models.IntegerField()),
]),
alter,
migrations.AlterField("Foo", "c", models.CharField(max_length=255)),
],
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
("c", models.CharField(max_length=255)),
], options=options),
],
)
# RenameField
if isinstance(option_value, str):
renamed_options = {alter.option_name: 'c'}
else:
renamed_options = {
alter.option_name: {
tuple('c' if value == 'b' else value for value in item) for item in option_value
}
}
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
]),
alter,
migrations.RenameField("Foo", "b", "c"),
],
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("c", models.IntegerField()),
], options=renamed_options),
],
)
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
]),
alter,
migrations.RenameField("Foo", "b", "x"),
migrations.RenameField("Foo", "x", "c"),
],
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("c", models.IntegerField()),
], options=renamed_options),
],
)
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
("c", models.IntegerField()),
]),
alter,
migrations.RenameField("Foo", "c", "d"),
],
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
("d", models.IntegerField()),
], options=options),
],
)
# RemoveField
if isinstance(option_value, str):
removed_options = None
else:
removed_options = {
alter.option_name: {
tuple(value for value in item if value != 'b') for item in option_value
}
}
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
]),
alter,
migrations.RemoveField("Foo", "b"),
],
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
], options=removed_options),
]
)
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
("c", models.IntegerField()),
]),
alter,
migrations.RemoveField("Foo", "c"),
],
[
migrations.CreateModel("Foo", [
("a", models.IntegerField()),
("b", models.IntegerField()),
], options=options),
],
)
def test_create_alter_unique_field(self):
self._test_create_alter_foo_field(migrations.AlterUniqueTogether("Foo", [["a", "b"]]))
def test_create_alter_index_field(self):
self._test_create_alter_foo_field(migrations.AlterIndexTogether("Foo", [["a", "b"]]))
def test_create_alter_owrt_field(self):
self._test_create_alter_foo_field(migrations.AlterOrderWithRespectTo("Foo", "b"))
def test_optimize_through_fields(self):
"""
field-level through checking is working. This should manage to collapse
model Foo to nonexistence, and model Bar to a single IntegerField
called "width".
"""
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.AddField("Foo", "age", models.IntegerField()),
migrations.AddField("Bar", "width", models.IntegerField()),
migrations.AlterField("Foo", "age", models.IntegerField()),
migrations.RenameField("Bar", "size", "dimensions"),
migrations.RemoveField("Foo", "age"),
migrations.RenameModel("Foo", "Phou"),
migrations.RemoveField("Bar", "dimensions"),
migrations.RenameModel("Phou", "Fou"),
migrations.DeleteModel("Fou"),
],
[
migrations.CreateModel("Bar", [("width", models.IntegerField())]),
],
)
def test_optimize_elidable_operation(self):
elidable_operation = operations.base.Operation()
elidable_operation.elidable = True
self.assertOptimizesTo(
[
elidable_operation,
migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]),
elidable_operation,
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
elidable_operation,
migrations.RenameModel("Foo", "Phou"),
migrations.DeleteModel("Bar"),
elidable_operation,
],
[
migrations.CreateModel("Phou", [("name", models.CharField(max_length=255))]),
],
)
|
b7cdf96d92f1a9b7d5fad15a24de26f3c1165aad1a43abfaa3c874803cddb13f | from unittest import mock
from django.apps.registry import apps as global_apps
from django.db import connection
from django.db.migrations.exceptions import InvalidMigrationPlan
from django.db.migrations.executor import MigrationExecutor
from django.db.migrations.graph import MigrationGraph
from django.db.migrations.recorder import MigrationRecorder
from django.db.utils import DatabaseError
from django.test import (
SimpleTestCase, modify_settings, override_settings, skipUnlessDBFeature,
)
from .test_base import MigrationTestBase
@modify_settings(INSTALLED_APPS={'append': 'migrations2'})
class ExecutorTests(MigrationTestBase):
"""
Tests the migration executor (full end-to-end running).
Bear in mind that if these are failing you should fix the other
test failures first, as they may be propagating into here.
"""
available_apps = ["migrations", "migrations2", "django.contrib.auth", "django.contrib.contenttypes"]
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_run(self):
"""
Tests running a simple set of migrations.
"""
executor = MigrationExecutor(connection)
# Let's look at the plan first and make sure it's up to scratch
plan = executor.migration_plan([("migrations", "0002_second")])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
(executor.loader.graph.nodes["migrations", "0002_second"], False),
],
)
# Were the tables there before?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
# Alright, let's try running it
executor.migrate([("migrations", "0002_second")])
# Are the tables there now?
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_book")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Alright, let's undo what we did
plan = executor.migration_plan([("migrations", None)])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0002_second"], True),
(executor.loader.graph.nodes["migrations", "0001_initial"], True),
],
)
executor.migrate([("migrations", None)])
# Are the tables gone?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"})
def test_run_with_squashed(self):
"""
Tests running a squashed migration from zero (should ignore what it replaces)
"""
executor = MigrationExecutor(connection)
# Check our leaf node is the squashed one
leaves = [key for key in executor.loader.graph.leaf_nodes() if key[0] == "migrations"]
self.assertEqual(leaves, [("migrations", "0001_squashed_0002")])
# Check the plan
plan = executor.migration_plan([("migrations", "0001_squashed_0002")])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_squashed_0002"], False),
],
)
# Were the tables there before?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
# Alright, let's try running it
executor.migrate([("migrations", "0001_squashed_0002")])
# Are the tables there now?
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_book")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Alright, let's undo what we did. Should also just use squashed.
plan = executor.migration_plan([("migrations", None)])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_squashed_0002"], True),
],
)
executor.migrate([("migrations", None)])
# Are the tables gone?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_non_atomic"})
def test_non_atomic_migration(self):
"""
Applying a non-atomic migration works as expected.
"""
executor = MigrationExecutor(connection)
with self.assertRaisesMessage(RuntimeError, "Abort migration"):
executor.migrate([("migrations", "0001_initial")])
self.assertTableExists("migrations_publisher")
migrations_apps = executor.loader.project_state(("migrations", "0001_initial")).apps
Publisher = migrations_apps.get_model("migrations", "Publisher")
self.assertTrue(Publisher.objects.exists())
self.assertTableNotExists("migrations_book")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_atomic_operation"})
def test_atomic_operation_in_non_atomic_migration(self):
"""
An atomic operation is properly rolled back inside a non-atomic
migration.
"""
executor = MigrationExecutor(connection)
with self.assertRaisesMessage(RuntimeError, "Abort migration"):
executor.migrate([("migrations", "0001_initial")])
migrations_apps = executor.loader.project_state(("migrations", "0001_initial")).apps
Editor = migrations_apps.get_model("migrations", "Editor")
self.assertFalse(Editor.objects.exists())
# Record previous migration as successful.
executor.migrate([("migrations", "0001_initial")], fake=True)
# Rebuild the graph to reflect the new DB state.
executor.loader.build_graph()
# Migrating backwards is also atomic.
with self.assertRaisesMessage(RuntimeError, "Abort migration"):
executor.migrate([("migrations", None)])
self.assertFalse(Editor.objects.exists())
@override_settings(MIGRATION_MODULES={
"migrations": "migrations.test_migrations",
"migrations2": "migrations2.test_migrations_2",
})
def test_empty_plan(self):
"""
Re-planning a full migration of a fully-migrated set doesn't
perform spurious unmigrations and remigrations.
There was previously a bug where the executor just always performed the
backwards plan for applied migrations - which even for the most recent
migration in an app, might include other, dependent apps, and these
were being unmigrated.
"""
# Make the initial plan, check it
executor = MigrationExecutor(connection)
plan = executor.migration_plan([
("migrations", "0002_second"),
("migrations2", "0001_initial"),
])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
(executor.loader.graph.nodes["migrations", "0002_second"], False),
(executor.loader.graph.nodes["migrations2", "0001_initial"], False),
],
)
# Fake-apply all migrations
executor.migrate([
("migrations", "0002_second"),
("migrations2", "0001_initial")
], fake=True)
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Now plan a second time and make sure it's empty
plan = executor.migration_plan([
("migrations", "0002_second"),
("migrations2", "0001_initial"),
])
self.assertEqual(plan, [])
# The resulting state should include applied migrations.
state = executor.migrate([
("migrations", "0002_second"),
("migrations2", "0001_initial"),
])
self.assertIn(('migrations', 'book'), state.models)
self.assertIn(('migrations', 'author'), state.models)
self.assertIn(('migrations2', 'otherauthor'), state.models)
# Erase all the fake records
executor.recorder.record_unapplied("migrations2", "0001_initial")
executor.recorder.record_unapplied("migrations", "0002_second")
executor.recorder.record_unapplied("migrations", "0001_initial")
@override_settings(MIGRATION_MODULES={
"migrations": "migrations.test_migrations",
"migrations2": "migrations2.test_migrations_2_no_deps",
})
def test_mixed_plan_not_supported(self):
"""
Although the MigrationExecutor interfaces allows for mixed migration
plans (combined forwards and backwards migrations) this is not
supported.
"""
# Prepare for mixed plan
executor = MigrationExecutor(connection)
plan = executor.migration_plan([("migrations", "0002_second")])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
(executor.loader.graph.nodes["migrations", "0002_second"], False),
],
)
executor.migrate(None, plan)
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
self.assertIn(('migrations', '0001_initial'), executor.loader.applied_migrations)
self.assertIn(('migrations', '0002_second'), executor.loader.applied_migrations)
self.assertNotIn(('migrations2', '0001_initial'), executor.loader.applied_migrations)
# Generate mixed plan
plan = executor.migration_plan([
("migrations", None),
("migrations2", "0001_initial"),
])
msg = (
'Migration plans with both forwards and backwards migrations are '
'not supported. Please split your migration process into separate '
'plans of only forwards OR backwards migrations.'
)
with self.assertRaisesMessage(InvalidMigrationPlan, msg) as cm:
executor.migrate(None, plan)
self.assertEqual(
cm.exception.args[1],
[
(executor.loader.graph.nodes["migrations", "0002_second"], True),
(executor.loader.graph.nodes["migrations", "0001_initial"], True),
(executor.loader.graph.nodes["migrations2", "0001_initial"], False),
],
)
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
executor.migrate([
("migrations", None),
("migrations2", None),
])
# Are the tables gone?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
self.assertTableNotExists("migrations2_otherauthor")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_soft_apply(self):
"""
Tests detection of initial migrations already having been applied.
"""
state = {"faked": None}
def fake_storer(phase, migration=None, fake=None):
state["faked"] = fake
executor = MigrationExecutor(connection, progress_callback=fake_storer)
# Were the tables there before?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Run it normally
self.assertEqual(
executor.migration_plan([("migrations", "0001_initial")]),
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
],
)
executor.migrate([("migrations", "0001_initial")])
# Are the tables there now?
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
# We shouldn't have faked that one
self.assertIs(state["faked"], False)
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Fake-reverse that
executor.migrate([("migrations", None)], fake=True)
# Are the tables still there?
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
# Make sure that was faked
self.assertIs(state["faked"], True)
# Finally, migrate forwards; this should fake-apply our initial migration
executor.loader.build_graph()
self.assertEqual(
executor.migration_plan([("migrations", "0001_initial")]),
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
],
)
# Applying the migration should raise a database level error
# because we haven't given the --fake-initial option
with self.assertRaises(DatabaseError):
executor.migrate([("migrations", "0001_initial")])
# Reset the faked state
state = {"faked": None}
# Allow faking of initial CreateModel operations
executor.migrate([("migrations", "0001_initial")], fake_initial=True)
self.assertIs(state["faked"], True)
# And migrate back to clean up the database
executor.loader.build_graph()
executor.migrate([("migrations", None)])
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_custom_user",
"django.contrib.auth": "django.contrib.auth.migrations",
},
AUTH_USER_MODEL="migrations.Author",
)
def test_custom_user(self):
"""
Regression test for #22325 - references to a custom user model defined in the
same app are not resolved correctly.
"""
executor = MigrationExecutor(connection)
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Migrate forwards
executor.migrate([("migrations", "0001_initial")])
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
# Make sure the soft-application detection works (#23093)
# Change table_names to not return auth_user during this as
# it wouldn't be there in a normal run, and ensure migrations.Author
# exists in the global app registry temporarily.
old_table_names = connection.introspection.table_names
connection.introspection.table_names = lambda c: [x for x in old_table_names(c) if x != "auth_user"]
migrations_apps = executor.loader.project_state(("migrations", "0001_initial")).apps
global_apps.get_app_config("migrations").models["author"] = migrations_apps.get_model("migrations", "author")
try:
migration = executor.loader.get_migration("auth", "0001_initial")
self.assertIs(executor.detect_soft_applied(None, migration)[0], True)
finally:
connection.introspection.table_names = old_table_names
del global_apps.get_app_config("migrations").models["author"]
# And migrate back to clean up the database
executor.loader.build_graph()
executor.migrate([("migrations", None)])
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_add_many_to_many_field_initial",
},
)
def test_detect_soft_applied_add_field_manytomanyfield(self):
"""
executor.detect_soft_applied() detects ManyToManyField tables from an
AddField operation. This checks the case of AddField in a migration
with other operations (0001) and the case of AddField in its own
migration (0002).
"""
tables = [
# from 0001
"migrations_project",
"migrations_task",
"migrations_project_tasks",
# from 0002
"migrations_task_projects",
]
executor = MigrationExecutor(connection)
# Create the tables for 0001 but make it look like the migration hasn't
# been applied.
executor.migrate([("migrations", "0001_initial")])
executor.migrate([("migrations", None)], fake=True)
for table in tables[:3]:
self.assertTableExists(table)
# Table detection sees 0001 is applied but not 0002.
migration = executor.loader.get_migration("migrations", "0001_initial")
self.assertIs(executor.detect_soft_applied(None, migration)[0], True)
migration = executor.loader.get_migration("migrations", "0002_initial")
self.assertIs(executor.detect_soft_applied(None, migration)[0], False)
# Create the tables for both migrations but make it look like neither
# has been applied.
executor.loader.build_graph()
executor.migrate([("migrations", "0001_initial")], fake=True)
executor.migrate([("migrations", "0002_initial")])
executor.loader.build_graph()
executor.migrate([("migrations", None)], fake=True)
# Table detection sees 0002 is applied.
migration = executor.loader.get_migration("migrations", "0002_initial")
self.assertIs(executor.detect_soft_applied(None, migration)[0], True)
# Leave the tables for 0001 except the many-to-many table. That missing
# table should cause detect_soft_applied() to return False.
with connection.schema_editor() as editor:
for table in tables[2:]:
editor.execute(editor.sql_delete_table % {"table": table})
migration = executor.loader.get_migration("migrations", "0001_initial")
self.assertIs(executor.detect_soft_applied(None, migration)[0], False)
# Cleanup by removing the remaining tables.
with connection.schema_editor() as editor:
for table in tables[:2]:
editor.execute(editor.sql_delete_table % {"table": table})
for table in tables:
self.assertTableNotExists(table)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.lookuperror_a",
"migrations.migrations_test_apps.lookuperror_b",
"migrations.migrations_test_apps.lookuperror_c"
]
)
def test_unrelated_model_lookups_forwards(self):
"""
#24123 - All models of apps already applied which are
unrelated to the first app being applied are part of the initial model
state.
"""
try:
executor = MigrationExecutor(connection)
self.assertTableNotExists("lookuperror_a_a1")
self.assertTableNotExists("lookuperror_b_b1")
self.assertTableNotExists("lookuperror_c_c1")
executor.migrate([("lookuperror_b", "0003_b3")])
self.assertTableExists("lookuperror_b_b3")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Migrate forwards -- This led to a lookup LookupErrors because
# lookuperror_b.B2 is already applied
executor.migrate([
("lookuperror_a", "0004_a4"),
("lookuperror_c", "0003_c3"),
])
self.assertTableExists("lookuperror_a_a4")
self.assertTableExists("lookuperror_c_c3")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
finally:
# Cleanup
executor.migrate([
("lookuperror_a", None),
("lookuperror_b", None),
("lookuperror_c", None),
])
self.assertTableNotExists("lookuperror_a_a1")
self.assertTableNotExists("lookuperror_b_b1")
self.assertTableNotExists("lookuperror_c_c1")
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.lookuperror_a",
"migrations.migrations_test_apps.lookuperror_b",
"migrations.migrations_test_apps.lookuperror_c"
]
)
def test_unrelated_model_lookups_backwards(self):
"""
#24123 - All models of apps being unapplied which are
unrelated to the first app being unapplied are part of the initial
model state.
"""
try:
executor = MigrationExecutor(connection)
self.assertTableNotExists("lookuperror_a_a1")
self.assertTableNotExists("lookuperror_b_b1")
self.assertTableNotExists("lookuperror_c_c1")
executor.migrate([
("lookuperror_a", "0004_a4"),
("lookuperror_b", "0003_b3"),
("lookuperror_c", "0003_c3"),
])
self.assertTableExists("lookuperror_b_b3")
self.assertTableExists("lookuperror_a_a4")
self.assertTableExists("lookuperror_c_c3")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Migrate backwards -- This led to a lookup LookupErrors because
# lookuperror_b.B2 is not in the initial state (unrelated to app c)
executor.migrate([("lookuperror_a", None)])
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
finally:
# Cleanup
executor.migrate([
("lookuperror_b", None),
("lookuperror_c", None)
])
self.assertTableNotExists("lookuperror_a_a1")
self.assertTableNotExists("lookuperror_b_b1")
self.assertTableNotExists("lookuperror_c_c1")
@override_settings(
INSTALLED_APPS=[
'migrations.migrations_test_apps.mutate_state_a',
'migrations.migrations_test_apps.mutate_state_b',
]
)
def test_unrelated_applied_migrations_mutate_state(self):
"""
#26647 - Unrelated applied migrations should be part of the final
state in both directions.
"""
executor = MigrationExecutor(connection)
executor.migrate([
('mutate_state_b', '0002_add_field'),
])
# Migrate forward.
executor.loader.build_graph()
state = executor.migrate([
('mutate_state_a', '0001_initial'),
])
self.assertIn('added', dict(state.models['mutate_state_b', 'b'].fields))
executor.loader.build_graph()
# Migrate backward.
state = executor.migrate([
('mutate_state_a', None),
])
self.assertIn('added', dict(state.models['mutate_state_b', 'b'].fields))
executor.migrate([
('mutate_state_b', None),
])
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_process_callback(self):
"""
#24129 - Tests callback process
"""
call_args_list = []
def callback(*args):
call_args_list.append(args)
executor = MigrationExecutor(connection, progress_callback=callback)
# Were the tables there before?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
executor.migrate([
("migrations", "0001_initial"),
("migrations", "0002_second"),
])
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
executor.migrate([
("migrations", None),
("migrations", None),
])
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
migrations = executor.loader.graph.nodes
expected = [
("render_start",),
("render_success",),
("apply_start", migrations['migrations', '0001_initial'], False),
("apply_success", migrations['migrations', '0001_initial'], False),
("apply_start", migrations['migrations', '0002_second'], False),
("apply_success", migrations['migrations', '0002_second'], False),
("render_start",),
("render_success",),
("unapply_start", migrations['migrations', '0002_second'], False),
("unapply_success", migrations['migrations', '0002_second'], False),
("unapply_start", migrations['migrations', '0001_initial'], False),
("unapply_success", migrations['migrations', '0001_initial'], False),
]
self.assertEqual(call_args_list, expected)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.alter_fk.author_app",
"migrations.migrations_test_apps.alter_fk.book_app",
]
)
def test_alter_id_type_with_fk(self):
try:
executor = MigrationExecutor(connection)
self.assertTableNotExists("author_app_author")
self.assertTableNotExists("book_app_book")
# Apply initial migrations
executor.migrate([
("author_app", "0001_initial"),
("book_app", "0001_initial"),
])
self.assertTableExists("author_app_author")
self.assertTableExists("book_app_book")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Apply PK type alteration
executor.migrate([("author_app", "0002_alter_id")])
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
finally:
# We can't simply unapply the migrations here because there is no
# implicit cast from VARCHAR to INT on the database level.
with connection.schema_editor() as editor:
editor.execute(editor.sql_delete_table % {"table": "book_app_book"})
editor.execute(editor.sql_delete_table % {"table": "author_app_author"})
self.assertTableNotExists("author_app_author")
self.assertTableNotExists("book_app_book")
executor.migrate([("author_app", None)], fake=True)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"})
def test_apply_all_replaced_marks_replacement_as_applied(self):
"""
Applying all replaced migrations marks replacement as applied (#24628).
"""
recorder = MigrationRecorder(connection)
# Place the database in a state where the replaced migrations are
# partially applied: 0001 is applied, 0002 is not.
recorder.record_applied("migrations", "0001_initial")
executor = MigrationExecutor(connection)
# Use fake because we don't actually have the first migration
# applied, so the second will fail. And there's no need to actually
# create/modify tables here, we're just testing the
# MigrationRecord, which works the same with or without fake.
executor.migrate([("migrations", "0002_second")], fake=True)
# Because we've now applied 0001 and 0002 both, their squashed
# replacement should be marked as applied.
self.assertIn(
("migrations", "0001_squashed_0002"),
recorder.applied_migrations(),
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"})
def test_migrate_marks_replacement_applied_even_if_it_did_nothing(self):
"""
A new squash migration will be marked as applied even if all its
replaced migrations were previously already applied (#24628).
"""
recorder = MigrationRecorder(connection)
# Record all replaced migrations as applied
recorder.record_applied("migrations", "0001_initial")
recorder.record_applied("migrations", "0002_second")
executor = MigrationExecutor(connection)
executor.migrate([("migrations", "0001_squashed_0002")])
# Because 0001 and 0002 are both applied, even though this migrate run
# didn't apply anything new, their squashed replacement should be
# marked as applied.
self.assertIn(
("migrations", "0001_squashed_0002"),
recorder.applied_migrations(),
)
# When the feature is False, the operation and the record won't be
# performed in a transaction and the test will systematically pass.
@skipUnlessDBFeature('can_rollback_ddl')
@override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'})
def test_migrations_applied_and_recorded_atomically(self):
"""Migrations are applied and recorded atomically."""
executor = MigrationExecutor(connection)
with mock.patch('django.db.migrations.executor.MigrationExecutor.record_migration') as record_migration:
record_migration.side_effect = RuntimeError('Recording migration failed.')
with self.assertRaisesMessage(RuntimeError, 'Recording migration failed.'):
executor.migrate([('migrations', '0001_initial')])
# The migration isn't recorded as applied since it failed.
migration_recorder = MigrationRecorder(connection)
self.assertFalse(migration_recorder.migration_qs.filter(app='migrations', name='0001_initial').exists())
self.assertTableNotExists('migrations_author')
class FakeLoader:
def __init__(self, graph, applied):
self.graph = graph
self.applied_migrations = applied
class FakeMigration:
"""Really all we need is any object with a debug-useful repr."""
def __init__(self, name):
self.name = name
def __repr__(self):
return 'M<%s>' % self.name
class ExecutorUnitTests(SimpleTestCase):
"""(More) isolated unit tests for executor methods."""
def test_minimize_rollbacks(self):
"""
Minimize unnecessary rollbacks in connected apps.
When you say "./manage.py migrate appA 0001", rather than migrating to
just after appA-0001 in the linearized migration plan (which could roll
back migrations in other apps that depend on appA 0001, but don't need
to be rolled back since we're not rolling back appA 0001), we migrate
to just before appA-0002.
"""
a1_impl = FakeMigration('a1')
a1 = ('a', '1')
a2_impl = FakeMigration('a2')
a2 = ('a', '2')
b1_impl = FakeMigration('b1')
b1 = ('b', '1')
graph = MigrationGraph()
graph.add_node(a1, a1_impl)
graph.add_node(a2, a2_impl)
graph.add_node(b1, b1_impl)
graph.add_dependency(None, b1, a1)
graph.add_dependency(None, a2, a1)
executor = MigrationExecutor(None)
executor.loader = FakeLoader(graph, {a1, b1, a2})
plan = executor.migration_plan({a1})
self.assertEqual(plan, [(a2_impl, True)])
def test_minimize_rollbacks_branchy(self):
r"""
Minimize rollbacks when target has multiple in-app children.
a: 1 <---- 3 <--\
\ \- 2 <--- 4
\ \
b: \- 1 <--- 2
"""
a1_impl = FakeMigration('a1')
a1 = ('a', '1')
a2_impl = FakeMigration('a2')
a2 = ('a', '2')
a3_impl = FakeMigration('a3')
a3 = ('a', '3')
a4_impl = FakeMigration('a4')
a4 = ('a', '4')
b1_impl = FakeMigration('b1')
b1 = ('b', '1')
b2_impl = FakeMigration('b2')
b2 = ('b', '2')
graph = MigrationGraph()
graph.add_node(a1, a1_impl)
graph.add_node(a2, a2_impl)
graph.add_node(a3, a3_impl)
graph.add_node(a4, a4_impl)
graph.add_node(b1, b1_impl)
graph.add_node(b2, b2_impl)
graph.add_dependency(None, a2, a1)
graph.add_dependency(None, a3, a1)
graph.add_dependency(None, a4, a2)
graph.add_dependency(None, a4, a3)
graph.add_dependency(None, b2, b1)
graph.add_dependency(None, b1, a1)
graph.add_dependency(None, b2, a2)
executor = MigrationExecutor(None)
executor.loader = FakeLoader(graph, {a1, b1, a2, b2, a3, a4})
plan = executor.migration_plan({a1})
should_be_rolled_back = [b2_impl, a4_impl, a2_impl, a3_impl]
exp = [(m, True) for m in should_be_rolled_back]
self.assertEqual(plan, exp)
def test_backwards_nothing_to_do(self):
r"""
If the current state satisfies the given target, do nothing.
a: 1 <--- 2
b: \- 1
c: \- 1
If a1 is applied already and a2 is not, and we're asked to migrate to
a1, don't apply or unapply b1 or c1, regardless of their current state.
"""
a1_impl = FakeMigration('a1')
a1 = ('a', '1')
a2_impl = FakeMigration('a2')
a2 = ('a', '2')
b1_impl = FakeMigration('b1')
b1 = ('b', '1')
c1_impl = FakeMigration('c1')
c1 = ('c', '1')
graph = MigrationGraph()
graph.add_node(a1, a1_impl)
graph.add_node(a2, a2_impl)
graph.add_node(b1, b1_impl)
graph.add_node(c1, c1_impl)
graph.add_dependency(None, a2, a1)
graph.add_dependency(None, b1, a1)
graph.add_dependency(None, c1, a1)
executor = MigrationExecutor(None)
executor.loader = FakeLoader(graph, {a1, b1})
plan = executor.migration_plan({a1})
self.assertEqual(plan, [])
|
22780acec8716ed149a8f96f326782d00d59619b1b7105ec4d56c7dbf2aedd66 | import datetime
import importlib
import io
import os
import sys
from unittest import mock
from django.apps import apps
from django.core.management import CommandError, call_command
from django.db import (
ConnectionHandler, DatabaseError, connection, connections, models,
)
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.backends.utils import truncate_name
from django.db.migrations.exceptions import InconsistentMigrationHistory
from django.db.migrations.recorder import MigrationRecorder
from django.test import TestCase, override_settings
from .models import UnicodeModel, UnserializableModel
from .routers import TestRouter
from .test_base import MigrationTestBase
class MigrateTests(MigrationTestBase):
"""
Tests running the migrate command.
"""
databases = {'default', 'other'}
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate(self):
"""
Tests basic usage of the migrate command.
"""
# No tables are created
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
# Run the migrations to 0001 only
stdout = io.StringIO()
call_command('migrate', 'migrations', '0001', verbosity=1, stdout=stdout, no_color=True)
stdout = stdout.getvalue()
self.assertIn('Target specific migration: 0001_initial, from migrations', stdout)
self.assertIn('Applying migrations.0001_initial… OK', stdout)
# The correct tables exist
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
# Run migrations all the way
call_command("migrate", verbosity=0)
# The correct tables exist
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
# Unmigrate everything
stdout = io.StringIO()
call_command('migrate', 'migrations', 'zero', verbosity=1, stdout=stdout, no_color=True)
stdout = stdout.getvalue()
self.assertIn('Unapply all migrations: migrations', stdout)
self.assertIn('Unapplying migrations.0002_second… OK', stdout)
# Tables are gone
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
@override_settings(INSTALLED_APPS=[
'django.contrib.auth',
'django.contrib.contenttypes',
'migrations.migrations_test_apps.migrated_app',
])
def test_migrate_with_system_checks(self):
out = io.StringIO()
call_command('migrate', skip_checks=False, no_color=True, stdout=out)
self.assertIn('Apply all migrations: migrated_app', out.getvalue())
@override_settings(INSTALLED_APPS=['migrations', 'migrations.migrations_test_apps.unmigrated_app_syncdb'])
def test_app_without_migrations(self):
msg = "App 'unmigrated_app_syncdb' does not have migrations."
with self.assertRaisesMessage(CommandError, msg):
call_command('migrate', app_label='unmigrated_app_syncdb')
@override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_clashing_prefix'})
def test_ambigious_prefix(self):
msg = (
"More than one migration matches 'a' in app 'migrations'. Please "
"be more specific."
)
with self.assertRaisesMessage(CommandError, msg):
call_command('migrate', app_label='migrations', migration_name='a')
@override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'})
def test_unknown_prefix(self):
msg = "Cannot find a migration matching 'nonexistent' from app 'migrations'."
with self.assertRaisesMessage(CommandError, msg):
call_command('migrate', app_label='migrations', migration_name='nonexistent')
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_initial_false"})
def test_migrate_initial_false(self):
"""
`Migration.initial = False` skips fake-initial detection.
"""
# Make sure no tables are created
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Run the migrations to 0001 only
call_command("migrate", "migrations", "0001", verbosity=0)
# Fake rollback
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
# Make sure fake-initial detection does not run
with self.assertRaises(DatabaseError):
call_command("migrate", "migrations", "0001", fake_initial=True, verbosity=0)
call_command("migrate", "migrations", "0001", fake=True, verbosity=0)
# Real rollback
call_command("migrate", "migrations", "zero", verbosity=0)
# Make sure it's all gone
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations"},
DATABASE_ROUTERS=['migrations.routers.TestRouter'],
)
def test_migrate_fake_initial(self):
"""
--fake-initial only works if all tables created in the initial
migration of an app exists. Database routers must be obeyed when doing
that check.
"""
# Make sure no tables are created
for db in connections:
self.assertTableNotExists("migrations_author", using=db)
self.assertTableNotExists("migrations_tribble", using=db)
# Run the migrations to 0001 only
call_command("migrate", "migrations", "0001", verbosity=0)
call_command("migrate", "migrations", "0001", verbosity=0, database="other")
# Make sure the right tables exist
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Also check the "other" database
self.assertTableNotExists("migrations_author", using="other")
self.assertTableExists("migrations_tribble", using="other")
# Fake a roll-back
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
call_command("migrate", "migrations", "zero", fake=True, verbosity=0, database="other")
# Make sure the tables still exist
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble", using="other")
# Try to run initial migration
with self.assertRaises(DatabaseError):
call_command("migrate", "migrations", "0001", verbosity=0)
# Run initial migration with an explicit --fake-initial
out = io.StringIO()
with mock.patch('django.core.management.color.supports_color', lambda *args: False):
call_command("migrate", "migrations", "0001", fake_initial=True, stdout=out, verbosity=1)
call_command("migrate", "migrations", "0001", fake_initial=True, verbosity=0, database="other")
self.assertIn(
"migrations.0001_initial… faked",
out.getvalue().lower()
)
# Run migrations all the way
call_command("migrate", verbosity=0)
call_command("migrate", verbosity=0, database="other")
# Make sure the right tables exist
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
self.assertTableNotExists("migrations_author", using="other")
self.assertTableNotExists("migrations_tribble", using="other")
self.assertTableNotExists("migrations_book", using="other")
# Fake a roll-back
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
call_command("migrate", "migrations", "zero", fake=True, verbosity=0, database="other")
# Make sure the tables still exist
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
# Try to run initial migration
with self.assertRaises(DatabaseError):
call_command("migrate", "migrations", verbosity=0)
# Run initial migration with an explicit --fake-initial
with self.assertRaises(DatabaseError):
# Fails because "migrations_tribble" does not exist but needs to in
# order to make --fake-initial work.
call_command("migrate", "migrations", fake_initial=True, verbosity=0)
# Fake an apply
call_command("migrate", "migrations", fake=True, verbosity=0)
call_command("migrate", "migrations", fake=True, verbosity=0, database="other")
# Unmigrate everything
call_command("migrate", "migrations", "zero", verbosity=0)
call_command("migrate", "migrations", "zero", verbosity=0, database="other")
# Make sure it's all gone
for db in connections:
self.assertTableNotExists("migrations_author", using=db)
self.assertTableNotExists("migrations_tribble", using=db)
self.assertTableNotExists("migrations_book", using=db)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_fake_split_initial"})
def test_migrate_fake_split_initial(self):
"""
Split initial migrations can be faked with --fake-initial.
"""
call_command("migrate", "migrations", "0002", verbosity=0)
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
out = io.StringIO()
with mock.patch('django.core.management.color.supports_color', lambda *args: False):
call_command("migrate", "migrations", "0002", fake_initial=True, stdout=out, verbosity=1)
value = out.getvalue().lower()
self.assertIn("migrations.0001_initial… faked", value)
self.assertIn("migrations.0002_second… faked", value)
# Fake an apply
call_command("migrate", "migrations", fake=True, verbosity=0)
# Unmigrate everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"})
def test_migrate_conflict_exit(self):
"""
migrate exits if it detects a conflict.
"""
with self.assertRaisesMessage(CommandError, "Conflicting migrations detected"):
call_command("migrate", "migrations")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_showmigrations_list(self):
"""
showmigrations --list displays migrations and whether or not they're
applied.
"""
out = io.StringIO()
with mock.patch('django.core.management.color.supports_color', lambda *args: True):
call_command("showmigrations", format='list', stdout=out, verbosity=0, no_color=False)
self.assertEqual(
'\x1b[1mmigrations\n\x1b[0m'
' [ ] 0001_initial\n'
' [ ] 0002_second\n',
out.getvalue().lower()
)
call_command("migrate", "migrations", "0001", verbosity=0)
out = io.StringIO()
# Giving the explicit app_label tests for selective `show_list` in the command
call_command("showmigrations", "migrations", format='list', stdout=out, verbosity=0, no_color=True)
self.assertEqual(
'migrations\n'
' [x] 0001_initial\n'
' [ ] 0002_second\n',
out.getvalue().lower()
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"})
def test_showmigrations_plan(self):
"""
Tests --plan output of showmigrations command
"""
out = io.StringIO()
call_command("showmigrations", format='plan', stdout=out)
self.assertEqual(
"[ ] migrations.0001_initial\n"
"[ ] migrations.0003_third\n"
"[ ] migrations.0002_second\n",
out.getvalue().lower()
)
out = io.StringIO()
call_command("showmigrations", format='plan', stdout=out, verbosity=2)
self.assertEqual(
"[ ] migrations.0001_initial\n"
"[ ] migrations.0003_third … (migrations.0001_initial)\n"
"[ ] migrations.0002_second … (migrations.0001_initial, migrations.0003_third)\n",
out.getvalue().lower()
)
call_command("migrate", "migrations", "0003", verbosity=0)
out = io.StringIO()
call_command("showmigrations", format='plan', stdout=out)
self.assertEqual(
"[x] migrations.0001_initial\n"
"[x] migrations.0003_third\n"
"[ ] migrations.0002_second\n",
out.getvalue().lower()
)
out = io.StringIO()
call_command("showmigrations", format='plan', stdout=out, verbosity=2)
self.assertEqual(
"[x] migrations.0001_initial\n"
"[x] migrations.0003_third … (migrations.0001_initial)\n"
"[ ] migrations.0002_second … (migrations.0001_initial, migrations.0003_third)\n",
out.getvalue().lower()
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_plan'})
def test_migrate_plan(self):
"""Tests migrate --plan output."""
out = io.StringIO()
# Show the plan up to the third migration.
call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True)
self.assertEqual(
'Planned operations:\n'
'migrations.0001_initial\n'
' Create model Salamander\n'
' Raw Python operation -> Grow salamander tail.\n'
'migrations.0002_second\n'
' Create model Book\n'
" Raw SQL operation -> ['SELECT * FROM migrations_book']\n"
'migrations.0003_third\n'
' Create model Author\n'
" Raw SQL operation -> ['SELECT * FROM migrations_author']\n",
out.getvalue()
)
# Migrate to the third migration.
call_command('migrate', 'migrations', '0003', verbosity=0)
out = io.StringIO()
# Show the plan for when there is nothing to apply.
call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True)
self.assertEqual(
'Planned operations:\n'
' No planned migration operations.\n',
out.getvalue()
)
out = io.StringIO()
# Show the plan for reverse migration back to 0001.
call_command('migrate', 'migrations', '0001', plan=True, stdout=out, no_color=True)
self.assertEqual(
'Planned operations:\n'
'migrations.0003_third\n'
' Undo Create model Author\n'
" Raw SQL operation -> ['SELECT * FROM migrations_book']\n"
'migrations.0002_second\n'
' Undo Create model Book\n'
" Raw SQL operation -> ['SELECT * FROM migrations_salamand…\n",
out.getvalue()
)
out = io.StringIO()
# Show the migration plan to fourth, with truncated details.
call_command('migrate', 'migrations', '0004', plan=True, stdout=out, no_color=True)
self.assertEqual(
'Planned operations:\n'
'migrations.0004_fourth\n'
' Raw SQL operation -> SELECT * FROM migrations_author WHE…\n',
out.getvalue()
)
# Show the plan when an operation is irreversible.
# Migrate to the fourth migration.
call_command('migrate', 'migrations', '0004', verbosity=0)
out = io.StringIO()
call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True)
self.assertEqual(
'Planned operations:\n'
'migrations.0004_fourth\n'
' Raw SQL operation -> IRREVERSIBLE\n',
out.getvalue()
)
# Cleanup by unmigrating everything: fake the irreversible, then
# migrate all to zero.
call_command('migrate', 'migrations', '0003', fake=True, verbosity=0)
call_command('migrate', 'migrations', 'zero', verbosity=0)
@override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_empty'})
def test_showmigrations_no_migrations(self):
out = io.StringIO()
call_command('showmigrations', stdout=out, no_color=True)
self.assertEqual('migrations\n (no migrations)\n', out.getvalue().lower())
@override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app'])
def test_showmigrations_unmigrated_app(self):
out = io.StringIO()
call_command('showmigrations', 'unmigrated_app', stdout=out, no_color=True)
self.assertEqual('unmigrated_app\n (no migrations)\n', out.getvalue().lower())
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_empty"})
def test_showmigrations_plan_no_migrations(self):
"""
Tests --plan output of showmigrations command without migrations
"""
out = io.StringIO()
call_command('showmigrations', format='plan', stdout=out, no_color=True)
self.assertEqual('(no migrations)\n', out.getvalue().lower())
out = io.StringIO()
call_command('showmigrations', format='plan', stdout=out, verbosity=2, no_color=True)
self.assertEqual('(no migrations)\n', out.getvalue().lower())
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"})
def test_showmigrations_plan_squashed(self):
"""
Tests --plan output of showmigrations command with squashed migrations.
"""
out = io.StringIO()
call_command("showmigrations", format='plan', stdout=out)
self.assertEqual(
"[ ] migrations.1_auto\n"
"[ ] migrations.2_auto\n"
"[ ] migrations.3_squashed_5\n"
"[ ] migrations.6_auto\n"
"[ ] migrations.7_auto\n",
out.getvalue().lower()
)
out = io.StringIO()
call_command("showmigrations", format='plan', stdout=out, verbosity=2)
self.assertEqual(
"[ ] migrations.1_auto\n"
"[ ] migrations.2_auto … (migrations.1_auto)\n"
"[ ] migrations.3_squashed_5 … (migrations.2_auto)\n"
"[ ] migrations.6_auto … (migrations.3_squashed_5)\n"
"[ ] migrations.7_auto … (migrations.6_auto)\n",
out.getvalue().lower()
)
call_command("migrate", "migrations", "3_squashed_5", verbosity=0)
out = io.StringIO()
call_command("showmigrations", format='plan', stdout=out)
self.assertEqual(
"[x] migrations.1_auto\n"
"[x] migrations.2_auto\n"
"[x] migrations.3_squashed_5\n"
"[ ] migrations.6_auto\n"
"[ ] migrations.7_auto\n",
out.getvalue().lower()
)
out = io.StringIO()
call_command("showmigrations", format='plan', stdout=out, verbosity=2)
self.assertEqual(
"[x] migrations.1_auto\n"
"[x] migrations.2_auto … (migrations.1_auto)\n"
"[x] migrations.3_squashed_5 … (migrations.2_auto)\n"
"[ ] migrations.6_auto … (migrations.3_squashed_5)\n"
"[ ] migrations.7_auto … (migrations.6_auto)\n",
out.getvalue().lower()
)
@override_settings(INSTALLED_APPS=[
'migrations.migrations_test_apps.mutate_state_b',
'migrations.migrations_test_apps.alter_fk.author_app',
'migrations.migrations_test_apps.alter_fk.book_app',
])
def test_showmigrations_plan_single_app_label(self):
"""
`showmigrations --plan app_label` output with a single app_label.
"""
# Single app with no dependencies on other apps.
out = io.StringIO()
call_command('showmigrations', 'mutate_state_b', format='plan', stdout=out)
self.assertEqual(
'[ ] mutate_state_b.0001_initial\n'
'[ ] mutate_state_b.0002_add_field\n',
out.getvalue()
)
# Single app with dependencies.
out = io.StringIO()
call_command('showmigrations', 'author_app', format='plan', stdout=out)
self.assertEqual(
'[ ] author_app.0001_initial\n'
'[ ] book_app.0001_initial\n'
'[ ] author_app.0002_alter_id\n',
out.getvalue()
)
# Some migrations already applied.
call_command('migrate', 'author_app', '0001', verbosity=0)
out = io.StringIO()
call_command('showmigrations', 'author_app', format='plan', stdout=out)
self.assertEqual(
'[X] author_app.0001_initial\n'
'[ ] book_app.0001_initial\n'
'[ ] author_app.0002_alter_id\n',
out.getvalue()
)
# Cleanup by unmigrating author_app.
call_command('migrate', 'author_app', 'zero', verbosity=0)
@override_settings(INSTALLED_APPS=[
'migrations.migrations_test_apps.mutate_state_b',
'migrations.migrations_test_apps.alter_fk.author_app',
'migrations.migrations_test_apps.alter_fk.book_app',
])
def test_showmigrations_plan_multiple_app_labels(self):
"""
`showmigrations --plan app_label` output with multiple app_labels.
"""
# Multiple apps: author_app depends on book_app; mutate_state_b doesn't
# depend on other apps.
out = io.StringIO()
call_command('showmigrations', 'mutate_state_b', 'author_app', format='plan', stdout=out)
self.assertEqual(
'[ ] author_app.0001_initial\n'
'[ ] book_app.0001_initial\n'
'[ ] author_app.0002_alter_id\n'
'[ ] mutate_state_b.0001_initial\n'
'[ ] mutate_state_b.0002_add_field\n',
out.getvalue()
)
# Multiple apps: args order shouldn't matter (the same result is
# expected as above).
out = io.StringIO()
call_command('showmigrations', 'author_app', 'mutate_state_b', format='plan', stdout=out)
self.assertEqual(
'[ ] author_app.0001_initial\n'
'[ ] book_app.0001_initial\n'
'[ ] author_app.0002_alter_id\n'
'[ ] mutate_state_b.0001_initial\n'
'[ ] mutate_state_b.0002_add_field\n',
out.getvalue()
)
@override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app'])
def test_showmigrations_plan_app_label_no_migrations(self):
out = io.StringIO()
call_command('showmigrations', 'unmigrated_app', format='plan', stdout=out, no_color=True)
self.assertEqual('(no migrations)\n', out.getvalue())
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_sqlmigrate_forwards(self):
"""
sqlmigrate outputs forward looking SQL.
"""
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out)
output = out.getvalue().lower()
index_tx_start = output.find(connection.ops.start_transaction_sql().lower())
index_op_desc_author = output.find('-- create model author')
index_create_table = output.find('create table')
index_op_desc_tribble = output.find('-- create model tribble')
index_op_desc_unique_together = output.find('-- alter unique_together')
index_tx_end = output.find(connection.ops.end_transaction_sql().lower())
self.assertGreater(index_tx_start, -1, "Transaction start not found")
self.assertGreater(
index_op_desc_author, index_tx_start,
"Operation description (author) not found or found before transaction start"
)
self.assertGreater(
index_create_table, index_op_desc_author,
"CREATE TABLE not found or found before operation description (author)"
)
self.assertGreater(
index_op_desc_tribble, index_create_table,
"Operation description (tribble) not found or found before CREATE TABLE (author)"
)
self.assertGreater(
index_op_desc_unique_together, index_op_desc_tribble,
"Operation description (unique_together) not found or found before operation description (tribble)"
)
self.assertGreater(
index_tx_end, index_op_desc_unique_together,
"Transaction end not found or found before operation description (unique_together)"
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_sqlmigrate_backwards(self):
"""
sqlmigrate outputs reverse looking SQL.
"""
# Cannot generate the reverse SQL unless we've applied the migration.
call_command("migrate", "migrations", verbosity=0)
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out, backwards=True)
output = out.getvalue().lower()
index_tx_start = output.find(connection.ops.start_transaction_sql().lower())
index_op_desc_unique_together = output.find('-- alter unique_together')
index_op_desc_tribble = output.find('-- create model tribble')
index_op_desc_author = output.find('-- create model author')
index_drop_table = output.rfind('drop table')
index_tx_end = output.find(connection.ops.end_transaction_sql().lower())
self.assertGreater(index_tx_start, -1, "Transaction start not found")
self.assertGreater(
index_op_desc_unique_together, index_tx_start,
"Operation description (unique_together) not found or found before transaction start"
)
self.assertGreater(
index_op_desc_tribble, index_op_desc_unique_together,
"Operation description (tribble) not found or found before operation description (unique_together)"
)
self.assertGreater(
index_op_desc_author, index_op_desc_tribble,
"Operation description (author) not found or found before operation description (tribble)"
)
self.assertGreater(
index_drop_table, index_op_desc_author,
"DROP TABLE not found or found before operation description (author)"
)
self.assertGreater(
index_tx_end, index_op_desc_unique_together,
"Transaction end not found or found before DROP TABLE"
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_non_atomic"})
def test_sqlmigrate_for_non_atomic_migration(self):
"""
Transaction wrappers aren't shown for non-atomic migrations.
"""
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out)
output = out.getvalue().lower()
queries = [q.strip() for q in output.splitlines()]
if connection.ops.start_transaction_sql():
self.assertNotIn(connection.ops.start_transaction_sql().lower(), queries)
self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
"migrations.migrations_test_apps.migrated_unapplied_app",
"migrations.migrations_test_apps.unmigrated_app",
],
)
def test_regression_22823_unmigrated_fk_to_migrated_model(self):
"""
Assuming you have 3 apps, `A`, `B`, and `C`, such that:
* `A` has migrations
* `B` has a migration we want to apply
* `C` has no migrations, but has an FK to `A`
When we try to migrate "B", an exception occurs because the
"B" was not included in the ProjectState that is used to detect
soft-applied migrations (#22823).
"""
call_command("migrate", "migrated_unapplied_app", stdout=io.StringIO())
# unmigrated_app.SillyModel has a foreign key to 'migrations.Tribble',
# but that model is only defined in a migration, so the global app
# registry never sees it and the reference is left dangling. Remove it
# to avoid problems in subsequent tests.
del apps._pending_operations[('migrations', 'tribble')]
@override_settings(INSTALLED_APPS=['migrations.migrations_test_apps.unmigrated_app_syncdb'])
def test_migrate_syncdb_deferred_sql_executed_with_schemaeditor(self):
"""
For an app without migrations, editor.execute() is used for executing
the syncdb deferred SQL.
"""
stdout = io.StringIO()
with mock.patch.object(BaseDatabaseSchemaEditor, 'execute') as execute:
call_command('migrate', run_syncdb=True, verbosity=1, stdout=stdout, no_color=True)
create_table_count = len([call for call in execute.mock_calls if 'CREATE TABLE' in str(call)])
self.assertEqual(create_table_count, 2)
# There's at least one deferred SQL for creating the foreign key
# index.
self.assertGreater(len(execute.mock_calls), 2)
stdout = stdout.getvalue()
self.assertIn('Synchronize unmigrated apps: unmigrated_app_syncdb', stdout)
self.assertIn('Creating tables…', stdout)
table_name = truncate_name('unmigrated_app_syncdb_classroom', connection.ops.max_name_length())
self.assertIn('Creating table %s' % table_name, stdout)
@override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'})
def test_migrate_syncdb_app_with_migrations(self):
msg = "Can't use run_syncdb with app 'migrations' as it has migrations."
with self.assertRaisesMessage(CommandError, msg):
call_command('migrate', 'migrations', run_syncdb=True, verbosity=0)
@override_settings(INSTALLED_APPS=[
'migrations.migrations_test_apps.unmigrated_app_syncdb',
'migrations.migrations_test_apps.unmigrated_app_simple',
])
def test_migrate_syncdb_app_label(self):
"""
Running migrate --run-syncdb with an app_label only creates tables for
the specified app.
"""
stdout = io.StringIO()
with mock.patch.object(BaseDatabaseSchemaEditor, 'execute') as execute:
call_command('migrate', 'unmigrated_app_syncdb', run_syncdb=True, stdout=stdout)
create_table_count = len([call for call in execute.mock_calls if 'CREATE TABLE' in str(call)])
self.assertEqual(create_table_count, 2)
self.assertGreater(len(execute.mock_calls), 2)
self.assertIn('Synchronize unmigrated app: unmigrated_app_syncdb', stdout.getvalue())
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"})
def test_migrate_record_replaced(self):
"""
Running a single squashed migration should record all of the original
replaced migrations as run.
"""
recorder = MigrationRecorder(connection)
out = io.StringIO()
call_command("migrate", "migrations", verbosity=0)
call_command("showmigrations", "migrations", stdout=out, no_color=True)
self.assertEqual(
'migrations\n'
' [x] 0001_squashed_0002 (2 squashed migrations)\n',
out.getvalue().lower()
)
applied_migrations = recorder.applied_migrations()
self.assertIn(("migrations", "0001_initial"), applied_migrations)
self.assertIn(("migrations", "0002_second"), applied_migrations)
self.assertIn(("migrations", "0001_squashed_0002"), applied_migrations)
# Rollback changes
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"})
def test_migrate_record_squashed(self):
"""
Running migrate for a squashed migration should record as run
if all of the replaced migrations have been run (#25231).
"""
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0001_initial")
recorder.record_applied("migrations", "0002_second")
out = io.StringIO()
call_command("migrate", "migrations", verbosity=0)
call_command("showmigrations", "migrations", stdout=out, no_color=True)
self.assertEqual(
'migrations\n'
' [x] 0001_squashed_0002 (2 squashed migrations)\n',
out.getvalue().lower()
)
self.assertIn(
("migrations", "0001_squashed_0002"),
recorder.applied_migrations()
)
# No changes were actually applied so there is nothing to rollback
@override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations'})
def test_migrate_inconsistent_history(self):
"""
Running migrate with some migrations applied before their dependencies
should not be allowed.
"""
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0002_second")
msg = "Migration migrations.0002_second is applied before its dependency migrations.0001_initial"
with self.assertRaisesMessage(InconsistentMigrationHistory, msg):
call_command("migrate")
applied_migrations = recorder.applied_migrations()
self.assertNotIn(("migrations", "0001_initial"), applied_migrations)
class MakeMigrationsTests(MigrationTestBase):
"""
Tests running the makemigrations command.
"""
def setUp(self):
super().setUp()
self._old_models = apps.app_configs['migrations'].models.copy()
def tearDown(self):
apps.app_configs['migrations'].models = self._old_models
apps.all_models['migrations'] = self._old_models
apps.clear_cache()
super().tearDown()
def test_files_content(self):
self.assertTableNotExists("migrations_unicodemodel")
apps.register_model('migrations', UnicodeModel)
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", verbosity=0)
# Check for empty __init__.py file in migrations folder
init_file = os.path.join(migration_dir, "__init__.py")
self.assertTrue(os.path.exists(init_file))
with open(init_file, 'r') as fp:
content = fp.read()
self.assertEqual(content, '')
# Check for existing 0001_initial.py file in migration folder
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
with open(initial_file, 'r', encoding='utf-8') as fp:
content = fp.read()
self.assertIn('migrations.CreateModel', content)
self.assertIn('initial = True', content)
self.assertIn('úñí©óðé µóðéø', content) # Meta.verbose_name
self.assertIn('úñí©óðé µóðéøß', content) # Meta.verbose_name_plural
self.assertIn('ÚÑÍ¢ÓÐÉ', content) # title.verbose_name
self.assertIn('“Ðjáñgó”', content) # title.default
def test_makemigrations_order(self):
"""
makemigrations should recognize number-only migrations (0001.py).
"""
module = 'migrations.test_migrations_order'
with self.temporary_migration_module(module=module) as migration_dir:
if hasattr(importlib, 'invalidate_caches'):
# importlib caches os.listdir() on some platforms like macOS
# (#23850).
importlib.invalidate_caches()
call_command('makemigrations', 'migrations', '--empty', '-n', 'a', '-v', '0')
self.assertTrue(os.path.exists(os.path.join(migration_dir, '0002_a.py')))
def test_makemigrations_empty_connections(self):
empty_connections = ConnectionHandler({'default': {}})
with mock.patch('django.core.management.commands.makemigrations.connections', new=empty_connections):
# with no apps
out = io.StringIO()
call_command('makemigrations', stdout=out)
self.assertIn('No changes detected', out.getvalue())
# with an app
with self.temporary_migration_module() as migration_dir:
call_command('makemigrations', 'migrations', verbosity=0)
init_file = os.path.join(migration_dir, '__init__.py')
self.assertTrue(os.path.exists(init_file))
@override_settings(INSTALLED_APPS=['migrations', 'migrations2'])
def test_makemigrations_consistency_checks_respect_routers(self):
"""
The history consistency checks in makemigrations respect
settings.DATABASE_ROUTERS.
"""
def patched_has_table(migration_recorder):
if migration_recorder.connection is connections['other']:
raise Exception('Other connection')
else:
return mock.DEFAULT
self.assertTableNotExists('migrations_unicodemodel')
apps.register_model('migrations', UnicodeModel)
with mock.patch.object(
MigrationRecorder, 'has_table',
autospec=True, side_effect=patched_has_table) as has_table:
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", verbosity=0)
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
self.assertEqual(has_table.call_count, 1) # 'default' is checked
# Router says not to migrate 'other' so consistency shouldn't
# be checked.
with self.settings(DATABASE_ROUTERS=['migrations.routers.TestRouter']):
call_command('makemigrations', 'migrations', verbosity=0)
self.assertEqual(has_table.call_count, 2) # 'default' again
# With a router that doesn't prohibit migrating 'other',
# consistency is checked.
with self.settings(DATABASE_ROUTERS=['migrations.routers.EmptyRouter']):
with self.assertRaisesMessage(Exception, 'Other connection'):
call_command('makemigrations', 'migrations', verbosity=0)
self.assertEqual(has_table.call_count, 4) # 'default' and 'other'
# With a router that doesn't allow migrating on any database,
# no consistency checks are made.
with self.settings(DATABASE_ROUTERS=['migrations.routers.TestRouter']):
with mock.patch.object(TestRouter, 'allow_migrate', return_value=False) as allow_migrate:
call_command('makemigrations', 'migrations', verbosity=0)
allow_migrate.assert_any_call('other', 'migrations', model_name='UnicodeModel')
# allow_migrate() is called with the correct arguments.
self.assertGreater(len(allow_migrate.mock_calls), 0)
for mock_call in allow_migrate.mock_calls:
_, call_args, call_kwargs = mock_call
connection_alias, app_name = call_args
self.assertIn(connection_alias, ['default', 'other'])
# Raises an error if invalid app_name/model_name occurs.
apps.get_app_config(app_name).get_model(call_kwargs['model_name'])
self.assertEqual(has_table.call_count, 4)
def test_failing_migration(self):
# If a migration fails to serialize, it shouldn't generate an empty file. #21280
apps.register_model('migrations', UnserializableModel)
with self.temporary_migration_module() as migration_dir:
with self.assertRaisesMessage(ValueError, 'Cannot serialize'):
call_command("makemigrations", "migrations", verbosity=0)
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertFalse(os.path.exists(initial_file))
def test_makemigrations_conflict_exit(self):
"""
makemigrations exits if it detects a conflict.
"""
with self.temporary_migration_module(module="migrations.test_migrations_conflict"):
with self.assertRaises(CommandError) as context:
call_command("makemigrations")
exception_message = str(context.exception)
self.assertIn(
'Conflicting migrations detected; multiple leaf nodes '
'in the migration graph:',
exception_message
)
self.assertIn('0002_second', exception_message)
self.assertIn('0002_conflicting_second', exception_message)
self.assertIn('in migrations', exception_message)
self.assertIn("To fix them run 'python manage.py makemigrations --merge'", exception_message)
def test_makemigrations_merge_no_conflict(self):
"""
makemigrations exits if in merge mode with no conflicts.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations"):
call_command("makemigrations", merge=True, stdout=out)
self.assertIn("No conflicts detected to merge.", out.getvalue())
def test_makemigrations_empty_no_app_specified(self):
"""
makemigrations exits if no app is specified with 'empty' mode.
"""
msg = 'You must supply at least one app label when using --empty.'
with self.assertRaisesMessage(CommandError, msg):
call_command("makemigrations", empty=True)
def test_makemigrations_empty_migration(self):
"""
makemigrations properly constructs an empty migration.
"""
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", empty=True, verbosity=0)
# Check for existing 0001_initial.py file in migration folder
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
with open(initial_file, 'r', encoding='utf-8') as fp:
content = fp.read()
# Remove all whitespace to check for empty dependencies and operations
content = content.replace(' ', '')
self.assertIn('dependencies=[\n]', content)
self.assertIn('operations=[\n]', content)
@override_settings(MIGRATION_MODULES={"migrations": None})
def test_makemigrations_disabled_migrations_for_app(self):
"""
makemigrations raises a nice error when migrations are disabled for an
app.
"""
msg = (
"Django can't create migrations for app 'migrations' because migrations "
"have been disabled via the MIGRATION_MODULES setting."
)
with self.assertRaisesMessage(ValueError, msg):
call_command("makemigrations", "migrations", empty=True, verbosity=0)
def test_makemigrations_no_changes_no_apps(self):
"""
makemigrations exits when there are no changes and no apps are specified.
"""
out = io.StringIO()
call_command("makemigrations", stdout=out)
self.assertIn("No changes detected", out.getvalue())
def test_makemigrations_no_changes(self):
"""
makemigrations exits when there are no changes to an app.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_no_changes"):
call_command("makemigrations", "migrations", stdout=out)
self.assertIn("No changes detected in app 'migrations'", out.getvalue())
def test_makemigrations_no_apps_initial(self):
"""
makemigrations should detect initial is needed on empty migration
modules if no app provided.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_empty"):
call_command("makemigrations", stdout=out)
self.assertIn("0001_initial.py", out.getvalue())
def test_makemigrations_no_init(self):
"""Migration directories without an __init__.py file are allowed."""
out = io.StringIO()
with self.temporary_migration_module(module='migrations.test_migrations_no_init'):
call_command('makemigrations', stdout=out)
self.assertIn('0001_initial.py', out.getvalue())
def test_makemigrations_migrations_announce(self):
"""
makemigrations announces the migration at the default verbosity level.
"""
out = io.StringIO()
with self.temporary_migration_module():
call_command("makemigrations", "migrations", stdout=out)
self.assertIn("Migrations for 'migrations'", out.getvalue())
def test_makemigrations_no_common_ancestor(self):
"""
makemigrations fails to merge migrations with no common ancestor.
"""
with self.assertRaises(ValueError) as context:
with self.temporary_migration_module(module="migrations.test_migrations_no_ancestor"):
call_command("makemigrations", "migrations", merge=True)
exception_message = str(context.exception)
self.assertIn("Could not find common ancestor of", exception_message)
self.assertIn("0002_second", exception_message)
self.assertIn("0002_conflicting_second", exception_message)
def test_makemigrations_interactive_reject(self):
"""
makemigrations enters and exits interactive mode properly.
"""
# Monkeypatch interactive questioner to auto reject
with mock.patch('builtins.input', mock.Mock(return_value='N')):
with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir:
call_command("makemigrations", "migrations", name="merge", merge=True, interactive=True, verbosity=0)
merge_file = os.path.join(migration_dir, '0003_merge.py')
self.assertFalse(os.path.exists(merge_file))
def test_makemigrations_interactive_accept(self):
"""
makemigrations enters interactive mode and merges properly.
"""
# Monkeypatch interactive questioner to auto accept
with mock.patch('builtins.input', mock.Mock(return_value='y')):
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir:
call_command("makemigrations", "migrations", name="merge", merge=True, interactive=True, stdout=out)
merge_file = os.path.join(migration_dir, '0003_merge.py')
self.assertTrue(os.path.exists(merge_file))
self.assertIn("Created new merge migration", out.getvalue())
@mock.patch('django.db.migrations.utils.datetime')
def test_makemigrations_default_merge_name(self, mock_datetime):
mock_datetime.datetime.now.return_value = datetime.datetime(2016, 1, 2, 3, 4)
with mock.patch('builtins.input', mock.Mock(return_value='y')):
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir:
call_command("makemigrations", "migrations", merge=True, interactive=True, stdout=out)
merge_file = os.path.join(migration_dir, '0003_merge_20160102_0304.py')
self.assertTrue(os.path.exists(merge_file))
self.assertIn("Created new merge migration", out.getvalue())
def test_makemigrations_non_interactive_not_null_addition(self):
"""
Non-interactive makemigrations fails when a default is missing on a
new not-null field.
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
silly_int = models.IntegerField()
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.assertRaises(SystemExit):
with self.temporary_migration_module(module="migrations.test_migrations_no_default"):
call_command("makemigrations", "migrations", interactive=False, stdout=out)
def test_makemigrations_non_interactive_not_null_alteration(self):
"""
Non-interactive makemigrations fails when a default is missing on a
field changed to not-null.
"""
class Author(models.Model):
name = models.CharField(max_length=255)
slug = models.SlugField()
age = models.IntegerField(default=0)
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations"):
call_command("makemigrations", "migrations", interactive=False, stdout=out)
self.assertIn("Alter field slug on author", out.getvalue())
def test_makemigrations_non_interactive_no_model_rename(self):
"""
makemigrations adds and removes a possible model rename in
non-interactive mode.
"""
class RenamedModel(models.Model):
silly_field = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_no_default"):
call_command("makemigrations", "migrations", interactive=False, stdout=out)
self.assertIn("Delete model SillyModel", out.getvalue())
self.assertIn("Create model RenamedModel", out.getvalue())
def test_makemigrations_non_interactive_no_field_rename(self):
"""
makemigrations adds and removes a possible field rename in
non-interactive mode.
"""
class SillyModel(models.Model):
silly_rename = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_no_default"):
call_command("makemigrations", "migrations", interactive=False, stdout=out)
self.assertIn("Remove field silly_field from sillymodel", out.getvalue())
self.assertIn("Add field silly_rename to sillymodel", out.getvalue())
def test_makemigrations_handle_merge(self):
"""
makemigrations properly merges the conflicting migrations with --noinput.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir:
call_command("makemigrations", "migrations", name="merge", merge=True, interactive=False, stdout=out)
merge_file = os.path.join(migration_dir, '0003_merge.py')
self.assertTrue(os.path.exists(merge_file))
output = out.getvalue()
self.assertIn("Merging migrations", output)
self.assertIn("Branch 0002_second", output)
self.assertIn("Branch 0002_conflicting_second", output)
self.assertIn("Created new merge migration", output)
def test_makemigration_merge_dry_run(self):
"""
makemigrations respects --dry-run option when fixing migration
conflicts (#24427).
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir:
call_command(
"makemigrations", "migrations", name="merge", dry_run=True,
merge=True, interactive=False, stdout=out,
)
merge_file = os.path.join(migration_dir, '0003_merge.py')
self.assertFalse(os.path.exists(merge_file))
output = out.getvalue()
self.assertIn("Merging migrations", output)
self.assertIn("Branch 0002_second", output)
self.assertIn("Branch 0002_conflicting_second", output)
self.assertNotIn("Created new merge migration", output)
def test_makemigration_merge_dry_run_verbosity_3(self):
"""
`makemigrations --merge --dry-run` writes the merge migration file to
stdout with `verbosity == 3` (#24427).
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir:
call_command(
"makemigrations", "migrations", name="merge", dry_run=True,
merge=True, interactive=False, stdout=out, verbosity=3,
)
merge_file = os.path.join(migration_dir, '0003_merge.py')
self.assertFalse(os.path.exists(merge_file))
output = out.getvalue()
self.assertIn("Merging migrations", output)
self.assertIn("Branch 0002_second", output)
self.assertIn("Branch 0002_conflicting_second", output)
self.assertNotIn("Created new merge migration", output)
# Additional output caused by verbosity 3
# The complete merge migration file that would be written
self.assertIn("class Migration(migrations.Migration):", output)
self.assertIn("dependencies = [", output)
self.assertIn("('migrations', '0002_second')", output)
self.assertIn("('migrations', '0002_conflicting_second')", output)
self.assertIn("operations = [", output)
self.assertIn("]", output)
def test_makemigrations_dry_run(self):
"""
`makemigrations --dry-run` should not ask for defaults.
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
silly_date = models.DateField() # Added field without a default
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_no_default"):
call_command("makemigrations", "migrations", dry_run=True, stdout=out)
# Output the expected changes directly, without asking for defaults
self.assertIn("Add field silly_date to sillymodel", out.getvalue())
def test_makemigrations_dry_run_verbosity_3(self):
"""
Allow `makemigrations --dry-run` to output the migrations file to
stdout (with verbosity == 3).
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
silly_char = models.CharField(default="")
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_no_default"):
call_command("makemigrations", "migrations", dry_run=True, stdout=out, verbosity=3)
# Normal --dry-run output
self.assertIn("- Add field silly_char to sillymodel", out.getvalue())
# Additional output caused by verbosity 3
# The complete migrations file that would be written
self.assertIn("class Migration(migrations.Migration):", out.getvalue())
self.assertIn("dependencies = [", out.getvalue())
self.assertIn("('migrations', '0001_initial'),", out.getvalue())
self.assertIn("migrations.AddField(", out.getvalue())
self.assertIn("model_name='sillymodel',", out.getvalue())
self.assertIn("name='silly_char',", out.getvalue())
def test_makemigrations_migrations_modules_path_not_exist(self):
"""
makemigrations creates migrations when specifying a custom location
for migration files using MIGRATION_MODULES if the custom path
doesn't already exist.
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
out = io.StringIO()
migration_module = "migrations.test_migrations_path_doesnt_exist.foo.bar"
with self.temporary_migration_module(module=migration_module) as migration_dir:
call_command("makemigrations", "migrations", stdout=out)
# Migrations file is actually created in the expected path.
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
# Command output indicates the migration is created.
self.assertIn(" - Create model SillyModel", out.getvalue())
@override_settings(MIGRATION_MODULES={'migrations': 'some.nonexistent.path'})
def test_makemigrations_migrations_modules_nonexistent_toplevel_package(self):
msg = (
'Could not locate an appropriate location to create migrations '
'package some.nonexistent.path. Make sure the toplevel package '
'exists and can be imported.'
)
with self.assertRaisesMessage(ValueError, msg):
call_command('makemigrations', 'migrations', empty=True, verbosity=0)
def test_makemigrations_interactive_by_default(self):
"""
The user is prompted to merge by default if there are conflicts and
merge is True. Answer negative to differentiate it from behavior when
--noinput is specified.
"""
# Monkeypatch interactive questioner to auto reject
out = io.StringIO()
with mock.patch('builtins.input', mock.Mock(return_value='N')):
with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir:
call_command("makemigrations", "migrations", name="merge", merge=True, stdout=out)
merge_file = os.path.join(migration_dir, '0003_merge.py')
# This will fail if interactive is False by default
self.assertFalse(os.path.exists(merge_file))
self.assertNotIn("Created new merge migration", out.getvalue())
@override_settings(
INSTALLED_APPS=[
"migrations",
"migrations.migrations_test_apps.unspecified_app_with_conflict"])
def test_makemigrations_unspecified_app_with_conflict_no_merge(self):
"""
makemigrations does not raise a CommandError when an unspecified app
has conflicting migrations.
"""
with self.temporary_migration_module(module="migrations.test_migrations_no_changes"):
call_command("makemigrations", "migrations", merge=False, verbosity=0)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
"migrations.migrations_test_apps.unspecified_app_with_conflict"])
def test_makemigrations_unspecified_app_with_conflict_merge(self):
"""
makemigrations does not create a merge for an unspecified app even if
it has conflicting migrations.
"""
# Monkeypatch interactive questioner to auto accept
with mock.patch('builtins.input', mock.Mock(return_value='y')):
out = io.StringIO()
with self.temporary_migration_module(app_label="migrated_app") as migration_dir:
call_command("makemigrations", "migrated_app", name="merge", merge=True, interactive=True, stdout=out)
merge_file = os.path.join(migration_dir, '0003_merge.py')
self.assertFalse(os.path.exists(merge_file))
self.assertIn("No conflicts detected to merge.", out.getvalue())
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
"migrations.migrations_test_apps.conflicting_app_with_dependencies"])
def test_makemigrations_merge_dont_output_dependency_operations(self):
"""
makemigrations --merge does not output any operations from apps that
don't belong to a given app.
"""
# Monkeypatch interactive questioner to auto accept
with mock.patch('builtins.input', mock.Mock(return_value='N')):
out = io.StringIO()
with mock.patch('django.core.management.color.supports_color', lambda *args: False):
call_command(
"makemigrations", "conflicting_app_with_dependencies",
merge=True, interactive=True, stdout=out
)
val = out.getvalue().lower()
self.assertIn('merging conflicting_app_with_dependencies\n', val)
self.assertIn(
' branch 0002_conflicting_second\n'
' - create model something\n',
val
)
self.assertIn(
' branch 0002_second\n'
' - delete model tribble\n'
' - remove field silly_field from author\n'
' - add field rating to author\n'
' - create model book\n',
val
)
def test_makemigrations_with_custom_name(self):
"""
makemigrations --name generate a custom migration name.
"""
with self.temporary_migration_module() as migration_dir:
def cmd(migration_count, migration_name, *args):
call_command("makemigrations", "migrations", "--verbosity", "0", "--name", migration_name, *args)
migration_file = os.path.join(migration_dir, "%s_%s.py" % (migration_count, migration_name))
# Check for existing migration file in migration folder
self.assertTrue(os.path.exists(migration_file))
with open(migration_file, "r", encoding="utf-8") as fp:
content = fp.read()
content = content.replace(" ", "")
return content
# generate an initial migration
migration_name_0001 = "my_initial_migration"
content = cmd("0001", migration_name_0001)
self.assertIn("dependencies=[\n]", content)
# importlib caches os.listdir() on some platforms like macOS
# (#23850).
if hasattr(importlib, 'invalidate_caches'):
importlib.invalidate_caches()
# generate an empty migration
migration_name_0002 = "my_custom_migration"
content = cmd("0002", migration_name_0002, "--empty")
self.assertIn("dependencies=[\n('migrations','0001_%s'),\n]" % migration_name_0001, content)
self.assertIn("operations=[\n]", content)
def test_makemigrations_with_invalid_custom_name(self):
msg = 'The migration name must be a valid Python identifier.'
with self.assertRaisesMessage(CommandError, msg):
call_command('makemigrations', 'migrations', '--name', 'invalid name', '--empty')
def test_makemigrations_check(self):
"""
makemigrations --check should exit with a non-zero status when
there are changes to an app requiring migrations.
"""
with self.temporary_migration_module():
with self.assertRaises(SystemExit):
call_command("makemigrations", "--check", "migrations", verbosity=0)
with self.temporary_migration_module(module="migrations.test_migrations_no_changes"):
call_command("makemigrations", "--check", "migrations", verbosity=0)
def test_makemigrations_migration_path_output(self):
"""
makemigrations should print the relative paths to the migrations unless
they are outside of the current tree, in which case the absolute path
should be shown.
"""
out = io.StringIO()
apps.register_model('migrations', UnicodeModel)
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", stdout=out)
self.assertIn(os.path.join(migration_dir, '0001_initial.py'), out.getvalue())
def test_makemigrations_migration_path_output_valueerror(self):
"""
makemigrations prints the absolute path if os.path.relpath() raises a
ValueError when it's impossible to obtain a relative path, e.g. on
Windows if Django is installed on a different drive than where the
migration files are created.
"""
out = io.StringIO()
with self.temporary_migration_module() as migration_dir:
with mock.patch('os.path.relpath', side_effect=ValueError):
call_command('makemigrations', 'migrations', stdout=out)
self.assertIn(os.path.join(migration_dir, '0001_initial.py'), out.getvalue())
def test_makemigrations_inconsistent_history(self):
"""
makemigrations should raise InconsistentMigrationHistory exception if
there are some migrations applied before their dependencies.
"""
recorder = MigrationRecorder(connection)
recorder.record_applied('migrations', '0002_second')
msg = "Migration migrations.0002_second is applied before its dependency migrations.0001_initial"
with self.temporary_migration_module(module="migrations.test_migrations"):
with self.assertRaisesMessage(InconsistentMigrationHistory, msg):
call_command("makemigrations")
@mock.patch('builtins.input', return_value='1')
@mock.patch('django.db.migrations.questioner.sys.stdin', mock.MagicMock(encoding=sys.getdefaultencoding()))
def test_makemigrations_auto_now_add_interactive(self, *args):
"""
makemigrations prompts the user when adding auto_now_add to an existing
model.
"""
class Entry(models.Model):
title = models.CharField(max_length=255)
creation_date = models.DateTimeField(auto_now_add=True)
class Meta:
app_label = 'migrations'
# Monkeypatch interactive questioner to auto accept
with mock.patch('django.db.migrations.questioner.sys.stdout', new_callable=io.StringIO) as prompt_stdout:
out = io.StringIO()
with self.temporary_migration_module(module='migrations.test_auto_now_add'):
call_command('makemigrations', 'migrations', interactive=True, stdout=out)
output = out.getvalue()
prompt_output = prompt_stdout.getvalue()
self.assertIn("You can accept the default 'timezone.now' by pressing 'Enter'", prompt_output)
self.assertIn("Add field creation_date to entry", output)
class SquashMigrationsTests(MigrationTestBase):
"""
Tests running the squashmigrations command.
"""
def test_squashmigrations_squashes(self):
"""
squashmigrations squashes migrations.
"""
with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir:
call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=0)
squashed_migration_file = os.path.join(migration_dir, "0001_squashed_0002_second.py")
self.assertTrue(os.path.exists(squashed_migration_file))
def test_squashmigrations_initial_attribute(self):
with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir:
call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=0)
squashed_migration_file = os.path.join(migration_dir, "0001_squashed_0002_second.py")
with open(squashed_migration_file, "r", encoding="utf-8") as fp:
content = fp.read()
self.assertIn("initial = True", content)
def test_squashmigrations_optimizes(self):
"""
squashmigrations optimizes operations.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations"):
call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=1, stdout=out)
self.assertIn("Optimized from 8 operations to 2 operations.", out.getvalue())
def test_ticket_23799_squashmigrations_no_optimize(self):
"""
squashmigrations --no-optimize doesn't optimize operations.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations"):
call_command("squashmigrations", "migrations", "0002",
interactive=False, verbosity=1, no_optimize=True, stdout=out)
self.assertIn("Skipping optimization", out.getvalue())
def test_squashmigrations_valid_start(self):
"""
squashmigrations accepts a starting migration.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_no_changes") as migration_dir:
call_command("squashmigrations", "migrations", "0002", "0003",
interactive=False, verbosity=1, stdout=out)
squashed_migration_file = os.path.join(migration_dir, "0002_second_squashed_0003_third.py")
with open(squashed_migration_file, "r", encoding="utf-8") as fp:
content = fp.read()
self.assertIn(" ('migrations', '0001_initial')", content)
self.assertNotIn("initial = True", content)
out = out.getvalue()
self.assertNotIn(" - 0001_initial", out)
self.assertIn(" - 0002_second", out)
self.assertIn(" - 0003_third", out)
def test_squashmigrations_invalid_start(self):
"""
squashmigrations doesn't accept a starting migration after the ending migration.
"""
with self.temporary_migration_module(module="migrations.test_migrations_no_changes"):
msg = (
"The migration 'migrations.0003_third' cannot be found. Maybe "
"it comes after the migration 'migrations.0002_second'"
)
with self.assertRaisesMessage(CommandError, msg):
call_command("squashmigrations", "migrations", "0003", "0002", interactive=False, verbosity=0)
def test_squashed_name_with_start_migration_name(self):
"""--squashed-name specifies the new migration's name."""
squashed_name = 'squashed_name'
with self.temporary_migration_module(module='migrations.test_migrations') as migration_dir:
call_command(
'squashmigrations', 'migrations', '0001', '0002',
squashed_name=squashed_name, interactive=False, verbosity=0,
)
squashed_migration_file = os.path.join(migration_dir, '0001_%s.py' % squashed_name)
self.assertTrue(os.path.exists(squashed_migration_file))
def test_squashed_name_without_start_migration_name(self):
"""--squashed-name also works if a start migration is omitted."""
squashed_name = 'squashed_name'
with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir:
call_command(
'squashmigrations', 'migrations', '0001',
squashed_name=squashed_name, interactive=False, verbosity=0,
)
squashed_migration_file = os.path.join(migration_dir, '0001_%s.py' % squashed_name)
self.assertTrue(os.path.exists(squashed_migration_file))
class AppLabelErrorTests(TestCase):
"""
This class inherits TestCase because MigrationTestBase uses
`availabe_apps = ['migrations']` which means that it's the only installed
app. 'django.contrib.auth' must be in INSTALLED_APPS for some of these
tests.
"""
nonexistent_app_error = "No installed app with label 'nonexistent_app'."
did_you_mean_auth_error = (
"No installed app with label 'django.contrib.auth'. Did you mean "
"'auth'?"
)
def test_makemigrations_nonexistent_app_label(self):
err = io.StringIO()
with self.assertRaises(SystemExit):
call_command('makemigrations', 'nonexistent_app', stderr=err)
self.assertIn(self.nonexistent_app_error, err.getvalue())
def test_makemigrations_app_name_specified_as_label(self):
err = io.StringIO()
with self.assertRaises(SystemExit):
call_command('makemigrations', 'django.contrib.auth', stderr=err)
self.assertIn(self.did_you_mean_auth_error, err.getvalue())
def test_migrate_nonexistent_app_label(self):
with self.assertRaisesMessage(CommandError, self.nonexistent_app_error):
call_command('migrate', 'nonexistent_app')
def test_migrate_app_name_specified_as_label(self):
with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error):
call_command('migrate', 'django.contrib.auth')
def test_showmigrations_nonexistent_app_label(self):
err = io.StringIO()
with self.assertRaises(SystemExit):
call_command('showmigrations', 'nonexistent_app', stderr=err)
self.assertIn(self.nonexistent_app_error, err.getvalue())
def test_showmigrations_app_name_specified_as_label(self):
err = io.StringIO()
with self.assertRaises(SystemExit):
call_command('showmigrations', 'django.contrib.auth', stderr=err)
self.assertIn(self.did_you_mean_auth_error, err.getvalue())
def test_sqlmigrate_nonexistent_app_label(self):
with self.assertRaisesMessage(CommandError, self.nonexistent_app_error):
call_command('sqlmigrate', 'nonexistent_app', '0002')
def test_sqlmigrate_app_name_specified_as_label(self):
with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error):
call_command('sqlmigrate', 'django.contrib.auth', '0002')
def test_squashmigrations_nonexistent_app_label(self):
with self.assertRaisesMessage(CommandError, self.nonexistent_app_error):
call_command('squashmigrations', 'nonexistent_app', '0002')
def test_squashmigrations_app_name_specified_as_label(self):
with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error):
call_command('squashmigrations', 'django.contrib.auth', '0002')
|
2a67c92c11deae99a465ba61a0f54839b81ae375f5e1236d98cff879af5c8df2 | import datetime
import decimal
import enum
import functools
import math
import os
import re
import uuid
from unittest import mock
import custom_migration_operations.more_operations
import custom_migration_operations.operations
from django import get_version
from django.conf import SettingsReference, settings
from django.core.validators import EmailValidator, RegexValidator
from django.db import migrations, models
from django.db.migrations.serializer import BaseSerializer
from django.db.migrations.writer import MigrationWriter, OperationWriter
from django.test import SimpleTestCase
from django.utils.deconstruct import deconstructible
from django.utils.functional import SimpleLazyObject
from django.utils.timezone import get_default_timezone, get_fixed_timezone, utc
from django.utils.translation import gettext_lazy as _
from django.utils.version import PY36
from .models import FoodManager, FoodQuerySet
class Money(decimal.Decimal):
def deconstruct(self):
return (
'%s.%s' % (self.__class__.__module__, self.__class__.__name__),
[str(self)],
{}
)
class TestModel1:
def upload_to(self):
return '/somewhere/dynamic/'
thing = models.FileField(upload_to=upload_to)
class OperationWriterTests(SimpleTestCase):
def test_empty_signature(self):
operation = custom_migration_operations.operations.TestOperation()
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.TestOperation(\n'
'),'
)
def test_args_signature(self):
operation = custom_migration_operations.operations.ArgsOperation(1, 2)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.ArgsOperation(\n'
' arg1=1,\n'
' arg2=2,\n'
'),'
)
def test_kwargs_signature(self):
operation = custom_migration_operations.operations.KwargsOperation(kwarg1=1)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.KwargsOperation(\n'
' kwarg1=1,\n'
'),'
)
def test_args_kwargs_signature(self):
operation = custom_migration_operations.operations.ArgsKwargsOperation(1, 2, kwarg2=4)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.ArgsKwargsOperation(\n'
' arg1=1,\n'
' arg2=2,\n'
' kwarg2=4,\n'
'),'
)
def test_nested_args_signature(self):
operation = custom_migration_operations.operations.ArgsOperation(
custom_migration_operations.operations.ArgsOperation(1, 2),
custom_migration_operations.operations.KwargsOperation(kwarg1=3, kwarg2=4)
)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.ArgsOperation(\n'
' arg1=custom_migration_operations.operations.ArgsOperation(\n'
' arg1=1,\n'
' arg2=2,\n'
' ),\n'
' arg2=custom_migration_operations.operations.KwargsOperation(\n'
' kwarg1=3,\n'
' kwarg2=4,\n'
' ),\n'
'),'
)
def test_multiline_args_signature(self):
operation = custom_migration_operations.operations.ArgsOperation("test\n arg1", "test\narg2")
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
"custom_migration_operations.operations.ArgsOperation(\n"
" arg1='test\\n arg1',\n"
" arg2='test\\narg2',\n"
"),"
)
def test_expand_args_signature(self):
operation = custom_migration_operations.operations.ExpandArgsOperation([1, 2])
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.ExpandArgsOperation(\n'
' arg=[\n'
' 1,\n'
' 2,\n'
' ],\n'
'),'
)
def test_nested_operation_expand_args_signature(self):
operation = custom_migration_operations.operations.ExpandArgsOperation(
arg=[
custom_migration_operations.operations.KwargsOperation(
kwarg1=1,
kwarg2=2,
),
]
)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.ExpandArgsOperation(\n'
' arg=[\n'
' custom_migration_operations.operations.KwargsOperation(\n'
' kwarg1=1,\n'
' kwarg2=2,\n'
' ),\n'
' ],\n'
'),'
)
class WriterTests(SimpleTestCase):
"""
Tests the migration writer (makes migration files from Migration instances)
"""
def safe_exec(self, string, value=None):
d = {}
try:
exec(string, globals(), d)
except Exception as e:
if value:
self.fail("Could not exec %r (from value %r): %s" % (string.strip(), value, e))
else:
self.fail("Could not exec %r: %s" % (string.strip(), e))
return d
def serialize_round_trip(self, value):
string, imports = MigrationWriter.serialize(value)
return self.safe_exec("%s\ntest_value_result = %s" % ("\n".join(imports), string), value)['test_value_result']
def assertSerializedEqual(self, value):
self.assertEqual(self.serialize_round_trip(value), value)
def assertSerializedResultEqual(self, value, target):
self.assertEqual(MigrationWriter.serialize(value), target)
def assertSerializedFieldEqual(self, value):
new_value = self.serialize_round_trip(value)
self.assertEqual(value.__class__, new_value.__class__)
self.assertEqual(value.max_length, new_value.max_length)
self.assertEqual(value.null, new_value.null)
self.assertEqual(value.unique, new_value.unique)
def test_serialize_numbers(self):
self.assertSerializedEqual(1)
self.assertSerializedEqual(1.2)
self.assertTrue(math.isinf(self.serialize_round_trip(float("inf"))))
self.assertTrue(math.isinf(self.serialize_round_trip(float("-inf"))))
self.assertTrue(math.isnan(self.serialize_round_trip(float("nan"))))
self.assertSerializedEqual(decimal.Decimal('1.3'))
self.assertSerializedResultEqual(
decimal.Decimal('1.3'),
("Decimal('1.3')", {'from decimal import Decimal'})
)
self.assertSerializedEqual(Money('1.3'))
self.assertSerializedResultEqual(
Money('1.3'),
("migrations.test_writer.Money('1.3')", {'import migrations.test_writer'})
)
def test_serialize_constants(self):
self.assertSerializedEqual(None)
self.assertSerializedEqual(True)
self.assertSerializedEqual(False)
def test_serialize_strings(self):
self.assertSerializedEqual(b"foobar")
string, imports = MigrationWriter.serialize(b"foobar")
self.assertEqual(string, "b'foobar'")
self.assertSerializedEqual("föobár")
string, imports = MigrationWriter.serialize("foobar")
self.assertEqual(string, "'foobar'")
def test_serialize_multiline_strings(self):
self.assertSerializedEqual(b"foo\nbar")
string, imports = MigrationWriter.serialize(b"foo\nbar")
self.assertEqual(string, "b'foo\\nbar'")
self.assertSerializedEqual("föo\nbár")
string, imports = MigrationWriter.serialize("foo\nbar")
self.assertEqual(string, "'foo\\nbar'")
def test_serialize_collections(self):
self.assertSerializedEqual({1: 2})
self.assertSerializedEqual(["a", 2, True, None])
self.assertSerializedEqual({2, 3, "eighty"})
self.assertSerializedEqual({"lalalala": ["yeah", "no", "maybe"]})
self.assertSerializedEqual(_('Hello'))
def test_serialize_builtin_types(self):
self.assertSerializedEqual([list, tuple, dict, set, frozenset])
self.assertSerializedResultEqual(
[list, tuple, dict, set, frozenset],
("[list, tuple, dict, set, frozenset]", set())
)
def test_serialize_lazy_objects(self):
pattern = re.compile(r'^foo$')
lazy_pattern = SimpleLazyObject(lambda: pattern)
self.assertEqual(self.serialize_round_trip(lazy_pattern), pattern)
def test_serialize_enums(self):
class TextEnum(enum.Enum):
A = 'a-value'
B = 'value-b'
class BinaryEnum(enum.Enum):
A = b'a-value'
B = b'value-b'
class IntEnum(enum.IntEnum):
A = 1
B = 2
self.assertSerializedResultEqual(
TextEnum.A,
("migrations.test_writer.TextEnum('a-value')", {'import migrations.test_writer'})
)
self.assertSerializedResultEqual(
BinaryEnum.A,
("migrations.test_writer.BinaryEnum(b'a-value')", {'import migrations.test_writer'})
)
self.assertSerializedResultEqual(
IntEnum.B,
("migrations.test_writer.IntEnum(2)", {'import migrations.test_writer'})
)
field = models.CharField(default=TextEnum.B, choices=[(m.value, m) for m in TextEnum])
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.CharField(choices=["
"('a-value', migrations.test_writer.TextEnum('a-value')), "
"('value-b', migrations.test_writer.TextEnum('value-b'))], "
"default=migrations.test_writer.TextEnum('value-b'))"
)
field = models.CharField(default=BinaryEnum.B, choices=[(m.value, m) for m in BinaryEnum])
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.CharField(choices=["
"(b'a-value', migrations.test_writer.BinaryEnum(b'a-value')), "
"(b'value-b', migrations.test_writer.BinaryEnum(b'value-b'))], "
"default=migrations.test_writer.BinaryEnum(b'value-b'))"
)
field = models.IntegerField(default=IntEnum.A, choices=[(m.value, m) for m in IntEnum])
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.IntegerField(choices=["
"(1, migrations.test_writer.IntEnum(1)), "
"(2, migrations.test_writer.IntEnum(2))], "
"default=migrations.test_writer.IntEnum(1))"
)
def test_serialize_uuid(self):
self.assertSerializedEqual(uuid.uuid1())
self.assertSerializedEqual(uuid.uuid4())
uuid_a = uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8')
uuid_b = uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2')
self.assertSerializedResultEqual(
uuid_a,
("uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8')", {'import uuid'})
)
self.assertSerializedResultEqual(
uuid_b,
("uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2')", {'import uuid'})
)
field = models.UUIDField(choices=((uuid_a, 'UUID A'), (uuid_b, 'UUID B')), default=uuid_a)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.UUIDField(choices=["
"(uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8'), 'UUID A'), "
"(uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2'), 'UUID B')], "
"default=uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8'))"
)
def test_serialize_functions(self):
with self.assertRaisesMessage(ValueError, 'Cannot serialize function: lambda'):
self.assertSerializedEqual(lambda x: 42)
self.assertSerializedEqual(models.SET_NULL)
string, imports = MigrationWriter.serialize(models.SET(42))
self.assertEqual(string, 'models.SET(42)')
self.serialize_round_trip(models.SET(42))
def test_serialize_datetime(self):
self.assertSerializedEqual(datetime.datetime.utcnow())
self.assertSerializedEqual(datetime.datetime.utcnow)
self.assertSerializedEqual(datetime.datetime.today())
self.assertSerializedEqual(datetime.datetime.today)
self.assertSerializedEqual(datetime.date.today())
self.assertSerializedEqual(datetime.date.today)
self.assertSerializedEqual(datetime.datetime.now().time())
self.assertSerializedEqual(datetime.datetime(2014, 1, 1, 1, 1, tzinfo=get_default_timezone()))
self.assertSerializedEqual(datetime.datetime(2013, 12, 31, 22, 1, tzinfo=get_fixed_timezone(180)))
self.assertSerializedResultEqual(
datetime.datetime(2014, 1, 1, 1, 1),
("datetime.datetime(2014, 1, 1, 1, 1)", {'import datetime'})
)
self.assertSerializedResultEqual(
datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc),
(
"datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc)",
{'import datetime', 'from django.utils.timezone import utc'},
)
)
def test_serialize_fields(self):
self.assertSerializedFieldEqual(models.CharField(max_length=255))
self.assertSerializedResultEqual(
models.CharField(max_length=255),
("models.CharField(max_length=255)", {"from django.db import models"})
)
self.assertSerializedFieldEqual(models.TextField(null=True, blank=True))
self.assertSerializedResultEqual(
models.TextField(null=True, blank=True),
("models.TextField(blank=True, null=True)", {'from django.db import models'})
)
def test_serialize_settings(self):
self.assertSerializedEqual(SettingsReference(settings.AUTH_USER_MODEL, "AUTH_USER_MODEL"))
self.assertSerializedResultEqual(
SettingsReference("someapp.model", "AUTH_USER_MODEL"),
("settings.AUTH_USER_MODEL", {"from django.conf import settings"})
)
def test_serialize_iterators(self):
self.assertSerializedResultEqual(
((x, x * x) for x in range(3)),
("((0, 0), (1, 1), (2, 4))", set())
)
def test_serialize_compiled_regex(self):
"""
Make sure compiled regex can be serialized.
"""
regex = re.compile(r'^\w+$')
self.assertSerializedEqual(regex)
def test_serialize_class_based_validators(self):
"""
Ticket #22943: Test serialization of class-based validators, including
compiled regexes.
"""
validator = RegexValidator(message="hello")
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(string, "django.core.validators.RegexValidator(message='hello')")
self.serialize_round_trip(validator)
# Test with a compiled regex.
validator = RegexValidator(regex=re.compile(r'^\w+$'))
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(string, "django.core.validators.RegexValidator(regex=re.compile('^\\\\w+$'))")
self.serialize_round_trip(validator)
# Test a string regex with flag
validator = RegexValidator(r'^[0-9]+$', flags=re.S)
string = MigrationWriter.serialize(validator)[0]
if PY36:
self.assertEqual(string, "django.core.validators.RegexValidator('^[0-9]+$', flags=re.RegexFlag(16))")
else:
self.assertEqual(string, "django.core.validators.RegexValidator('^[0-9]+$', flags=16)")
self.serialize_round_trip(validator)
# Test message and code
validator = RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', 'invalid')
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(string, "django.core.validators.RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', 'invalid')")
self.serialize_round_trip(validator)
# Test with a subclass.
validator = EmailValidator(message="hello")
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(string, "django.core.validators.EmailValidator(message='hello')")
self.serialize_round_trip(validator)
validator = deconstructible(path="migrations.test_writer.EmailValidator")(EmailValidator)(message="hello")
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(string, "migrations.test_writer.EmailValidator(message='hello')")
validator = deconstructible(path="custom.EmailValidator")(EmailValidator)(message="hello")
with self.assertRaisesMessage(ImportError, "No module named 'custom'"):
MigrationWriter.serialize(validator)
validator = deconstructible(path="django.core.validators.EmailValidator2")(EmailValidator)(message="hello")
with self.assertRaisesMessage(ValueError, "Could not find object EmailValidator2 in django.core.validators."):
MigrationWriter.serialize(validator)
def test_serialize_empty_nonempty_tuple(self):
"""
Ticket #22679: makemigrations generates invalid code for (an empty
tuple) default_permissions = ()
"""
empty_tuple = ()
one_item_tuple = ('a',)
many_items_tuple = ('a', 'b', 'c')
self.assertSerializedEqual(empty_tuple)
self.assertSerializedEqual(one_item_tuple)
self.assertSerializedEqual(many_items_tuple)
def test_serialize_builtins(self):
string, imports = MigrationWriter.serialize(range)
self.assertEqual(string, 'range')
self.assertEqual(imports, set())
def test_serialize_unbound_method_reference(self):
"""An unbound method used within a class body can be serialized."""
self.serialize_round_trip(TestModel1.thing)
def test_serialize_local_function_reference(self):
"""A reference in a local scope can't be serialized."""
class TestModel2:
def upload_to(self):
return "somewhere dynamic"
thing = models.FileField(upload_to=upload_to)
with self.assertRaisesMessage(ValueError, 'Could not find function upload_to in migrations.test_writer'):
self.serialize_round_trip(TestModel2.thing)
def test_serialize_managers(self):
self.assertSerializedEqual(models.Manager())
self.assertSerializedResultEqual(
FoodQuerySet.as_manager(),
('migrations.models.FoodQuerySet.as_manager()', {'import migrations.models'})
)
self.assertSerializedEqual(FoodManager('a', 'b'))
self.assertSerializedEqual(FoodManager('x', 'y', c=3, d=4))
def test_serialize_frozensets(self):
self.assertSerializedEqual(frozenset())
self.assertSerializedEqual(frozenset("let it go"))
def test_serialize_set(self):
self.assertSerializedEqual(set())
self.assertSerializedResultEqual(set(), ('set()', set()))
self.assertSerializedEqual({'a'})
self.assertSerializedResultEqual({'a'}, ("{'a'}", set()))
def test_serialize_timedelta(self):
self.assertSerializedEqual(datetime.timedelta())
self.assertSerializedEqual(datetime.timedelta(minutes=42))
def test_serialize_functools_partial(self):
value = functools.partial(datetime.timedelta, 1, seconds=2)
result = self.serialize_round_trip(value)
self.assertEqual(result.func, value.func)
self.assertEqual(result.args, value.args)
self.assertEqual(result.keywords, value.keywords)
def test_serialize_functools_partialmethod(self):
value = functools.partialmethod(datetime.timedelta, 1, seconds=2)
result = self.serialize_round_trip(value)
self.assertIsInstance(result, functools.partialmethod)
self.assertEqual(result.func, value.func)
self.assertEqual(result.args, value.args)
self.assertEqual(result.keywords, value.keywords)
def test_serialize_type_none(self):
self.assertSerializedEqual(type(None))
def test_simple_migration(self):
"""
Tests serializing a simple migration.
"""
fields = {
'charfield': models.DateTimeField(default=datetime.datetime.utcnow),
'datetimefield': models.DateTimeField(default=datetime.datetime.utcnow),
}
options = {
'verbose_name': 'My model',
'verbose_name_plural': 'My models',
}
migration = type("Migration", (migrations.Migration,), {
"operations": [
migrations.CreateModel("MyModel", tuple(fields.items()), options, (models.Model,)),
migrations.CreateModel("MyModel2", tuple(fields.items()), bases=(models.Model,)),
migrations.CreateModel(
name="MyModel3", fields=tuple(fields.items()), options=options, bases=(models.Model,)
),
migrations.DeleteModel("MyModel"),
migrations.AddField("OtherModel", "datetimefield", fields["datetimefield"]),
],
"dependencies": [("testapp", "some_other_one")],
})
writer = MigrationWriter(migration)
output = writer.as_string()
# We don't test the output formatting - that's too fragile.
# Just make sure it runs for now, and that things look alright.
result = self.safe_exec(output)
self.assertIn("Migration", result)
def test_migration_path(self):
test_apps = [
'migrations.migrations_test_apps.normal',
'migrations.migrations_test_apps.with_package_model',
'migrations.migrations_test_apps.without_init_file',
]
base_dir = os.path.dirname(os.path.dirname(__file__))
for app in test_apps:
with self.modify_settings(INSTALLED_APPS={'append': app}):
migration = migrations.Migration('0001_initial', app.split('.')[-1])
expected_path = os.path.join(base_dir, *(app.split('.') + ['migrations', '0001_initial.py']))
writer = MigrationWriter(migration)
self.assertEqual(writer.path, expected_path)
def test_custom_operation(self):
migration = type("Migration", (migrations.Migration,), {
"operations": [
custom_migration_operations.operations.TestOperation(),
custom_migration_operations.operations.CreateModel(),
migrations.CreateModel("MyModel", (), {}, (models.Model,)),
custom_migration_operations.more_operations.TestOperation()
],
"dependencies": []
})
writer = MigrationWriter(migration)
output = writer.as_string()
result = self.safe_exec(output)
self.assertIn("custom_migration_operations", result)
self.assertNotEqual(
result['custom_migration_operations'].operations.TestOperation,
result['custom_migration_operations'].more_operations.TestOperation
)
def test_sorted_imports(self):
"""
#24155 - Tests ordering of imports.
"""
migration = type("Migration", (migrations.Migration,), {
"operations": [
migrations.AddField("mymodel", "myfield", models.DateTimeField(
default=datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc),
)),
]
})
writer = MigrationWriter(migration)
output = writer.as_string()
self.assertIn(
"import datetime\n"
"from django.db import migrations, models\n"
"from django.utils.timezone import utc\n",
output
)
def test_migration_file_header_comments(self):
"""
Test comments at top of file.
"""
migration = type("Migration", (migrations.Migration,), {
"operations": []
})
dt = datetime.datetime(2015, 7, 31, 4, 40, 0, 0, tzinfo=utc)
with mock.patch('django.db.migrations.writer.now', lambda: dt):
for include_header in (True, False):
with self.subTest(include_header=include_header):
writer = MigrationWriter(migration, include_header)
output = writer.as_string()
self.assertEqual(
include_header,
output.startswith(
"# Generated by Django %s on 2015-07-31 04:40\n\n" % get_version()
)
)
if not include_header:
# Make sure the output starts with something that's not
# a comment or indentation or blank line
self.assertRegex(output.splitlines(keepends=True)[0], r"^[^#\s]+")
def test_models_import_omitted(self):
"""
django.db.models shouldn't be imported if unused.
"""
migration = type("Migration", (migrations.Migration,), {
"operations": [
migrations.AlterModelOptions(
name='model',
options={'verbose_name': 'model', 'verbose_name_plural': 'models'},
),
]
})
writer = MigrationWriter(migration)
output = writer.as_string()
self.assertIn("from django.db import migrations\n", output)
def test_deconstruct_class_arguments(self):
# Yes, it doesn't make sense to use a class as a default for a
# CharField. It does make sense for custom fields though, for example
# an enumfield that takes the enum class as an argument.
class DeconstructibleInstances:
def deconstruct(self):
return ('DeconstructibleInstances', [], {})
string = MigrationWriter.serialize(models.CharField(default=DeconstructibleInstances))[0]
self.assertEqual(string, "models.CharField(default=migrations.test_writer.DeconstructibleInstances)")
def test_register_serializer(self):
class ComplexSerializer(BaseSerializer):
def serialize(self):
return 'complex(%r)' % self.value, {}
MigrationWriter.register_serializer(complex, ComplexSerializer)
self.assertSerializedEqual(complex(1, 2))
MigrationWriter.unregister_serializer(complex)
with self.assertRaisesMessage(ValueError, 'Cannot serialize: (1+2j)'):
self.assertSerializedEqual(complex(1, 2))
def test_register_non_serializer(self):
with self.assertRaisesMessage(ValueError, "'TestModel1' must inherit from 'BaseSerializer'."):
MigrationWriter.register_serializer(complex, TestModel1)
|
8df93b930a777084c5b4de7eb8e31e1585c363b4d3bd1f3f64ff94e0eeaf4c2d | import compileall
import os
from django.db import connection, connections
from django.db.migrations.exceptions import (
AmbiguityError, InconsistentMigrationHistory, NodeNotFoundError,
)
from django.db.migrations.loader import MigrationLoader
from django.db.migrations.recorder import MigrationRecorder
from django.test import TestCase, modify_settings, override_settings
from .test_base import MigrationTestBase
class RecorderTests(TestCase):
"""
Tests recording migrations as applied or not.
"""
databases = {'default', 'other'}
def test_apply(self):
"""
Tests marking migrations as applied/unapplied.
"""
recorder = MigrationRecorder(connection)
self.assertEqual(
{(x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"},
set(),
)
recorder.record_applied("myapp", "0432_ponies")
self.assertEqual(
{(x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"},
{("myapp", "0432_ponies")},
)
# That should not affect records of another database
recorder_other = MigrationRecorder(connections['other'])
self.assertEqual(
{(x, y) for (x, y) in recorder_other.applied_migrations() if x == "myapp"},
set(),
)
recorder.record_unapplied("myapp", "0432_ponies")
self.assertEqual(
{(x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"},
set(),
)
class LoaderTests(TestCase):
"""
Tests the disk and database loader, and running through migrations
in memory.
"""
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
@modify_settings(INSTALLED_APPS={'append': 'basic'})
def test_load(self):
"""
Makes sure the loader can load the migrations for the test apps,
and then render them out to a new Apps.
"""
# Load and test the plan
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.graph.forwards_plan(("migrations", "0002_second")),
[
("migrations", "0001_initial"),
("migrations", "0002_second"),
],
)
# Now render it out!
project_state = migration_loader.project_state(("migrations", "0002_second"))
self.assertEqual(len(project_state.models), 2)
author_state = project_state.models["migrations", "author"]
self.assertEqual(
[x for x, y in author_state.fields],
["id", "name", "slug", "age", "rating"]
)
book_state = project_state.models["migrations", "book"]
self.assertEqual(
[x for x, y in book_state.fields],
["id", "author"]
)
# Ensure we've included unmigrated apps in there too
self.assertIn("basic", project_state.real_apps)
@override_settings(MIGRATION_MODULES={
'migrations': 'migrations.test_migrations',
'migrations2': 'migrations2.test_migrations_2',
})
@modify_settings(INSTALLED_APPS={'append': 'migrations2'})
def test_plan_handles_repeated_migrations(self):
"""
_generate_plan() doesn't readd migrations already in the plan (#29180).
"""
migration_loader = MigrationLoader(connection)
nodes = [('migrations', '0002_second'), ('migrations2', '0001_initial')]
self.assertEqual(
migration_loader.graph._generate_plan(nodes, at_end=True),
[('migrations', '0001_initial'), ('migrations', '0002_second'), ('migrations2', '0001_initial')]
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_unmigdep"})
def test_load_unmigrated_dependency(self):
"""
Makes sure the loader can load migrations with a dependency on an unmigrated app.
"""
# Load and test the plan
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.graph.forwards_plan(("migrations", "0001_initial")),
[
('contenttypes', '0001_initial'),
('auth', '0001_initial'),
("migrations", "0001_initial"),
],
)
# Now render it out!
project_state = migration_loader.project_state(("migrations", "0001_initial"))
self.assertEqual(len([m for a, m in project_state.models if a == "migrations"]), 1)
book_state = project_state.models["migrations", "book"]
self.assertEqual(
[x for x, y in book_state.fields],
["id", "user"]
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"})
def test_run_before(self):
"""
Makes sure the loader uses Migration.run_before.
"""
# Load and test the plan
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.graph.forwards_plan(("migrations", "0002_second")),
[
("migrations", "0001_initial"),
("migrations", "0003_third"),
("migrations", "0002_second"),
],
)
@override_settings(MIGRATION_MODULES={
"migrations": "migrations.test_migrations_first",
"migrations2": "migrations2.test_migrations_2_first",
})
@modify_settings(INSTALLED_APPS={'append': 'migrations2'})
def test_first(self):
"""
Makes sure the '__first__' migrations build correctly.
"""
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.graph.forwards_plan(("migrations", "second")),
[
("migrations", "thefirst"),
("migrations2", "0001_initial"),
("migrations2", "0002_second"),
("migrations", "second"),
],
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_name_match(self):
"Tests prefix name matching"
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.get_migration_by_prefix("migrations", "0001").name,
"0001_initial",
)
with self.assertRaises(AmbiguityError):
migration_loader.get_migration_by_prefix("migrations", "0")
with self.assertRaises(KeyError):
migration_loader.get_migration_by_prefix("migrations", "blarg")
def test_load_import_error(self):
with override_settings(MIGRATION_MODULES={"migrations": "import_error_package"}):
with self.assertRaises(ImportError):
MigrationLoader(connection)
def test_load_module_file(self):
with override_settings(MIGRATION_MODULES={"migrations": "migrations.faulty_migrations.file"}):
loader = MigrationLoader(connection)
self.assertIn(
"migrations", loader.unmigrated_apps,
"App with migrations module file not in unmigrated apps."
)
def test_load_empty_dir(self):
with override_settings(MIGRATION_MODULES={"migrations": "migrations.faulty_migrations.namespace"}):
loader = MigrationLoader(connection)
self.assertIn(
"migrations", loader.unmigrated_apps,
"App missing __init__.py in migrations module not in unmigrated apps."
)
@override_settings(
INSTALLED_APPS=['migrations.migrations_test_apps.migrated_app'],
)
def test_marked_as_migrated(self):
"""
Undefined MIGRATION_MODULES implies default migration module.
"""
migration_loader = MigrationLoader(connection)
self.assertEqual(migration_loader.migrated_apps, {'migrated_app'})
self.assertEqual(migration_loader.unmigrated_apps, set())
@override_settings(
INSTALLED_APPS=['migrations.migrations_test_apps.migrated_app'],
MIGRATION_MODULES={"migrated_app": None},
)
def test_marked_as_unmigrated(self):
"""
MIGRATION_MODULES allows disabling of migrations for a particular app.
"""
migration_loader = MigrationLoader(connection)
self.assertEqual(migration_loader.migrated_apps, set())
self.assertEqual(migration_loader.unmigrated_apps, {'migrated_app'})
@override_settings(
INSTALLED_APPS=['migrations.migrations_test_apps.migrated_app'],
MIGRATION_MODULES={'migrated_app': 'missing-module'},
)
def test_explicit_missing_module(self):
"""
If a MIGRATION_MODULES override points to a missing module, the error
raised during the importation attempt should be propagated unless
`ignore_no_migrations=True`.
"""
with self.assertRaisesMessage(ImportError, 'missing-module'):
migration_loader = MigrationLoader(connection)
migration_loader = MigrationLoader(connection, ignore_no_migrations=True)
self.assertEqual(migration_loader.migrated_apps, set())
self.assertEqual(migration_loader.unmigrated_apps, {'migrated_app'})
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"})
def test_loading_squashed(self):
"Tests loading a squashed migration"
migration_loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
self.addCleanup(recorder.flush)
# Loading with nothing applied should just give us the one node
self.assertEqual(
len([x for x in migration_loader.graph.nodes if x[0] == "migrations"]),
1,
)
# However, fake-apply one migration and it should now use the old two
recorder.record_applied("migrations", "0001_initial")
migration_loader.build_graph()
self.assertEqual(
len([x for x in migration_loader.graph.nodes if x[0] == "migrations"]),
2,
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"})
def test_loading_squashed_complex(self):
"Tests loading a complex set of squashed migrations"
loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
self.addCleanup(recorder.flush)
def num_nodes():
plan = set(loader.graph.forwards_plan(('migrations', '7_auto')))
return len(plan - loader.applied_migrations)
# Empty database: use squashed migration
loader.build_graph()
self.assertEqual(num_nodes(), 5)
# Starting at 1 or 2 should use the squashed migration too
recorder.record_applied("migrations", "1_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 4)
recorder.record_applied("migrations", "2_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 3)
# However, starting at 3 to 5 cannot use the squashed migration
recorder.record_applied("migrations", "3_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 4)
recorder.record_applied("migrations", "4_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 3)
# Starting at 5 to 7 we are passed the squashed migrations
recorder.record_applied("migrations", "5_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 2)
recorder.record_applied("migrations", "6_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 1)
recorder.record_applied("migrations", "7_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 0)
@override_settings(MIGRATION_MODULES={
"app1": "migrations.test_migrations_squashed_complex_multi_apps.app1",
"app2": "migrations.test_migrations_squashed_complex_multi_apps.app2",
})
@modify_settings(INSTALLED_APPS={'append': [
"migrations.test_migrations_squashed_complex_multi_apps.app1",
"migrations.test_migrations_squashed_complex_multi_apps.app2",
]})
def test_loading_squashed_complex_multi_apps(self):
loader = MigrationLoader(connection)
loader.build_graph()
plan = set(loader.graph.forwards_plan(('app1', '4_auto')))
expected_plan = {
('app1', '1_auto'),
('app2', '1_squashed_2'),
('app1', '2_squashed_3'),
('app1', '4_auto'),
}
self.assertEqual(plan, expected_plan)
@override_settings(MIGRATION_MODULES={
"app1": "migrations.test_migrations_squashed_complex_multi_apps.app1",
"app2": "migrations.test_migrations_squashed_complex_multi_apps.app2",
})
@modify_settings(INSTALLED_APPS={'append': [
"migrations.test_migrations_squashed_complex_multi_apps.app1",
"migrations.test_migrations_squashed_complex_multi_apps.app2",
]})
def test_loading_squashed_complex_multi_apps_partially_applied(self):
loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
recorder.record_applied('app1', '1_auto')
recorder.record_applied('app1', '2_auto')
loader.build_graph()
plan = set(loader.graph.forwards_plan(('app1', '4_auto')))
plan = plan - loader.applied_migrations
expected_plan = {
('app2', '1_squashed_2'),
('app1', '3_auto'),
('app1', '4_auto'),
}
self.assertEqual(plan, expected_plan)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_erroneous"})
def test_loading_squashed_erroneous(self):
"Tests loading a complex but erroneous set of squashed migrations"
loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
self.addCleanup(recorder.flush)
def num_nodes():
plan = set(loader.graph.forwards_plan(('migrations', '7_auto')))
return len(plan - loader.applied_migrations)
# Empty database: use squashed migration
loader.build_graph()
self.assertEqual(num_nodes(), 5)
# Starting at 1 or 2 should use the squashed migration too
recorder.record_applied("migrations", "1_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 4)
recorder.record_applied("migrations", "2_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 3)
# However, starting at 3 or 4, nonexistent migrations would be needed.
msg = ("Migration migrations.6_auto depends on nonexistent node ('migrations', '5_auto'). "
"Django tried to replace migration migrations.5_auto with any of "
"[migrations.3_squashed_5] but wasn't able to because some of the replaced "
"migrations are already applied.")
recorder.record_applied("migrations", "3_auto")
with self.assertRaisesMessage(NodeNotFoundError, msg):
loader.build_graph()
recorder.record_applied("migrations", "4_auto")
with self.assertRaisesMessage(NodeNotFoundError, msg):
loader.build_graph()
# Starting at 5 to 7 we are passed the squashed migrations
recorder.record_applied("migrations", "5_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 2)
recorder.record_applied("migrations", "6_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 1)
recorder.record_applied("migrations", "7_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 0)
@override_settings(
MIGRATION_MODULES={'migrations': 'migrations.test_migrations'},
INSTALLED_APPS=['migrations'],
)
def test_check_consistent_history(self):
loader = MigrationLoader(connection=None)
loader.check_consistent_history(connection)
recorder = MigrationRecorder(connection)
recorder.record_applied('migrations', '0002_second')
msg = (
"Migration migrations.0002_second is applied before its dependency "
"migrations.0001_initial on database 'default'."
)
with self.assertRaisesMessage(InconsistentMigrationHistory, msg):
loader.check_consistent_history(connection)
@override_settings(
MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed_extra'},
INSTALLED_APPS=['migrations'],
)
def test_check_consistent_history_squashed(self):
"""
MigrationLoader.check_consistent_history() should ignore unapplied
squashed migrations that have all of their `replaces` applied.
"""
loader = MigrationLoader(connection=None)
recorder = MigrationRecorder(connection)
recorder.record_applied('migrations', '0001_initial')
recorder.record_applied('migrations', '0002_second')
loader.check_consistent_history(connection)
recorder.record_applied('migrations', '0003_third')
loader.check_consistent_history(connection)
@override_settings(MIGRATION_MODULES={
"app1": "migrations.test_migrations_squashed_ref_squashed.app1",
"app2": "migrations.test_migrations_squashed_ref_squashed.app2",
})
@modify_settings(INSTALLED_APPS={'append': [
"migrations.test_migrations_squashed_ref_squashed.app1",
"migrations.test_migrations_squashed_ref_squashed.app2",
]})
def test_loading_squashed_ref_squashed(self):
"Tests loading a squashed migration with a new migration referencing it"
r"""
The sample migrations are structured like this:
app_1 1 --> 2 ---------------------*--> 3 *--> 4
\ / /
*-------------------*----/--> 2_sq_3 --*
\ / /
=============== \ ============= / == / ======================
app_2 *--> 1_sq_2 --* /
\ /
*--> 1 --> 2 --*
Where 2_sq_3 is a replacing migration for 2 and 3 in app_1,
as 1_sq_2 is a replacing migration for 1 and 2 in app_2.
"""
loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
self.addCleanup(recorder.flush)
# Load with nothing applied: both migrations squashed.
loader.build_graph()
plan = set(loader.graph.forwards_plan(('app1', '4_auto')))
plan = plan - loader.applied_migrations
expected_plan = {
('app1', '1_auto'),
('app2', '1_squashed_2'),
('app1', '2_squashed_3'),
('app1', '4_auto'),
}
self.assertEqual(plan, expected_plan)
# Fake-apply a few from app1: unsquashes migration in app1.
recorder.record_applied('app1', '1_auto')
recorder.record_applied('app1', '2_auto')
loader.build_graph()
plan = set(loader.graph.forwards_plan(('app1', '4_auto')))
plan = plan - loader.applied_migrations
expected_plan = {
('app2', '1_squashed_2'),
('app1', '3_auto'),
('app1', '4_auto'),
}
self.assertEqual(plan, expected_plan)
# Fake-apply one from app2: unsquashes migration in app2 too.
recorder.record_applied('app2', '1_auto')
loader.build_graph()
plan = set(loader.graph.forwards_plan(('app1', '4_auto')))
plan = plan - loader.applied_migrations
expected_plan = {
('app2', '2_auto'),
('app1', '3_auto'),
('app1', '4_auto'),
}
self.assertEqual(plan, expected_plan)
@override_settings(MIGRATION_MODULES={'migrations': 'migrations.test_migrations_private'})
def test_ignore_files(self):
"""Files prefixed with underscore, tilde, or dot aren't loaded."""
loader = MigrationLoader(connection)
loader.load_disk()
migrations = [name for app, name in loader.disk_migrations if app == 'migrations']
self.assertEqual(migrations, ['0001_initial'])
class PycLoaderTests(MigrationTestBase):
def test_valid(self):
"""
To support frozen environments, MigrationLoader loads .pyc migrations.
"""
with self.temporary_migration_module(module='migrations.test_migrations') as migration_dir:
# Compile .py files to .pyc files and delete .py files.
compileall.compile_dir(migration_dir, force=True, quiet=1, legacy=True)
for name in os.listdir(migration_dir):
if name.endswith('.py'):
os.remove(os.path.join(migration_dir, name))
loader = MigrationLoader(connection)
self.assertIn(('migrations', '0001_initial'), loader.disk_migrations)
def test_invalid(self):
"""
MigrationLoader reraises ImportErrors caused by "bad magic number" pyc
files with a more helpful message.
"""
with self.temporary_migration_module(module='migrations.test_migrations_bad_pyc') as migration_dir:
# The -tpl suffix is to avoid the pyc exclusion in MANIFEST.in.
os.rename(
os.path.join(migration_dir, '0001_initial.pyc-tpl'),
os.path.join(migration_dir, '0001_initial.pyc'),
)
msg = (
r"Couldn't import '\w+.migrations.0001_initial' as it appears "
"to be a stale .pyc file."
)
with self.assertRaisesRegex(ImportError, msg):
MigrationLoader(connection)
|
75ba3e37e480be0b284b3d59219f178e0ebe7649d7935f8af98c18ad8199df07 | from django.core.exceptions import FieldDoesNotExist
from django.db import connection, migrations, models, transaction
from django.db.migrations.migration import Migration
from django.db.migrations.operations import CreateModel
from django.db.migrations.operations.fields import FieldOperation
from django.db.migrations.state import ModelState, ProjectState
from django.db.models.fields import NOT_PROVIDED
from django.db.transaction import atomic
from django.db.utils import IntegrityError
from django.test import SimpleTestCase, override_settings, skipUnlessDBFeature
from .models import FoodManager, FoodQuerySet, UnicodeModel
from .test_base import MigrationTestBase
class Mixin:
pass
class OperationTestBase(MigrationTestBase):
"""
Common functions to help test operations.
"""
def apply_operations(self, app_label, project_state, operations, atomic=True):
migration = Migration('name', app_label)
migration.operations = operations
with connection.schema_editor(atomic=atomic) as editor:
return migration.apply(project_state, editor)
def unapply_operations(self, app_label, project_state, operations, atomic=True):
migration = Migration('name', app_label)
migration.operations = operations
with connection.schema_editor(atomic=atomic) as editor:
return migration.unapply(project_state, editor)
def make_test_state(self, app_label, operation, **kwargs):
"""
Makes a test state using set_up_test_model and returns the
original state and the state after the migration is applied.
"""
project_state = self.set_up_test_model(app_label, **kwargs)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
return project_state, new_state
def set_up_test_model(
self, app_label, second_model=False, third_model=False, index=False, multicol_index=False,
related_model=False, mti_model=False, proxy_model=False, manager_model=False,
unique_together=False, options=False, db_table=None, index_together=False, constraints=None):
"""
Creates a test model state and database table.
"""
# Delete the tables if they already exist
table_names = [
# Start with ManyToMany tables
'_pony_stables', '_pony_vans',
# Then standard model tables
'_pony', '_stable', '_van',
]
tables = [(app_label + table_name) for table_name in table_names]
with connection.cursor() as cursor:
table_names = connection.introspection.table_names(cursor)
connection.disable_constraint_checking()
sql_delete_table = connection.schema_editor().sql_delete_table
with transaction.atomic():
for table in tables:
if table in table_names:
cursor.execute(sql_delete_table % {
"table": connection.ops.quote_name(table),
})
connection.enable_constraint_checking()
# Make the "current" state
model_options = {
"swappable": "TEST_SWAP_MODEL",
"index_together": [["weight", "pink"]] if index_together else [],
"unique_together": [["pink", "weight"]] if unique_together else [],
}
if options:
model_options["permissions"] = [("can_groom", "Can groom")]
if db_table:
model_options["db_table"] = db_table
operations = [migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=3)),
("weight", models.FloatField()),
],
options=model_options,
)]
if index:
operations.append(migrations.AddIndex(
"Pony",
models.Index(fields=["pink"], name="pony_pink_idx")
))
if multicol_index:
operations.append(migrations.AddIndex(
"Pony",
models.Index(fields=["pink", "weight"], name="pony_test_idx")
))
if constraints:
for constraint in constraints:
operations.append(migrations.AddConstraint(
"Pony",
constraint,
))
if second_model:
operations.append(migrations.CreateModel(
"Stable",
[
("id", models.AutoField(primary_key=True)),
]
))
if third_model:
operations.append(migrations.CreateModel(
"Van",
[
("id", models.AutoField(primary_key=True)),
]
))
if related_model:
operations.append(migrations.CreateModel(
"Rider",
[
("id", models.AutoField(primary_key=True)),
("pony", models.ForeignKey("Pony", models.CASCADE)),
("friend", models.ForeignKey("self", models.CASCADE))
],
))
if mti_model:
operations.append(migrations.CreateModel(
"ShetlandPony",
fields=[
('pony_ptr', models.OneToOneField(
'Pony',
models.CASCADE,
auto_created=True,
parent_link=True,
primary_key=True,
to_field='id',
serialize=False,
)),
("cuteness", models.IntegerField(default=1)),
],
bases=['%s.Pony' % app_label],
))
if proxy_model:
operations.append(migrations.CreateModel(
"ProxyPony",
fields=[],
options={"proxy": True},
bases=['%s.Pony' % app_label],
))
if manager_model:
operations.append(migrations.CreateModel(
"Food",
fields=[
("id", models.AutoField(primary_key=True)),
],
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
]
))
return self.apply_operations(app_label, ProjectState(), operations)
class OperationTests(OperationTestBase):
"""
Tests running the operations and making sure they do what they say they do.
Each test looks at their state changing, and then their database operation -
both forwards and backwards.
"""
def test_create_model(self):
"""
Tests the CreateModel operation.
Most other tests use this operation as part of setup, so check failures here first.
"""
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=1)),
],
)
self.assertEqual(operation.describe(), "Create model Pony")
# Test the state alteration
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crmo", new_state)
self.assertEqual(new_state.models["test_crmo", "pony"].name, "Pony")
self.assertEqual(len(new_state.models["test_crmo", "pony"].fields), 2)
# Test the database alteration
self.assertTableNotExists("test_crmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmo", editor, project_state, new_state)
self.assertTableExists("test_crmo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmo", editor, new_state, project_state)
self.assertTableNotExists("test_crmo_pony")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["fields", "name"])
# And default manager not in set
operation = migrations.CreateModel("Foo", fields=[], managers=[("objects", models.Manager())])
definition = operation.deconstruct()
self.assertNotIn('managers', definition[2])
def test_create_model_with_duplicate_field_name(self):
with self.assertRaisesMessage(ValueError, 'Found duplicate value pink in CreateModel fields argument.'):
migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.TextField()),
("pink", models.IntegerField(default=1)),
],
)
def test_create_model_with_duplicate_base(self):
message = 'Found duplicate value test_crmo.pony in CreateModel bases argument.'
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=("test_crmo.Pony", "test_crmo.Pony",),
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=("test_crmo.Pony", "test_crmo.pony",),
)
message = 'Found duplicate value migrations.unicodemodel in CreateModel bases argument.'
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(UnicodeModel, UnicodeModel,),
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(UnicodeModel, 'migrations.unicodemodel',),
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(UnicodeModel, 'migrations.UnicodeModel',),
)
message = "Found duplicate value <class 'django.db.models.base.Model'> in CreateModel bases argument."
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(models.Model, models.Model,),
)
message = "Found duplicate value <class 'migrations.test_operations.Mixin'> in CreateModel bases argument."
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(Mixin, Mixin,),
)
def test_create_model_with_duplicate_manager_name(self):
with self.assertRaisesMessage(ValueError, 'Found duplicate value objects in CreateModel managers argument.'):
migrations.CreateModel(
"Pony",
fields=[],
managers=[
("objects", models.Manager()),
("objects", models.Manager()),
],
)
def test_create_model_with_unique_after(self):
"""
Tests the CreateModel operation directly followed by an
AlterUniqueTogether (bug #22844 - sqlite remake issues)
"""
operation1 = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=1)),
],
)
operation2 = migrations.CreateModel(
"Rider",
[
("id", models.AutoField(primary_key=True)),
("number", models.IntegerField(default=1)),
("pony", models.ForeignKey("test_crmoua.Pony", models.CASCADE)),
],
)
operation3 = migrations.AlterUniqueTogether(
"Rider",
[
("number", "pony"),
],
)
# Test the database alteration
project_state = ProjectState()
self.assertTableNotExists("test_crmoua_pony")
self.assertTableNotExists("test_crmoua_rider")
with connection.schema_editor() as editor:
new_state = project_state.clone()
operation1.state_forwards("test_crmoua", new_state)
operation1.database_forwards("test_crmoua", editor, project_state, new_state)
project_state, new_state = new_state, new_state.clone()
operation2.state_forwards("test_crmoua", new_state)
operation2.database_forwards("test_crmoua", editor, project_state, new_state)
project_state, new_state = new_state, new_state.clone()
operation3.state_forwards("test_crmoua", new_state)
operation3.database_forwards("test_crmoua", editor, project_state, new_state)
self.assertTableExists("test_crmoua_pony")
self.assertTableExists("test_crmoua_rider")
def test_create_model_m2m(self):
"""
Test the creation of a model with a ManyToMany field and the
auto-created "through" model.
"""
project_state = self.set_up_test_model("test_crmomm")
operation = migrations.CreateModel(
"Stable",
[
("id", models.AutoField(primary_key=True)),
("ponies", models.ManyToManyField("Pony", related_name="stables"))
]
)
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_crmomm", new_state)
# Test the database alteration
self.assertTableNotExists("test_crmomm_stable_ponies")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmomm", editor, project_state, new_state)
self.assertTableExists("test_crmomm_stable")
self.assertTableExists("test_crmomm_stable_ponies")
self.assertColumnNotExists("test_crmomm_stable", "ponies")
# Make sure the M2M field actually works
with atomic():
Pony = new_state.apps.get_model("test_crmomm", "Pony")
Stable = new_state.apps.get_model("test_crmomm", "Stable")
stable = Stable.objects.create()
p1 = Pony.objects.create(pink=False, weight=4.55)
p2 = Pony.objects.create(pink=True, weight=5.43)
stable.ponies.add(p1, p2)
self.assertEqual(stable.ponies.count(), 2)
stable.ponies.all().delete()
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmomm", editor, new_state, project_state)
self.assertTableNotExists("test_crmomm_stable")
self.assertTableNotExists("test_crmomm_stable_ponies")
def test_create_model_inheritance(self):
"""
Tests the CreateModel operation on a multi-table inheritance setup.
"""
project_state = self.set_up_test_model("test_crmoih")
# Test the state alteration
operation = migrations.CreateModel(
"ShetlandPony",
[
('pony_ptr', models.OneToOneField(
'test_crmoih.Pony',
models.CASCADE,
auto_created=True,
primary_key=True,
to_field='id',
serialize=False,
)),
("cuteness", models.IntegerField(default=1)),
],
)
new_state = project_state.clone()
operation.state_forwards("test_crmoih", new_state)
self.assertIn(("test_crmoih", "shetlandpony"), new_state.models)
# Test the database alteration
self.assertTableNotExists("test_crmoih_shetlandpony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmoih", editor, project_state, new_state)
self.assertTableExists("test_crmoih_shetlandpony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmoih", editor, new_state, project_state)
self.assertTableNotExists("test_crmoih_shetlandpony")
def test_create_proxy_model(self):
"""
CreateModel ignores proxy models.
"""
project_state = self.set_up_test_model("test_crprmo")
# Test the state alteration
operation = migrations.CreateModel(
"ProxyPony",
[],
options={"proxy": True},
bases=("test_crprmo.Pony",),
)
self.assertEqual(operation.describe(), "Create proxy model ProxyPony")
new_state = project_state.clone()
operation.state_forwards("test_crprmo", new_state)
self.assertIn(("test_crprmo", "proxypony"), new_state.models)
# Test the database alteration
self.assertTableNotExists("test_crprmo_proxypony")
self.assertTableExists("test_crprmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crprmo", editor, project_state, new_state)
self.assertTableNotExists("test_crprmo_proxypony")
self.assertTableExists("test_crprmo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crprmo", editor, new_state, project_state)
self.assertTableNotExists("test_crprmo_proxypony")
self.assertTableExists("test_crprmo_pony")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["bases", "fields", "name", "options"])
def test_create_unmanaged_model(self):
"""
CreateModel ignores unmanaged models.
"""
project_state = self.set_up_test_model("test_crummo")
# Test the state alteration
operation = migrations.CreateModel(
"UnmanagedPony",
[],
options={"proxy": True},
bases=("test_crummo.Pony",),
)
self.assertEqual(operation.describe(), "Create proxy model UnmanagedPony")
new_state = project_state.clone()
operation.state_forwards("test_crummo", new_state)
self.assertIn(("test_crummo", "unmanagedpony"), new_state.models)
# Test the database alteration
self.assertTableNotExists("test_crummo_unmanagedpony")
self.assertTableExists("test_crummo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crummo", editor, project_state, new_state)
self.assertTableNotExists("test_crummo_unmanagedpony")
self.assertTableExists("test_crummo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crummo", editor, new_state, project_state)
self.assertTableNotExists("test_crummo_unmanagedpony")
self.assertTableExists("test_crummo_pony")
@skipUnlessDBFeature('supports_table_check_constraints')
def test_create_model_with_constraint(self):
where = models.Q(pink__gt=2)
check_constraint = models.CheckConstraint(check=where, name='test_constraint_pony_pink_gt_2')
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=3)),
],
options={'constraints': [check_constraint]},
)
# Test the state alteration
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crmo", new_state)
self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1)
# Test database alteration
self.assertTableNotExists("test_crmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmo", editor, project_state, new_state)
self.assertTableExists("test_crmo_pony")
with connection.cursor() as cursor:
with self.assertRaises(IntegrityError):
cursor.execute("INSERT INTO test_crmo_pony (id, pink) VALUES (1, 1)")
# Test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmo", editor, new_state, project_state)
self.assertTableNotExists("test_crmo_pony")
# Test deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2]['options']['constraints'], [check_constraint])
def test_create_model_with_partial_unique_constraint(self):
partial_unique_constraint = models.UniqueConstraint(
fields=['pink'],
condition=models.Q(weight__gt=5),
name='test_constraint_pony_pink_for_weight_gt_5_uniq',
)
operation = migrations.CreateModel(
'Pony',
[
('id', models.AutoField(primary_key=True)),
('pink', models.IntegerField(default=3)),
('weight', models.FloatField()),
],
options={'constraints': [partial_unique_constraint]},
)
# Test the state alteration
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards('test_crmo', new_state)
self.assertEqual(len(new_state.models['test_crmo', 'pony'].options['constraints']), 1)
# Test database alteration
self.assertTableNotExists('test_crmo_pony')
with connection.schema_editor() as editor:
operation.database_forwards('test_crmo', editor, project_state, new_state)
self.assertTableExists('test_crmo_pony')
# Test constraint works
Pony = new_state.apps.get_model('test_crmo', 'Pony')
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=6.0)
if connection.features.supports_partial_indexes:
with self.assertRaises(IntegrityError):
Pony.objects.create(pink=1, weight=7.0)
else:
Pony.objects.create(pink=1, weight=7.0)
# Test reversal
with connection.schema_editor() as editor:
operation.database_backwards('test_crmo', editor, new_state, project_state)
self.assertTableNotExists('test_crmo_pony')
# Test deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], 'CreateModel')
self.assertEqual(definition[1], [])
self.assertEqual(definition[2]['options']['constraints'], [partial_unique_constraint])
def test_create_model_managers(self):
"""
The managers on a model are set.
"""
project_state = self.set_up_test_model("test_cmoma")
# Test the state alteration
operation = migrations.CreateModel(
"Food",
fields=[
("id", models.AutoField(primary_key=True)),
],
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
]
)
self.assertEqual(operation.describe(), "Create model Food")
new_state = project_state.clone()
operation.state_forwards("test_cmoma", new_state)
self.assertIn(("test_cmoma", "food"), new_state.models)
managers = new_state.models["test_cmoma", "food"].managers
self.assertEqual(managers[0][0], "food_qs")
self.assertIsInstance(managers[0][1], models.Manager)
self.assertEqual(managers[1][0], "food_mgr")
self.assertIsInstance(managers[1][1], FoodManager)
self.assertEqual(managers[1][1].args, ("a", "b", 1, 2))
self.assertEqual(managers[2][0], "food_mgr_kwargs")
self.assertIsInstance(managers[2][1], FoodManager)
self.assertEqual(managers[2][1].args, ("x", "y", 3, 4))
def test_delete_model(self):
"""
Tests the DeleteModel operation.
"""
project_state = self.set_up_test_model("test_dlmo")
# Test the state alteration
operation = migrations.DeleteModel("Pony")
self.assertEqual(operation.describe(), "Delete model Pony")
new_state = project_state.clone()
operation.state_forwards("test_dlmo", new_state)
self.assertNotIn(("test_dlmo", "pony"), new_state.models)
# Test the database alteration
self.assertTableExists("test_dlmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_dlmo", editor, project_state, new_state)
self.assertTableNotExists("test_dlmo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_dlmo", editor, new_state, project_state)
self.assertTableExists("test_dlmo_pony")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "DeleteModel")
self.assertEqual(definition[1], [])
self.assertEqual(list(definition[2]), ["name"])
def test_delete_proxy_model(self):
"""
Tests the DeleteModel operation ignores proxy models.
"""
project_state = self.set_up_test_model("test_dlprmo", proxy_model=True)
# Test the state alteration
operation = migrations.DeleteModel("ProxyPony")
new_state = project_state.clone()
operation.state_forwards("test_dlprmo", new_state)
self.assertIn(("test_dlprmo", "proxypony"), project_state.models)
self.assertNotIn(("test_dlprmo", "proxypony"), new_state.models)
# Test the database alteration
self.assertTableExists("test_dlprmo_pony")
self.assertTableNotExists("test_dlprmo_proxypony")
with connection.schema_editor() as editor:
operation.database_forwards("test_dlprmo", editor, project_state, new_state)
self.assertTableExists("test_dlprmo_pony")
self.assertTableNotExists("test_dlprmo_proxypony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_dlprmo", editor, new_state, project_state)
self.assertTableExists("test_dlprmo_pony")
self.assertTableNotExists("test_dlprmo_proxypony")
def test_delete_mti_model(self):
project_state = self.set_up_test_model('test_dlmtimo', mti_model=True)
# Test the state alteration
operation = migrations.DeleteModel('ShetlandPony')
new_state = project_state.clone()
operation.state_forwards('test_dlmtimo', new_state)
self.assertIn(('test_dlmtimo', 'shetlandpony'), project_state.models)
self.assertNotIn(('test_dlmtimo', 'shetlandpony'), new_state.models)
# Test the database alteration
self.assertTableExists('test_dlmtimo_pony')
self.assertTableExists('test_dlmtimo_shetlandpony')
self.assertColumnExists('test_dlmtimo_shetlandpony', 'pony_ptr_id')
with connection.schema_editor() as editor:
operation.database_forwards('test_dlmtimo', editor, project_state, new_state)
self.assertTableExists('test_dlmtimo_pony')
self.assertTableNotExists('test_dlmtimo_shetlandpony')
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards('test_dlmtimo', editor, new_state, project_state)
self.assertTableExists('test_dlmtimo_pony')
self.assertTableExists('test_dlmtimo_shetlandpony')
self.assertColumnExists('test_dlmtimo_shetlandpony', 'pony_ptr_id')
def test_rename_model(self):
"""
Tests the RenameModel operation.
"""
project_state = self.set_up_test_model("test_rnmo", related_model=True)
# Test the state alteration
operation = migrations.RenameModel("Pony", "Horse")
self.assertEqual(operation.describe(), "Rename model Pony to Horse")
# Test initial state and database
self.assertIn(("test_rnmo", "pony"), project_state.models)
self.assertNotIn(("test_rnmo", "horse"), project_state.models)
self.assertTableExists("test_rnmo_pony")
self.assertTableNotExists("test_rnmo_horse")
if connection.features.supports_foreign_keys:
self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id"))
self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id"))
# Migrate forwards
new_state = project_state.clone()
atomic_rename = connection.features.supports_atomic_references_rename
new_state = self.apply_operations("test_rnmo", new_state, [operation], atomic=atomic_rename)
# Test new state and database
self.assertNotIn(("test_rnmo", "pony"), new_state.models)
self.assertIn(("test_rnmo", "horse"), new_state.models)
# RenameModel also repoints all incoming FKs and M2Ms
self.assertEqual("test_rnmo.Horse", new_state.models["test_rnmo", "rider"].fields[1][1].remote_field.model)
self.assertTableNotExists("test_rnmo_pony")
self.assertTableExists("test_rnmo_horse")
if connection.features.supports_foreign_keys:
self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id"))
self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id"))
# Migrate backwards
original_state = self.unapply_operations("test_rnmo", project_state, [operation], atomic=atomic_rename)
# Test original state and database
self.assertIn(("test_rnmo", "pony"), original_state.models)
self.assertNotIn(("test_rnmo", "horse"), original_state.models)
self.assertEqual("Pony", original_state.models["test_rnmo", "rider"].fields[1][1].remote_field.model)
self.assertTableExists("test_rnmo_pony")
self.assertTableNotExists("test_rnmo_horse")
if connection.features.supports_foreign_keys:
self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id"))
self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id"))
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RenameModel")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'old_name': "Pony", 'new_name': "Horse"})
def test_rename_model_state_forwards(self):
"""
RenameModel operations shouldn't trigger the caching of rendered apps
on state without prior apps.
"""
state = ProjectState()
state.add_model(ModelState('migrations', 'Foo', []))
operation = migrations.RenameModel('Foo', 'Bar')
operation.state_forwards('migrations', state)
self.assertNotIn('apps', state.__dict__)
self.assertNotIn(('migrations', 'foo'), state.models)
self.assertIn(('migrations', 'bar'), state.models)
# Now with apps cached.
apps = state.apps
operation = migrations.RenameModel('Bar', 'Foo')
operation.state_forwards('migrations', state)
self.assertIs(state.apps, apps)
self.assertNotIn(('migrations', 'bar'), state.models)
self.assertIn(('migrations', 'foo'), state.models)
def test_rename_model_with_self_referential_fk(self):
"""
Tests the RenameModel operation on model with self referential FK.
"""
project_state = self.set_up_test_model("test_rmwsrf", related_model=True)
# Test the state alteration
operation = migrations.RenameModel("Rider", "HorseRider")
self.assertEqual(operation.describe(), "Rename model Rider to HorseRider")
new_state = project_state.clone()
operation.state_forwards("test_rmwsrf", new_state)
self.assertNotIn(("test_rmwsrf", "rider"), new_state.models)
self.assertIn(("test_rmwsrf", "horserider"), new_state.models)
# Remember, RenameModel also repoints all incoming FKs and M2Ms
self.assertEqual(
'self',
new_state.models["test_rmwsrf", "horserider"].fields[2][1].remote_field.model
)
HorseRider = new_state.apps.get_model('test_rmwsrf', 'horserider')
self.assertIs(HorseRider._meta.get_field('horserider').remote_field.model, HorseRider)
# Test the database alteration
self.assertTableExists("test_rmwsrf_rider")
self.assertTableNotExists("test_rmwsrf_horserider")
if connection.features.supports_foreign_keys:
self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id"))
self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id"))
atomic_rename = connection.features.supports_atomic_references_rename
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_forwards("test_rmwsrf", editor, project_state, new_state)
self.assertTableNotExists("test_rmwsrf_rider")
self.assertTableExists("test_rmwsrf_horserider")
if connection.features.supports_foreign_keys:
self.assertFKNotExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_rider", "id"))
self.assertFKExists("test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_horserider", "id"))
# And test reversal
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_backwards("test_rmwsrf", editor, new_state, project_state)
self.assertTableExists("test_rmwsrf_rider")
self.assertTableNotExists("test_rmwsrf_horserider")
if connection.features.supports_foreign_keys:
self.assertFKExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id"))
self.assertFKNotExists("test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id"))
def test_rename_model_with_superclass_fk(self):
"""
Tests the RenameModel operation on a model which has a superclass that
has a foreign key.
"""
project_state = self.set_up_test_model("test_rmwsc", related_model=True, mti_model=True)
# Test the state alteration
operation = migrations.RenameModel("ShetlandPony", "LittleHorse")
self.assertEqual(operation.describe(), "Rename model ShetlandPony to LittleHorse")
new_state = project_state.clone()
operation.state_forwards("test_rmwsc", new_state)
self.assertNotIn(("test_rmwsc", "shetlandpony"), new_state.models)
self.assertIn(("test_rmwsc", "littlehorse"), new_state.models)
# RenameModel shouldn't repoint the superclass's relations, only local ones
self.assertEqual(
project_state.models["test_rmwsc", "rider"].fields[1][1].remote_field.model,
new_state.models["test_rmwsc", "rider"].fields[1][1].remote_field.model
)
# Before running the migration we have a table for Shetland Pony, not Little Horse
self.assertTableExists("test_rmwsc_shetlandpony")
self.assertTableNotExists("test_rmwsc_littlehorse")
if connection.features.supports_foreign_keys:
# and the foreign key on rider points to pony, not shetland pony
self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id"))
self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_shetlandpony", "id"))
with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor:
operation.database_forwards("test_rmwsc", editor, project_state, new_state)
# Now we have a little horse table, not shetland pony
self.assertTableNotExists("test_rmwsc_shetlandpony")
self.assertTableExists("test_rmwsc_littlehorse")
if connection.features.supports_foreign_keys:
# but the Foreign keys still point at pony, not little horse
self.assertFKExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id"))
self.assertFKNotExists("test_rmwsc_rider", ["pony_id"], ("test_rmwsc_littlehorse", "id"))
def test_rename_model_with_self_referential_m2m(self):
app_label = "test_rename_model_with_self_referential_m2m"
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel("ReflexivePony", fields=[
("id", models.AutoField(primary_key=True)),
("ponies", models.ManyToManyField("self")),
]),
])
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.RenameModel("ReflexivePony", "ReflexivePony2"),
], atomic=connection.features.supports_atomic_references_rename)
Pony = project_state.apps.get_model(app_label, "ReflexivePony2")
pony = Pony.objects.create()
pony.ponies.add(pony)
def test_rename_model_with_m2m(self):
app_label = "test_rename_model_with_m2m"
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel("Rider", fields=[
("id", models.AutoField(primary_key=True)),
]),
migrations.CreateModel("Pony", fields=[
("id", models.AutoField(primary_key=True)),
("riders", models.ManyToManyField("Rider")),
]),
])
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.RenameModel("Pony", "Pony2"),
], atomic=connection.features.supports_atomic_references_rename)
Pony = project_state.apps.get_model(app_label, "Pony2")
Rider = project_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
self.assertEqual(Pony.objects.count(), 2)
self.assertEqual(Rider.objects.count(), 2)
self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2)
def test_rename_m2m_target_model(self):
app_label = "test_rename_m2m_target_model"
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel("Rider", fields=[
("id", models.AutoField(primary_key=True)),
]),
migrations.CreateModel("Pony", fields=[
("id", models.AutoField(primary_key=True)),
("riders", models.ManyToManyField("Rider")),
]),
])
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.RenameModel("Rider", "Rider2"),
], atomic=connection.features.supports_atomic_references_rename)
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider2")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
self.assertEqual(Pony.objects.count(), 2)
self.assertEqual(Rider.objects.count(), 2)
self.assertEqual(Pony._meta.get_field('riders').remote_field.through.objects.count(), 2)
def test_rename_m2m_through_model(self):
app_label = "test_rename_through"
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel("Rider", fields=[
("id", models.AutoField(primary_key=True)),
]),
migrations.CreateModel("Pony", fields=[
("id", models.AutoField(primary_key=True)),
]),
migrations.CreateModel("PonyRider", fields=[
("id", models.AutoField(primary_key=True)),
("rider", models.ForeignKey("test_rename_through.Rider", models.CASCADE)),
("pony", models.ForeignKey("test_rename_through.Pony", models.CASCADE)),
]),
migrations.AddField(
"Pony",
"riders",
models.ManyToManyField("test_rename_through.Rider", through="test_rename_through.PonyRider"),
),
])
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
PonyRider = project_state.apps.get_model(app_label, "PonyRider")
pony = Pony.objects.create()
rider = Rider.objects.create()
PonyRider.objects.create(pony=pony, rider=rider)
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.RenameModel("PonyRider", "PonyRider2"),
])
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
PonyRider = project_state.apps.get_model(app_label, "PonyRider2")
pony = Pony.objects.first()
rider = Rider.objects.create()
PonyRider.objects.create(pony=pony, rider=rider)
self.assertEqual(Pony.objects.count(), 1)
self.assertEqual(Rider.objects.count(), 2)
self.assertEqual(PonyRider.objects.count(), 2)
self.assertEqual(pony.riders.count(), 2)
def test_rename_m2m_model_after_rename_field(self):
"""RenameModel renames a many-to-many column after a RenameField."""
app_label = 'test_rename_multiple'
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel('Pony', fields=[
('id', models.AutoField(primary_key=True)),
('name', models.CharField(max_length=20)),
]),
migrations.CreateModel('Rider', fields=[
('id', models.AutoField(primary_key=True)),
('pony', models.ForeignKey('test_rename_multiple.Pony', models.CASCADE)),
]),
migrations.CreateModel('PonyRider', fields=[
('id', models.AutoField(primary_key=True)),
('riders', models.ManyToManyField('Rider')),
]),
migrations.RenameField(model_name='pony', old_name='name', new_name='fancy_name'),
migrations.RenameModel(old_name='Rider', new_name='Jockey'),
], atomic=connection.features.supports_atomic_references_rename)
Pony = project_state.apps.get_model(app_label, 'Pony')
Jockey = project_state.apps.get_model(app_label, 'Jockey')
PonyRider = project_state.apps.get_model(app_label, 'PonyRider')
# No "no such column" error means the column was renamed correctly.
pony = Pony.objects.create(fancy_name='a good name')
jockey = Jockey.objects.create(pony=pony)
ponyrider = PonyRider.objects.create()
ponyrider.riders.add(jockey)
def test_add_field(self):
"""
Tests the AddField operation.
"""
# Test the state alteration
operation = migrations.AddField(
"Pony",
"height",
models.FloatField(null=True, default=5),
)
self.assertEqual(operation.describe(), "Add field height to Pony")
project_state, new_state = self.make_test_state("test_adfl", operation)
self.assertEqual(len(new_state.models["test_adfl", "pony"].fields), 4)
field = [
f for n, f in new_state.models["test_adfl", "pony"].fields
if n == "height"
][0]
self.assertEqual(field.default, 5)
# Test the database alteration
self.assertColumnNotExists("test_adfl_pony", "height")
with connection.schema_editor() as editor:
operation.database_forwards("test_adfl", editor, project_state, new_state)
self.assertColumnExists("test_adfl_pony", "height")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_adfl", editor, new_state, project_state)
self.assertColumnNotExists("test_adfl_pony", "height")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddField")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"])
def test_add_charfield(self):
"""
Tests the AddField operation on TextField.
"""
project_state = self.set_up_test_model("test_adchfl")
Pony = project_state.apps.get_model("test_adchfl", "Pony")
pony = Pony.objects.create(weight=42)
new_state = self.apply_operations("test_adchfl", project_state, [
migrations.AddField(
"Pony",
"text",
models.CharField(max_length=10, default="some text"),
),
migrations.AddField(
"Pony",
"empty",
models.CharField(max_length=10, default=""),
),
# If not properly quoted digits would be interpreted as an int.
migrations.AddField(
"Pony",
"digits",
models.CharField(max_length=10, default="42"),
),
# Manual quoting is fragile and could trip on quotes. Refs #xyz.
migrations.AddField(
"Pony",
"quotes",
models.CharField(max_length=10, default='"\'"'),
),
])
Pony = new_state.apps.get_model("test_adchfl", "Pony")
pony = Pony.objects.get(pk=pony.pk)
self.assertEqual(pony.text, "some text")
self.assertEqual(pony.empty, "")
self.assertEqual(pony.digits, "42")
self.assertEqual(pony.quotes, '"\'"')
def test_add_textfield(self):
"""
Tests the AddField operation on TextField.
"""
project_state = self.set_up_test_model("test_adtxtfl")
Pony = project_state.apps.get_model("test_adtxtfl", "Pony")
pony = Pony.objects.create(weight=42)
new_state = self.apply_operations("test_adtxtfl", project_state, [
migrations.AddField(
"Pony",
"text",
models.TextField(default="some text"),
),
migrations.AddField(
"Pony",
"empty",
models.TextField(default=""),
),
# If not properly quoted digits would be interpreted as an int.
migrations.AddField(
"Pony",
"digits",
models.TextField(default="42"),
),
# Manual quoting is fragile and could trip on quotes. Refs #xyz.
migrations.AddField(
"Pony",
"quotes",
models.TextField(default='"\'"'),
),
])
Pony = new_state.apps.get_model("test_adtxtfl", "Pony")
pony = Pony.objects.get(pk=pony.pk)
self.assertEqual(pony.text, "some text")
self.assertEqual(pony.empty, "")
self.assertEqual(pony.digits, "42")
self.assertEqual(pony.quotes, '"\'"')
def test_add_binaryfield(self):
"""
Tests the AddField operation on TextField/BinaryField.
"""
project_state = self.set_up_test_model("test_adbinfl")
Pony = project_state.apps.get_model("test_adbinfl", "Pony")
pony = Pony.objects.create(weight=42)
new_state = self.apply_operations("test_adbinfl", project_state, [
migrations.AddField(
"Pony",
"blob",
models.BinaryField(default=b"some text"),
),
migrations.AddField(
"Pony",
"empty",
models.BinaryField(default=b""),
),
# If not properly quoted digits would be interpreted as an int.
migrations.AddField(
"Pony",
"digits",
models.BinaryField(default=b"42"),
),
# Manual quoting is fragile and could trip on quotes. Refs #xyz.
migrations.AddField(
"Pony",
"quotes",
models.BinaryField(default=b'"\'"'),
),
])
Pony = new_state.apps.get_model("test_adbinfl", "Pony")
pony = Pony.objects.get(pk=pony.pk)
# SQLite returns buffer/memoryview, cast to bytes for checking.
self.assertEqual(bytes(pony.blob), b"some text")
self.assertEqual(bytes(pony.empty), b"")
self.assertEqual(bytes(pony.digits), b"42")
self.assertEqual(bytes(pony.quotes), b'"\'"')
def test_column_name_quoting(self):
"""
Column names that are SQL keywords shouldn't cause problems when used
in migrations (#22168).
"""
project_state = self.set_up_test_model("test_regr22168")
operation = migrations.AddField(
"Pony",
"order",
models.IntegerField(default=0),
)
new_state = project_state.clone()
operation.state_forwards("test_regr22168", new_state)
with connection.schema_editor() as editor:
operation.database_forwards("test_regr22168", editor, project_state, new_state)
self.assertColumnExists("test_regr22168_pony", "order")
def test_add_field_preserve_default(self):
"""
Tests the AddField operation's state alteration
when preserve_default = False.
"""
project_state = self.set_up_test_model("test_adflpd")
# Test the state alteration
operation = migrations.AddField(
"Pony",
"height",
models.FloatField(null=True, default=4),
preserve_default=False,
)
new_state = project_state.clone()
operation.state_forwards("test_adflpd", new_state)
self.assertEqual(len(new_state.models["test_adflpd", "pony"].fields), 4)
field = [
f for n, f in new_state.models["test_adflpd", "pony"].fields
if n == "height"
][0]
self.assertEqual(field.default, NOT_PROVIDED)
# Test the database alteration
project_state.apps.get_model("test_adflpd", "pony").objects.create(
weight=4,
)
self.assertColumnNotExists("test_adflpd_pony", "height")
with connection.schema_editor() as editor:
operation.database_forwards("test_adflpd", editor, project_state, new_state)
self.assertColumnExists("test_adflpd_pony", "height")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddField")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["field", "model_name", "name", "preserve_default"])
def test_add_field_m2m(self):
"""
Tests the AddField operation with a ManyToManyField.
"""
project_state = self.set_up_test_model("test_adflmm", second_model=True)
# Test the state alteration
operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies"))
new_state = project_state.clone()
operation.state_forwards("test_adflmm", new_state)
self.assertEqual(len(new_state.models["test_adflmm", "pony"].fields), 4)
# Test the database alteration
self.assertTableNotExists("test_adflmm_pony_stables")
with connection.schema_editor() as editor:
operation.database_forwards("test_adflmm", editor, project_state, new_state)
self.assertTableExists("test_adflmm_pony_stables")
self.assertColumnNotExists("test_adflmm_pony", "stables")
# Make sure the M2M field actually works
with atomic():
Pony = new_state.apps.get_model("test_adflmm", "Pony")
p = Pony.objects.create(pink=False, weight=4.55)
p.stables.create()
self.assertEqual(p.stables.count(), 1)
p.stables.all().delete()
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_adflmm", editor, new_state, project_state)
self.assertTableNotExists("test_adflmm_pony_stables")
def test_alter_field_m2m(self):
project_state = self.set_up_test_model("test_alflmm", second_model=True)
project_state = self.apply_operations("test_alflmm", project_state, operations=[
migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies"))
])
Pony = project_state.apps.get_model("test_alflmm", "Pony")
self.assertFalse(Pony._meta.get_field('stables').blank)
project_state = self.apply_operations("test_alflmm", project_state, operations=[
migrations.AlterField(
"Pony", "stables", models.ManyToManyField(to="Stable", related_name="ponies", blank=True)
)
])
Pony = project_state.apps.get_model("test_alflmm", "Pony")
self.assertTrue(Pony._meta.get_field('stables').blank)
def test_repoint_field_m2m(self):
project_state = self.set_up_test_model("test_alflmm", second_model=True, third_model=True)
project_state = self.apply_operations("test_alflmm", project_state, operations=[
migrations.AddField("Pony", "places", models.ManyToManyField("Stable", related_name="ponies"))
])
Pony = project_state.apps.get_model("test_alflmm", "Pony")
project_state = self.apply_operations("test_alflmm", project_state, operations=[
migrations.AlterField("Pony", "places", models.ManyToManyField(to="Van", related_name="ponies"))
])
# Ensure the new field actually works
Pony = project_state.apps.get_model("test_alflmm", "Pony")
p = Pony.objects.create(pink=False, weight=4.55)
p.places.create()
self.assertEqual(p.places.count(), 1)
p.places.all().delete()
def test_remove_field_m2m(self):
project_state = self.set_up_test_model("test_rmflmm", second_model=True)
project_state = self.apply_operations("test_rmflmm", project_state, operations=[
migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies"))
])
self.assertTableExists("test_rmflmm_pony_stables")
with_field_state = project_state.clone()
operations = [migrations.RemoveField("Pony", "stables")]
project_state = self.apply_operations("test_rmflmm", project_state, operations=operations)
self.assertTableNotExists("test_rmflmm_pony_stables")
# And test reversal
self.unapply_operations("test_rmflmm", with_field_state, operations=operations)
self.assertTableExists("test_rmflmm_pony_stables")
def test_remove_field_m2m_with_through(self):
project_state = self.set_up_test_model("test_rmflmmwt", second_model=True)
self.assertTableNotExists("test_rmflmmwt_ponystables")
project_state = self.apply_operations("test_rmflmmwt", project_state, operations=[
migrations.CreateModel("PonyStables", fields=[
("pony", models.ForeignKey('test_rmflmmwt.Pony', models.CASCADE)),
("stable", models.ForeignKey('test_rmflmmwt.Stable', models.CASCADE)),
]),
migrations.AddField(
"Pony", "stables",
models.ManyToManyField("Stable", related_name="ponies", through='test_rmflmmwt.PonyStables')
)
])
self.assertTableExists("test_rmflmmwt_ponystables")
operations = [migrations.RemoveField("Pony", "stables"), migrations.DeleteModel("PonyStables")]
self.apply_operations("test_rmflmmwt", project_state, operations=operations)
def test_remove_field(self):
"""
Tests the RemoveField operation.
"""
project_state = self.set_up_test_model("test_rmfl")
# Test the state alteration
operation = migrations.RemoveField("Pony", "pink")
self.assertEqual(operation.describe(), "Remove field pink from Pony")
new_state = project_state.clone()
operation.state_forwards("test_rmfl", new_state)
self.assertEqual(len(new_state.models["test_rmfl", "pony"].fields), 2)
# Test the database alteration
self.assertColumnExists("test_rmfl_pony", "pink")
with connection.schema_editor() as editor:
operation.database_forwards("test_rmfl", editor, project_state, new_state)
self.assertColumnNotExists("test_rmfl_pony", "pink")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_rmfl", editor, new_state, project_state)
self.assertColumnExists("test_rmfl_pony", "pink")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveField")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'model_name': "Pony", 'name': 'pink'})
def test_remove_fk(self):
"""
Tests the RemoveField operation on a foreign key.
"""
project_state = self.set_up_test_model("test_rfk", related_model=True)
self.assertColumnExists("test_rfk_rider", "pony_id")
operation = migrations.RemoveField("Rider", "pony")
new_state = project_state.clone()
operation.state_forwards("test_rfk", new_state)
with connection.schema_editor() as editor:
operation.database_forwards("test_rfk", editor, project_state, new_state)
self.assertColumnNotExists("test_rfk_rider", "pony_id")
with connection.schema_editor() as editor:
operation.database_backwards("test_rfk", editor, new_state, project_state)
self.assertColumnExists("test_rfk_rider", "pony_id")
def test_alter_model_table(self):
"""
Tests the AlterModelTable operation.
"""
project_state = self.set_up_test_model("test_almota")
# Test the state alteration
operation = migrations.AlterModelTable("Pony", "test_almota_pony_2")
self.assertEqual(operation.describe(), "Rename table for Pony to test_almota_pony_2")
new_state = project_state.clone()
operation.state_forwards("test_almota", new_state)
self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony_2")
# Test the database alteration
self.assertTableExists("test_almota_pony")
self.assertTableNotExists("test_almota_pony_2")
with connection.schema_editor() as editor:
operation.database_forwards("test_almota", editor, project_state, new_state)
self.assertTableNotExists("test_almota_pony")
self.assertTableExists("test_almota_pony_2")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_almota", editor, new_state, project_state)
self.assertTableExists("test_almota_pony")
self.assertTableNotExists("test_almota_pony_2")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelTable")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Pony", 'table': "test_almota_pony_2"})
def test_alter_model_table_none(self):
"""
Tests the AlterModelTable operation if the table name is set to None.
"""
operation = migrations.AlterModelTable("Pony", None)
self.assertEqual(operation.describe(), "Rename table for Pony to (default)")
def test_alter_model_table_noop(self):
"""
Tests the AlterModelTable operation if the table name is not changed.
"""
project_state = self.set_up_test_model("test_almota")
# Test the state alteration
operation = migrations.AlterModelTable("Pony", "test_almota_pony")
new_state = project_state.clone()
operation.state_forwards("test_almota", new_state)
self.assertEqual(new_state.models["test_almota", "pony"].options["db_table"], "test_almota_pony")
# Test the database alteration
self.assertTableExists("test_almota_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_almota", editor, project_state, new_state)
self.assertTableExists("test_almota_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_almota", editor, new_state, project_state)
self.assertTableExists("test_almota_pony")
def test_alter_model_table_m2m(self):
"""
AlterModelTable should rename auto-generated M2M tables.
"""
app_label = "test_talflmltlm2m"
pony_db_table = 'pony_foo'
project_state = self.set_up_test_model(app_label, second_model=True, db_table=pony_db_table)
# Add the M2M field
first_state = project_state.clone()
operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable"))
operation.state_forwards(app_label, first_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, first_state)
original_m2m_table = "%s_%s" % (pony_db_table, "stables")
new_m2m_table = "%s_%s" % (app_label, "pony_stables")
self.assertTableExists(original_m2m_table)
self.assertTableNotExists(new_m2m_table)
# Rename the Pony db_table which should also rename the m2m table.
second_state = first_state.clone()
operation = migrations.AlterModelTable(name='pony', table=None)
operation.state_forwards(app_label, second_state)
atomic_rename = connection.features.supports_atomic_references_rename
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_forwards(app_label, editor, first_state, second_state)
self.assertTableExists(new_m2m_table)
self.assertTableNotExists(original_m2m_table)
# And test reversal
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_backwards(app_label, editor, second_state, first_state)
self.assertTableExists(original_m2m_table)
self.assertTableNotExists(new_m2m_table)
def test_alter_field(self):
"""
Tests the AlterField operation.
"""
project_state = self.set_up_test_model("test_alfl")
# Test the state alteration
operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True))
self.assertEqual(operation.describe(), "Alter field pink on Pony")
new_state = project_state.clone()
operation.state_forwards("test_alfl", new_state)
self.assertIs(project_state.models["test_alfl", "pony"].get_field_by_name("pink").null, False)
self.assertIs(new_state.models["test_alfl", "pony"].get_field_by_name("pink").null, True)
# Test the database alteration
self.assertColumnNotNull("test_alfl_pony", "pink")
with connection.schema_editor() as editor:
operation.database_forwards("test_alfl", editor, project_state, new_state)
self.assertColumnNull("test_alfl_pony", "pink")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alfl", editor, new_state, project_state)
self.assertColumnNotNull("test_alfl_pony", "pink")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterField")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"])
def test_alter_field_pk(self):
"""
Tests the AlterField operation on primary keys (for things like PostgreSQL's SERIAL weirdness)
"""
project_state = self.set_up_test_model("test_alflpk")
# Test the state alteration
operation = migrations.AlterField("Pony", "id", models.IntegerField(primary_key=True))
new_state = project_state.clone()
operation.state_forwards("test_alflpk", new_state)
self.assertIsInstance(project_state.models["test_alflpk", "pony"].get_field_by_name("id"), models.AutoField)
self.assertIsInstance(new_state.models["test_alflpk", "pony"].get_field_by_name("id"), models.IntegerField)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alflpk", editor, project_state, new_state)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alflpk", editor, new_state, project_state)
@skipUnlessDBFeature('supports_foreign_keys')
def test_alter_field_pk_fk(self):
"""
Tests the AlterField operation on primary keys changes any FKs pointing to it.
"""
project_state = self.set_up_test_model("test_alflpkfk", related_model=True)
# Test the state alteration
operation = migrations.AlterField("Pony", "id", models.FloatField(primary_key=True))
new_state = project_state.clone()
operation.state_forwards("test_alflpkfk", new_state)
self.assertIsInstance(project_state.models["test_alflpkfk", "pony"].get_field_by_name("id"), models.AutoField)
self.assertIsInstance(new_state.models["test_alflpkfk", "pony"].get_field_by_name("id"), models.FloatField)
def assertIdTypeEqualsFkType():
with connection.cursor() as cursor:
id_type, id_null = [
(c.type_code, c.null_ok)
for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_pony")
if c.name == "id"
][0]
fk_type, fk_null = [
(c.type_code, c.null_ok)
for c in connection.introspection.get_table_description(cursor, "test_alflpkfk_rider")
if c.name == "pony_id"
][0]
self.assertEqual(id_type, fk_type)
self.assertEqual(id_null, fk_null)
assertIdTypeEqualsFkType()
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alflpkfk", editor, project_state, new_state)
assertIdTypeEqualsFkType()
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alflpkfk", editor, new_state, project_state)
assertIdTypeEqualsFkType()
def test_alter_field_reloads_state_on_fk_target_changes(self):
"""
If AlterField doesn't reload state appropriately, the second AlterField
crashes on MySQL due to not dropping the PonyRider.pony foreign key
constraint before modifying the column.
"""
app_label = 'alter_alter_field_reloads_state_on_fk_target_changes'
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel('Rider', fields=[
('id', models.CharField(primary_key=True, max_length=100)),
]),
migrations.CreateModel('Pony', fields=[
('id', models.CharField(primary_key=True, max_length=100)),
('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE)),
]),
migrations.CreateModel('PonyRider', fields=[
('id', models.AutoField(primary_key=True)),
('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE)),
]),
])
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.AlterField('Rider', 'id', models.CharField(primary_key=True, max_length=99)),
migrations.AlterField('Pony', 'id', models.CharField(primary_key=True, max_length=99)),
])
def test_alter_field_reloads_state_on_fk_with_to_field_target_changes(self):
"""
If AlterField doesn't reload state appropriately, the second AlterField
crashes on MySQL due to not dropping the PonyRider.pony foreign key
constraint before modifying the column.
"""
app_label = 'alter_alter_field_reloads_state_on_fk_with_to_field_target_changes'
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel('Rider', fields=[
('id', models.CharField(primary_key=True, max_length=100)),
('slug', models.CharField(unique=True, max_length=100)),
]),
migrations.CreateModel('Pony', fields=[
('id', models.CharField(primary_key=True, max_length=100)),
('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE, to_field='slug')),
('slug', models.CharField(unique=True, max_length=100)),
]),
migrations.CreateModel('PonyRider', fields=[
('id', models.AutoField(primary_key=True)),
('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE, to_field='slug')),
]),
])
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.AlterField('Rider', 'slug', models.CharField(unique=True, max_length=99)),
migrations.AlterField('Pony', 'slug', models.CharField(unique=True, max_length=99)),
])
def test_rename_field_reloads_state_on_fk_target_changes(self):
"""
If RenameField doesn't reload state appropriately, the AlterField
crashes on MySQL due to not dropping the PonyRider.pony foreign key
constraint before modifying the column.
"""
app_label = 'alter_rename_field_reloads_state_on_fk_target_changes'
project_state = self.apply_operations(app_label, ProjectState(), operations=[
migrations.CreateModel('Rider', fields=[
('id', models.CharField(primary_key=True, max_length=100)),
]),
migrations.CreateModel('Pony', fields=[
('id', models.CharField(primary_key=True, max_length=100)),
('rider', models.ForeignKey('%s.Rider' % app_label, models.CASCADE)),
]),
migrations.CreateModel('PonyRider', fields=[
('id', models.AutoField(primary_key=True)),
('pony', models.ForeignKey('%s.Pony' % app_label, models.CASCADE)),
]),
])
project_state = self.apply_operations(app_label, project_state, operations=[
migrations.RenameField('Rider', 'id', 'id2'),
migrations.AlterField('Pony', 'id', models.CharField(primary_key=True, max_length=99)),
], atomic=connection.features.supports_atomic_references_rename)
def test_rename_field(self):
"""
Tests the RenameField operation.
"""
project_state = self.set_up_test_model("test_rnfl", unique_together=True, index_together=True)
# Test the state alteration
operation = migrations.RenameField("Pony", "pink", "blue")
self.assertEqual(operation.describe(), "Rename field pink on Pony to blue")
new_state = project_state.clone()
operation.state_forwards("test_rnfl", new_state)
self.assertIn("blue", [n for n, f in new_state.models["test_rnfl", "pony"].fields])
self.assertNotIn("pink", [n for n, f in new_state.models["test_rnfl", "pony"].fields])
# Make sure the unique_together has the renamed column too
self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['unique_together'][0])
self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['unique_together'][0])
# Make sure the index_together has the renamed column too
self.assertIn("blue", new_state.models["test_rnfl", "pony"].options['index_together'][0])
self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].options['index_together'][0])
# Test the database alteration
self.assertColumnExists("test_rnfl_pony", "pink")
self.assertColumnNotExists("test_rnfl_pony", "blue")
with connection.schema_editor() as editor:
operation.database_forwards("test_rnfl", editor, project_state, new_state)
self.assertColumnExists("test_rnfl_pony", "blue")
self.assertColumnNotExists("test_rnfl_pony", "pink")
# Ensure the unique constraint has been ported over
with connection.cursor() as cursor:
cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)")
with self.assertRaises(IntegrityError):
with atomic():
cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_rnfl_pony")
# Ensure the index constraint has been ported over
self.assertIndexExists("test_rnfl_pony", ["weight", "blue"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_rnfl", editor, new_state, project_state)
self.assertColumnExists("test_rnfl_pony", "pink")
self.assertColumnNotExists("test_rnfl_pony", "blue")
# Ensure the index constraint has been reset
self.assertIndexExists("test_rnfl_pony", ["weight", "pink"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RenameField")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'model_name': "Pony", 'old_name': "pink", 'new_name': "blue"})
def test_rename_missing_field(self):
state = ProjectState()
state.add_model(ModelState('app', 'model', []))
with self.assertRaisesMessage(FieldDoesNotExist, "app.model has no field named 'field'"):
migrations.RenameField('model', 'field', 'new_field').state_forwards('app', state)
def test_rename_referenced_field_state_forward(self):
state = ProjectState()
state.add_model(ModelState('app', 'Model', [
('id', models.AutoField(primary_key=True)),
('field', models.IntegerField(unique=True)),
]))
state.add_model(ModelState('app', 'OtherModel', [
('id', models.AutoField(primary_key=True)),
('fk', models.ForeignKey('Model', models.CASCADE, to_field='field')),
('fo', models.ForeignObject('Model', models.CASCADE, from_fields=('fk',), to_fields=('field',))),
]))
operation = migrations.RenameField('Model', 'field', 'renamed')
new_state = state.clone()
operation.state_forwards('app', new_state)
self.assertEqual(new_state.models['app', 'othermodel'].fields[1][1].remote_field.field_name, 'renamed')
self.assertEqual(new_state.models['app', 'othermodel'].fields[1][1].from_fields, ['self'])
self.assertEqual(new_state.models['app', 'othermodel'].fields[1][1].to_fields, ('renamed',))
self.assertEqual(new_state.models['app', 'othermodel'].fields[2][1].from_fields, ('fk',))
self.assertEqual(new_state.models['app', 'othermodel'].fields[2][1].to_fields, ('renamed',))
operation = migrations.RenameField('OtherModel', 'fk', 'renamed_fk')
new_state = state.clone()
operation.state_forwards('app', new_state)
self.assertEqual(new_state.models['app', 'othermodel'].fields[1][1].remote_field.field_name, 'renamed')
self.assertEqual(new_state.models['app', 'othermodel'].fields[1][1].from_fields, ('self',))
self.assertEqual(new_state.models['app', 'othermodel'].fields[1][1].to_fields, ('renamed',))
self.assertEqual(new_state.models['app', 'othermodel'].fields[2][1].from_fields, ('renamed_fk',))
self.assertEqual(new_state.models['app', 'othermodel'].fields[2][1].to_fields, ('renamed',))
def test_alter_unique_together(self):
"""
Tests the AlterUniqueTogether operation.
"""
project_state = self.set_up_test_model("test_alunto")
# Test the state alteration
operation = migrations.AlterUniqueTogether("Pony", [("pink", "weight")])
self.assertEqual(operation.describe(), "Alter unique_together for Pony (1 constraint(s))")
new_state = project_state.clone()
operation.state_forwards("test_alunto", new_state)
self.assertEqual(len(project_state.models["test_alunto", "pony"].options.get("unique_together", set())), 0)
self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1)
# Make sure we can insert duplicate rows
with connection.cursor() as cursor:
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_alunto_pony")
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alunto", editor, project_state, new_state)
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
with self.assertRaises(IntegrityError):
with atomic():
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_alunto_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alunto", editor, new_state, project_state)
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_alunto_pony")
# Test flat unique_together
operation = migrations.AlterUniqueTogether("Pony", ("pink", "weight"))
operation.state_forwards("test_alunto", new_state)
self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterUniqueTogether")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Pony", 'unique_together': {("pink", "weight")}})
def test_alter_unique_together_remove(self):
operation = migrations.AlterUniqueTogether("Pony", None)
self.assertEqual(operation.describe(), "Alter unique_together for Pony (0 constraint(s))")
def test_add_index(self):
"""
Test the AddIndex operation.
"""
project_state = self.set_up_test_model("test_adin")
msg = (
"Indexes passed to AddIndex operations require a name argument. "
"<Index: fields='pink'> doesn't have one."
)
with self.assertRaisesMessage(ValueError, msg):
migrations.AddIndex("Pony", models.Index(fields=["pink"]))
index = models.Index(fields=["pink"], name="test_adin_pony_pink_idx")
operation = migrations.AddIndex("Pony", index)
self.assertEqual(operation.describe(), "Create index test_adin_pony_pink_idx on field(s) pink of model Pony")
new_state = project_state.clone()
operation.state_forwards("test_adin", new_state)
# Test the database alteration
self.assertEqual(len(new_state.models["test_adin", "pony"].options['indexes']), 1)
self.assertIndexNotExists("test_adin_pony", ["pink"])
with connection.schema_editor() as editor:
operation.database_forwards("test_adin", editor, project_state, new_state)
self.assertIndexExists("test_adin_pony", ["pink"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_adin", editor, new_state, project_state)
self.assertIndexNotExists("test_adin_pony", ["pink"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddIndex")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'model_name': "Pony", 'index': index})
def test_remove_index(self):
"""
Test the RemoveIndex operation.
"""
project_state = self.set_up_test_model("test_rmin", multicol_index=True)
self.assertTableExists("test_rmin_pony")
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
operation = migrations.RemoveIndex("Pony", "pony_test_idx")
self.assertEqual(operation.describe(), "Remove index pony_test_idx from Pony")
new_state = project_state.clone()
operation.state_forwards("test_rmin", new_state)
# Test the state alteration
self.assertEqual(len(new_state.models["test_rmin", "pony"].options['indexes']), 0)
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_rmin", editor, project_state, new_state)
self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_rmin", editor, new_state, project_state)
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveIndex")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'model_name': "Pony", 'name': "pony_test_idx"})
# Also test a field dropped with index - sqlite remake issue
operations = [
migrations.RemoveIndex("Pony", "pony_test_idx"),
migrations.RemoveField("Pony", "pink"),
]
self.assertColumnExists("test_rmin_pony", "pink")
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
# Test database alteration
new_state = project_state.clone()
self.apply_operations('test_rmin', new_state, operations=operations)
self.assertColumnNotExists("test_rmin_pony", "pink")
self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"])
# And test reversal
self.unapply_operations("test_rmin", project_state, operations=operations)
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
def test_add_index_state_forwards(self):
project_state = self.set_up_test_model('test_adinsf')
index = models.Index(fields=['pink'], name='test_adinsf_pony_pink_idx')
old_model = project_state.apps.get_model('test_adinsf', 'Pony')
new_state = project_state.clone()
operation = migrations.AddIndex('Pony', index)
operation.state_forwards('test_adinsf', new_state)
new_model = new_state.apps.get_model('test_adinsf', 'Pony')
self.assertIsNot(old_model, new_model)
def test_remove_index_state_forwards(self):
project_state = self.set_up_test_model('test_rminsf')
index = models.Index(fields=['pink'], name='test_rminsf_pony_pink_idx')
migrations.AddIndex('Pony', index).state_forwards('test_rminsf', project_state)
old_model = project_state.apps.get_model('test_rminsf', 'Pony')
new_state = project_state.clone()
operation = migrations.RemoveIndex('Pony', 'test_rminsf_pony_pink_idx')
operation.state_forwards('test_rminsf', new_state)
new_model = new_state.apps.get_model('test_rminsf', 'Pony')
self.assertIsNot(old_model, new_model)
def test_alter_field_with_index(self):
"""
Test AlterField operation with an index to ensure indexes created via
Meta.indexes don't get dropped with sqlite3 remake.
"""
project_state = self.set_up_test_model("test_alflin", index=True)
operation = migrations.AlterField("Pony", "pink", models.IntegerField(null=True))
new_state = project_state.clone()
operation.state_forwards("test_alflin", new_state)
# Test the database alteration
self.assertColumnNotNull("test_alflin_pony", "pink")
with connection.schema_editor() as editor:
operation.database_forwards("test_alflin", editor, project_state, new_state)
# Index hasn't been dropped
self.assertIndexExists("test_alflin_pony", ["pink"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alflin", editor, new_state, project_state)
# Ensure the index is still there
self.assertIndexExists("test_alflin_pony", ["pink"])
def test_alter_index_together(self):
"""
Tests the AlterIndexTogether operation.
"""
project_state = self.set_up_test_model("test_alinto")
# Test the state alteration
operation = migrations.AlterIndexTogether("Pony", [("pink", "weight")])
self.assertEqual(operation.describe(), "Alter index_together for Pony (1 constraint(s))")
new_state = project_state.clone()
operation.state_forwards("test_alinto", new_state)
self.assertEqual(len(project_state.models["test_alinto", "pony"].options.get("index_together", set())), 0)
self.assertEqual(len(new_state.models["test_alinto", "pony"].options.get("index_together", set())), 1)
# Make sure there's no matching index
self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"])
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alinto", editor, project_state, new_state)
self.assertIndexExists("test_alinto_pony", ["pink", "weight"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alinto", editor, new_state, project_state)
self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterIndexTogether")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Pony", 'index_together': {("pink", "weight")}})
def test_alter_index_together_remove(self):
operation = migrations.AlterIndexTogether("Pony", None)
self.assertEqual(operation.describe(), "Alter index_together for Pony (0 constraint(s))")
@skipUnlessDBFeature('supports_table_check_constraints')
def test_add_constraint(self):
project_state = self.set_up_test_model("test_addconstraint")
gt_check = models.Q(pink__gt=2)
gt_constraint = models.CheckConstraint(check=gt_check, name="test_add_constraint_pony_pink_gt_2")
gt_operation = migrations.AddConstraint("Pony", gt_constraint)
self.assertEqual(
gt_operation.describe(), "Create constraint test_add_constraint_pony_pink_gt_2 on model Pony"
)
# Test the state alteration
new_state = project_state.clone()
gt_operation.state_forwards("test_addconstraint", new_state)
self.assertEqual(len(new_state.models["test_addconstraint", "pony"].options["constraints"]), 1)
Pony = new_state.apps.get_model("test_addconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 1)
# Test the database alteration
with connection.schema_editor() as editor:
gt_operation.database_forwards("test_addconstraint", editor, project_state, new_state)
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=1, weight=1.0)
# Add another one.
lt_check = models.Q(pink__lt=100)
lt_constraint = models.CheckConstraint(check=lt_check, name="test_add_constraint_pony_pink_lt_100")
lt_operation = migrations.AddConstraint("Pony", lt_constraint)
lt_operation.state_forwards("test_addconstraint", new_state)
self.assertEqual(len(new_state.models["test_addconstraint", "pony"].options["constraints"]), 2)
Pony = new_state.apps.get_model("test_addconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 2)
with connection.schema_editor() as editor:
lt_operation.database_forwards("test_addconstraint", editor, project_state, new_state)
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=100, weight=1.0)
# Test reversal
with connection.schema_editor() as editor:
gt_operation.database_backwards("test_addconstraint", editor, new_state, project_state)
Pony.objects.create(pink=1, weight=1.0)
# Test deconstruction
definition = gt_operation.deconstruct()
self.assertEqual(definition[0], "AddConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'model_name': "Pony", 'constraint': gt_constraint})
@skipUnlessDBFeature('supports_table_check_constraints')
def test_remove_constraint(self):
project_state = self.set_up_test_model("test_removeconstraint", constraints=[
models.CheckConstraint(check=models.Q(pink__gt=2), name="test_remove_constraint_pony_pink_gt_2"),
models.CheckConstraint(check=models.Q(pink__lt=100), name="test_remove_constraint_pony_pink_lt_100"),
])
gt_operation = migrations.RemoveConstraint("Pony", "test_remove_constraint_pony_pink_gt_2")
self.assertEqual(
gt_operation.describe(), "Remove constraint test_remove_constraint_pony_pink_gt_2 from model Pony"
)
# Test state alteration
new_state = project_state.clone()
gt_operation.state_forwards("test_removeconstraint", new_state)
self.assertEqual(len(new_state.models["test_removeconstraint", "pony"].options['constraints']), 1)
Pony = new_state.apps.get_model("test_removeconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 1)
# Test database alteration
with connection.schema_editor() as editor:
gt_operation.database_forwards("test_removeconstraint", editor, project_state, new_state)
Pony.objects.create(pink=1, weight=1.0).delete()
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=100, weight=1.0)
# Remove the other one.
lt_operation = migrations.RemoveConstraint("Pony", "test_remove_constraint_pony_pink_lt_100")
lt_operation.state_forwards("test_removeconstraint", new_state)
self.assertEqual(len(new_state.models["test_removeconstraint", "pony"].options['constraints']), 0)
Pony = new_state.apps.get_model("test_removeconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 0)
with connection.schema_editor() as editor:
lt_operation.database_forwards("test_removeconstraint", editor, project_state, new_state)
Pony.objects.create(pink=100, weight=1.0).delete()
# Test reversal
with connection.schema_editor() as editor:
gt_operation.database_backwards("test_removeconstraint", editor, new_state, project_state)
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=1, weight=1.0)
# Test deconstruction
definition = gt_operation.deconstruct()
self.assertEqual(definition[0], "RemoveConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'model_name': "Pony", 'name': "test_remove_constraint_pony_pink_gt_2"})
def test_add_partial_unique_constraint(self):
project_state = self.set_up_test_model('test_addpartialuniqueconstraint')
partial_unique_constraint = models.UniqueConstraint(
fields=['pink'],
condition=models.Q(weight__gt=5),
name='test_constraint_pony_pink_for_weight_gt_5_uniq',
)
operation = migrations.AddConstraint('Pony', partial_unique_constraint)
self.assertEqual(
operation.describe(),
'Create constraint test_constraint_pony_pink_for_weight_gt_5_uniq '
'on model Pony'
)
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards('test_addpartialuniqueconstraint', new_state)
self.assertEqual(len(new_state.models['test_addpartialuniqueconstraint', 'pony'].options['constraints']), 1)
Pony = new_state.apps.get_model('test_addpartialuniqueconstraint', 'Pony')
self.assertEqual(len(Pony._meta.constraints), 1)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards('test_addpartialuniqueconstraint', editor, project_state, new_state)
# Test constraint works
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=6.0)
if connection.features.supports_partial_indexes:
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=1, weight=7.0)
else:
Pony.objects.create(pink=1, weight=7.0)
# Test reversal
with connection.schema_editor() as editor:
operation.database_backwards('test_addpartialuniqueconstraint', editor, new_state, project_state)
# Test constraint doesn't work
Pony.objects.create(pink=1, weight=7.0)
# Test deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], 'AddConstraint')
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'model_name': 'Pony', 'constraint': partial_unique_constraint})
def test_remove_partial_unique_constraint(self):
project_state = self.set_up_test_model('test_removepartialuniqueconstraint', constraints=[
models.UniqueConstraint(
fields=['pink'],
condition=models.Q(weight__gt=5),
name='test_constraint_pony_pink_for_weight_gt_5_uniq',
),
])
gt_operation = migrations.RemoveConstraint('Pony', 'test_constraint_pony_pink_for_weight_gt_5_uniq')
self.assertEqual(
gt_operation.describe(), 'Remove constraint test_constraint_pony_pink_for_weight_gt_5_uniq from model Pony'
)
# Test state alteration
new_state = project_state.clone()
gt_operation.state_forwards('test_removepartialuniqueconstraint', new_state)
self.assertEqual(len(new_state.models['test_removepartialuniqueconstraint', 'pony'].options['constraints']), 0)
Pony = new_state.apps.get_model('test_removepartialuniqueconstraint', 'Pony')
self.assertEqual(len(Pony._meta.constraints), 0)
# Test database alteration
with connection.schema_editor() as editor:
gt_operation.database_forwards('test_removepartialuniqueconstraint', editor, project_state, new_state)
# Test constraint doesn't work
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=6.0)
Pony.objects.create(pink=1, weight=7.0).delete()
# Test reversal
with connection.schema_editor() as editor:
gt_operation.database_backwards('test_removepartialuniqueconstraint', editor, new_state, project_state)
# Test constraint works
if connection.features.supports_partial_indexes:
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=1, weight=7.0)
else:
Pony.objects.create(pink=1, weight=7.0)
# Test deconstruction
definition = gt_operation.deconstruct()
self.assertEqual(definition[0], 'RemoveConstraint')
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {
'model_name': 'Pony',
'name': 'test_constraint_pony_pink_for_weight_gt_5_uniq',
})
def test_alter_model_options(self):
"""
Tests the AlterModelOptions operation.
"""
project_state = self.set_up_test_model("test_almoop")
# Test the state alteration (no DB alteration to test)
operation = migrations.AlterModelOptions("Pony", {"permissions": [("can_groom", "Can groom")]})
self.assertEqual(operation.describe(), "Change Meta options on Pony")
new_state = project_state.clone()
operation.state_forwards("test_almoop", new_state)
self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 0)
self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 1)
self.assertEqual(new_state.models["test_almoop", "pony"].options["permissions"][0][0], "can_groom")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelOptions")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Pony", 'options': {"permissions": [("can_groom", "Can groom")]}})
def test_alter_model_options_emptying(self):
"""
The AlterModelOptions operation removes keys from the dict (#23121)
"""
project_state = self.set_up_test_model("test_almoop", options=True)
# Test the state alteration (no DB alteration to test)
operation = migrations.AlterModelOptions("Pony", {})
self.assertEqual(operation.describe(), "Change Meta options on Pony")
new_state = project_state.clone()
operation.state_forwards("test_almoop", new_state)
self.assertEqual(len(project_state.models["test_almoop", "pony"].options.get("permissions", [])), 1)
self.assertEqual(len(new_state.models["test_almoop", "pony"].options.get("permissions", [])), 0)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelOptions")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Pony", 'options': {}})
def test_alter_order_with_respect_to(self):
"""
Tests the AlterOrderWithRespectTo operation.
"""
project_state = self.set_up_test_model("test_alorwrtto", related_model=True)
# Test the state alteration
operation = migrations.AlterOrderWithRespectTo("Rider", "pony")
self.assertEqual(operation.describe(), "Set order_with_respect_to on Rider to pony")
new_state = project_state.clone()
operation.state_forwards("test_alorwrtto", new_state)
self.assertIsNone(
project_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None)
)
self.assertEqual(
new_state.models["test_alorwrtto", "rider"].options.get("order_with_respect_to", None),
"pony"
)
# Make sure there's no matching index
self.assertColumnNotExists("test_alorwrtto_rider", "_order")
# Create some rows before alteration
rendered_state = project_state.apps
pony = rendered_state.get_model("test_alorwrtto", "Pony").objects.create(weight=50)
rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony, friend_id=1)
rendered_state.get_model("test_alorwrtto", "Rider").objects.create(pony=pony, friend_id=2)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alorwrtto", editor, project_state, new_state)
self.assertColumnExists("test_alorwrtto_rider", "_order")
# Check for correct value in rows
updated_riders = new_state.apps.get_model("test_alorwrtto", "Rider").objects.all()
self.assertEqual(updated_riders[0]._order, 0)
self.assertEqual(updated_riders[1]._order, 0)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alorwrtto", editor, new_state, project_state)
self.assertColumnNotExists("test_alorwrtto_rider", "_order")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterOrderWithRespectTo")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {'name': "Rider", 'order_with_respect_to': "pony"})
def test_alter_model_managers(self):
"""
The managers on a model are set.
"""
project_state = self.set_up_test_model("test_almoma")
# Test the state alteration
operation = migrations.AlterModelManagers(
"Pony",
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
]
)
self.assertEqual(operation.describe(), "Change managers on Pony")
managers = project_state.models["test_almoma", "pony"].managers
self.assertEqual(managers, [])
new_state = project_state.clone()
operation.state_forwards("test_almoma", new_state)
self.assertIn(("test_almoma", "pony"), new_state.models)
managers = new_state.models["test_almoma", "pony"].managers
self.assertEqual(managers[0][0], "food_qs")
self.assertIsInstance(managers[0][1], models.Manager)
self.assertEqual(managers[1][0], "food_mgr")
self.assertIsInstance(managers[1][1], FoodManager)
self.assertEqual(managers[1][1].args, ("a", "b", 1, 2))
self.assertEqual(managers[2][0], "food_mgr_kwargs")
self.assertIsInstance(managers[2][1], FoodManager)
self.assertEqual(managers[2][1].args, ("x", "y", 3, 4))
rendered_state = new_state.apps
model = rendered_state.get_model('test_almoma', 'pony')
self.assertIsInstance(model.food_qs, models.Manager)
self.assertIsInstance(model.food_mgr, FoodManager)
self.assertIsInstance(model.food_mgr_kwargs, FoodManager)
def test_alter_model_managers_emptying(self):
"""
The managers on a model are set.
"""
project_state = self.set_up_test_model("test_almomae", manager_model=True)
# Test the state alteration
operation = migrations.AlterModelManagers("Food", managers=[])
self.assertEqual(operation.describe(), "Change managers on Food")
self.assertIn(("test_almomae", "food"), project_state.models)
managers = project_state.models["test_almomae", "food"].managers
self.assertEqual(managers[0][0], "food_qs")
self.assertIsInstance(managers[0][1], models.Manager)
self.assertEqual(managers[1][0], "food_mgr")
self.assertIsInstance(managers[1][1], FoodManager)
self.assertEqual(managers[1][1].args, ("a", "b", 1, 2))
self.assertEqual(managers[2][0], "food_mgr_kwargs")
self.assertIsInstance(managers[2][1], FoodManager)
self.assertEqual(managers[2][1].args, ("x", "y", 3, 4))
new_state = project_state.clone()
operation.state_forwards("test_almomae", new_state)
managers = new_state.models["test_almomae", "food"].managers
self.assertEqual(managers, [])
def test_alter_fk(self):
"""
Creating and then altering an FK works correctly
and deals with the pending SQL (#23091)
"""
project_state = self.set_up_test_model("test_alfk")
# Test adding and then altering the FK in one go
create_operation = migrations.CreateModel(
name="Rider",
fields=[
("id", models.AutoField(primary_key=True)),
("pony", models.ForeignKey("Pony", models.CASCADE)),
],
)
create_state = project_state.clone()
create_operation.state_forwards("test_alfk", create_state)
alter_operation = migrations.AlterField(
model_name='Rider',
name='pony',
field=models.ForeignKey("Pony", models.CASCADE, editable=False),
)
alter_state = create_state.clone()
alter_operation.state_forwards("test_alfk", alter_state)
with connection.schema_editor() as editor:
create_operation.database_forwards("test_alfk", editor, project_state, create_state)
alter_operation.database_forwards("test_alfk", editor, create_state, alter_state)
def test_alter_fk_non_fk(self):
"""
Altering an FK to a non-FK works (#23244)
"""
# Test the state alteration
operation = migrations.AlterField(
model_name="Rider",
name="pony",
field=models.FloatField(),
)
project_state, new_state = self.make_test_state("test_afknfk", operation, related_model=True)
# Test the database alteration
self.assertColumnExists("test_afknfk_rider", "pony_id")
self.assertColumnNotExists("test_afknfk_rider", "pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_afknfk", editor, project_state, new_state)
self.assertColumnExists("test_afknfk_rider", "pony")
self.assertColumnNotExists("test_afknfk_rider", "pony_id")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_afknfk", editor, new_state, project_state)
self.assertColumnExists("test_afknfk_rider", "pony_id")
self.assertColumnNotExists("test_afknfk_rider", "pony")
def test_run_sql(self):
"""
Tests the RunSQL operation.
"""
project_state = self.set_up_test_model("test_runsql")
# Create the operation
operation = migrations.RunSQL(
# Use a multi-line string with a comment to test splitting on SQLite and MySQL respectively
"CREATE TABLE i_love_ponies (id int, special_thing varchar(15));\n"
"INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'i love ponies'); -- this is magic!\n"
"INSERT INTO i_love_ponies (id, special_thing) VALUES (2, 'i love django');\n"
"UPDATE i_love_ponies SET special_thing = 'Ponies' WHERE special_thing LIKE '%%ponies';"
"UPDATE i_love_ponies SET special_thing = 'Django' WHERE special_thing LIKE '%django';",
# Run delete queries to test for parameter substitution failure
# reported in #23426
"DELETE FROM i_love_ponies WHERE special_thing LIKE '%Django%';"
"DELETE FROM i_love_ponies WHERE special_thing LIKE '%%Ponies%%';"
"DROP TABLE i_love_ponies",
state_operations=[migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))])],
)
self.assertEqual(operation.describe(), "Raw SQL operation")
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_runsql", new_state)
self.assertEqual(len(new_state.models["test_runsql", "somethingelse"].fields), 1)
# Make sure there's no table
self.assertTableNotExists("i_love_ponies")
# Test SQL collection
with connection.schema_editor(collect_sql=True) as editor:
operation.database_forwards("test_runsql", editor, project_state, new_state)
self.assertIn("LIKE '%%ponies';", "\n".join(editor.collected_sql))
operation.database_backwards("test_runsql", editor, project_state, new_state)
self.assertIn("LIKE '%%Ponies%%';", "\n".join(editor.collected_sql))
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_runsql", editor, project_state, new_state)
self.assertTableExists("i_love_ponies")
# Make sure all the SQL was processed
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM i_love_ponies")
self.assertEqual(cursor.fetchall()[0][0], 2)
cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Django'")
self.assertEqual(cursor.fetchall()[0][0], 1)
cursor.execute("SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Ponies'")
self.assertEqual(cursor.fetchall()[0][0], 1)
# And test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards("test_runsql", editor, new_state, project_state)
self.assertTableNotExists("i_love_ponies")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RunSQL")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["reverse_sql", "sql", "state_operations"])
# And elidable reduction
self.assertIs(False, operation.reduce(operation, []))
elidable_operation = migrations.RunSQL('SELECT 1 FROM void;', elidable=True)
self.assertEqual(elidable_operation.reduce(operation, []), [operation])
def test_run_sql_params(self):
"""
#23426 - RunSQL should accept parameters.
"""
project_state = self.set_up_test_model("test_runsql")
# Create the operation
operation = migrations.RunSQL(
["CREATE TABLE i_love_ponies (id int, special_thing varchar(15));"],
["DROP TABLE i_love_ponies"],
)
param_operation = migrations.RunSQL(
# forwards
(
"INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'Django');",
["INSERT INTO i_love_ponies (id, special_thing) VALUES (2, %s);", ['Ponies']],
("INSERT INTO i_love_ponies (id, special_thing) VALUES (%s, %s);", (3, 'Python',)),
),
# backwards
[
"DELETE FROM i_love_ponies WHERE special_thing = 'Django';",
["DELETE FROM i_love_ponies WHERE special_thing = 'Ponies';", None],
("DELETE FROM i_love_ponies WHERE id = %s OR special_thing = %s;", [3, 'Python']),
]
)
# Make sure there's no table
self.assertTableNotExists("i_love_ponies")
new_state = project_state.clone()
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_runsql", editor, project_state, new_state)
# Test parameter passing
with connection.schema_editor() as editor:
param_operation.database_forwards("test_runsql", editor, project_state, new_state)
# Make sure all the SQL was processed
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM i_love_ponies")
self.assertEqual(cursor.fetchall()[0][0], 3)
with connection.schema_editor() as editor:
param_operation.database_backwards("test_runsql", editor, new_state, project_state)
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM i_love_ponies")
self.assertEqual(cursor.fetchall()[0][0], 0)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_runsql", editor, new_state, project_state)
self.assertTableNotExists("i_love_ponies")
def test_run_sql_params_invalid(self):
"""
#23426 - RunSQL should fail when a list of statements with an incorrect
number of tuples is given.
"""
project_state = self.set_up_test_model("test_runsql")
new_state = project_state.clone()
operation = migrations.RunSQL(
# forwards
[
["INSERT INTO foo (bar) VALUES ('buz');"]
],
# backwards
(
("DELETE FROM foo WHERE bar = 'buz';", 'invalid', 'parameter count'),
),
)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 1"):
operation.database_forwards("test_runsql", editor, project_state, new_state)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 3"):
operation.database_backwards("test_runsql", editor, new_state, project_state)
def test_run_sql_noop(self):
"""
#24098 - Tests no-op RunSQL operations.
"""
operation = migrations.RunSQL(migrations.RunSQL.noop, migrations.RunSQL.noop)
with connection.schema_editor() as editor:
operation.database_forwards("test_runsql", editor, None, None)
operation.database_backwards("test_runsql", editor, None, None)
def test_run_python(self):
"""
Tests the RunPython operation
"""
project_state = self.set_up_test_model("test_runpython", mti_model=True)
# Create the operation
def inner_method(models, schema_editor):
Pony = models.get_model("test_runpython", "Pony")
Pony.objects.create(pink=1, weight=3.55)
Pony.objects.create(weight=5)
def inner_method_reverse(models, schema_editor):
Pony = models.get_model("test_runpython", "Pony")
Pony.objects.filter(pink=1, weight=3.55).delete()
Pony.objects.filter(weight=5).delete()
operation = migrations.RunPython(inner_method, reverse_code=inner_method_reverse)
self.assertEqual(operation.describe(), "Raw Python operation")
# Test the state alteration does nothing
new_state = project_state.clone()
operation.state_forwards("test_runpython", new_state)
self.assertEqual(new_state, project_state)
# Test the database alteration
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0)
with connection.schema_editor() as editor:
operation.database_forwards("test_runpython", editor, project_state, new_state)
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2)
# Now test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards("test_runpython", editor, project_state, new_state)
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0)
# Now test we can't use a string
with self.assertRaisesMessage(ValueError, 'RunPython must be supplied with a callable'):
migrations.RunPython("print 'ahahaha'")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RunPython")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["code", "reverse_code"])
# Also test reversal fails, with an operation identical to above but without reverse_code set
no_reverse_operation = migrations.RunPython(inner_method)
self.assertFalse(no_reverse_operation.reversible)
with connection.schema_editor() as editor:
no_reverse_operation.database_forwards("test_runpython", editor, project_state, new_state)
with self.assertRaises(NotImplementedError):
no_reverse_operation.database_backwards("test_runpython", editor, new_state, project_state)
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2)
def create_ponies(models, schema_editor):
Pony = models.get_model("test_runpython", "Pony")
pony1 = Pony.objects.create(pink=1, weight=3.55)
self.assertIsNot(pony1.pk, None)
pony2 = Pony.objects.create(weight=5)
self.assertIsNot(pony2.pk, None)
self.assertNotEqual(pony1.pk, pony2.pk)
operation = migrations.RunPython(create_ponies)
with connection.schema_editor() as editor:
operation.database_forwards("test_runpython", editor, project_state, new_state)
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 4)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RunPython")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["code"])
def create_shetlandponies(models, schema_editor):
ShetlandPony = models.get_model("test_runpython", "ShetlandPony")
pony1 = ShetlandPony.objects.create(weight=4.0)
self.assertIsNot(pony1.pk, None)
pony2 = ShetlandPony.objects.create(weight=5.0)
self.assertIsNot(pony2.pk, None)
self.assertNotEqual(pony1.pk, pony2.pk)
operation = migrations.RunPython(create_shetlandponies)
with connection.schema_editor() as editor:
operation.database_forwards("test_runpython", editor, project_state, new_state)
self.assertEqual(project_state.apps.get_model("test_runpython", "Pony").objects.count(), 6)
self.assertEqual(project_state.apps.get_model("test_runpython", "ShetlandPony").objects.count(), 2)
# And elidable reduction
self.assertIs(False, operation.reduce(operation, []))
elidable_operation = migrations.RunPython(inner_method, elidable=True)
self.assertEqual(elidable_operation.reduce(operation, []), [operation])
def test_run_python_atomic(self):
"""
Tests the RunPython operation correctly handles the "atomic" keyword
"""
project_state = self.set_up_test_model("test_runpythonatomic", mti_model=True)
def inner_method(models, schema_editor):
Pony = models.get_model("test_runpythonatomic", "Pony")
Pony.objects.create(pink=1, weight=3.55)
raise ValueError("Adrian hates ponies.")
# Verify atomicity when applying.
atomic_migration = Migration("test", "test_runpythonatomic")
atomic_migration.operations = [migrations.RunPython(inner_method, reverse_code=inner_method)]
non_atomic_migration = Migration("test", "test_runpythonatomic")
non_atomic_migration.operations = [migrations.RunPython(inner_method, reverse_code=inner_method, atomic=False)]
# If we're a fully-transactional database, both versions should rollback
if connection.features.can_rollback_ddl:
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.apply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.apply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
# Otherwise, the non-atomic operation should leave a row there
else:
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.apply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.apply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1)
# Reset object count to zero and verify atomicity when unapplying.
project_state.apps.get_model("test_runpythonatomic", "Pony").objects.all().delete()
# On a fully-transactional database, both versions rollback.
if connection.features.can_rollback_ddl:
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.unapply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.unapply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
# Otherwise, the non-atomic operation leaves a row there.
else:
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.unapply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 0)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.unapply(project_state, editor)
self.assertEqual(project_state.apps.get_model("test_runpythonatomic", "Pony").objects.count(), 1)
# Verify deconstruction.
definition = non_atomic_migration.operations[0].deconstruct()
self.assertEqual(definition[0], "RunPython")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["atomic", "code", "reverse_code"])
def test_run_python_related_assignment(self):
"""
#24282 - Model changes to a FK reverse side update the model
on the FK side as well.
"""
def inner_method(models, schema_editor):
Author = models.get_model("test_authors", "Author")
Book = models.get_model("test_books", "Book")
author = Author.objects.create(name="Hemingway")
Book.objects.create(title="Old Man and The Sea", author=author)
create_author = migrations.CreateModel(
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
options={},
)
create_book = migrations.CreateModel(
"Book",
[
("id", models.AutoField(primary_key=True)),
("title", models.CharField(max_length=100)),
("author", models.ForeignKey("test_authors.Author", models.CASCADE))
],
options={},
)
add_hometown = migrations.AddField(
"Author",
"hometown",
models.CharField(max_length=100),
)
create_old_man = migrations.RunPython(inner_method, inner_method)
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor() as editor:
create_author.state_forwards("test_authors", new_state)
create_author.database_forwards("test_authors", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_book.state_forwards("test_books", new_state)
create_book.database_forwards("test_books", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
add_hometown.state_forwards("test_authors", new_state)
add_hometown.database_forwards("test_authors", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_old_man.state_forwards("test_books", new_state)
create_old_man.database_forwards("test_books", editor, project_state, new_state)
def test_model_with_bigautofield(self):
"""
A model with BigAutoField can be created.
"""
def create_data(models, schema_editor):
Author = models.get_model("test_author", "Author")
Book = models.get_model("test_book", "Book")
author1 = Author.objects.create(name="Hemingway")
Book.objects.create(title="Old Man and The Sea", author=author1)
Book.objects.create(id=2 ** 33, title="A farewell to arms", author=author1)
author2 = Author.objects.create(id=2 ** 33, name="Remarque")
Book.objects.create(title="All quiet on the western front", author=author2)
Book.objects.create(title="Arc de Triomphe", author=author2)
create_author = migrations.CreateModel(
"Author",
[
("id", models.BigAutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
options={},
)
create_book = migrations.CreateModel(
"Book",
[
("id", models.BigAutoField(primary_key=True)),
("title", models.CharField(max_length=100)),
("author", models.ForeignKey(to="test_author.Author", on_delete=models.CASCADE))
],
options={},
)
fill_data = migrations.RunPython(create_data)
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor() as editor:
create_author.state_forwards("test_author", new_state)
create_author.database_forwards("test_author", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_book.state_forwards("test_book", new_state)
create_book.database_forwards("test_book", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
fill_data.state_forwards("fill_data", new_state)
fill_data.database_forwards("fill_data", editor, project_state, new_state)
def test_autofield_foreignfield_growth(self):
"""
A field may be migrated from AutoField to BigAutoField.
"""
def create_initial_data(models, schema_editor):
Article = models.get_model("test_article", "Article")
Blog = models.get_model("test_blog", "Blog")
blog = Blog.objects.create(name="web development done right")
Article.objects.create(name="Frameworks", blog=blog)
Article.objects.create(name="Programming Languages", blog=blog)
def create_big_data(models, schema_editor):
Article = models.get_model("test_article", "Article")
Blog = models.get_model("test_blog", "Blog")
blog2 = Blog.objects.create(name="Frameworks", id=2 ** 33)
Article.objects.create(name="Django", blog=blog2)
Article.objects.create(id=2 ** 33, name="Django2", blog=blog2)
create_blog = migrations.CreateModel(
"Blog",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
options={},
)
create_article = migrations.CreateModel(
"Article",
[
("id", models.AutoField(primary_key=True)),
("blog", models.ForeignKey(to="test_blog.Blog", on_delete=models.CASCADE)),
("name", models.CharField(max_length=100)),
("data", models.TextField(default="")),
],
options={},
)
fill_initial_data = migrations.RunPython(create_initial_data, create_initial_data)
fill_big_data = migrations.RunPython(create_big_data, create_big_data)
grow_article_id = migrations.AlterField("Article", "id", models.BigAutoField(primary_key=True))
grow_blog_id = migrations.AlterField("Blog", "id", models.BigAutoField(primary_key=True))
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor() as editor:
create_blog.state_forwards("test_blog", new_state)
create_blog.database_forwards("test_blog", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_article.state_forwards("test_article", new_state)
create_article.database_forwards("test_article", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
fill_initial_data.state_forwards("fill_initial_data", new_state)
fill_initial_data.database_forwards("fill_initial_data", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
grow_article_id.state_forwards("test_article", new_state)
grow_article_id.database_forwards("test_article", editor, project_state, new_state)
state = new_state.clone()
article = state.apps.get_model("test_article.Article")
self.assertIsInstance(article._meta.pk, models.BigAutoField)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
grow_blog_id.state_forwards("test_blog", new_state)
grow_blog_id.database_forwards("test_blog", editor, project_state, new_state)
state = new_state.clone()
blog = state.apps.get_model("test_blog.Blog")
self.assertIsInstance(blog._meta.pk, models.BigAutoField)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
fill_big_data.state_forwards("fill_big_data", new_state)
fill_big_data.database_forwards("fill_big_data", editor, project_state, new_state)
def test_run_python_noop(self):
"""
#24098 - Tests no-op RunPython operations.
"""
project_state = ProjectState()
new_state = project_state.clone()
operation = migrations.RunPython(migrations.RunPython.noop, migrations.RunPython.noop)
with connection.schema_editor() as editor:
operation.database_forwards("test_runpython", editor, project_state, new_state)
operation.database_backwards("test_runpython", editor, new_state, project_state)
def test_separate_database_and_state(self):
"""
Tests the SeparateDatabaseAndState operation.
"""
project_state = self.set_up_test_model("test_separatedatabaseandstate")
# Create the operation
database_operation = migrations.RunSQL(
"CREATE TABLE i_love_ponies (id int, special_thing int);",
"DROP TABLE i_love_ponies;"
)
state_operation = migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))])
operation = migrations.SeparateDatabaseAndState(
state_operations=[state_operation],
database_operations=[database_operation]
)
self.assertEqual(operation.describe(), "Custom state/database change combination")
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_separatedatabaseandstate", new_state)
self.assertEqual(len(new_state.models["test_separatedatabaseandstate", "somethingelse"].fields), 1)
# Make sure there's no table
self.assertTableNotExists("i_love_ponies")
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_separatedatabaseandstate", editor, project_state, new_state)
self.assertTableExists("i_love_ponies")
# And test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards("test_separatedatabaseandstate", editor, new_state, project_state)
self.assertTableNotExists("i_love_ponies")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "SeparateDatabaseAndState")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["database_operations", "state_operations"])
def test_separate_database_and_state2(self):
"""
A complex SeparateDatabaseAndState operation: Multiple operations both
for state and database. Verify the state dependencies within each list
and that state ops don't affect the database.
"""
app_label = "test_separatedatabaseandstate2"
project_state = self.set_up_test_model(app_label)
# Create the operation
database_operations = [
migrations.CreateModel(
"ILovePonies",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "iloveponies"},
),
migrations.CreateModel(
"ILoveMorePonies",
# We use IntegerField and not AutoField because
# the model is going to be deleted immediately
# and with an AutoField this fails on Oracle
[("id", models.IntegerField(primary_key=True))],
options={"db_table": "ilovemoreponies"},
),
migrations.DeleteModel("ILoveMorePonies"),
migrations.CreateModel(
"ILoveEvenMorePonies",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "iloveevenmoreponies"},
),
]
state_operations = [
migrations.CreateModel(
"SomethingElse",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "somethingelse"},
),
migrations.DeleteModel("SomethingElse"),
migrations.CreateModel(
"SomethingCompletelyDifferent",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "somethingcompletelydifferent"},
),
]
operation = migrations.SeparateDatabaseAndState(
state_operations=state_operations,
database_operations=database_operations,
)
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
def assertModelsAndTables(after_db):
# Tables and models exist, or don't, as they should:
self.assertNotIn((app_label, "somethingelse"), new_state.models)
self.assertEqual(len(new_state.models[app_label, "somethingcompletelydifferent"].fields), 1)
self.assertNotIn((app_label, "iloveponiesonies"), new_state.models)
self.assertNotIn((app_label, "ilovemoreponies"), new_state.models)
self.assertNotIn((app_label, "iloveevenmoreponies"), new_state.models)
self.assertTableNotExists("somethingelse")
self.assertTableNotExists("somethingcompletelydifferent")
self.assertTableNotExists("ilovemoreponies")
if after_db:
self.assertTableExists("iloveponies")
self.assertTableExists("iloveevenmoreponies")
else:
self.assertTableNotExists("iloveponies")
self.assertTableNotExists("iloveevenmoreponies")
assertModelsAndTables(after_db=False)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
assertModelsAndTables(after_db=True)
# And test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
assertModelsAndTables(after_db=False)
class SwappableOperationTests(OperationTestBase):
"""
Key operations ignore swappable models
(we don't want to replicate all of them here, as the functionality
is in a common base class anyway)
"""
available_apps = ['migrations']
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_create_ignore_swapped(self):
"""
The CreateTable operation ignores swapped models.
"""
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=1)),
],
options={
"swappable": "TEST_SWAP_MODEL",
},
)
# Test the state alteration (it should still be there!)
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crigsw", new_state)
self.assertEqual(new_state.models["test_crigsw", "pony"].name, "Pony")
self.assertEqual(len(new_state.models["test_crigsw", "pony"].fields), 2)
# Test the database alteration
self.assertTableNotExists("test_crigsw_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crigsw", editor, project_state, new_state)
self.assertTableNotExists("test_crigsw_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crigsw", editor, new_state, project_state)
self.assertTableNotExists("test_crigsw_pony")
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_delete_ignore_swapped(self):
"""
Tests the DeleteModel operation ignores swapped models.
"""
operation = migrations.DeleteModel("Pony")
project_state, new_state = self.make_test_state("test_dligsw", operation)
# Test the database alteration
self.assertTableNotExists("test_dligsw_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_dligsw", editor, project_state, new_state)
self.assertTableNotExists("test_dligsw_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_dligsw", editor, new_state, project_state)
self.assertTableNotExists("test_dligsw_pony")
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_add_field_ignore_swapped(self):
"""
Tests the AddField operation.
"""
# Test the state alteration
operation = migrations.AddField(
"Pony",
"height",
models.FloatField(null=True, default=5),
)
project_state, new_state = self.make_test_state("test_adfligsw", operation)
# Test the database alteration
self.assertTableNotExists("test_adfligsw_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_adfligsw", editor, project_state, new_state)
self.assertTableNotExists("test_adfligsw_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_adfligsw", editor, new_state, project_state)
self.assertTableNotExists("test_adfligsw_pony")
@override_settings(TEST_SWAP_MODEL='migrations.SomeFakeModel')
def test_indexes_ignore_swapped(self):
"""
Add/RemoveIndex operations ignore swapped models.
"""
operation = migrations.AddIndex('Pony', models.Index(fields=['pink'], name='my_name_idx'))
project_state, new_state = self.make_test_state('test_adinigsw', operation)
with connection.schema_editor() as editor:
# No database queries should be run for swapped models
operation.database_forwards('test_adinigsw', editor, project_state, new_state)
operation.database_backwards('test_adinigsw', editor, new_state, project_state)
operation = migrations.RemoveIndex('Pony', models.Index(fields=['pink'], name='my_name_idx'))
project_state, new_state = self.make_test_state("test_rminigsw", operation)
with connection.schema_editor() as editor:
operation.database_forwards('test_rminigsw', editor, project_state, new_state)
operation.database_backwards('test_rminigsw', editor, new_state, project_state)
class TestCreateModel(SimpleTestCase):
def test_references_model_mixin(self):
CreateModel('name', [], bases=(Mixin, models.Model)).references_model('other_model')
class FieldOperationTests(SimpleTestCase):
def test_references_model(self):
operation = FieldOperation('MoDel', 'field', models.ForeignKey('Other', models.CASCADE))
# Model name match.
self.assertIs(operation.references_model('mOdEl'), True)
# Referenced field.
self.assertIs(operation.references_model('oTher'), True)
# Doesn't reference.
self.assertIs(operation.references_model('Whatever'), False)
def test_references_field_by_name(self):
operation = FieldOperation('MoDel', 'field', models.BooleanField(default=False))
self.assertIs(operation.references_field('model', 'field'), True)
def test_references_field_by_remote_field_model(self):
operation = FieldOperation('Model', 'field', models.ForeignKey('Other', models.CASCADE))
self.assertIs(operation.references_field('Other', 'whatever'), True)
self.assertIs(operation.references_field('Missing', 'whatever'), False)
def test_references_field_by_from_fields(self):
operation = FieldOperation(
'Model', 'field', models.fields.related.ForeignObject('Other', models.CASCADE, ['from'], ['to'])
)
self.assertIs(operation.references_field('Model', 'from'), True)
self.assertIs(operation.references_field('Model', 'to'), False)
self.assertIs(operation.references_field('Other', 'from'), False)
self.assertIs(operation.references_field('Model', 'to'), False)
def test_references_field_by_to_fields(self):
operation = FieldOperation('Model', 'field', models.ForeignKey('Other', models.CASCADE, to_field='field'))
self.assertIs(operation.references_field('Other', 'field'), True)
self.assertIs(operation.references_field('Other', 'whatever'), False)
self.assertIs(operation.references_field('Missing', 'whatever'), False)
def test_references_field_by_through(self):
operation = FieldOperation('Model', 'field', models.ManyToManyField('Other', through='Through'))
self.assertIs(operation.references_field('Other', 'whatever'), True)
self.assertIs(operation.references_field('Through', 'whatever'), True)
self.assertIs(operation.references_field('Missing', 'whatever'), False)
def test_reference_field_by_through_fields(self):
operation = FieldOperation(
'Model', 'field', models.ManyToManyField('Other', through='Through', through_fields=('first', 'second'))
)
self.assertIs(operation.references_field('Other', 'whatever'), True)
self.assertIs(operation.references_field('Through', 'whatever'), False)
self.assertIs(operation.references_field('Through', 'first'), True)
self.assertIs(operation.references_field('Through', 'second'), True)
|
14324e9241db462dd6722a5fb89d6d23f4586c9c59af44a93b70b81b3523ac64 | from django.db import connection, migrations, models
from django.db.migrations.state import ProjectState
from django.test import override_settings
from .test_operations import OperationTestBase
class AgnosticRouter:
"""
A router that doesn't have an opinion regarding migrating.
"""
def allow_migrate(self, db, app_label, **hints):
return None
class MigrateNothingRouter:
"""
A router that doesn't allow migrating.
"""
def allow_migrate(self, db, app_label, **hints):
return False
class MigrateEverythingRouter:
"""
A router that always allows migrating.
"""
def allow_migrate(self, db, app_label, **hints):
return True
class MigrateWhenFooRouter:
"""
A router that allows migrating depending on a hint.
"""
def allow_migrate(self, db, app_label, **hints):
return hints.get('foo', False)
class MultiDBOperationTests(OperationTestBase):
databases = {'default', 'other'}
def _test_create_model(self, app_label, should_run):
"""
CreateModel honors multi-db settings.
"""
operation = migrations.CreateModel(
"Pony",
[("id", models.AutoField(primary_key=True))],
)
# Test the state alteration
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
# Test the database alteration
self.assertTableNotExists("%s_pony" % app_label)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
if should_run:
self.assertTableExists("%s_pony" % app_label)
else:
self.assertTableNotExists("%s_pony" % app_label)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertTableNotExists("%s_pony" % app_label)
@override_settings(DATABASE_ROUTERS=[AgnosticRouter()])
def test_create_model(self):
"""
Test when router doesn't have an opinion (i.e. CreateModel should run).
"""
self._test_create_model("test_mltdb_crmo", should_run=True)
@override_settings(DATABASE_ROUTERS=[MigrateNothingRouter()])
def test_create_model2(self):
"""
Test when router returns False (i.e. CreateModel shouldn't run).
"""
self._test_create_model("test_mltdb_crmo2", should_run=False)
@override_settings(DATABASE_ROUTERS=[MigrateEverythingRouter()])
def test_create_model3(self):
"""
Test when router returns True (i.e. CreateModel should run).
"""
self._test_create_model("test_mltdb_crmo3", should_run=True)
def test_create_model4(self):
"""
Test multiple routers.
"""
with override_settings(DATABASE_ROUTERS=[AgnosticRouter(), AgnosticRouter()]):
self._test_create_model("test_mltdb_crmo4", should_run=True)
with override_settings(DATABASE_ROUTERS=[MigrateNothingRouter(), MigrateEverythingRouter()]):
self._test_create_model("test_mltdb_crmo4", should_run=False)
with override_settings(DATABASE_ROUTERS=[MigrateEverythingRouter(), MigrateNothingRouter()]):
self._test_create_model("test_mltdb_crmo4", should_run=True)
def _test_run_sql(self, app_label, should_run, hints=None):
with override_settings(DATABASE_ROUTERS=[MigrateEverythingRouter()]):
project_state = self.set_up_test_model(app_label)
sql = """
INSERT INTO {0}_pony (pink, weight) VALUES (1, 3.55);
INSERT INTO {0}_pony (pink, weight) VALUES (3, 5.0);
""".format(app_label)
operation = migrations.RunSQL(sql, hints=hints or {})
# Test the state alteration does nothing
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(new_state, project_state)
# Test the database alteration
self.assertEqual(project_state.apps.get_model(app_label, "Pony").objects.count(), 0)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
Pony = project_state.apps.get_model(app_label, "Pony")
if should_run:
self.assertEqual(Pony.objects.count(), 2)
else:
self.assertEqual(Pony.objects.count(), 0)
@override_settings(DATABASE_ROUTERS=[MigrateNothingRouter()])
def test_run_sql(self):
self._test_run_sql("test_mltdb_runsql", should_run=False)
@override_settings(DATABASE_ROUTERS=[MigrateWhenFooRouter()])
def test_run_sql2(self):
self._test_run_sql("test_mltdb_runsql2", should_run=False)
self._test_run_sql("test_mltdb_runsql2", should_run=True, hints={'foo': True})
def _test_run_python(self, app_label, should_run, hints=None):
with override_settings(DATABASE_ROUTERS=[MigrateEverythingRouter()]):
project_state = self.set_up_test_model(app_label)
# Create the operation
def inner_method(models, schema_editor):
Pony = models.get_model(app_label, "Pony")
Pony.objects.create(pink=1, weight=3.55)
Pony.objects.create(weight=5)
operation = migrations.RunPython(inner_method, hints=hints or {})
# Test the state alteration does nothing
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(new_state, project_state)
# Test the database alteration
self.assertEqual(project_state.apps.get_model(app_label, "Pony").objects.count(), 0)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
Pony = project_state.apps.get_model(app_label, "Pony")
if should_run:
self.assertEqual(Pony.objects.count(), 2)
else:
self.assertEqual(Pony.objects.count(), 0)
@override_settings(DATABASE_ROUTERS=[MigrateNothingRouter()])
def test_run_python(self):
self._test_run_python("test_mltdb_runpython", should_run=False)
@override_settings(DATABASE_ROUTERS=[MigrateWhenFooRouter()])
def test_run_python2(self):
self._test_run_python("test_mltdb_runpython2", should_run=False)
self._test_run_python("test_mltdb_runpython2", should_run=True, hints={'foo': True})
|
3141476001d02f093887c7af13b3a38e70139d16aa3a465aa7669bfd94ba0864 | import os
import sys
import tempfile
import unittest
import warnings
from io import StringIO
from unittest import mock
from django.apps import apps
from django.contrib.sites.models import Site
from django.core import management
from django.core.files.temp import NamedTemporaryFile
from django.core.management import CommandError
from django.core.management.commands.dumpdata import ProxyModelWarning
from django.core.serializers.base import ProgressBar
from django.db import IntegrityError, connection
from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature
from .models import (
Article, Category, NaturalKeyThing, PrimaryKeyUUIDModel, ProxySpy, Spy,
Tag, Visa,
)
class TestCaseFixtureLoadingTests(TestCase):
fixtures = ['fixture1.json', 'fixture2.json']
def testClassFixtures(self):
"Test case has installed 3 fixture objects"
self.assertEqual(Article.objects.count(), 3)
self.assertQuerysetEqual(Article.objects.all(), [
'<Article: Django conquers world!>',
'<Article: Copyright is fine the way it is>',
'<Article: Poker has no place on ESPN>',
])
class SubclassTestCaseFixtureLoadingTests(TestCaseFixtureLoadingTests):
"""
Make sure that subclasses can remove fixtures from parent class (#21089).
"""
fixtures = []
def testClassFixtures(self):
"There were no fixture objects installed"
self.assertEqual(Article.objects.count(), 0)
class DumpDataAssertMixin:
def _dumpdata_assert(self, args, output, format='json', filename=None,
natural_foreign_keys=False, natural_primary_keys=False,
use_base_manager=False, exclude_list=[], primary_keys=''):
new_io = StringIO()
filename = filename and os.path.join(tempfile.gettempdir(), filename)
management.call_command(
'dumpdata',
*args,
format=format,
stdout=new_io,
stderr=new_io,
output=filename,
use_natural_foreign_keys=natural_foreign_keys,
use_natural_primary_keys=natural_primary_keys,
use_base_manager=use_base_manager,
exclude=exclude_list,
primary_keys=primary_keys,
)
if filename:
with open(filename, "r") as f:
command_output = f.read()
os.remove(filename)
else:
command_output = new_io.getvalue().strip()
if format == "json":
self.assertJSONEqual(command_output, output)
elif format == "xml":
self.assertXMLEqual(command_output, output)
else:
self.assertEqual(command_output, output)
class FixtureLoadingTests(DumpDataAssertMixin, TestCase):
def test_loading_and_dumping(self):
apps.clear_cache()
Site.objects.all().delete()
# Load fixture 1. Single JSON file, with two objects.
management.call_command('loaddata', 'fixture1.json', verbosity=0)
self.assertQuerysetEqual(Article.objects.all(), [
'<Article: Time to reform copyright>',
'<Article: Poker has no place on ESPN>',
])
# Dump the current contents of the database as a JSON fixture
self._dumpdata_assert(
['fixtures'],
'[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": '
'"News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place '
'on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}]'
)
# Try just dumping the contents of fixtures.Category
self._dumpdata_assert(
['fixtures.Category'],
'[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", '
'"title": "News Stories"}}]'
)
# ...and just fixtures.Article
self._dumpdata_assert(
['fixtures.Article'],
'[{"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": '
'"Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}]'
)
# ...and both
self._dumpdata_assert(
['fixtures.Category', 'fixtures.Article'],
'[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", '
'"title": "News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has '
'no place on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", '
'"fields": {"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}]'
)
# Specify a specific model twice
self._dumpdata_assert(
['fixtures.Article', 'fixtures.Article'],
(
'[{"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": '
'"Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}]'
)
)
# Specify a dump that specifies Article both explicitly and implicitly
self._dumpdata_assert(
['fixtures.Article', 'fixtures'],
'[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": '
'"News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place '
'on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}]'
)
# Specify a dump that specifies Article both explicitly and implicitly,
# but lists the app first (#22025).
self._dumpdata_assert(
['fixtures', 'fixtures.Article'],
'[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": '
'"News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place '
'on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}]'
)
# Same again, but specify in the reverse order
self._dumpdata_assert(
['fixtures'],
'[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": '
'"News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no '
'place on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields":'
' {"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}]'
)
# Specify one model from one application, and an entire other application.
self._dumpdata_assert(
['fixtures.Category', 'sites'],
'[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": '
'"News Stories"}}, {"pk": 1, "model": "sites.site", "fields": {"domain": "example.com", "name": '
'"example.com"}}]'
)
# Load fixture 2. JSON file imported by default. Overwrites some existing objects
management.call_command('loaddata', 'fixture2.json', verbosity=0)
self.assertQuerysetEqual(Article.objects.all(), [
'<Article: Django conquers world!>',
'<Article: Copyright is fine the way it is>',
'<Article: Poker has no place on ESPN>',
])
# Load fixture 3, XML format.
management.call_command('loaddata', 'fixture3.xml', verbosity=0)
self.assertQuerysetEqual(Article.objects.all(), [
'<Article: XML identified as leading cause of cancer>',
'<Article: Django conquers world!>',
'<Article: Copyright is fine the way it is>',
'<Article: Poker on TV is great!>',
])
# Load fixture 6, JSON file with dynamic ContentType fields. Testing ManyToOne.
management.call_command('loaddata', 'fixture6.json', verbosity=0)
self.assertQuerysetEqual(Tag.objects.all(), [
'<Tag: <Article: Copyright is fine the way it is> tagged "copyright">',
'<Tag: <Article: Copyright is fine the way it is> tagged "law">',
], ordered=False)
# Load fixture 7, XML file with dynamic ContentType fields. Testing ManyToOne.
management.call_command('loaddata', 'fixture7.xml', verbosity=0)
self.assertQuerysetEqual(Tag.objects.all(), [
'<Tag: <Article: Copyright is fine the way it is> tagged "copyright">',
'<Tag: <Article: Copyright is fine the way it is> tagged "legal">',
'<Tag: <Article: Django conquers world!> tagged "django">',
'<Tag: <Article: Django conquers world!> tagged "world domination">',
], ordered=False)
# Load fixture 8, JSON file with dynamic Permission fields. Testing ManyToMany.
management.call_command('loaddata', 'fixture8.json', verbosity=0)
self.assertQuerysetEqual(Visa.objects.all(), [
'<Visa: Django Reinhardt Can add user, Can change user, Can delete user>',
'<Visa: Stephane Grappelli Can add user>',
'<Visa: Prince >'
], ordered=False)
# Load fixture 9, XML file with dynamic Permission fields. Testing ManyToMany.
management.call_command('loaddata', 'fixture9.xml', verbosity=0)
self.assertQuerysetEqual(Visa.objects.all(), [
'<Visa: Django Reinhardt Can add user, Can change user, Can delete user>',
'<Visa: Stephane Grappelli Can add user, Can delete user>',
'<Visa: Artist formerly known as "Prince" Can change user>'
], ordered=False)
# object list is unaffected
self.assertQuerysetEqual(Article.objects.all(), [
'<Article: XML identified as leading cause of cancer>',
'<Article: Django conquers world!>',
'<Article: Copyright is fine the way it is>',
'<Article: Poker on TV is great!>',
])
# By default, you get raw keys on dumpdata
self._dumpdata_assert(
['fixtures.book'],
'[{"pk": 1, "model": "fixtures.book", "fields": {"name": "Music for all ages", "authors": [3, 1]}}]'
)
# But you can get natural keys if you ask for them and they are available
self._dumpdata_assert(
['fixtures.book'],
'[{"pk": 1, "model": "fixtures.book", "fields": {"name": "Music for all ages", "authors": [["Artist '
'formerly known as \\"Prince\\""], ["Django Reinhardt"]]}}]',
natural_foreign_keys=True
)
# You can also omit the primary keys for models that we can get later with natural keys.
self._dumpdata_assert(
['fixtures.person'],
'[{"fields": {"name": "Django Reinhardt"}, "model": "fixtures.person"}, {"fields": {"name": "Stephane '
'Grappelli"}, "model": "fixtures.person"}, {"fields": {"name": "Artist formerly known as '
'\\"Prince\\""}, "model": "fixtures.person"}]',
natural_primary_keys=True
)
# Dump the current contents of the database as a JSON fixture
self._dumpdata_assert(
['fixtures'],
'[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": '
'"News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker on TV is '
'great!", "pub_date": "2006-06-16T11:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Copyright is fine the way it is", "pub_date": "2006-06-16T14:00:00"}}, {"pk": 4, '
'"model": "fixtures.article", "fields": {"headline": "Django conquers world!", "pub_date": '
'"2006-06-16T15:00:00"}}, {"pk": 5, "model": "fixtures.article", "fields": {"headline": "XML '
'identified as leading cause of cancer", "pub_date": "2006-06-16T16:00:00"}}, {"pk": 1, "model": '
'"fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "copyright", "tagged_id": '
'3}}, {"pk": 2, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": '
'"legal", "tagged_id": 3}}, {"pk": 3, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", '
'"article"], "name": "django", "tagged_id": 4}}, {"pk": 4, "model": "fixtures.tag", "fields": '
'{"tagged_type": ["fixtures", "article"], "name": "world domination", "tagged_id": 4}}, {"pk": 1, '
'"model": "fixtures.person", "fields": {"name": "Django Reinhardt"}}, {"pk": 2, "model": '
'"fixtures.person", "fields": {"name": "Stephane Grappelli"}}, {"pk": 3, "model": "fixtures.person", '
'"fields": {"name": "Artist formerly known as \\"Prince\\""}}, {"pk": 1, "model": "fixtures.visa", '
'"fields": {"person": ["Django Reinhardt"], "permissions": [["add_user", "auth", "user"], '
'["change_user", "auth", "user"], ["delete_user", "auth", "user"]]}}, {"pk": 2, "model": '
'"fixtures.visa", "fields": {"person": ["Stephane Grappelli"], "permissions": [["add_user", "auth", '
'"user"], ["delete_user", "auth", "user"]]}}, {"pk": 3, "model": "fixtures.visa", "fields": {"person":'
' ["Artist formerly known as \\"Prince\\""], "permissions": [["change_user", "auth", "user"]]}}, '
'{"pk": 1, "model": "fixtures.book", "fields": {"name": "Music for all ages", "authors": [["Artist '
'formerly known as \\"Prince\\""], ["Django Reinhardt"]]}}]',
natural_foreign_keys=True
)
# Dump the current contents of the database as an XML fixture
self._dumpdata_assert(
['fixtures'],
'<?xml version="1.0" encoding="utf-8"?><django-objects version="1.0"><object pk="1" '
'model="fixtures.category"><field type="CharField" name="title">News Stories</field><field '
'type="TextField" name="description">Latest news stories</field></object><object pk="2" '
'model="fixtures.article"><field type="CharField" name="headline">Poker on TV is great!</field><field '
'type="DateTimeField" name="pub_date">2006-06-16T11:00:00</field></object><object pk="3" '
'model="fixtures.article"><field type="CharField" name="headline">Copyright is fine the way it '
'is</field><field type="DateTimeField" name="pub_date">2006-06-16T14:00:00</field></object><object '
'pk="4" model="fixtures.article"><field type="CharField" name="headline">Django conquers world!'
'</field><field type="DateTimeField" name="pub_date">2006-06-16T15:00:00</field></object><object '
'pk="5" model="fixtures.article"><field type="CharField" name="headline">XML identified as leading '
'cause of cancer</field><field type="DateTimeField" name="pub_date">2006-06-16T16:00:00</field>'
'</object><object pk="1" model="fixtures.tag"><field type="CharField" name="name">copyright</field>'
'<field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures'
'</natural><natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">3'
'</field></object><object pk="2" model="fixtures.tag"><field type="CharField" name="name">legal'
'</field><field to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>'
'fixtures</natural><natural>article</natural></field><field type="PositiveIntegerField" '
'name="tagged_id">3</field></object><object pk="3" model="fixtures.tag"><field type="CharField" '
'name="name">django</field><field to="contenttypes.contenttype" name="tagged_type" '
'rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field><field '
'type="PositiveIntegerField" name="tagged_id">4</field></object><object pk="4" model="fixtures.tag">'
'<field type="CharField" name="name">world domination</field><field to="contenttypes.contenttype" '
'name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural></field>'
'<field type="PositiveIntegerField" name="tagged_id">4</field></object><object pk="1" '
'model="fixtures.person"><field type="CharField" name="name">Django Reinhardt</field></object>'
'<object pk="2" model="fixtures.person"><field type="CharField" name="name">Stephane Grappelli'
'</field></object><object pk="3" model="fixtures.person"><field type="CharField" name="name">'
'Artist formerly known as "Prince"</field></object><object pk="1" model="fixtures.visa"><field '
'to="fixtures.person" name="person" rel="ManyToOneRel"><natural>Django Reinhardt</natural></field>'
'<field to="auth.permission" name="permissions" rel="ManyToManyRel"><object><natural>add_user'
'</natural><natural>auth</natural><natural>user</natural></object><object><natural>change_user'
'</natural><natural>auth</natural><natural>user</natural></object><object><natural>delete_user'
'</natural><natural>auth</natural><natural>user</natural></object></field></object><object pk="2" '
'model="fixtures.visa"><field to="fixtures.person" name="person" rel="ManyToOneRel"><natural>Stephane'
' Grappelli</natural></field><field to="auth.permission" name="permissions" rel="ManyToManyRel">'
'<object><natural>add_user</natural><natural>auth</natural><natural>user</natural></object><object>'
'<natural>delete_user</natural><natural>auth</natural><natural>user</natural></object></field>'
'</object><object pk="3" model="fixtures.visa"><field to="fixtures.person" name="person" '
'rel="ManyToOneRel"><natural>Artist formerly known as "Prince"</natural></field><field '
'to="auth.permission" name="permissions" rel="ManyToManyRel"><object><natural>change_user</natural>'
'<natural>auth</natural><natural>user</natural></object></field></object><object pk="1" '
'model="fixtures.book"><field type="CharField" name="name">Music for all ages</field><field '
'to="fixtures.person" name="authors" rel="ManyToManyRel"><object><natural>Artist formerly known as '
'"Prince"</natural></object><object><natural>Django Reinhardt</natural></object></field></object>'
'</django-objects>',
format='xml', natural_foreign_keys=True
)
def test_dumpdata_with_excludes(self):
# Load fixture1 which has a site, two articles, and a category
Site.objects.all().delete()
management.call_command('loaddata', 'fixture1.json', verbosity=0)
# Excluding fixtures app should only leave sites
self._dumpdata_assert(
['sites', 'fixtures'],
'[{"pk": 1, "model": "sites.site", "fields": {"domain": "example.com", "name": "example.com"}}]',
exclude_list=['fixtures'],
)
# Excluding fixtures.Article/Book should leave fixtures.Category
self._dumpdata_assert(
['sites', 'fixtures'],
'[{"pk": 1, "model": "sites.site", "fields": {"domain": "example.com", "name": "example.com"}}, '
'{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": '
'"News Stories"}}]',
exclude_list=['fixtures.Article', 'fixtures.Book']
)
# Excluding fixtures and fixtures.Article/Book should be a no-op
self._dumpdata_assert(
['sites', 'fixtures'],
'[{"pk": 1, "model": "sites.site", "fields": {"domain": "example.com", "name": "example.com"}}, '
'{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": '
'"News Stories"}}]',
exclude_list=['fixtures.Article', 'fixtures.Book']
)
# Excluding sites and fixtures.Article/Book should only leave fixtures.Category
self._dumpdata_assert(
['sites', 'fixtures'],
'[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": '
'"News Stories"}}]',
exclude_list=['fixtures.Article', 'fixtures.Book', 'sites']
)
# Excluding a bogus app should throw an error
with self.assertRaisesMessage(management.CommandError, "No installed app with label 'foo_app'."):
self._dumpdata_assert(['fixtures', 'sites'], '', exclude_list=['foo_app'])
# Excluding a bogus model should throw an error
with self.assertRaisesMessage(management.CommandError, "Unknown model: fixtures.FooModel"):
self._dumpdata_assert(['fixtures', 'sites'], '', exclude_list=['fixtures.FooModel'])
@unittest.skipIf(sys.platform.startswith('win'), "Windows doesn't support '?' in filenames.")
def test_load_fixture_with_special_characters(self):
management.call_command('loaddata', 'fixture_with[special]chars', verbosity=0)
self.assertQuerysetEqual(Article.objects.all(), ['<Article: How To Deal With Special Characters>'])
def test_dumpdata_with_filtering_manager(self):
spy1 = Spy.objects.create(name='Paul')
spy2 = Spy.objects.create(name='Alex', cover_blown=True)
self.assertQuerysetEqual(Spy.objects.all(),
['<Spy: Paul>'])
# Use the default manager
self._dumpdata_assert(
['fixtures.Spy'],
'[{"pk": %d, "model": "fixtures.spy", "fields": {"cover_blown": false}}]' % spy1.pk
)
# Dump using Django's base manager. Should return all objects,
# even those normally filtered by the manager
self._dumpdata_assert(
['fixtures.Spy'],
'[{"pk": %d, "model": "fixtures.spy", "fields": {"cover_blown": true}}, {"pk": %d, "model": '
'"fixtures.spy", "fields": {"cover_blown": false}}]' % (spy2.pk, spy1.pk),
use_base_manager=True
)
def test_dumpdata_with_pks(self):
management.call_command('loaddata', 'fixture1.json', verbosity=0)
management.call_command('loaddata', 'fixture2.json', verbosity=0)
self._dumpdata_assert(
['fixtures.Article'],
'[{"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": {"headline": '
'"Copyright is fine the way it is", "pub_date": "2006-06-16T14:00:00"}}]',
primary_keys='2,3'
)
self._dumpdata_assert(
['fixtures.Article'],
'[{"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}]',
primary_keys='2'
)
with self.assertRaisesMessage(management.CommandError, "You can only use --pks option with one model"):
self._dumpdata_assert(
['fixtures'],
'[{"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Copyright is fine the way it is", "pub_date": "2006-06-16T14:00:00"}}]',
primary_keys='2,3'
)
with self.assertRaisesMessage(management.CommandError, "You can only use --pks option with one model"):
self._dumpdata_assert(
'',
'[{"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Copyright is fine the way it is", "pub_date": "2006-06-16T14:00:00"}}]',
primary_keys='2,3'
)
with self.assertRaisesMessage(management.CommandError, "You can only use --pks option with one model"):
self._dumpdata_assert(
['fixtures.Article', 'fixtures.category'],
'[{"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Copyright is fine the way it is", "pub_date": "2006-06-16T14:00:00"}}]',
primary_keys='2,3'
)
def test_dumpdata_with_uuid_pks(self):
m1 = PrimaryKeyUUIDModel.objects.create()
m2 = PrimaryKeyUUIDModel.objects.create()
output = StringIO()
management.call_command(
'dumpdata', 'fixtures.PrimaryKeyUUIDModel', '--pks', ', '.join([str(m1.id), str(m2.id)]),
stdout=output,
)
result = output.getvalue()
self.assertIn('"pk": "%s"' % m1.id, result)
self.assertIn('"pk": "%s"' % m2.id, result)
def test_dumpdata_with_file_output(self):
management.call_command('loaddata', 'fixture1.json', verbosity=0)
self._dumpdata_assert(
['fixtures'],
'[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": '
'"News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place '
'on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}]',
filename='dumpdata.json'
)
def test_dumpdata_progressbar(self):
"""
Dumpdata shows a progress bar on the command line when --output is set,
stdout is a tty, and verbosity > 0.
"""
management.call_command('loaddata', 'fixture1.json', verbosity=0)
new_io = StringIO()
new_io.isatty = lambda: True
with NamedTemporaryFile() as file:
options = {
'format': 'json',
'stdout': new_io,
'stderr': new_io,
'output': file.name,
}
management.call_command('dumpdata', 'fixtures', **options)
self.assertTrue(new_io.getvalue().endswith('[' + '.' * ProgressBar.progress_width + ']\n'))
# Test no progress bar when verbosity = 0
options['verbosity'] = 0
new_io = StringIO()
new_io.isatty = lambda: True
options.update({'stdout': new_io, 'stderr': new_io})
management.call_command('dumpdata', 'fixtures', **options)
self.assertEqual(new_io.getvalue(), '')
def test_dumpdata_proxy_without_concrete(self):
"""
A warning is displayed if a proxy model is dumped without its concrete
parent.
"""
ProxySpy.objects.create(name='Paul')
msg = "fixtures.ProxySpy is a proxy model and won't be serialized."
with self.assertWarnsMessage(ProxyModelWarning, msg):
self._dumpdata_assert(['fixtures.ProxySpy'], '[]')
def test_dumpdata_proxy_with_concrete(self):
"""
A warning isn't displayed if a proxy model is dumped with its concrete
parent.
"""
spy = ProxySpy.objects.create(name='Paul')
with warnings.catch_warnings(record=True) as warning_list:
warnings.simplefilter('always')
self._dumpdata_assert(
['fixtures.ProxySpy', 'fixtures.Spy'],
'[{"pk": %d, "model": "fixtures.spy", "fields": {"cover_blown": false}}]' % spy.pk
)
self.assertEqual(len(warning_list), 0)
def test_compress_format_loading(self):
# Load fixture 4 (compressed), using format specification
management.call_command('loaddata', 'fixture4.json', verbosity=0)
self.assertQuerysetEqual(Article.objects.all(), [
'<Article: Django pets kitten>',
])
def test_compressed_specified_loading(self):
# Load fixture 5 (compressed), using format *and* compression specification
management.call_command('loaddata', 'fixture5.json.zip', verbosity=0)
self.assertQuerysetEqual(Article.objects.all(), [
'<Article: WoW subscribers now outnumber readers>',
])
def test_compressed_loading(self):
# Load fixture 5 (compressed), only compression specification
management.call_command('loaddata', 'fixture5.zip', verbosity=0)
self.assertQuerysetEqual(Article.objects.all(), [
'<Article: WoW subscribers now outnumber readers>',
])
def test_ambiguous_compressed_fixture(self):
# The name "fixture5" is ambiguous, so loading it will raise an error
with self.assertRaises(management.CommandError) as cm:
management.call_command('loaddata', 'fixture5', verbosity=0)
self.assertIn("Multiple fixtures named 'fixture5'", cm.exception.args[0])
def test_db_loading(self):
# Load db fixtures 1 and 2. These will load using the 'default' database identifier implicitly
management.call_command('loaddata', 'db_fixture_1', verbosity=0)
management.call_command('loaddata', 'db_fixture_2', verbosity=0)
self.assertQuerysetEqual(Article.objects.all(), [
'<Article: Who needs more than one database?>',
'<Article: Who needs to use compressed data?>',
])
def test_loaddata_error_message(self):
"""
Loading a fixture which contains an invalid object outputs an error
message which contains the pk of the object that triggered the error.
"""
# MySQL needs a little prodding to reject invalid data.
# This won't affect other tests because the database connection
# is closed at the end of each test.
if connection.vendor == 'mysql':
connection.cursor().execute("SET sql_mode = 'TRADITIONAL'")
with self.assertRaises(IntegrityError) as cm:
management.call_command('loaddata', 'invalid.json', verbosity=0)
self.assertIn("Could not load fixtures.Article(pk=1):", cm.exception.args[0])
@unittest.skipUnless(connection.vendor == 'postgresql', 'psycopg2 prohibits null characters in data.')
def test_loaddata_null_characters_on_postgresql(self):
msg = (
'Could not load fixtures.Article(pk=2): '
'A string literal cannot contain NUL (0x00) characters.'
)
with self.assertRaisesMessage(ValueError, msg):
management.call_command('loaddata', 'null_character_in_field_value.json')
def test_loaddata_app_option(self):
with self.assertRaisesMessage(CommandError, "No fixture named 'db_fixture_1' found."):
management.call_command('loaddata', 'db_fixture_1', verbosity=0, app_label="someotherapp")
self.assertQuerysetEqual(Article.objects.all(), [])
management.call_command('loaddata', 'db_fixture_1', verbosity=0, app_label="fixtures")
self.assertQuerysetEqual(Article.objects.all(), [
'<Article: Who needs more than one database?>',
])
def test_loaddata_verbosity_three(self):
output = StringIO()
management.call_command('loaddata', 'fixture1.json', verbosity=3, stdout=output, stderr=output)
command_output = output.getvalue()
self.assertIn(
"\rProcessed 1 object(s).\rProcessed 2 object(s)."
"\rProcessed 3 object(s).\rProcessed 4 object(s).\n",
command_output
)
def test_loading_using(self):
# Load db fixtures 1 and 2. These will load using the 'default' database identifier explicitly
management.call_command('loaddata', 'db_fixture_1', verbosity=0, database='default')
management.call_command('loaddata', 'db_fixture_2', verbosity=0, database='default')
self.assertQuerysetEqual(Article.objects.all(), [
'<Article: Who needs more than one database?>',
'<Article: Who needs to use compressed data?>',
])
def test_unmatched_identifier_loading(self):
# Try to load db fixture 3. This won't load because the database identifier doesn't match
with self.assertRaisesMessage(CommandError, "No fixture named 'db_fixture_3' found."):
management.call_command('loaddata', 'db_fixture_3', verbosity=0)
with self.assertRaisesMessage(CommandError, "No fixture named 'db_fixture_3' found."):
management.call_command('loaddata', 'db_fixture_3', verbosity=0, database='default')
self.assertQuerysetEqual(Article.objects.all(), [])
def test_output_formats(self):
# Load back in fixture 1, we need the articles from it
management.call_command('loaddata', 'fixture1', verbosity=0)
# Try to load fixture 6 using format discovery
management.call_command('loaddata', 'fixture6', verbosity=0)
self.assertQuerysetEqual(Tag.objects.all(), [
'<Tag: <Article: Time to reform copyright> tagged "copyright">',
'<Tag: <Article: Time to reform copyright> tagged "law">'
], ordered=False)
# Dump the current contents of the database as a JSON fixture
self._dumpdata_assert(
['fixtures'],
'[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": '
'"News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place '
'on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}, {"pk": 1, "model": '
'"fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": "copyright", "tagged_id": '
'3}}, {"pk": 2, "model": "fixtures.tag", "fields": {"tagged_type": ["fixtures", "article"], "name": '
'"law", "tagged_id": 3}}, {"pk": 1, "model": "fixtures.person", "fields": {"name": "Django '
'Reinhardt"}}, {"pk": 2, "model": "fixtures.person", "fields": {"name": "Stephane Grappelli"}}, '
'{"pk": 3, "model": "fixtures.person", "fields": {"name": "Prince"}}]',
natural_foreign_keys=True
)
# Dump the current contents of the database as an XML fixture
self._dumpdata_assert(
['fixtures'],
'<?xml version="1.0" encoding="utf-8"?><django-objects version="1.0"><object pk="1" '
'model="fixtures.category"><field type="CharField" name="title">News Stories</field><field '
'type="TextField" name="description">Latest news stories</field></object><object pk="2" '
'model="fixtures.article"><field type="CharField" name="headline">Poker has no place on ESPN</field>'
'<field type="DateTimeField" name="pub_date">2006-06-16T12:00:00</field></object><object pk="3" '
'model="fixtures.article"><field type="CharField" name="headline">Time to reform copyright</field>'
'<field type="DateTimeField" name="pub_date">2006-06-16T13:00:00</field></object><object pk="1" '
'model="fixtures.tag"><field type="CharField" name="name">copyright</field><field '
'to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural>'
'<natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">3</field>'
'</object><object pk="2" model="fixtures.tag"><field type="CharField" name="name">law</field><field '
'to="contenttypes.contenttype" name="tagged_type" rel="ManyToOneRel"><natural>fixtures</natural>'
'<natural>article</natural></field><field type="PositiveIntegerField" name="tagged_id">3</field>'
'</object><object pk="1" model="fixtures.person"><field type="CharField" name="name">Django Reinhardt'
'</field></object><object pk="2" model="fixtures.person"><field type="CharField" name="name">Stephane '
'Grappelli</field></object><object pk="3" model="fixtures.person"><field type="CharField" name="name">'
'Prince</field></object></django-objects>',
format='xml', natural_foreign_keys=True
)
def test_loading_with_exclude_app(self):
Site.objects.all().delete()
management.call_command('loaddata', 'fixture1', exclude=['fixtures'], verbosity=0)
self.assertFalse(Article.objects.exists())
self.assertFalse(Category.objects.exists())
self.assertQuerysetEqual(Site.objects.all(), ['<Site: example.com>'])
def test_loading_with_exclude_model(self):
Site.objects.all().delete()
management.call_command('loaddata', 'fixture1', exclude=['fixtures.Article'], verbosity=0)
self.assertFalse(Article.objects.exists())
self.assertQuerysetEqual(Category.objects.all(), ['<Category: News Stories>'])
self.assertQuerysetEqual(Site.objects.all(), ['<Site: example.com>'])
def test_exclude_option_errors(self):
"""Excluding a bogus app or model should raise an error."""
msg = "No installed app with label 'foo_app'."
with self.assertRaisesMessage(management.CommandError, msg):
management.call_command('loaddata', 'fixture1', exclude=['foo_app'], verbosity=0)
msg = "Unknown model: fixtures.FooModel"
with self.assertRaisesMessage(management.CommandError, msg):
management.call_command('loaddata', 'fixture1', exclude=['fixtures.FooModel'], verbosity=0)
def test_stdin_without_format(self):
"""Reading from stdin raises an error if format isn't specified."""
msg = '--format must be specified when reading from stdin.'
with self.assertRaisesMessage(management.CommandError, msg):
management.call_command('loaddata', '-', verbosity=0)
def test_loading_stdin(self):
"""Loading fixtures from stdin with json and xml."""
tests_dir = os.path.dirname(__file__)
fixture_json = os.path.join(tests_dir, 'fixtures', 'fixture1.json')
fixture_xml = os.path.join(tests_dir, 'fixtures', 'fixture3.xml')
with mock.patch('django.core.management.commands.loaddata.sys.stdin', open(fixture_json, 'r')):
management.call_command('loaddata', '--format=json', '-', verbosity=0)
self.assertEqual(Article.objects.count(), 2)
self.assertQuerysetEqual(Article.objects.all(), [
'<Article: Time to reform copyright>',
'<Article: Poker has no place on ESPN>',
])
with mock.patch('django.core.management.commands.loaddata.sys.stdin', open(fixture_xml, 'r')):
management.call_command('loaddata', '--format=xml', '-', verbosity=0)
self.assertEqual(Article.objects.count(), 3)
self.assertQuerysetEqual(Article.objects.all(), [
'<Article: XML identified as leading cause of cancer>',
'<Article: Time to reform copyright>',
'<Article: Poker on TV is great!>',
])
class NonexistentFixtureTests(TestCase):
"""
Custom class to limit fixture dirs.
"""
def test_loaddata_not_existent_fixture_file(self):
stdout_output = StringIO()
with self.assertRaisesMessage(CommandError, "No fixture named 'this_fixture_doesnt_exist' found."):
management.call_command('loaddata', 'this_fixture_doesnt_exist', stdout=stdout_output)
@mock.patch('django.db.connection.enable_constraint_checking')
@mock.patch('django.db.connection.disable_constraint_checking')
def test_nonexistent_fixture_no_constraint_checking(
self, disable_constraint_checking, enable_constraint_checking):
"""
If no fixtures match the loaddata command, constraints checks on the
database shouldn't be disabled. This is performance critical on MSSQL.
"""
with self.assertRaisesMessage(CommandError, "No fixture named 'this_fixture_doesnt_exist' found."):
management.call_command('loaddata', 'this_fixture_doesnt_exist', verbosity=0)
disable_constraint_checking.assert_not_called()
enable_constraint_checking.assert_not_called()
class FixtureTransactionTests(DumpDataAssertMixin, TransactionTestCase):
available_apps = [
'fixtures',
'django.contrib.sites',
]
@skipUnlessDBFeature('supports_forward_references')
def test_format_discovery(self):
# Load fixture 1 again, using format discovery
management.call_command('loaddata', 'fixture1', verbosity=0)
self.assertQuerysetEqual(Article.objects.all(), [
'<Article: Time to reform copyright>',
'<Article: Poker has no place on ESPN>',
])
# Try to load fixture 2 using format discovery; this will fail
# because there are two fixture2's in the fixtures directory
with self.assertRaises(management.CommandError) as cm:
management.call_command('loaddata', 'fixture2', verbosity=0)
self.assertIn("Multiple fixtures named 'fixture2'", cm.exception.args[0])
# object list is unaffected
self.assertQuerysetEqual(Article.objects.all(), [
'<Article: Time to reform copyright>',
'<Article: Poker has no place on ESPN>',
])
# Dump the current contents of the database as a JSON fixture
self._dumpdata_assert(
['fixtures'],
'[{"pk": 1, "model": "fixtures.category", "fields": {"description": "Latest news stories", "title": '
'"News Stories"}}, {"pk": 2, "model": "fixtures.article", "fields": {"headline": "Poker has no place '
'on ESPN", "pub_date": "2006-06-16T12:00:00"}}, {"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", "pub_date": "2006-06-16T13:00:00"}}]'
)
# Load fixture 4 (compressed), using format discovery
management.call_command('loaddata', 'fixture4', verbosity=0)
self.assertQuerysetEqual(Article.objects.all(), [
'<Article: Django pets kitten>',
'<Article: Time to reform copyright>',
'<Article: Poker has no place on ESPN>',
])
class ForwardReferenceTests(TestCase):
def test_forward_reference_fk(self):
management.call_command('loaddata', 'forward_reference_fk.json', verbosity=0)
self.assertEqual(NaturalKeyThing.objects.count(), 2)
t1, t2 = NaturalKeyThing.objects.all()
self.assertEqual(t1.other_thing, t2)
self.assertEqual(t2.other_thing, t1)
def test_forward_reference_m2m(self):
management.call_command('loaddata', 'forward_reference_m2m.json', verbosity=0)
self.assertEqual(NaturalKeyThing.objects.count(), 3)
t1 = NaturalKeyThing.objects.get_by_natural_key('t1')
self.assertQuerysetEqual(
t1.other_things.order_by('key'),
['<NaturalKeyThing: t2>', '<NaturalKeyThing: t3>']
)
|
4bf4f951bed0912bd136a0d7d49eb4fea7a34b3dcb53ce7184c4fe0212978e0f | import os
import sys
import unittest
from types import ModuleType
from unittest import mock
from django.conf import ENVIRONMENT_VARIABLE, LazySettings, Settings, settings
from django.core.exceptions import ImproperlyConfigured
from django.http import HttpRequest
from django.test import (
SimpleTestCase, TestCase, TransactionTestCase, modify_settings,
override_settings, signals,
)
from django.test.utils import requires_tz_support
@modify_settings(ITEMS={
'prepend': ['b'],
'append': ['d'],
'remove': ['a', 'e']
})
@override_settings(ITEMS=['a', 'c', 'e'], ITEMS_OUTER=[1, 2, 3], TEST='override', TEST_OUTER='outer')
class FullyDecoratedTranTestCase(TransactionTestCase):
available_apps = []
def test_override(self):
self.assertEqual(settings.ITEMS, ['b', 'c', 'd'])
self.assertEqual(settings.ITEMS_OUTER, [1, 2, 3])
self.assertEqual(settings.TEST, 'override')
self.assertEqual(settings.TEST_OUTER, 'outer')
@modify_settings(ITEMS={
'append': ['e', 'f'],
'prepend': ['a'],
'remove': ['d', 'c'],
})
def test_method_list_override(self):
self.assertEqual(settings.ITEMS, ['a', 'b', 'e', 'f'])
self.assertEqual(settings.ITEMS_OUTER, [1, 2, 3])
@modify_settings(ITEMS={
'append': ['b'],
'prepend': ['d'],
'remove': ['a', 'c', 'e'],
})
def test_method_list_override_no_ops(self):
self.assertEqual(settings.ITEMS, ['b', 'd'])
@modify_settings(ITEMS={
'append': 'e',
'prepend': 'a',
'remove': 'c',
})
def test_method_list_override_strings(self):
self.assertEqual(settings.ITEMS, ['a', 'b', 'd', 'e'])
@modify_settings(ITEMS={'remove': ['b', 'd']})
@modify_settings(ITEMS={'append': ['b'], 'prepend': ['d']})
def test_method_list_override_nested_order(self):
self.assertEqual(settings.ITEMS, ['d', 'c', 'b'])
@override_settings(TEST='override2')
def test_method_override(self):
self.assertEqual(settings.TEST, 'override2')
self.assertEqual(settings.TEST_OUTER, 'outer')
def test_decorated_testcase_name(self):
self.assertEqual(FullyDecoratedTranTestCase.__name__, 'FullyDecoratedTranTestCase')
def test_decorated_testcase_module(self):
self.assertEqual(FullyDecoratedTranTestCase.__module__, __name__)
@modify_settings(ITEMS={
'prepend': ['b'],
'append': ['d'],
'remove': ['a', 'e']
})
@override_settings(ITEMS=['a', 'c', 'e'], TEST='override')
class FullyDecoratedTestCase(TestCase):
def test_override(self):
self.assertEqual(settings.ITEMS, ['b', 'c', 'd'])
self.assertEqual(settings.TEST, 'override')
@modify_settings(ITEMS={
'append': 'e',
'prepend': 'a',
'remove': 'c',
})
@override_settings(TEST='override2')
def test_method_override(self):
self.assertEqual(settings.ITEMS, ['a', 'b', 'd', 'e'])
self.assertEqual(settings.TEST, 'override2')
class ClassDecoratedTestCaseSuper(TestCase):
"""
Dummy class for testing max recursion error in child class call to
super(). Refs #17011.
"""
def test_max_recursion_error(self):
pass
@override_settings(TEST='override')
class ClassDecoratedTestCase(ClassDecoratedTestCaseSuper):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.foo = getattr(settings, 'TEST', 'BUG')
def test_override(self):
self.assertEqual(settings.TEST, 'override')
def test_setupclass_override(self):
"""Settings are overridden within setUpClass (#21281)."""
self.assertEqual(self.foo, 'override')
@override_settings(TEST='override2')
def test_method_override(self):
self.assertEqual(settings.TEST, 'override2')
def test_max_recursion_error(self):
"""
Overriding a method on a super class and then calling that method on
the super class should not trigger infinite recursion. See #17011.
"""
super().test_max_recursion_error()
@modify_settings(ITEMS={'append': 'mother'})
@override_settings(ITEMS=['father'], TEST='override-parent')
class ParentDecoratedTestCase(TestCase):
pass
@modify_settings(ITEMS={'append': ['child']})
@override_settings(TEST='override-child')
class ChildDecoratedTestCase(ParentDecoratedTestCase):
def test_override_settings_inheritance(self):
self.assertEqual(settings.ITEMS, ['father', 'mother', 'child'])
self.assertEqual(settings.TEST, 'override-child')
class SettingsTests(SimpleTestCase):
def setUp(self):
self.testvalue = None
signals.setting_changed.connect(self.signal_callback)
def tearDown(self):
signals.setting_changed.disconnect(self.signal_callback)
def signal_callback(self, sender, setting, value, **kwargs):
if setting == 'TEST':
self.testvalue = value
def test_override(self):
settings.TEST = 'test'
self.assertEqual('test', settings.TEST)
with self.settings(TEST='override'):
self.assertEqual('override', settings.TEST)
self.assertEqual('test', settings.TEST)
del settings.TEST
def test_override_change(self):
settings.TEST = 'test'
self.assertEqual('test', settings.TEST)
with self.settings(TEST='override'):
self.assertEqual('override', settings.TEST)
settings.TEST = 'test2'
self.assertEqual('test', settings.TEST)
del settings.TEST
def test_override_doesnt_leak(self):
with self.assertRaises(AttributeError):
getattr(settings, 'TEST')
with self.settings(TEST='override'):
self.assertEqual('override', settings.TEST)
settings.TEST = 'test'
with self.assertRaises(AttributeError):
getattr(settings, 'TEST')
@override_settings(TEST='override')
def test_decorator(self):
self.assertEqual('override', settings.TEST)
def test_context_manager(self):
with self.assertRaises(AttributeError):
getattr(settings, 'TEST')
override = override_settings(TEST='override')
with self.assertRaises(AttributeError):
getattr(settings, 'TEST')
override.enable()
self.assertEqual('override', settings.TEST)
override.disable()
with self.assertRaises(AttributeError):
getattr(settings, 'TEST')
def test_class_decorator(self):
# SimpleTestCase can be decorated by override_settings, but not ut.TestCase
class SimpleTestCaseSubclass(SimpleTestCase):
pass
class UnittestTestCaseSubclass(unittest.TestCase):
pass
decorated = override_settings(TEST='override')(SimpleTestCaseSubclass)
self.assertIsInstance(decorated, type)
self.assertTrue(issubclass(decorated, SimpleTestCase))
with self.assertRaisesMessage(Exception, "Only subclasses of Django SimpleTestCase"):
decorated = override_settings(TEST='override')(UnittestTestCaseSubclass)
def test_signal_callback_context_manager(self):
with self.assertRaises(AttributeError):
getattr(settings, 'TEST')
with self.settings(TEST='override'):
self.assertEqual(self.testvalue, 'override')
self.assertIsNone(self.testvalue)
@override_settings(TEST='override')
def test_signal_callback_decorator(self):
self.assertEqual(self.testvalue, 'override')
#
# Regression tests for #10130: deleting settings.
#
def test_settings_delete(self):
settings.TEST = 'test'
self.assertEqual('test', settings.TEST)
del settings.TEST
msg = "'Settings' object has no attribute 'TEST'"
with self.assertRaisesMessage(AttributeError, msg):
getattr(settings, 'TEST')
def test_settings_delete_wrapped(self):
with self.assertRaisesMessage(TypeError, "can't delete _wrapped."):
delattr(settings, '_wrapped')
def test_override_settings_delete(self):
"""
Allow deletion of a setting in an overridden settings set (#18824)
"""
previous_i18n = settings.USE_I18N
previous_l10n = settings.USE_L10N
with self.settings(USE_I18N=False):
del settings.USE_I18N
with self.assertRaises(AttributeError):
getattr(settings, 'USE_I18N')
# Should also work for a non-overridden setting
del settings.USE_L10N
with self.assertRaises(AttributeError):
getattr(settings, 'USE_L10N')
self.assertNotIn('USE_I18N', dir(settings))
self.assertNotIn('USE_L10N', dir(settings))
self.assertEqual(settings.USE_I18N, previous_i18n)
self.assertEqual(settings.USE_L10N, previous_l10n)
def test_override_settings_nested(self):
"""
override_settings uses the actual _wrapped attribute at
runtime, not when it was instantiated.
"""
with self.assertRaises(AttributeError):
getattr(settings, 'TEST')
with self.assertRaises(AttributeError):
getattr(settings, 'TEST2')
inner = override_settings(TEST2='override')
with override_settings(TEST='override'):
self.assertEqual('override', settings.TEST)
with inner:
self.assertEqual('override', settings.TEST)
self.assertEqual('override', settings.TEST2)
# inner's __exit__ should have restored the settings of the outer
# context manager, not those when the class was instantiated
self.assertEqual('override', settings.TEST)
with self.assertRaises(AttributeError):
getattr(settings, 'TEST2')
with self.assertRaises(AttributeError):
getattr(settings, 'TEST')
with self.assertRaises(AttributeError):
getattr(settings, 'TEST2')
def test_no_secret_key(self):
settings_module = ModuleType('fake_settings_module')
sys.modules['fake_settings_module'] = settings_module
msg = 'The SECRET_KEY setting must not be empty.'
try:
with self.assertRaisesMessage(ImproperlyConfigured, msg):
Settings('fake_settings_module')
finally:
del sys.modules['fake_settings_module']
def test_no_settings_module(self):
msg = (
'Requested setting%s, but settings are not configured. You '
'must either define the environment variable DJANGO_SETTINGS_MODULE '
'or call settings.configure() before accessing settings.'
)
orig_settings = os.environ[ENVIRONMENT_VARIABLE]
os.environ[ENVIRONMENT_VARIABLE] = ''
try:
with self.assertRaisesMessage(ImproperlyConfigured, msg % 's'):
settings._setup()
with self.assertRaisesMessage(ImproperlyConfigured, msg % ' TEST'):
settings._setup('TEST')
finally:
os.environ[ENVIRONMENT_VARIABLE] = orig_settings
def test_already_configured(self):
with self.assertRaisesMessage(RuntimeError, 'Settings already configured.'):
settings.configure()
@requires_tz_support
@mock.patch('django.conf.global_settings.TIME_ZONE', 'test')
def test_incorrect_timezone(self):
with self.assertRaisesMessage(ValueError, 'Incorrect timezone setting: test'):
settings._setup()
class TestComplexSettingOverride(SimpleTestCase):
def setUp(self):
self.old_warn_override_settings = signals.COMPLEX_OVERRIDE_SETTINGS.copy()
signals.COMPLEX_OVERRIDE_SETTINGS.add('TEST_WARN')
def tearDown(self):
signals.COMPLEX_OVERRIDE_SETTINGS = self.old_warn_override_settings
self.assertNotIn('TEST_WARN', signals.COMPLEX_OVERRIDE_SETTINGS)
def test_complex_override_warning(self):
"""Regression test for #19031"""
msg = 'Overriding setting TEST_WARN can lead to unexpected behavior.'
with self.assertWarnsMessage(UserWarning, msg) as cm:
with override_settings(TEST_WARN='override'):
self.assertEqual(settings.TEST_WARN, 'override')
self.assertEqual(cm.filename, __file__)
class SecureProxySslHeaderTest(SimpleTestCase):
@override_settings(SECURE_PROXY_SSL_HEADER=None)
def test_none(self):
req = HttpRequest()
self.assertIs(req.is_secure(), False)
@override_settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTOCOL', 'https'))
def test_set_without_xheader(self):
req = HttpRequest()
self.assertIs(req.is_secure(), False)
@override_settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTOCOL', 'https'))
def test_set_with_xheader_wrong(self):
req = HttpRequest()
req.META['HTTP_X_FORWARDED_PROTOCOL'] = 'wrongvalue'
self.assertIs(req.is_secure(), False)
@override_settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTOCOL', 'https'))
def test_set_with_xheader_right(self):
req = HttpRequest()
req.META['HTTP_X_FORWARDED_PROTOCOL'] = 'https'
self.assertIs(req.is_secure(), True)
class IsOverriddenTest(SimpleTestCase):
def test_configure(self):
s = LazySettings()
s.configure(SECRET_KEY='foo')
self.assertTrue(s.is_overridden('SECRET_KEY'))
def test_module(self):
settings_module = ModuleType('fake_settings_module')
settings_module.SECRET_KEY = 'foo'
sys.modules['fake_settings_module'] = settings_module
try:
s = Settings('fake_settings_module')
self.assertTrue(s.is_overridden('SECRET_KEY'))
self.assertFalse(s.is_overridden('ALLOWED_HOSTS'))
finally:
del sys.modules['fake_settings_module']
def test_override(self):
self.assertFalse(settings.is_overridden('ALLOWED_HOSTS'))
with override_settings(ALLOWED_HOSTS=[]):
self.assertTrue(settings.is_overridden('ALLOWED_HOSTS'))
def test_unevaluated_lazysettings_repr(self):
lazy_settings = LazySettings()
expected = '<LazySettings [Unevaluated]>'
self.assertEqual(repr(lazy_settings), expected)
def test_evaluated_lazysettings_repr(self):
lazy_settings = LazySettings()
module = os.environ.get(ENVIRONMENT_VARIABLE)
expected = '<LazySettings "%s">' % module
# Force evaluation of the lazy object.
lazy_settings.APPEND_SLASH
self.assertEqual(repr(lazy_settings), expected)
def test_usersettingsholder_repr(self):
lazy_settings = LazySettings()
lazy_settings.configure(APPEND_SLASH=False)
expected = '<UserSettingsHolder>'
self.assertEqual(repr(lazy_settings._wrapped), expected)
def test_settings_repr(self):
module = os.environ.get(ENVIRONMENT_VARIABLE)
lazy_settings = Settings(module)
expected = '<Settings "%s">' % module
self.assertEqual(repr(lazy_settings), expected)
class TestListSettings(unittest.TestCase):
"""
Make sure settings that should be lists or tuples throw
ImproperlyConfigured if they are set to a string instead of a list or tuple.
"""
list_or_tuple_settings = (
"INSTALLED_APPS",
"TEMPLATE_DIRS",
"LOCALE_PATHS",
)
def test_tuple_settings(self):
settings_module = ModuleType('fake_settings_module')
settings_module.SECRET_KEY = 'foo'
for setting in self.list_or_tuple_settings:
setattr(settings_module, setting, ('non_list_or_tuple_value'))
sys.modules['fake_settings_module'] = settings_module
try:
with self.assertRaises(ImproperlyConfigured):
Settings('fake_settings_module')
finally:
del sys.modules['fake_settings_module']
delattr(settings_module, setting)
class SettingChangeEnterException(Exception):
pass
class SettingChangeExitException(Exception):
pass
class OverrideSettingsIsolationOnExceptionTests(SimpleTestCase):
"""
The override_settings context manager restore settings if one of the
receivers of "setting_changed" signal fails. Check the three cases of
receiver failure detailed in receiver(). In each case, ALL receivers are
called when exiting the context manager.
"""
def setUp(self):
signals.setting_changed.connect(self.receiver)
self.addCleanup(signals.setting_changed.disconnect, self.receiver)
# Create a spy that's connected to the `setting_changed` signal and
# executed AFTER `self.receiver`.
self.spy_receiver = mock.Mock()
signals.setting_changed.connect(self.spy_receiver)
self.addCleanup(signals.setting_changed.disconnect, self.spy_receiver)
def receiver(self, **kwargs):
"""
A receiver that fails while certain settings are being changed.
- SETTING_BOTH raises an error while receiving the signal
on both entering and exiting the context manager.
- SETTING_ENTER raises an error only on enter.
- SETTING_EXIT raises an error only on exit.
"""
setting = kwargs['setting']
enter = kwargs['enter']
if setting in ('SETTING_BOTH', 'SETTING_ENTER') and enter:
raise SettingChangeEnterException
if setting in ('SETTING_BOTH', 'SETTING_EXIT') and not enter:
raise SettingChangeExitException
def check_settings(self):
"""Assert that settings for these tests aren't present."""
self.assertFalse(hasattr(settings, 'SETTING_BOTH'))
self.assertFalse(hasattr(settings, 'SETTING_ENTER'))
self.assertFalse(hasattr(settings, 'SETTING_EXIT'))
self.assertFalse(hasattr(settings, 'SETTING_PASS'))
def check_spy_receiver_exit_calls(self, call_count):
"""
Assert that `self.spy_receiver` was called exactly `call_count` times
with the ``enter=False`` keyword argument.
"""
kwargs_with_exit = [
kwargs for args, kwargs in self.spy_receiver.call_args_list
if ('enter', False) in kwargs.items()
]
self.assertEqual(len(kwargs_with_exit), call_count)
def test_override_settings_both(self):
"""Receiver fails on both enter and exit."""
with self.assertRaises(SettingChangeEnterException):
with override_settings(SETTING_PASS='BOTH', SETTING_BOTH='BOTH'):
pass
self.check_settings()
# Two settings were touched, so expect two calls of `spy_receiver`.
self.check_spy_receiver_exit_calls(call_count=2)
def test_override_settings_enter(self):
"""Receiver fails on enter only."""
with self.assertRaises(SettingChangeEnterException):
with override_settings(SETTING_PASS='ENTER', SETTING_ENTER='ENTER'):
pass
self.check_settings()
# Two settings were touched, so expect two calls of `spy_receiver`.
self.check_spy_receiver_exit_calls(call_count=2)
def test_override_settings_exit(self):
"""Receiver fails on exit only."""
with self.assertRaises(SettingChangeExitException):
with override_settings(SETTING_PASS='EXIT', SETTING_EXIT='EXIT'):
pass
self.check_settings()
# Two settings were touched, so expect two calls of `spy_receiver`.
self.check_spy_receiver_exit_calls(call_count=2)
def test_override_settings_reusable_on_enter(self):
"""
Error is raised correctly when reusing the same override_settings
instance.
"""
@override_settings(SETTING_ENTER='ENTER')
def decorated_function():
pass
with self.assertRaises(SettingChangeEnterException):
decorated_function()
signals.setting_changed.disconnect(self.receiver)
# This call shouldn't raise any errors.
decorated_function()
|
a7abfdbb9d36b9cec6ebe04cf67e1024eee34003e43a8583250e92d88ca0148d | import datetime
import itertools
import os
import re
from importlib import import_module
from unittest import mock
from urllib.parse import quote
from django.apps import apps
from django.conf import settings
from django.contrib.admin.models import LogEntry
from django.contrib.auth import (
BACKEND_SESSION_KEY, REDIRECT_FIELD_NAME, SESSION_KEY,
)
from django.contrib.auth.forms import (
AuthenticationForm, PasswordChangeForm, SetPasswordForm,
)
from django.contrib.auth.models import Permission, User
from django.contrib.auth.views import (
INTERNAL_RESET_SESSION_TOKEN, LoginView, logout_then_login,
redirect_to_login,
)
from django.contrib.contenttypes.models import ContentType
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.sites.requests import RequestSite
from django.core import mail
from django.db import connection
from django.http import HttpRequest
from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.test import Client, TestCase, override_settings
from django.test.client import RedirectCycleError
from django.urls import NoReverseMatch, reverse, reverse_lazy
from django.utils.http import urlsafe_base64_encode
from django.utils.translation import LANGUAGE_SESSION_KEY
from .client import PasswordResetConfirmClient
from .models import CustomUser, UUIDUser
from .settings import AUTH_TEMPLATES
@override_settings(
LANGUAGES=[('en', 'English')],
LANGUAGE_CODE='en',
TEMPLATES=AUTH_TEMPLATES,
ROOT_URLCONF='auth_tests.urls',
)
class AuthViewsTestCase(TestCase):
"""
Helper base class for all the follow test cases.
"""
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create_user(username='testclient', password='password', email='[email protected]')
cls.u3 = User.objects.create_user(username='staff', password='password', email='[email protected]')
def login(self, username='testclient', password='password'):
response = self.client.post('/login/', {
'username': username,
'password': password,
})
self.assertIn(SESSION_KEY, self.client.session)
return response
def logout(self):
response = self.client.get('/admin/logout/')
self.assertEqual(response.status_code, 200)
self.assertNotIn(SESSION_KEY, self.client.session)
def assertFormError(self, response, error):
"""Assert that error is found in response.context['form'] errors"""
form_errors = list(itertools.chain(*response.context['form'].errors.values()))
self.assertIn(str(error), form_errors)
@override_settings(ROOT_URLCONF='django.contrib.auth.urls')
class AuthViewNamedURLTests(AuthViewsTestCase):
def test_named_urls(self):
"Named URLs should be reversible"
expected_named_urls = [
('login', [], {}),
('logout', [], {}),
('password_change', [], {}),
('password_change_done', [], {}),
('password_reset', [], {}),
('password_reset_done', [], {}),
('password_reset_confirm', [], {
'uidb64': 'aaaaaaa',
'token': '1111-aaaaa',
}),
('password_reset_complete', [], {}),
]
for name, args, kwargs in expected_named_urls:
with self.subTest(name=name):
try:
reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.fail("Reversal of url named '%s' failed with NoReverseMatch" % name)
class PasswordResetTest(AuthViewsTestCase):
def setUp(self):
self.client = PasswordResetConfirmClient()
def test_email_not_found(self):
"""If the provided email is not registered, don't raise any error but
also don't send any email."""
response = self.client.get('/password_reset/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/password_reset/', {'email': '[email protected]'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post('/password_reset/', {'email': '[email protected]'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn("http://", mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
# optional multipart text/html email has been added. Make sure original,
# default functionality is 100% the same
self.assertFalse(mail.outbox[0].message().is_multipart())
def test_extra_email_context(self):
"""
extra_email_context should be available in the email template context.
"""
response = self.client.post(
'/password_reset_extra_email_context/',
{'email': '[email protected]'},
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn('Email email context: "Hello!"', mail.outbox[0].body)
def test_html_mail_template(self):
"""
A multipart email with text/plain and text/html is sent
if the html_email_template parameter is passed to the view
"""
response = self.client.post('/password_reset/html_email_template/', {'email': '[email protected]'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0].message()
self.assertEqual(len(message.get_payload()), 2)
self.assertTrue(message.is_multipart())
self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain')
self.assertEqual(message.get_payload(1).get_content_type(), 'text/html')
self.assertNotIn('<html>', message.get_payload(0).get_payload())
self.assertIn('<html>', message.get_payload(1).get_payload())
def test_email_found_custom_from(self):
"Email is sent if a valid email address is provided for password reset when a custom from_email is provided."
response = self.client.post('/password_reset_from_email/', {'email': '[email protected]'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("[email protected]", mail.outbox[0].from_email)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host(self):
"Poisoned HTTP_HOST headers can't be used for reset emails"
# This attack is based on the way browsers handle URLs. The colon
# should be used to separate the port, but if the URL contains an @,
# the colon is interpreted as part of a username for login purposes,
# making 'evil.com' the request domain. Since HTTP_HOST is used to
# produce a meaningful reset URL, we need to be certain that the
# HTTP_HOST header isn't poisoned. This is done as a check when get_host()
# is invoked, but we check here as a practical consequence.
with self.assertLogs('django.security.DisallowedHost', 'ERROR'):
response = self.client.post(
'/password_reset/',
{'email': '[email protected]'},
HTTP_HOST='www.example:[email protected]'
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host_admin_site(self):
"Poisoned HTTP_HOST headers can't be used for reset emails on admin views"
with self.assertLogs('django.security.DisallowedHost', 'ERROR'):
response = self.client.post(
'/admin_password_reset/',
{'email': '[email protected]'},
HTTP_HOST='www.example:[email protected]'
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
def _test_confirm_start(self):
# Start by creating the email
self.client.post('/password_reset/', {'email': '[email protected]'})
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertIsNotNone(urlmatch, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
def test_confirm_invalid(self):
url, path = self._test_confirm_start()
# Let's munge the token in the path, but keep the same length,
# in case the URLconf will reject a different length.
path = path[:-5] + ("0" * 4) + path[-1]
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_user(self):
# A nonexistent user returns a 200 response, not a 404.
response = self.client.get('/reset/123456/1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_overflow_user(self):
# A base36 user id that overflows int returns a 200 response.
response = self.client.get('/reset/zzzzzzzzzzzzz/1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_post(self):
# Same as test_confirm_invalid, but trying to do a POST instead.
url, path = self._test_confirm_start()
path = path[:-5] + ("0" * 4) + path[-1]
self.client.post(path, {
'new_password1': 'anewpassword',
'new_password2': ' anewpassword',
})
# Check the password has not been changed
u = User.objects.get(email='[email protected]')
self.assertTrue(not u.check_password("anewpassword"))
def test_confirm_invalid_hash(self):
"""A POST with an invalid token is rejected."""
u = User.objects.get(email='[email protected]')
original_password = u.password
url, path = self._test_confirm_start()
path_parts = path.split('-')
path_parts[-1] = ("0") * 20 + '/'
path = '-'.join(path_parts)
response = self.client.post(path, {
'new_password1': 'anewpassword',
'new_password2': 'anewpassword',
})
self.assertIs(response.context['validlink'], False)
u.refresh_from_db()
self.assertEqual(original_password, u.password) # password hasn't changed
def test_confirm_complete(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'})
# Check the password has been changed
u = User.objects.get(email='[email protected]')
self.assertTrue(u.check_password("anewpassword"))
# The reset token is deleted from the session.
self.assertNotIn(INTERNAL_RESET_SESSION_TOKEN, self.client.session)
# Check we can't use the link again
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_different_passwords(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'x'})
self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch'])
def test_reset_redirect_default(self):
response = self.client.post('/password_reset/', {'email': '[email protected]'})
self.assertRedirects(response, '/password_reset/done/', fetch_redirect_response=False)
def test_reset_custom_redirect(self):
response = self.client.post('/password_reset/custom_redirect/', {'email': '[email protected]'})
self.assertRedirects(response, '/custom/', fetch_redirect_response=False)
def test_reset_custom_redirect_named(self):
response = self.client.post('/password_reset/custom_redirect/named/', {'email': '[email protected]'})
self.assertRedirects(response, '/password_reset/', fetch_redirect_response=False)
def test_confirm_redirect_default(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'})
self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False)
def test_confirm_redirect_custom(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/custom/')
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'})
self.assertRedirects(response, '/custom/', fetch_redirect_response=False)
def test_confirm_redirect_custom_named(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/custom/named/')
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'})
self.assertRedirects(response, '/password_reset/', fetch_redirect_response=False)
def test_confirm_login_post_reset(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/post_reset_login/')
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'})
self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False)
self.assertIn(SESSION_KEY, self.client.session)
@override_settings(
AUTHENTICATION_BACKENDS=[
'django.contrib.auth.backends.ModelBackend',
'django.contrib.auth.backends.AllowAllUsersModelBackend',
]
)
def test_confirm_login_post_reset_custom_backend(self):
# This backend is specified in the URL pattern.
backend = 'django.contrib.auth.backends.AllowAllUsersModelBackend'
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/post_reset_login_custom_backend/')
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'})
self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False)
self.assertIn(SESSION_KEY, self.client.session)
self.assertEqual(self.client.session[BACKEND_SESSION_KEY], backend)
def test_confirm_login_post_reset_already_logged_in(self):
url, path = self._test_confirm_start()
path = path.replace('/reset/', '/reset/post_reset_login/')
self.login()
response = self.client.post(path, {'new_password1': 'anewpassword', 'new_password2': 'anewpassword'})
self.assertRedirects(response, '/reset/done/', fetch_redirect_response=False)
self.assertIn(SESSION_KEY, self.client.session)
def test_confirm_display_user_from_form(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# The password_reset_confirm() view passes the user object to the
# SetPasswordForm``, even on GET requests (#16919). For this test,
# {{ form.user }}`` is rendered in the template
# registration/password_reset_confirm.html.
username = User.objects.get(email='[email protected]').username
self.assertContains(response, "Hello, %s." % username)
# However, the view should NOT pass any user object on a form if the
# password reset link was invalid.
response = self.client.get('/reset/zzzzzzzzzzzzz/1-1/')
self.assertContains(response, "Hello, .")
def test_confirm_link_redirects_to_set_password_page(self):
url, path = self._test_confirm_start()
# Don't use PasswordResetConfirmClient (self.client) here which
# automatically fetches the redirect page.
client = Client()
response = client.get(path)
token = response.resolver_match.kwargs['token']
uuidb64 = response.resolver_match.kwargs['uidb64']
self.assertRedirects(response, '/reset/%s/set-password/' % uuidb64)
self.assertEqual(client.session['_password_reset_token'], token)
def test_invalid_link_if_going_directly_to_the_final_reset_password_url(self):
url, path = self._test_confirm_start()
_, uuidb64, _ = path.strip('/').split('/')
response = Client().get('/reset/%s/set-password/' % uuidb64)
self.assertContains(response, 'The password reset link was invalid')
@override_settings(AUTH_USER_MODEL='auth_tests.CustomUser')
class CustomUserPasswordResetTest(AuthViewsTestCase):
user_email = '[email protected]'
@classmethod
def setUpTestData(cls):
cls.u1 = CustomUser.custom_objects.create(
email='[email protected]',
date_of_birth=datetime.date(1976, 11, 8),
)
cls.u1.set_password('password')
cls.u1.save()
def setUp(self):
self.client = PasswordResetConfirmClient()
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post('/password_reset/', {'email': self.user_email})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertIsNotNone(urlmatch, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid_custom_user(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
# then submit a new password
response = self.client.post(path, {
'new_password1': 'anewpassword',
'new_password2': 'anewpassword',
})
self.assertRedirects(response, '/reset/done/')
@override_settings(AUTH_USER_MODEL='auth_tests.UUIDUser')
class UUIDUserPasswordResetTest(CustomUserPasswordResetTest):
def _test_confirm_start(self):
# instead of fixture
UUIDUser.objects.create_user(
email=self.user_email,
username='foo',
password='foo',
)
return super()._test_confirm_start()
def test_confirm_invalid_uuid(self):
"""A uidb64 that decodes to a non-UUID doesn't crash."""
_, path = self._test_confirm_start()
invalid_uidb64 = urlsafe_base64_encode('INVALID_UUID'.encode())
first, _uuidb64_, second = path.strip('/').split('/')
response = self.client.get('/' + '/'.join((first, invalid_uidb64, second)) + '/')
self.assertContains(response, 'The password reset link was invalid')
class ChangePasswordTest(AuthViewsTestCase):
def fail_login(self):
response = self.client.post('/login/', {
'username': 'testclient',
'password': 'password',
})
self.assertFormError(response, AuthenticationForm.error_messages['invalid_login'] % {
'username': User._meta.get_field('username').verbose_name
})
def logout(self):
self.client.get('/logout/')
def test_password_change_fails_with_invalid_old_password(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'donuts',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertFormError(response, PasswordChangeForm.error_messages['password_incorrect'])
def test_password_change_fails_with_mismatched_passwords(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'donuts',
})
self.assertFormError(response, SetPasswordForm.error_messages['password_mismatch'])
def test_password_change_succeeds(self):
self.login()
self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.fail_login()
self.login(password='password1')
def test_password_change_done_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertRedirects(response, '/password_change/done/', fetch_redirect_response=False)
@override_settings(LOGIN_URL='/login/')
def test_password_change_done_fails(self):
response = self.client.get('/password_change/done/')
self.assertRedirects(response, '/login/?next=/password_change/done/', fetch_redirect_response=False)
def test_password_change_redirect_default(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertRedirects(response, '/password_change/done/', fetch_redirect_response=False)
def test_password_change_redirect_custom(self):
self.login()
response = self.client.post('/password_change/custom/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertRedirects(response, '/custom/', fetch_redirect_response=False)
def test_password_change_redirect_custom_named(self):
self.login()
response = self.client.post('/password_change/custom/named/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertRedirects(response, '/password_reset/', fetch_redirect_response=False)
class SessionAuthenticationTests(AuthViewsTestCase):
def test_user_password_change_updates_session(self):
"""
#21649 - Ensure contrib.auth.views.password_change updates the user's
session auth hash after a password change so the session isn't logged out.
"""
self.login()
original_session_key = self.client.session.session_key
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
# if the hash isn't updated, retrieving the redirection page will fail.
self.assertRedirects(response, '/password_change/done/')
# The session key is rotated.
self.assertNotEqual(original_session_key, self.client.session.session_key)
class LoginTest(AuthViewsTestCase):
def test_current_site_in_context_after_login(self):
response = self.client.get(reverse('login'))
self.assertEqual(response.status_code, 200)
if apps.is_installed('django.contrib.sites'):
Site = apps.get_model('sites.Site')
site = Site.objects.get_current()
self.assertEqual(response.context['site'], site)
self.assertEqual(response.context['site_name'], site.name)
else:
self.assertIsInstance(response.context['site'], RequestSite)
self.assertIsInstance(response.context['form'], AuthenticationForm)
def test_security_check(self):
login_url = reverse('login')
# These URLs should not pass the security check.
bad_urls = (
'http://example.com',
'http:///example.com',
'https://example.com',
'ftp://example.com',
'///example.com',
'//example.com',
'javascript:alert("XSS")',
)
for bad_url in bad_urls:
with self.subTest(bad_url=bad_url):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': quote(bad_url),
}
response = self.client.post(nasty_url, {
'username': 'testclient',
'password': 'password',
})
self.assertEqual(response.status_code, 302)
self.assertNotIn(bad_url, response.url, '%s should be blocked' % bad_url)
# These URLs should pass the security check.
good_urls = (
'/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://example.com',
'view/?param=//example.com',
'https://testserver/',
'HTTPS://testserver/',
'//testserver/',
'/url%20with%20spaces/',
)
for good_url in good_urls:
with self.subTest(good_url=good_url):
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'good_url': quote(good_url),
}
response = self.client.post(safe_url, {
'username': 'testclient',
'password': 'password',
})
self.assertEqual(response.status_code, 302)
self.assertIn(good_url, response.url, '%s should be allowed' % good_url)
def test_security_check_https(self):
login_url = reverse('login')
non_https_next_url = 'http://testserver/path'
not_secured_url = '%(url)s?%(next)s=%(next_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'next_url': quote(non_https_next_url),
}
post_data = {
'username': 'testclient',
'password': 'password',
}
response = self.client.post(not_secured_url, post_data, secure=True)
self.assertEqual(response.status_code, 302)
self.assertNotEqual(response.url, non_https_next_url)
self.assertEqual(response.url, settings.LOGIN_REDIRECT_URL)
def test_login_form_contains_request(self):
# The custom authentication form for this login requires a request to
# initialize it.
response = self.client.post('/custom_request_auth_login/', {
'username': 'testclient',
'password': 'password',
})
# The login was successful.
self.assertRedirects(response, settings.LOGIN_REDIRECT_URL, fetch_redirect_response=False)
def test_login_csrf_rotate(self):
"""
Makes sure that a login rotates the currently-used CSRF token.
"""
# Do a GET to establish a CSRF token
# The test client isn't used here as it's a test for middleware.
req = HttpRequest()
CsrfViewMiddleware().process_view(req, LoginView.as_view(), (), {})
# get_token() triggers CSRF token inclusion in the response
get_token(req)
resp = LoginView.as_view()(req)
resp2 = CsrfViewMiddleware().process_response(req, resp)
csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None)
token1 = csrf_cookie.coded_value
# Prepare the POST request
req = HttpRequest()
req.COOKIES[settings.CSRF_COOKIE_NAME] = token1
req.method = "POST"
req.POST = {'username': 'testclient', 'password': 'password', 'csrfmiddlewaretoken': token1}
# Use POST request to log in
SessionMiddleware().process_request(req)
CsrfViewMiddleware().process_view(req, LoginView.as_view(), (), {})
req.META["SERVER_NAME"] = "testserver" # Required to have redirect work in login view
req.META["SERVER_PORT"] = 80
resp = LoginView.as_view()(req)
resp2 = CsrfViewMiddleware().process_response(req, resp)
csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, None)
token2 = csrf_cookie.coded_value
# Check the CSRF token switched
self.assertNotEqual(token1, token2)
def test_session_key_flushed_on_login(self):
"""
To avoid reusing another user's session, ensure a new, empty session is
created if the existing session corresponds to a different authenticated
user.
"""
self.login()
original_session_key = self.client.session.session_key
self.login(username='staff')
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_session_key_flushed_on_login_after_password_change(self):
"""
As above, but same user logging in after a password change.
"""
self.login()
original_session_key = self.client.session.session_key
# If no password change, session key should not be flushed.
self.login()
self.assertEqual(original_session_key, self.client.session.session_key)
user = User.objects.get(username='testclient')
user.set_password('foobar')
user.save()
self.login(password='foobar')
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_login_session_without_hash_session_key(self):
"""
Session without django.contrib.auth.HASH_SESSION_KEY should login
without an exception.
"""
user = User.objects.get(username='testclient')
engine = import_module(settings.SESSION_ENGINE)
session = engine.SessionStore()
session[SESSION_KEY] = user.id
session.save()
original_session_key = session.session_key
self.client.cookies[settings.SESSION_COOKIE_NAME] = original_session_key
self.login()
self.assertNotEqual(original_session_key, self.client.session.session_key)
class LoginURLSettings(AuthViewsTestCase):
"""Tests for settings.LOGIN_URL."""
def assertLoginURLEquals(self, url):
response = self.client.get('/login_required/')
self.assertRedirects(response, url, fetch_redirect_response=False)
@override_settings(LOGIN_URL='/login/')
def test_standard_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
@override_settings(LOGIN_URL='login')
def test_named_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
@override_settings(LOGIN_URL='http://remote.example.com/login')
def test_remote_login_url(self):
quoted_next = quote('http://testserver/login_required/')
expected = 'http://remote.example.com/login?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL='https:///login/')
def test_https_login_url(self):
quoted_next = quote('http://testserver/login_required/')
expected = 'https:///login/?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL='/login/?pretty=1')
def test_login_url_with_querystring(self):
self.assertLoginURLEquals('/login/?pretty=1&next=/login_required/')
@override_settings(LOGIN_URL='http://remote.example.com/login/?next=/default/')
def test_remote_login_url_with_next_querystring(self):
quoted_next = quote('http://testserver/login_required/')
expected = 'http://remote.example.com/login/?next=%s' % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL=reverse_lazy('login'))
def test_lazy_login_url(self):
self.assertLoginURLEquals('/login/?next=/login_required/')
class LoginRedirectUrlTest(AuthViewsTestCase):
"""Tests for settings.LOGIN_REDIRECT_URL."""
def assertLoginRedirectURLEqual(self, url):
response = self.login()
self.assertRedirects(response, url, fetch_redirect_response=False)
def test_default(self):
self.assertLoginRedirectURLEqual('/accounts/profile/')
@override_settings(LOGIN_REDIRECT_URL='/custom/')
def test_custom(self):
self.assertLoginRedirectURLEqual('/custom/')
@override_settings(LOGIN_REDIRECT_URL='password_reset')
def test_named(self):
self.assertLoginRedirectURLEqual('/password_reset/')
@override_settings(LOGIN_REDIRECT_URL='http://remote.example.com/welcome/')
def test_remote(self):
self.assertLoginRedirectURLEqual('http://remote.example.com/welcome/')
class RedirectToLoginTests(AuthViewsTestCase):
"""Tests for the redirect_to_login view"""
@override_settings(LOGIN_URL=reverse_lazy('login'))
def test_redirect_to_login_with_lazy(self):
login_redirect_response = redirect_to_login(next='/else/where/')
expected = '/login/?next=/else/where/'
self.assertEqual(expected, login_redirect_response.url)
@override_settings(LOGIN_URL=reverse_lazy('login'))
def test_redirect_to_login_with_lazy_and_unicode(self):
login_redirect_response = redirect_to_login(next='/else/where/झ/')
expected = '/login/?next=/else/where/%E0%A4%9D/'
self.assertEqual(expected, login_redirect_response.url)
class LogoutThenLoginTests(AuthViewsTestCase):
"""Tests for the logout_then_login view"""
def confirm_logged_out(self):
self.assertNotIn(SESSION_KEY, self.client.session)
@override_settings(LOGIN_URL='/login/')
def test_default_logout_then_login(self):
self.login()
req = HttpRequest()
req.method = 'GET'
req.session = self.client.session
response = logout_then_login(req)
self.confirm_logged_out()
self.assertRedirects(response, '/login/', fetch_redirect_response=False)
def test_logout_then_login_with_custom_login(self):
self.login()
req = HttpRequest()
req.method = 'GET'
req.session = self.client.session
response = logout_then_login(req, login_url='/custom/')
self.confirm_logged_out()
self.assertRedirects(response, '/custom/', fetch_redirect_response=False)
class LoginRedirectAuthenticatedUser(AuthViewsTestCase):
dont_redirect_url = '/login/redirect_authenticated_user_default/'
do_redirect_url = '/login/redirect_authenticated_user/'
def test_default(self):
"""Stay on the login page by default."""
self.login()
response = self.client.get(self.dont_redirect_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['next'], '')
def test_guest(self):
"""If not logged in, stay on the same page."""
response = self.client.get(self.do_redirect_url)
self.assertEqual(response.status_code, 200)
def test_redirect(self):
"""If logged in, go to default redirected URL."""
self.login()
response = self.client.get(self.do_redirect_url)
self.assertRedirects(response, '/accounts/profile/', fetch_redirect_response=False)
@override_settings(LOGIN_REDIRECT_URL='/custom/')
def test_redirect_url(self):
"""If logged in, go to custom redirected URL."""
self.login()
response = self.client.get(self.do_redirect_url)
self.assertRedirects(response, '/custom/', fetch_redirect_response=False)
def test_redirect_param(self):
"""If next is specified as a GET parameter, go there."""
self.login()
url = self.do_redirect_url + '?next=/custom_next/'
response = self.client.get(url)
self.assertRedirects(response, '/custom_next/', fetch_redirect_response=False)
def test_redirect_loop(self):
"""
Detect a redirect loop if LOGIN_REDIRECT_URL is not correctly set,
with and without custom parameters.
"""
self.login()
msg = (
"Redirection loop for authenticated user detected. Check that "
"your LOGIN_REDIRECT_URL doesn't point to a login page"
)
with self.settings(LOGIN_REDIRECT_URL=self.do_redirect_url):
with self.assertRaisesMessage(ValueError, msg):
self.client.get(self.do_redirect_url)
url = self.do_redirect_url + '?bla=2'
with self.assertRaisesMessage(ValueError, msg):
self.client.get(url)
def test_permission_required_not_logged_in(self):
# Not logged in ...
with self.settings(LOGIN_URL=self.do_redirect_url):
# redirected to login.
response = self.client.get('/permission_required_redirect/', follow=True)
self.assertEqual(response.status_code, 200)
# exception raised.
response = self.client.get('/permission_required_exception/', follow=True)
self.assertEqual(response.status_code, 403)
# redirected to login.
response = self.client.get('/login_and_permission_required_exception/', follow=True)
self.assertEqual(response.status_code, 200)
def test_permission_required_logged_in(self):
self.login()
# Already logged in...
with self.settings(LOGIN_URL=self.do_redirect_url):
# redirect loop encountered.
with self.assertRaisesMessage(RedirectCycleError, 'Redirect loop detected.'):
self.client.get('/permission_required_redirect/', follow=True)
# exception raised.
response = self.client.get('/permission_required_exception/', follow=True)
self.assertEqual(response.status_code, 403)
# exception raised.
response = self.client.get('/login_and_permission_required_exception/', follow=True)
self.assertEqual(response.status_code, 403)
class LoginSuccessURLAllowedHostsTest(AuthViewsTestCase):
def test_success_url_allowed_hosts_same_host(self):
response = self.client.post('/login/allowed_hosts/', {
'username': 'testclient',
'password': 'password',
'next': 'https://testserver/home',
})
self.assertIn(SESSION_KEY, self.client.session)
self.assertRedirects(response, 'https://testserver/home', fetch_redirect_response=False)
def test_success_url_allowed_hosts_safe_host(self):
response = self.client.post('/login/allowed_hosts/', {
'username': 'testclient',
'password': 'password',
'next': 'https://otherserver/home',
})
self.assertIn(SESSION_KEY, self.client.session)
self.assertRedirects(response, 'https://otherserver/home', fetch_redirect_response=False)
def test_success_url_allowed_hosts_unsafe_host(self):
response = self.client.post('/login/allowed_hosts/', {
'username': 'testclient',
'password': 'password',
'next': 'https://evil/home',
})
self.assertIn(SESSION_KEY, self.client.session)
self.assertRedirects(response, '/accounts/profile/', fetch_redirect_response=False)
class LogoutTest(AuthViewsTestCase):
def confirm_logged_out(self):
self.assertNotIn(SESSION_KEY, self.client.session)
def test_logout_default(self):
"Logout without next_page option renders the default template"
self.login()
response = self.client.get('/logout/')
self.assertContains(response, 'Logged out')
self.confirm_logged_out()
def test_logout_with_post(self):
self.login()
response = self.client.post('/logout/')
self.assertContains(response, 'Logged out')
self.confirm_logged_out()
def test_14377(self):
# Bug 14377
self.login()
response = self.client.get('/logout/')
self.assertIn('site', response.context)
def test_logout_doesnt_cache(self):
"""
The logout() view should send "no-cache" headers for reasons described
in #25490.
"""
response = self.client.get('/logout/')
self.assertIn('no-store', response['Cache-Control'])
def test_logout_with_overridden_redirect_url(self):
# Bug 11223
self.login()
response = self.client.get('/logout/next_page/')
self.assertRedirects(response, '/somewhere/', fetch_redirect_response=False)
response = self.client.get('/logout/next_page/?next=/login/')
self.assertRedirects(response, '/login/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_next_page_specified(self):
"Logout with next_page option given redirects to specified resource"
self.login()
response = self.client.get('/logout/next_page/')
self.assertRedirects(response, '/somewhere/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_redirect_argument(self):
"Logout with query string redirects to specified resource"
self.login()
response = self.client.get('/logout/?next=/login/')
self.assertRedirects(response, '/login/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_custom_redirect_argument(self):
"Logout with custom query string redirects to specified resource"
self.login()
response = self.client.get('/logout/custom_query/?follow=/somewhere/')
self.assertRedirects(response, '/somewhere/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_named_redirect(self):
"Logout resolves names or URLs passed as next_page."
self.login()
response = self.client.get('/logout/next_page/named/')
self.assertRedirects(response, '/password_reset/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_success_url_allowed_hosts_same_host(self):
self.login()
response = self.client.get('/logout/allowed_hosts/?next=https://testserver/')
self.assertRedirects(response, 'https://testserver/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_success_url_allowed_hosts_safe_host(self):
self.login()
response = self.client.get('/logout/allowed_hosts/?next=https://otherserver/')
self.assertRedirects(response, 'https://otherserver/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_success_url_allowed_hosts_unsafe_host(self):
self.login()
response = self.client.get('/logout/allowed_hosts/?next=https://evil/')
self.assertRedirects(response, '/logout/allowed_hosts/', fetch_redirect_response=False)
self.confirm_logged_out()
def test_security_check(self):
logout_url = reverse('logout')
# These URLs should not pass the security check.
bad_urls = (
'http://example.com',
'http:///example.com',
'https://example.com',
'ftp://example.com',
'///example.com',
'//example.com',
'javascript:alert("XSS")',
)
for bad_url in bad_urls:
with self.subTest(bad_url=bad_url):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': quote(bad_url),
}
self.login()
response = self.client.get(nasty_url)
self.assertEqual(response.status_code, 302)
self.assertNotIn(bad_url, response.url, '%s should be blocked' % bad_url)
self.confirm_logged_out()
# These URLs should pass the security check.
good_urls = (
'/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://example.com',
'view/?param=//example.com',
'https://testserver/',
'HTTPS://testserver/',
'//testserver/',
'/url%20with%20spaces/',
)
for good_url in good_urls:
with self.subTest(good_url=good_url):
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'good_url': quote(good_url),
}
self.login()
response = self.client.get(safe_url)
self.assertEqual(response.status_code, 302)
self.assertIn(good_url, response.url, '%s should be allowed' % good_url)
self.confirm_logged_out()
def test_security_check_https(self):
logout_url = reverse('logout')
non_https_next_url = 'http://testserver/'
url = '%(url)s?%(next)s=%(next_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'next_url': quote(non_https_next_url),
}
self.login()
response = self.client.get(url, secure=True)
self.assertRedirects(response, logout_url, fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_preserve_language(self):
"""Language stored in session is preserved after logout"""
# Create a new session with language
engine = import_module(settings.SESSION_ENGINE)
session = engine.SessionStore()
session[LANGUAGE_SESSION_KEY] = 'pl'
session.save()
self.client.cookies[settings.SESSION_COOKIE_NAME] = session.session_key
self.client.get('/logout/')
self.assertEqual(self.client.session[LANGUAGE_SESSION_KEY], 'pl')
@override_settings(LOGOUT_REDIRECT_URL='/custom/')
def test_logout_redirect_url_setting(self):
self.login()
response = self.client.get('/logout/')
self.assertRedirects(response, '/custom/', fetch_redirect_response=False)
@override_settings(LOGOUT_REDIRECT_URL='logout')
def test_logout_redirect_url_named_setting(self):
self.login()
response = self.client.get('/logout/')
self.assertRedirects(response, '/logout/', fetch_redirect_response=False)
def get_perm(Model, perm):
ct = ContentType.objects.get_for_model(Model)
return Permission.objects.get(content_type=ct, codename=perm)
# Redirect in test_user_change_password will fail if session auth hash
# isn't updated after password change (#21649)
@override_settings(ROOT_URLCONF='auth_tests.urls_admin')
class ChangelistTests(AuthViewsTestCase):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
# Make me a superuser before logging in.
User.objects.filter(username='testclient').update(is_staff=True, is_superuser=True)
def setUp(self):
self.login()
# Get the latest last_login value.
self.admin = User.objects.get(pk=self.u1.pk)
def get_user_data(self, user):
return {
'username': user.username,
'password': user.password,
'email': user.email,
'is_active': user.is_active,
'is_staff': user.is_staff,
'is_superuser': user.is_superuser,
'last_login_0': user.last_login.strftime('%Y-%m-%d'),
'last_login_1': user.last_login.strftime('%H:%M:%S'),
'initial-last_login_0': user.last_login.strftime('%Y-%m-%d'),
'initial-last_login_1': user.last_login.strftime('%H:%M:%S'),
'date_joined_0': user.date_joined.strftime('%Y-%m-%d'),
'date_joined_1': user.date_joined.strftime('%H:%M:%S'),
'initial-date_joined_0': user.date_joined.strftime('%Y-%m-%d'),
'initial-date_joined_1': user.date_joined.strftime('%H:%M:%S'),
'first_name': user.first_name,
'last_name': user.last_name,
}
# #20078 - users shouldn't be allowed to guess password hashes via
# repeated password__startswith queries.
def test_changelist_disallows_password_lookups(self):
# A lookup that tries to filter on password isn't OK
with self.assertLogs('django.security.DisallowedModelAdminLookup', 'ERROR'):
response = self.client.get(reverse('auth_test_admin:auth_user_changelist') + '?password__startswith=sha1$')
self.assertEqual(response.status_code, 400)
def test_user_change_email(self):
data = self.get_user_data(self.admin)
data['email'] = 'new_' + data['email']
response = self.client.post(
reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,)),
data
)
self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist'))
row = LogEntry.objects.latest('id')
self.assertEqual(row.get_change_message(), 'Changed email.')
def test_user_not_change(self):
response = self.client.post(
reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,)),
self.get_user_data(self.admin)
)
self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist'))
row = LogEntry.objects.latest('id')
self.assertEqual(row.get_change_message(), 'No fields changed.')
def test_user_change_password(self):
user_change_url = reverse('auth_test_admin:auth_user_change', args=(self.admin.pk,))
password_change_url = reverse('auth_test_admin:auth_user_password_change', args=(self.admin.pk,))
response = self.client.get(user_change_url)
# Test the link inside password field help_text.
rel_link = re.search(
r'you can change the password using <a href="([^"]*)">this form</a>',
response.content.decode()
).groups()[0]
self.assertEqual(
os.path.normpath(user_change_url + rel_link),
os.path.normpath(password_change_url)
)
response = self.client.post(
password_change_url,
{
'password1': 'password1',
'password2': 'password1',
}
)
self.assertRedirects(response, user_change_url)
row = LogEntry.objects.latest('id')
self.assertEqual(row.get_change_message(), 'Changed password.')
self.logout()
self.login(password='password1')
def test_user_change_different_user_password(self):
u = User.objects.get(email='[email protected]')
response = self.client.post(
reverse('auth_test_admin:auth_user_password_change', args=(u.pk,)),
{
'password1': 'password1',
'password2': 'password1',
}
)
self.assertRedirects(response, reverse('auth_test_admin:auth_user_change', args=(u.pk,)))
row = LogEntry.objects.latest('id')
self.assertEqual(row.user_id, self.admin.pk)
self.assertEqual(row.object_id, str(u.pk))
self.assertEqual(row.get_change_message(), 'Changed password.')
def test_password_change_bad_url(self):
response = self.client.get(reverse('auth_test_admin:auth_user_password_change', args=('foobar',)))
self.assertEqual(response.status_code, 404)
@mock.patch('django.contrib.auth.admin.UserAdmin.has_change_permission')
def test_user_change_password_passes_user_to_has_change_permission(self, has_change_permission):
url = reverse('auth_test_admin:auth_user_password_change', args=(self.admin.pk,))
self.client.post(url, {'password1': 'password1', 'password2': 'password1'})
(_request, user), _kwargs = has_change_permission.call_args
self.assertEqual(user.pk, self.admin.pk)
def test_view_user_password_is_readonly(self):
u = User.objects.get(username='testclient')
u.is_superuser = False
u.save()
original_password = u.password
u.user_permissions.add(get_perm(User, 'view_user'))
response = self.client.get(reverse('auth_test_admin:auth_user_change', args=(u.pk,)),)
algo, salt, hash_string = (u.password.split('$'))
self.assertContains(response, '<div class="readonly">testclient</div>')
# ReadOnlyPasswordHashWidget is used to render the field.
self.assertContains(
response,
'<strong>algorithm</strong>: %s\n\n'
'<strong>salt</strong>: %s**********\n\n'
'<strong>hash</strong>: %s**************************\n\n' % (
algo, salt[:2], hash_string[:6],
),
html=True,
)
# Value in POST data is ignored.
data = self.get_user_data(u)
data['password'] = 'shouldnotchange'
change_url = reverse('auth_test_admin:auth_user_change', args=(u.pk,))
response = self.client.post(change_url, data)
self.assertRedirects(response, reverse('auth_test_admin:auth_user_changelist'))
u.refresh_from_db()
self.assertEqual(u.password, original_password)
@override_settings(
AUTH_USER_MODEL='auth_tests.UUIDUser',
ROOT_URLCONF='auth_tests.urls_custom_user_admin',
)
class UUIDUserTests(TestCase):
def test_admin_password_change(self):
u = UUIDUser.objects.create_superuser(username='uuid', email='[email protected]', password='test')
self.assertTrue(self.client.login(username='uuid', password='test'))
user_change_url = reverse('custom_user_admin:auth_tests_uuiduser_change', args=(u.pk,))
response = self.client.get(user_change_url)
self.assertEqual(response.status_code, 200)
password_change_url = reverse('custom_user_admin:auth_user_password_change', args=(u.pk,))
response = self.client.get(password_change_url)
self.assertEqual(response.status_code, 200)
# A LogEntry is created with pk=1 which breaks a FK constraint on MySQL
with connection.constraint_checks_disabled():
response = self.client.post(password_change_url, {
'password1': 'password1',
'password2': 'password1',
})
self.assertRedirects(response, user_change_url)
row = LogEntry.objects.latest('id')
self.assertEqual(row.user_id, 1) # hardcoded in CustomUserAdmin.log_change()
self.assertEqual(row.object_id, str(u.pk))
self.assertEqual(row.get_change_message(), 'Changed password.')
# The LogEntry.user column isn't altered to a UUID type so it's set to
# an integer manually in CustomUserAdmin to avoid an error. To avoid a
# constraint error, delete the entry before constraints are checked
# after the test.
row.delete()
|
80693b49aee6cef9fabecf3a7440562a14abff3efdb6a25c911bfbc67d755d3b | from unittest import mock
from django.conf.global_settings import PASSWORD_HASHERS
from django.contrib.auth import get_user_model
from django.contrib.auth.base_user import AbstractBaseUser
from django.contrib.auth.hashers import get_hasher
from django.contrib.auth.models import (
AbstractUser, AnonymousUser, Group, Permission, User, UserManager,
)
from django.contrib.contenttypes.models import ContentType
from django.core import mail
from django.db.models.signals import post_save
from django.test import SimpleTestCase, TestCase, override_settings
from .models import IntegerUsernameUser
from .models.with_custom_email_field import CustomEmailField
class NaturalKeysTestCase(TestCase):
def test_user_natural_key(self):
staff_user = User.objects.create_user(username='staff')
self.assertEqual(User.objects.get_by_natural_key('staff'), staff_user)
self.assertEqual(staff_user.natural_key(), ('staff',))
def test_group_natural_key(self):
users_group = Group.objects.create(name='users')
self.assertEqual(Group.objects.get_by_natural_key('users'), users_group)
class LoadDataWithoutNaturalKeysTestCase(TestCase):
fixtures = ['regular.json']
def test_user_is_created_and_added_to_group(self):
user = User.objects.get(username='my_username')
group = Group.objects.get(name='my_group')
self.assertEqual(group, user.groups.get())
class LoadDataWithNaturalKeysTestCase(TestCase):
fixtures = ['natural.json']
def test_user_is_created_and_added_to_group(self):
user = User.objects.get(username='my_username')
group = Group.objects.get(name='my_group')
self.assertEqual(group, user.groups.get())
class LoadDataWithNaturalKeysAndMultipleDatabasesTestCase(TestCase):
databases = {'default', 'other'}
def test_load_data_with_user_permissions(self):
# Create test contenttypes for both databases
default_objects = [
ContentType.objects.db_manager('default').create(
model='examplemodela',
app_label='app_a',
),
ContentType.objects.db_manager('default').create(
model='examplemodelb',
app_label='app_b',
),
]
other_objects = [
ContentType.objects.db_manager('other').create(
model='examplemodelb',
app_label='app_b',
),
ContentType.objects.db_manager('other').create(
model='examplemodela',
app_label='app_a',
),
]
# Now we create the test UserPermission
Permission.objects.db_manager("default").create(
name="Can delete example model b",
codename="delete_examplemodelb",
content_type=default_objects[1],
)
Permission.objects.db_manager("other").create(
name="Can delete example model b",
codename="delete_examplemodelb",
content_type=other_objects[0],
)
perm_default = Permission.objects.get_by_natural_key(
'delete_examplemodelb',
'app_b',
'examplemodelb',
)
perm_other = Permission.objects.db_manager('other').get_by_natural_key(
'delete_examplemodelb',
'app_b',
'examplemodelb',
)
self.assertEqual(perm_default.content_type_id, default_objects[1].id)
self.assertEqual(perm_other.content_type_id, other_objects[0].id)
class UserManagerTestCase(TestCase):
def test_create_user(self):
email_lowercase = '[email protected]'
user = User.objects.create_user('user', email_lowercase)
self.assertEqual(user.email, email_lowercase)
self.assertEqual(user.username, 'user')
self.assertFalse(user.has_usable_password())
def test_create_user_email_domain_normalize_rfc3696(self):
# According to https://tools.ietf.org/html/rfc3696#section-3
# the "@" symbol can be part of the local part of an email address
returned = UserManager.normalize_email(r'Abc\@[email protected]')
self.assertEqual(returned, r'Abc\@[email protected]')
def test_create_user_email_domain_normalize(self):
returned = UserManager.normalize_email('[email protected]')
self.assertEqual(returned, '[email protected]')
def test_create_user_email_domain_normalize_with_whitespace(self):
returned = UserManager.normalize_email(r'email\ [email protected]')
self.assertEqual(returned, r'email\ [email protected]')
def test_empty_username(self):
with self.assertRaisesMessage(ValueError, 'The given username must be set'):
User.objects.create_user(username='')
def test_create_user_is_staff(self):
email = '[email protected]'
user = User.objects.create_user('user', email, is_staff=True)
self.assertEqual(user.email, email)
self.assertEqual(user.username, 'user')
self.assertTrue(user.is_staff)
def test_create_super_user_raises_error_on_false_is_superuser(self):
with self.assertRaisesMessage(ValueError, 'Superuser must have is_superuser=True.'):
User.objects.create_superuser(
username='test', email='[email protected]',
password='test', is_superuser=False,
)
def test_create_superuser_raises_error_on_false_is_staff(self):
with self.assertRaisesMessage(ValueError, 'Superuser must have is_staff=True.'):
User.objects.create_superuser(
username='test', email='[email protected]',
password='test', is_staff=False,
)
def test_make_random_password(self):
allowed_chars = 'abcdefg'
password = UserManager().make_random_password(5, allowed_chars)
self.assertEqual(len(password), 5)
for char in password:
self.assertIn(char, allowed_chars)
class AbstractBaseUserTests(SimpleTestCase):
def test_has_usable_password(self):
"""
Passwords are usable even if they don't correspond to a hasher in
settings.PASSWORD_HASHERS.
"""
self.assertIs(User(password='some-gibbberish').has_usable_password(), True)
def test_normalize_username(self):
self.assertEqual(IntegerUsernameUser().normalize_username(123), 123)
def test_clean_normalize_username(self):
# The normalization happens in AbstractBaseUser.clean()
ohm_username = 'iamtheΩ' # U+2126 OHM SIGN
for model in ('auth.User', 'auth_tests.CustomUser'):
with self.subTest(model=model), self.settings(AUTH_USER_MODEL=model):
User = get_user_model()
user = User(**{User.USERNAME_FIELD: ohm_username, 'password': 'foo'})
user.clean()
username = user.get_username()
self.assertNotEqual(username, ohm_username)
self.assertEqual(username, 'iamtheΩ') # U+03A9 GREEK CAPITAL LETTER OMEGA
def test_default_email(self):
user = AbstractBaseUser()
self.assertEqual(user.get_email_field_name(), 'email')
def test_custom_email(self):
user = CustomEmailField()
self.assertEqual(user.get_email_field_name(), 'email_address')
class AbstractUserTestCase(TestCase):
def test_email_user(self):
# valid send_mail parameters
kwargs = {
"fail_silently": False,
"auth_user": None,
"auth_password": None,
"connection": None,
"html_message": None,
}
abstract_user = AbstractUser(email='[email protected]')
abstract_user.email_user(
subject="Subject here",
message="This is a message",
from_email="[email protected]",
**kwargs
)
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0]
self.assertEqual(message.subject, "Subject here")
self.assertEqual(message.body, "This is a message")
self.assertEqual(message.from_email, "[email protected]")
self.assertEqual(message.to, [abstract_user.email])
def test_last_login_default(self):
user1 = User.objects.create(username='user1')
self.assertIsNone(user1.last_login)
user2 = User.objects.create_user(username='user2')
self.assertIsNone(user2.last_login)
def test_user_clean_normalize_email(self):
user = User(username='user', password='foo', email='[email protected]')
user.clean()
self.assertEqual(user.email, '[email protected]')
def test_user_double_save(self):
"""
Calling user.save() twice should trigger password_changed() once.
"""
user = User.objects.create_user(username='user', password='foo')
user.set_password('bar')
with mock.patch('django.contrib.auth.password_validation.password_changed') as pw_changed:
user.save()
self.assertEqual(pw_changed.call_count, 1)
user.save()
self.assertEqual(pw_changed.call_count, 1)
@override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS)
def test_check_password_upgrade(self):
"""
password_changed() shouldn't be called if User.check_password()
triggers a hash iteration upgrade.
"""
user = User.objects.create_user(username='user', password='foo')
initial_password = user.password
self.assertTrue(user.check_password('foo'))
hasher = get_hasher('default')
self.assertEqual('pbkdf2_sha256', hasher.algorithm)
old_iterations = hasher.iterations
try:
# Upgrade the password iterations
hasher.iterations = old_iterations + 1
with mock.patch('django.contrib.auth.password_validation.password_changed') as pw_changed:
user.check_password('foo')
self.assertEqual(pw_changed.call_count, 0)
self.assertNotEqual(initial_password, user.password)
finally:
hasher.iterations = old_iterations
class IsActiveTestCase(TestCase):
"""
Tests the behavior of the guaranteed is_active attribute
"""
def test_builtin_user_isactive(self):
user = User.objects.create(username='foo', email='[email protected]')
# is_active is true by default
self.assertIs(user.is_active, True)
user.is_active = False
user.save()
user_fetched = User.objects.get(pk=user.pk)
# the is_active flag is saved
self.assertFalse(user_fetched.is_active)
@override_settings(AUTH_USER_MODEL='auth_tests.IsActiveTestUser1')
def test_is_active_field_default(self):
"""
tests that the default value for is_active is provided
"""
UserModel = get_user_model()
user = UserModel(username='foo')
self.assertIs(user.is_active, True)
# you can set the attribute - but it will not save
user.is_active = False
# there should be no problem saving - but the attribute is not saved
user.save()
user_fetched = UserModel._default_manager.get(pk=user.pk)
# the attribute is always true for newly retrieved instance
self.assertIs(user_fetched.is_active, True)
class TestCreateSuperUserSignals(TestCase):
"""
Simple test case for ticket #20541
"""
def post_save_listener(self, *args, **kwargs):
self.signals_count += 1
def setUp(self):
self.signals_count = 0
post_save.connect(self.post_save_listener, sender=User)
def tearDown(self):
post_save.disconnect(self.post_save_listener, sender=User)
def test_create_user(self):
User.objects.create_user("JohnDoe")
self.assertEqual(self.signals_count, 1)
def test_create_superuser(self):
User.objects.create_superuser("JohnDoe", "[email protected]", "1")
self.assertEqual(self.signals_count, 1)
class AnonymousUserTests(SimpleTestCase):
no_repr_msg = "Django doesn't provide a DB representation for AnonymousUser."
def setUp(self):
self.user = AnonymousUser()
def test_properties(self):
self.assertIsNone(self.user.pk)
self.assertEqual(self.user.username, '')
self.assertEqual(self.user.get_username(), '')
self.assertIs(self.user.is_anonymous, True)
self.assertIs(self.user.is_authenticated, False)
self.assertIs(self.user.is_staff, False)
self.assertIs(self.user.is_active, False)
self.assertIs(self.user.is_superuser, False)
self.assertEqual(self.user.groups.all().count(), 0)
self.assertEqual(self.user.user_permissions.all().count(), 0)
self.assertEqual(self.user.get_group_permissions(), set())
def test_str(self):
self.assertEqual(str(self.user), 'AnonymousUser')
def test_eq(self):
self.assertEqual(self.user, AnonymousUser())
self.assertNotEqual(self.user, User('super', '[email protected]', 'super'))
def test_hash(self):
self.assertEqual(hash(self.user), 1)
def test_int(self):
msg = (
'Cannot cast AnonymousUser to int. Are you trying to use it in '
'place of User?'
)
with self.assertRaisesMessage(TypeError, msg):
int(self.user)
def test_delete(self):
with self.assertRaisesMessage(NotImplementedError, self.no_repr_msg):
self.user.delete()
def test_save(self):
with self.assertRaisesMessage(NotImplementedError, self.no_repr_msg):
self.user.save()
def test_set_password(self):
with self.assertRaisesMessage(NotImplementedError, self.no_repr_msg):
self.user.set_password('password')
def test_check_password(self):
with self.assertRaisesMessage(NotImplementedError, self.no_repr_msg):
self.user.check_password('password')
class GroupTests(SimpleTestCase):
def test_str(self):
g = Group(name='Users')
self.assertEqual(str(g), 'Users')
class PermissionTests(TestCase):
def test_str(self):
p = Permission.objects.get(codename='view_customemailfield')
self.assertEqual(str(p), 'auth_tests | custom email field | Can view custom email field')
|
9e2ebb7839ea2815a861a77d6e6b2e5759da7491ebbfc0952567368263cb0815 | from importlib import import_module
from django.apps import apps
from django.contrib.auth.models import Permission, User
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase
from .models import Proxy, UserProxy
update_proxy_permissions = import_module('django.contrib.auth.migrations.0011_update_proxy_permissions')
class ProxyModelWithDifferentAppLabelTests(TestCase):
available_apps = [
'auth_tests',
'django.contrib.auth',
'django.contrib.contenttypes',
]
def setUp(self):
"""
Create proxy permissions with content_type to the concrete model
rather than the proxy model (as they were before Django 2.2 and
migration 11).
"""
Permission.objects.all().delete()
self.concrete_content_type = ContentType.objects.get_for_model(UserProxy)
self.default_permission = Permission.objects.create(
content_type=self.concrete_content_type,
codename='add_userproxy',
name='Can add userproxy',
)
self.custom_permission = Permission.objects.create(
content_type=self.concrete_content_type,
codename='use_different_app_label',
name='May use a different app label',
)
def test_proxy_model_permissions_contenttype(self):
proxy_model_content_type = ContentType.objects.get_for_model(UserProxy, for_concrete_model=False)
self.assertEqual(self.default_permission.content_type, self.concrete_content_type)
self.assertEqual(self.custom_permission.content_type, self.concrete_content_type)
update_proxy_permissions.update_proxy_model_permissions(apps, None)
self.default_permission.refresh_from_db()
self.assertEqual(self.default_permission.content_type, proxy_model_content_type)
self.custom_permission.refresh_from_db()
self.assertEqual(self.custom_permission.content_type, proxy_model_content_type)
def test_user_has_now_proxy_model_permissions(self):
user = User.objects.create()
user.user_permissions.add(self.default_permission)
user.user_permissions.add(self.custom_permission)
for permission in [self.default_permission, self.custom_permission]:
self.assertTrue(user.has_perm('auth.' + permission.codename))
self.assertFalse(user.has_perm('auth_tests.' + permission.codename))
update_proxy_permissions.update_proxy_model_permissions(apps, None)
# Reload user to purge the _perm_cache.
user = User._default_manager.get(pk=user.pk)
for permission in [self.default_permission, self.custom_permission]:
self.assertFalse(user.has_perm('auth.' + permission.codename))
self.assertTrue(user.has_perm('auth_tests.' + permission.codename))
def test_migrate_backwards(self):
update_proxy_permissions.update_proxy_model_permissions(apps, None)
update_proxy_permissions.revert_proxy_model_permissions(apps, None)
self.default_permission.refresh_from_db()
self.assertEqual(self.default_permission.content_type, self.concrete_content_type)
self.custom_permission.refresh_from_db()
self.assertEqual(self.custom_permission.content_type, self.concrete_content_type)
def test_user_keeps_same_permissions_after_migrating_backward(self):
user = User.objects.create()
user.user_permissions.add(self.default_permission)
user.user_permissions.add(self.custom_permission)
for permission in [self.default_permission, self.custom_permission]:
self.assertTrue(user.has_perm('auth.' + permission.codename))
self.assertFalse(user.has_perm('auth_tests.' + permission.codename))
update_proxy_permissions.update_proxy_model_permissions(apps, None)
update_proxy_permissions.revert_proxy_model_permissions(apps, None)
# Reload user to purge the _perm_cache.
user = User._default_manager.get(pk=user.pk)
for permission in [self.default_permission, self.custom_permission]:
self.assertTrue(user.has_perm('auth.' + permission.codename))
self.assertFalse(user.has_perm('auth_tests.' + permission.codename))
class ProxyModelWithSameAppLabelTests(TestCase):
available_apps = [
'auth_tests',
'django.contrib.auth',
'django.contrib.contenttypes',
]
def setUp(self):
"""
Create proxy permissions with content_type to the concrete model
rather than the proxy model (as they were before Django 2.2 and
migration 11).
"""
Permission.objects.all().delete()
self.concrete_content_type = ContentType.objects.get_for_model(Proxy)
self.default_permission = Permission.objects.create(
content_type=self.concrete_content_type,
codename='add_proxy',
name='Can add proxy',
)
self.custom_permission = Permission.objects.create(
content_type=self.concrete_content_type,
codename='display_proxys',
name='May display proxys information',
)
def test_proxy_model_permissions_contenttype(self):
proxy_model_content_type = ContentType.objects.get_for_model(Proxy, for_concrete_model=False)
self.assertEqual(self.default_permission.content_type, self.concrete_content_type)
self.assertEqual(self.custom_permission.content_type, self.concrete_content_type)
update_proxy_permissions.update_proxy_model_permissions(apps, None)
self.default_permission.refresh_from_db()
self.custom_permission.refresh_from_db()
self.assertEqual(self.default_permission.content_type, proxy_model_content_type)
self.assertEqual(self.custom_permission.content_type, proxy_model_content_type)
def test_user_still_has_proxy_model_permissions(self):
user = User.objects.create()
user.user_permissions.add(self.default_permission)
user.user_permissions.add(self.custom_permission)
for permission in [self.default_permission, self.custom_permission]:
self.assertTrue(user.has_perm('auth_tests.' + permission.codename))
update_proxy_permissions.update_proxy_model_permissions(apps, None)
# Reload user to purge the _perm_cache.
user = User._default_manager.get(pk=user.pk)
for permission in [self.default_permission, self.custom_permission]:
self.assertTrue(user.has_perm('auth_tests.' + permission.codename))
def test_migrate_backwards(self):
update_proxy_permissions.update_proxy_model_permissions(apps, None)
update_proxy_permissions.revert_proxy_model_permissions(apps, None)
self.default_permission.refresh_from_db()
self.assertEqual(self.default_permission.content_type, self.concrete_content_type)
self.custom_permission.refresh_from_db()
self.assertEqual(self.custom_permission.content_type, self.concrete_content_type)
def test_user_keeps_same_permissions_after_migrating_backward(self):
user = User.objects.create()
user.user_permissions.add(self.default_permission)
user.user_permissions.add(self.custom_permission)
for permission in [self.default_permission, self.custom_permission]:
self.assertTrue(user.has_perm('auth_tests.' + permission.codename))
update_proxy_permissions.update_proxy_model_permissions(apps, None)
update_proxy_permissions.revert_proxy_model_permissions(apps, None)
# Reload user to purge the _perm_cache.
user = User._default_manager.get(pk=user.pk)
for permission in [self.default_permission, self.custom_permission]:
self.assertTrue(user.has_perm('auth_tests.' + permission.codename))
|
9214afe3d8cd17e349a5448a4617b3316acabc9a9560871f1e761d9e0b352247 | from datetime import date
from unittest import mock
from django.contrib.auth import (
BACKEND_SESSION_KEY, SESSION_KEY, authenticate, get_user, signals,
)
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.hashers import MD5PasswordHasher
from django.contrib.auth.models import AnonymousUser, Group, Permission, User
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.http import HttpRequest
from django.test import (
SimpleTestCase, TestCase, modify_settings, override_settings,
)
from .models import (
CustomPermissionsUser, CustomUser, CustomUserWithoutIsActiveField,
ExtensionUser, UUIDUser,
)
class CountingMD5PasswordHasher(MD5PasswordHasher):
"""Hasher that counts how many times it computes a hash."""
calls = 0
def encode(self, *args, **kwargs):
type(self).calls += 1
return super().encode(*args, **kwargs)
class BaseModelBackendTest:
"""
A base class for tests that need to validate the ModelBackend
with different User models. Subclasses should define a class
level UserModel attribute, and a create_users() method to
construct two users for test purposes.
"""
backend = 'django.contrib.auth.backends.ModelBackend'
def setUp(self):
self.patched_settings = modify_settings(
AUTHENTICATION_BACKENDS={'append': self.backend},
)
self.patched_settings.enable()
self.create_users()
def tearDown(self):
self.patched_settings.disable()
# The custom_perms test messes with ContentTypes, which will
# be cached; flush the cache to ensure there are no side effects
# Refs #14975, #14925
ContentType.objects.clear_cache()
def test_has_perm(self):
user = self.UserModel._default_manager.get(pk=self.user.pk)
self.assertIs(user.has_perm('auth.test'), False)
user.is_staff = True
user.save()
self.assertIs(user.has_perm('auth.test'), False)
user.is_superuser = True
user.save()
self.assertIs(user.has_perm('auth.test'), True)
user.is_staff = True
user.is_superuser = True
user.is_active = False
user.save()
self.assertIs(user.has_perm('auth.test'), False)
def test_custom_perms(self):
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(name='test', content_type=content_type, codename='test')
user.user_permissions.add(perm)
# reloading user to purge the _perm_cache
user = self.UserModel._default_manager.get(pk=self.user.pk)
self.assertEqual(user.get_all_permissions(), {'auth.test'})
self.assertEqual(user.get_group_permissions(), set())
self.assertIs(user.has_module_perms('Group'), False)
self.assertIs(user.has_module_perms('auth'), True)
perm = Permission.objects.create(name='test2', content_type=content_type, codename='test2')
user.user_permissions.add(perm)
perm = Permission.objects.create(name='test3', content_type=content_type, codename='test3')
user.user_permissions.add(perm)
user = self.UserModel._default_manager.get(pk=self.user.pk)
self.assertEqual(user.get_all_permissions(), {'auth.test2', 'auth.test', 'auth.test3'})
self.assertIs(user.has_perm('test'), False)
self.assertIs(user.has_perm('auth.test'), True)
self.assertIs(user.has_perms(['auth.test2', 'auth.test3']), True)
perm = Permission.objects.create(name='test_group', content_type=content_type, codename='test_group')
group = Group.objects.create(name='test_group')
group.permissions.add(perm)
user.groups.add(group)
user = self.UserModel._default_manager.get(pk=self.user.pk)
exp = {'auth.test2', 'auth.test', 'auth.test3', 'auth.test_group'}
self.assertEqual(user.get_all_permissions(), exp)
self.assertEqual(user.get_group_permissions(), {'auth.test_group'})
self.assertIs(user.has_perms(['auth.test3', 'auth.test_group']), True)
user = AnonymousUser()
self.assertIs(user.has_perm('test'), False)
self.assertIs(user.has_perms(['auth.test2', 'auth.test3']), False)
def test_has_no_object_perm(self):
"""Regressiontest for #12462"""
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(name='test', content_type=content_type, codename='test')
user.user_permissions.add(perm)
self.assertIs(user.has_perm('auth.test', 'object'), False)
self.assertEqual(user.get_all_permissions('object'), set())
self.assertIs(user.has_perm('auth.test'), True)
self.assertEqual(user.get_all_permissions(), {'auth.test'})
def test_anonymous_has_no_permissions(self):
"""
#17903 -- Anonymous users shouldn't have permissions in
ModelBackend.get_(all|user|group)_permissions().
"""
backend = ModelBackend()
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
user_perm = Permission.objects.create(name='test', content_type=content_type, codename='test_user')
group_perm = Permission.objects.create(name='test2', content_type=content_type, codename='test_group')
user.user_permissions.add(user_perm)
group = Group.objects.create(name='test_group')
user.groups.add(group)
group.permissions.add(group_perm)
self.assertEqual(backend.get_all_permissions(user), {'auth.test_user', 'auth.test_group'})
self.assertEqual(backend.get_user_permissions(user), {'auth.test_user'})
self.assertEqual(backend.get_group_permissions(user), {'auth.test_group'})
with mock.patch.object(self.UserModel, 'is_anonymous', True):
self.assertEqual(backend.get_all_permissions(user), set())
self.assertEqual(backend.get_user_permissions(user), set())
self.assertEqual(backend.get_group_permissions(user), set())
def test_inactive_has_no_permissions(self):
"""
#17903 -- Inactive users shouldn't have permissions in
ModelBackend.get_(all|user|group)_permissions().
"""
backend = ModelBackend()
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
user_perm = Permission.objects.create(name='test', content_type=content_type, codename='test_user')
group_perm = Permission.objects.create(name='test2', content_type=content_type, codename='test_group')
user.user_permissions.add(user_perm)
group = Group.objects.create(name='test_group')
user.groups.add(group)
group.permissions.add(group_perm)
self.assertEqual(backend.get_all_permissions(user), {'auth.test_user', 'auth.test_group'})
self.assertEqual(backend.get_user_permissions(user), {'auth.test_user'})
self.assertEqual(backend.get_group_permissions(user), {'auth.test_group'})
user.is_active = False
user.save()
self.assertEqual(backend.get_all_permissions(user), set())
self.assertEqual(backend.get_user_permissions(user), set())
self.assertEqual(backend.get_group_permissions(user), set())
def test_get_all_superuser_permissions(self):
"""A superuser has all permissions. Refs #14795."""
user = self.UserModel._default_manager.get(pk=self.superuser.pk)
self.assertEqual(len(user.get_all_permissions()), len(Permission.objects.all()))
@override_settings(PASSWORD_HASHERS=['auth_tests.test_auth_backends.CountingMD5PasswordHasher'])
def test_authentication_timing(self):
"""Hasher is run once regardless of whether the user exists. Refs #20760."""
# Re-set the password, because this tests overrides PASSWORD_HASHERS
self.user.set_password('test')
self.user.save()
CountingMD5PasswordHasher.calls = 0
username = getattr(self.user, self.UserModel.USERNAME_FIELD)
authenticate(username=username, password='test')
self.assertEqual(CountingMD5PasswordHasher.calls, 1)
CountingMD5PasswordHasher.calls = 0
authenticate(username='no_such_user', password='test')
self.assertEqual(CountingMD5PasswordHasher.calls, 1)
class ModelBackendTest(BaseModelBackendTest, TestCase):
"""
Tests for the ModelBackend using the default User model.
"""
UserModel = User
user_credentials = {'username': 'test', 'password': 'test'}
def create_users(self):
self.user = User.objects.create_user(email='[email protected]', **self.user_credentials)
self.superuser = User.objects.create_superuser(
username='test2',
email='[email protected]',
password='test',
)
def test_authenticate_inactive(self):
"""
An inactive user can't authenticate.
"""
self.assertEqual(authenticate(**self.user_credentials), self.user)
self.user.is_active = False
self.user.save()
self.assertIsNone(authenticate(**self.user_credentials))
@override_settings(AUTH_USER_MODEL='auth_tests.CustomUserWithoutIsActiveField')
def test_authenticate_user_without_is_active_field(self):
"""
A custom user without an `is_active` field is allowed to authenticate.
"""
user = CustomUserWithoutIsActiveField.objects._create_user(
username='test', email='[email protected]', password='test',
)
self.assertEqual(authenticate(username='test', password='test'), user)
@override_settings(AUTH_USER_MODEL='auth_tests.ExtensionUser')
class ExtensionUserModelBackendTest(BaseModelBackendTest, TestCase):
"""
Tests for the ModelBackend using the custom ExtensionUser model.
This isn't a perfect test, because both the User and ExtensionUser are
synchronized to the database, which wouldn't ordinary happen in
production. As a result, it doesn't catch errors caused by the non-
existence of the User table.
The specific problem is queries on .filter(groups__user) et al, which
makes an implicit assumption that the user model is called 'User'. In
production, the auth.User table won't exist, so the requested join
won't exist either; in testing, the auth.User *does* exist, and
so does the join. However, the join table won't contain any useful
data; for testing, we check that the data we expect actually does exist.
"""
UserModel = ExtensionUser
def create_users(self):
self.user = ExtensionUser._default_manager.create_user(
username='test',
email='[email protected]',
password='test',
date_of_birth=date(2006, 4, 25)
)
self.superuser = ExtensionUser._default_manager.create_superuser(
username='test2',
email='[email protected]',
password='test',
date_of_birth=date(1976, 11, 8)
)
@override_settings(AUTH_USER_MODEL='auth_tests.CustomPermissionsUser')
class CustomPermissionsUserModelBackendTest(BaseModelBackendTest, TestCase):
"""
Tests for the ModelBackend using the CustomPermissionsUser model.
As with the ExtensionUser test, this isn't a perfect test, because both
the User and CustomPermissionsUser are synchronized to the database,
which wouldn't ordinary happen in production.
"""
UserModel = CustomPermissionsUser
def create_users(self):
self.user = CustomPermissionsUser._default_manager.create_user(
email='[email protected]',
password='test',
date_of_birth=date(2006, 4, 25)
)
self.superuser = CustomPermissionsUser._default_manager.create_superuser(
email='[email protected]',
password='test',
date_of_birth=date(1976, 11, 8)
)
@override_settings(AUTH_USER_MODEL='auth_tests.CustomUser')
class CustomUserModelBackendAuthenticateTest(TestCase):
"""
The model backend can accept a credentials kwarg labeled with
custom user model's USERNAME_FIELD.
"""
def test_authenticate(self):
test_user = CustomUser._default_manager.create_user(
email='[email protected]',
password='test',
date_of_birth=date(2006, 4, 25)
)
authenticated_user = authenticate(email='[email protected]', password='test')
self.assertEqual(test_user, authenticated_user)
@override_settings(AUTH_USER_MODEL='auth_tests.UUIDUser')
class UUIDUserTests(TestCase):
def test_login(self):
"""
A custom user with a UUID primary key should be able to login.
"""
user = UUIDUser.objects.create_user(username='uuid', password='test')
self.assertTrue(self.client.login(username='uuid', password='test'))
self.assertEqual(UUIDUser.objects.get(pk=self.client.session[SESSION_KEY]), user)
class TestObj:
pass
class SimpleRowlevelBackend:
def has_perm(self, user, perm, obj=None):
if not obj:
return # We only support row level perms
if isinstance(obj, TestObj):
if user.username == 'test2':
return True
elif user.is_anonymous and perm == 'anon':
return True
elif not user.is_active and perm == 'inactive':
return True
return False
def has_module_perms(self, user, app_label):
return (user.is_anonymous or user.is_active) and app_label == 'app1'
def get_all_permissions(self, user, obj=None):
if not obj:
return [] # We only support row level perms
if not isinstance(obj, TestObj):
return ['none']
if user.is_anonymous:
return ['anon']
if user.username == 'test2':
return ['simple', 'advanced']
else:
return ['simple']
def get_group_permissions(self, user, obj=None):
if not obj:
return # We only support row level perms
if not isinstance(obj, TestObj):
return ['none']
if 'test_group' in [group.name for group in user.groups.all()]:
return ['group_perm']
else:
return ['none']
@modify_settings(AUTHENTICATION_BACKENDS={
'append': 'auth_tests.test_auth_backends.SimpleRowlevelBackend',
})
class RowlevelBackendTest(TestCase):
"""
Tests for auth backend that supports object level permissions
"""
@classmethod
def setUpTestData(cls):
cls.user1 = User.objects.create_user('test', '[email protected]', 'test')
cls.user2 = User.objects.create_user('test2', '[email protected]', 'test')
cls.user3 = User.objects.create_user('test3', '[email protected]', 'test')
def tearDown(self):
# The get_group_permissions test messes with ContentTypes, which will
# be cached; flush the cache to ensure there are no side effects
# Refs #14975, #14925
ContentType.objects.clear_cache()
def test_has_perm(self):
self.assertIs(self.user1.has_perm('perm', TestObj()), False)
self.assertIs(self.user2.has_perm('perm', TestObj()), True)
self.assertIs(self.user2.has_perm('perm'), False)
self.assertIs(self.user2.has_perms(['simple', 'advanced'], TestObj()), True)
self.assertIs(self.user3.has_perm('perm', TestObj()), False)
self.assertIs(self.user3.has_perm('anon', TestObj()), False)
self.assertIs(self.user3.has_perms(['simple', 'advanced'], TestObj()), False)
def test_get_all_permissions(self):
self.assertEqual(self.user1.get_all_permissions(TestObj()), {'simple'})
self.assertEqual(self.user2.get_all_permissions(TestObj()), {'simple', 'advanced'})
self.assertEqual(self.user2.get_all_permissions(), set())
def test_get_group_permissions(self):
group = Group.objects.create(name='test_group')
self.user3.groups.add(group)
self.assertEqual(self.user3.get_group_permissions(TestObj()), {'group_perm'})
@override_settings(
AUTHENTICATION_BACKENDS=['auth_tests.test_auth_backends.SimpleRowlevelBackend'],
)
class AnonymousUserBackendTest(SimpleTestCase):
"""
Tests for AnonymousUser delegating to backend.
"""
def setUp(self):
self.user1 = AnonymousUser()
def test_has_perm(self):
self.assertIs(self.user1.has_perm('perm', TestObj()), False)
self.assertIs(self.user1.has_perm('anon', TestObj()), True)
def test_has_perms(self):
self.assertIs(self.user1.has_perms(['anon'], TestObj()), True)
self.assertIs(self.user1.has_perms(['anon', 'perm'], TestObj()), False)
def test_has_module_perms(self):
self.assertIs(self.user1.has_module_perms("app1"), True)
self.assertIs(self.user1.has_module_perms("app2"), False)
def test_get_all_permissions(self):
self.assertEqual(self.user1.get_all_permissions(TestObj()), {'anon'})
@override_settings(AUTHENTICATION_BACKENDS=[])
class NoBackendsTest(TestCase):
"""
An appropriate error is raised if no auth backends are provided.
"""
@classmethod
def setUpTestData(cls):
cls.user = User.objects.create_user('test', '[email protected]', 'test')
def test_raises_exception(self):
msg = (
'No authentication backends have been defined. '
'Does AUTHENTICATION_BACKENDS contain anything?'
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.user.has_perm(('perm', TestObj()))
@override_settings(AUTHENTICATION_BACKENDS=['auth_tests.test_auth_backends.SimpleRowlevelBackend'])
class InActiveUserBackendTest(TestCase):
"""
Tests for an inactive user
"""
@classmethod
def setUpTestData(cls):
cls.user1 = User.objects.create_user('test', '[email protected]', 'test')
cls.user1.is_active = False
cls.user1.save()
def test_has_perm(self):
self.assertIs(self.user1.has_perm('perm', TestObj()), False)
self.assertIs(self.user1.has_perm('inactive', TestObj()), True)
def test_has_module_perms(self):
self.assertIs(self.user1.has_module_perms("app1"), False)
self.assertIs(self.user1.has_module_perms("app2"), False)
class PermissionDeniedBackend:
"""
Always raises PermissionDenied in `authenticate`, `has_perm` and `has_module_perms`.
"""
def authenticate(self, request, username=None, password=None):
raise PermissionDenied
def has_perm(self, user_obj, perm, obj=None):
raise PermissionDenied
def has_module_perms(self, user_obj, app_label):
raise PermissionDenied
class PermissionDeniedBackendTest(TestCase):
"""
Other backends are not checked once a backend raises PermissionDenied
"""
backend = 'auth_tests.test_auth_backends.PermissionDeniedBackend'
@classmethod
def setUpTestData(cls):
cls.user1 = User.objects.create_user('test', '[email protected]', 'test')
def setUp(self):
self.user_login_failed = []
signals.user_login_failed.connect(self.user_login_failed_listener)
def tearDown(self):
signals.user_login_failed.disconnect(self.user_login_failed_listener)
def user_login_failed_listener(self, sender, credentials, **kwargs):
self.user_login_failed.append(credentials)
@modify_settings(AUTHENTICATION_BACKENDS={'prepend': backend})
def test_permission_denied(self):
"user is not authenticated after a backend raises permission denied #2550"
self.assertIsNone(authenticate(username='test', password='test'))
# user_login_failed signal is sent.
self.assertEqual(self.user_login_failed, [{'password': '********************', 'username': 'test'}])
@modify_settings(AUTHENTICATION_BACKENDS={'append': backend})
def test_authenticates(self):
self.assertEqual(authenticate(username='test', password='test'), self.user1)
@modify_settings(AUTHENTICATION_BACKENDS={'prepend': backend})
def test_has_perm_denied(self):
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(name='test', content_type=content_type, codename='test')
self.user1.user_permissions.add(perm)
self.assertIs(self.user1.has_perm('auth.test'), False)
self.assertIs(self.user1.has_module_perms('auth'), False)
@modify_settings(AUTHENTICATION_BACKENDS={'append': backend})
def test_has_perm(self):
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(name='test', content_type=content_type, codename='test')
self.user1.user_permissions.add(perm)
self.assertIs(self.user1.has_perm('auth.test'), True)
self.assertIs(self.user1.has_module_perms('auth'), True)
class NewModelBackend(ModelBackend):
pass
class ChangedBackendSettingsTest(TestCase):
"""
Tests for changes in the settings.AUTHENTICATION_BACKENDS
"""
backend = 'auth_tests.test_auth_backends.NewModelBackend'
TEST_USERNAME = 'test_user'
TEST_PASSWORD = 'test_password'
TEST_EMAIL = '[email protected]'
@classmethod
def setUpTestData(cls):
User.objects.create_user(cls.TEST_USERNAME, cls.TEST_EMAIL, cls.TEST_PASSWORD)
@override_settings(AUTHENTICATION_BACKENDS=[backend])
def test_changed_backend_settings(self):
"""
Removing a backend configured in AUTHENTICATION_BACKENDS makes already
logged-in users disconnect.
"""
# Get a session for the test user
self.assertTrue(self.client.login(
username=self.TEST_USERNAME,
password=self.TEST_PASSWORD,
))
# Prepare a request object
request = HttpRequest()
request.session = self.client.session
# Remove NewModelBackend
with self.settings(AUTHENTICATION_BACKENDS=[
'django.contrib.auth.backends.ModelBackend']):
# Get the user from the request
user = get_user(request)
# Assert that the user retrieval is successful and the user is
# anonymous as the backend is not longer available.
self.assertIsNotNone(user)
self.assertTrue(user.is_anonymous)
class TypeErrorBackend:
"""
Always raises TypeError.
"""
def authenticate(self, request, username=None, password=None):
raise TypeError
class SkippedBackend:
def authenticate(self):
# Doesn't accept any credentials so is skipped by authenticate().
pass
class AuthenticateTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.user1 = User.objects.create_user('test', '[email protected]', 'test')
@override_settings(AUTHENTICATION_BACKENDS=['auth_tests.test_auth_backends.TypeErrorBackend'])
def test_type_error_raised(self):
"""A TypeError within a backend is propagated properly (#18171)."""
with self.assertRaises(TypeError):
authenticate(username='test', password='test')
@override_settings(AUTHENTICATION_BACKENDS=(
'auth_tests.test_auth_backends.SkippedBackend',
'django.contrib.auth.backends.ModelBackend',
))
def test_skips_backends_without_arguments(self):
"""
A backend (SkippedBackend) is ignored if it doesn't accept the
credentials as arguments.
"""
self.assertEqual(authenticate(username='test', password='test'), self.user1)
class ImproperlyConfiguredUserModelTest(TestCase):
"""
An exception from within get_user_model() is propagated and doesn't
raise an UnboundLocalError (#21439).
"""
@classmethod
def setUpTestData(cls):
cls.user1 = User.objects.create_user('test', '[email protected]', 'test')
def setUp(self):
self.client.login(username='test', password='test')
@override_settings(AUTH_USER_MODEL='thismodel.doesntexist')
def test_does_not_shadow_exception(self):
# Prepare a request object
request = HttpRequest()
request.session = self.client.session
msg = (
"AUTH_USER_MODEL refers to model 'thismodel.doesntexist' "
"that has not been installed"
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
get_user(request)
class ImportedModelBackend(ModelBackend):
pass
class CustomModelBackend(ModelBackend):
pass
class OtherModelBackend(ModelBackend):
pass
class ImportedBackendTests(TestCase):
"""
#23925 - The backend path added to the session should be the same
as the one defined in AUTHENTICATION_BACKENDS setting.
"""
backend = 'auth_tests.backend_alias.ImportedModelBackend'
@override_settings(AUTHENTICATION_BACKENDS=[backend])
def test_backend_path(self):
username = 'username'
password = 'password'
User.objects.create_user(username, 'email', password)
self.assertTrue(self.client.login(username=username, password=password))
request = HttpRequest()
request.session = self.client.session
self.assertEqual(request.session[BACKEND_SESSION_KEY], self.backend)
class SelectingBackendTests(TestCase):
backend = 'auth_tests.test_auth_backends.CustomModelBackend'
other_backend = 'auth_tests.test_auth_backends.OtherModelBackend'
username = 'username'
password = 'password'
def assertBackendInSession(self, backend):
request = HttpRequest()
request.session = self.client.session
self.assertEqual(request.session[BACKEND_SESSION_KEY], backend)
@override_settings(AUTHENTICATION_BACKENDS=[backend])
def test_backend_path_login_without_authenticate_single_backend(self):
user = User.objects.create_user(self.username, 'email', self.password)
self.client._login(user)
self.assertBackendInSession(self.backend)
@override_settings(AUTHENTICATION_BACKENDS=[backend, other_backend])
def test_backend_path_login_without_authenticate_multiple_backends(self):
user = User.objects.create_user(self.username, 'email', self.password)
expected_message = (
'You have multiple authentication backends configured and '
'therefore must provide the `backend` argument or set the '
'`backend` attribute on the user.'
)
with self.assertRaisesMessage(ValueError, expected_message):
self.client._login(user)
def test_non_string_backend(self):
user = User.objects.create_user(self.username, 'email', self.password)
expected_message = (
'backend must be a dotted import path string (got '
'<class \'django.contrib.auth.backends.ModelBackend\'>).'
)
with self.assertRaisesMessage(TypeError, expected_message):
self.client._login(user, backend=ModelBackend)
@override_settings(AUTHENTICATION_BACKENDS=[backend, other_backend])
def test_backend_path_login_with_explicit_backends(self):
user = User.objects.create_user(self.username, 'email', self.password)
self.client._login(user, self.other_backend)
self.assertBackendInSession(self.other_backend)
@override_settings(AUTHENTICATION_BACKENDS=['django.contrib.auth.backends.AllowAllUsersModelBackend'])
class AllowAllUsersModelBackendTest(TestCase):
"""
Inactive users may authenticate with the AllowAllUsersModelBackend.
"""
user_credentials = {'username': 'test', 'password': 'test'}
@classmethod
def setUpTestData(cls):
cls.user = User.objects.create_user(
email='[email protected]', is_active=False,
**cls.user_credentials
)
def test_authenticate(self):
self.assertFalse(self.user.is_active)
self.assertEqual(authenticate(**self.user_credentials), self.user)
def test_get_user(self):
self.client.force_login(self.user)
request = HttpRequest()
request.session = self.client.session
user = get_user(request)
self.assertEqual(user, self.user)
|
6a55cd23bf1dee70fcc6d192ec83c1cf4dfce2159b4e56a36eb325b47cb211ef | from django.contrib import admin
from django.contrib.auth import views
from django.contrib.auth.decorators import login_required, permission_required
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.auth.urls import urlpatterns as auth_urlpatterns
from django.contrib.messages.api import info
from django.http import HttpRequest, HttpResponse
from django.shortcuts import render
from django.template import RequestContext, Template
from django.urls import path, re_path, reverse_lazy
from django.views.decorators.cache import never_cache
class CustomRequestAuthenticationForm(AuthenticationForm):
def __init__(self, request, *args, **kwargs):
assert isinstance(request, HttpRequest)
super().__init__(request, *args, **kwargs)
@never_cache
def remote_user_auth_view(request):
"Dummy view for remote user tests"
t = Template("Username is {{ user }}.")
c = RequestContext(request, {})
return HttpResponse(t.render(c))
def auth_processor_no_attr_access(request):
render(request, 'context_processors/auth_attrs_no_access.html')
# *After* rendering, we check whether the session was accessed
return render(request,
'context_processors/auth_attrs_test_access.html',
{'session_accessed': request.session.accessed})
def auth_processor_attr_access(request):
render(request, 'context_processors/auth_attrs_access.html')
return render(request,
'context_processors/auth_attrs_test_access.html',
{'session_accessed': request.session.accessed})
def auth_processor_user(request):
return render(request, 'context_processors/auth_attrs_user.html')
def auth_processor_perms(request):
return render(request, 'context_processors/auth_attrs_perms.html')
def auth_processor_perm_in_perms(request):
return render(request, 'context_processors/auth_attrs_perm_in_perms.html')
def auth_processor_messages(request):
info(request, "Message 1")
return render(request, 'context_processors/auth_attrs_messages.html')
def userpage(request):
pass
@permission_required('unknown.permission')
def permission_required_redirect(request):
pass
@permission_required('unknown.permission', raise_exception=True)
def permission_required_exception(request):
pass
@login_required
@permission_required('unknown.permission', raise_exception=True)
def login_and_permission_required_exception(request):
pass
uid_token = r'(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})'
# special urls for auth test cases
urlpatterns = auth_urlpatterns + [
path('logout/custom_query/', views.LogoutView.as_view(redirect_field_name='follow')),
path('logout/next_page/', views.LogoutView.as_view(next_page='/somewhere/')),
path('logout/next_page/named/', views.LogoutView.as_view(next_page='password_reset')),
path('logout/allowed_hosts/', views.LogoutView.as_view(success_url_allowed_hosts={'otherserver'})),
path('remote_user/', remote_user_auth_view),
path('password_reset_from_email/', views.PasswordResetView.as_view(from_email='[email protected]')),
path(
'password_reset_extra_email_context/',
views.PasswordResetView.as_view(extra_email_context={'greeting': 'Hello!'})),
path(
'password_reset/custom_redirect/',
views.PasswordResetView.as_view(success_url='/custom/')),
path(
'password_reset/custom_redirect/named/',
views.PasswordResetView.as_view(success_url=reverse_lazy('password_reset'))),
path(
'password_reset/html_email_template/',
views.PasswordResetView.as_view(
html_email_template_name='registration/html_password_reset_email.html'
)),
re_path(
'^reset/custom/{}/$'.format(uid_token),
views.PasswordResetConfirmView.as_view(success_url='/custom/'),
),
re_path(
'^reset/custom/named/{}/$'.format(uid_token),
views.PasswordResetConfirmView.as_view(success_url=reverse_lazy('password_reset')),
),
re_path(
'^reset/post_reset_login/{}/$'.format(uid_token),
views.PasswordResetConfirmView.as_view(post_reset_login=True),
),
re_path(
'^reset/post_reset_login_custom_backend/{}/$'.format(uid_token),
views.PasswordResetConfirmView.as_view(
post_reset_login=True,
post_reset_login_backend='django.contrib.auth.backends.AllowAllUsersModelBackend',
),
),
path('password_change/custom/',
views.PasswordChangeView.as_view(success_url='/custom/')),
path('password_change/custom/named/',
views.PasswordChangeView.as_view(success_url=reverse_lazy('password_reset'))),
path('login_required/', login_required(views.PasswordResetView.as_view())),
path('login_required_login_url/', login_required(views.PasswordResetView.as_view(), login_url='/somewhere/')),
path('auth_processor_no_attr_access/', auth_processor_no_attr_access),
path('auth_processor_attr_access/', auth_processor_attr_access),
path('auth_processor_user/', auth_processor_user),
path('auth_processor_perms/', auth_processor_perms),
path('auth_processor_perm_in_perms/', auth_processor_perm_in_perms),
path('auth_processor_messages/', auth_processor_messages),
path(
'custom_request_auth_login/',
views.LoginView.as_view(authentication_form=CustomRequestAuthenticationForm)),
re_path('^userpage/(.+)/$', userpage, name='userpage'),
path('login/redirect_authenticated_user_default/', views.LoginView.as_view()),
path('login/redirect_authenticated_user/',
views.LoginView.as_view(redirect_authenticated_user=True)),
path('login/allowed_hosts/',
views.LoginView.as_view(success_url_allowed_hosts={'otherserver'})),
path('permission_required_redirect/', permission_required_redirect),
path('permission_required_exception/', permission_required_exception),
path('login_and_permission_required_exception/', login_and_permission_required_exception),
# This line is only required to render the password reset with is_admin=True
path('admin/', admin.site.urls),
]
|
6fcc95504251673322925d719dd05475988c7f4538e4af63b50d63355ea5bd89 | import os
from django.contrib.auth import validators
from django.contrib.auth.models import User
from django.contrib.auth.password_validation import (
CommonPasswordValidator, MinimumLengthValidator, NumericPasswordValidator,
UserAttributeSimilarityValidator, get_default_password_validators,
get_password_validators, password_changed,
password_validators_help_text_html, password_validators_help_texts,
validate_password,
)
from django.core.exceptions import ValidationError
from django.db import models
from django.test import SimpleTestCase, TestCase, override_settings
from django.test.utils import isolate_apps
from django.utils.html import conditional_escape
@override_settings(AUTH_PASSWORD_VALIDATORS=[
{'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator'},
{'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 'OPTIONS': {
'min_length': 12,
}},
])
class PasswordValidationTest(SimpleTestCase):
def test_get_default_password_validators(self):
validators = get_default_password_validators()
self.assertEqual(len(validators), 2)
self.assertEqual(validators[0].__class__.__name__, 'CommonPasswordValidator')
self.assertEqual(validators[1].__class__.__name__, 'MinimumLengthValidator')
self.assertEqual(validators[1].min_length, 12)
def test_get_password_validators_custom(self):
validator_config = [{'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator'}]
validators = get_password_validators(validator_config)
self.assertEqual(len(validators), 1)
self.assertEqual(validators[0].__class__.__name__, 'CommonPasswordValidator')
self.assertEqual(get_password_validators([]), [])
def test_validate_password(self):
self.assertIsNone(validate_password('sufficiently-long'))
msg_too_short = 'This password is too short. It must contain at least 12 characters.'
with self.assertRaises(ValidationError) as cm:
validate_password('django4242')
self.assertEqual(cm.exception.messages, [msg_too_short])
self.assertEqual(cm.exception.error_list[0].code, 'password_too_short')
with self.assertRaises(ValidationError) as cm:
validate_password('password')
self.assertEqual(cm.exception.messages, ['This password is too common.', msg_too_short])
self.assertEqual(cm.exception.error_list[0].code, 'password_too_common')
self.assertIsNone(validate_password('password', password_validators=[]))
def test_password_changed(self):
self.assertIsNone(password_changed('password'))
def test_password_changed_with_custom_validator(self):
class Validator:
def password_changed(self, password, user):
self.password = password
self.user = user
user = object()
validator = Validator()
password_changed('password', user=user, password_validators=(validator,))
self.assertIs(validator.user, user)
self.assertEqual(validator.password, 'password')
def test_password_validators_help_texts(self):
help_texts = password_validators_help_texts()
self.assertEqual(len(help_texts), 2)
self.assertIn('12 characters', help_texts[1])
self.assertEqual(password_validators_help_texts(password_validators=[]), [])
def test_password_validators_help_text_html(self):
help_text = password_validators_help_text_html()
self.assertEqual(help_text.count('<li>'), 2)
self.assertIn('12 characters', help_text)
def test_password_validators_help_text_html_escaping(self):
class AmpersandValidator:
def get_help_text(self):
return 'Must contain &'
help_text = password_validators_help_text_html([AmpersandValidator()])
self.assertEqual(help_text, '<ul><li>Must contain &</li></ul>')
# help_text is marked safe and therefore unchanged by conditional_escape().
self.assertEqual(help_text, conditional_escape(help_text))
@override_settings(AUTH_PASSWORD_VALIDATORS=[])
def test_empty_password_validator_help_text_html(self):
self.assertEqual(password_validators_help_text_html(), '')
class MinimumLengthValidatorTest(SimpleTestCase):
def test_validate(self):
expected_error = "This password is too short. It must contain at least %d characters."
self.assertIsNone(MinimumLengthValidator().validate('12345678'))
self.assertIsNone(MinimumLengthValidator(min_length=3).validate('123'))
with self.assertRaises(ValidationError) as cm:
MinimumLengthValidator().validate('1234567')
self.assertEqual(cm.exception.messages, [expected_error % 8])
self.assertEqual(cm.exception.error_list[0].code, 'password_too_short')
with self.assertRaises(ValidationError) as cm:
MinimumLengthValidator(min_length=3).validate('12')
self.assertEqual(cm.exception.messages, [expected_error % 3])
def test_help_text(self):
self.assertEqual(
MinimumLengthValidator().get_help_text(),
"Your password must contain at least 8 characters."
)
class UserAttributeSimilarityValidatorTest(TestCase):
def test_validate(self):
user = User.objects.create_user(
username='testclient', password='password', email='[email protected]',
first_name='Test', last_name='Client',
)
expected_error = "The password is too similar to the %s."
self.assertIsNone(UserAttributeSimilarityValidator().validate('testclient'))
with self.assertRaises(ValidationError) as cm:
UserAttributeSimilarityValidator().validate('testclient', user=user),
self.assertEqual(cm.exception.messages, [expected_error % "username"])
self.assertEqual(cm.exception.error_list[0].code, 'password_too_similar')
with self.assertRaises(ValidationError) as cm:
UserAttributeSimilarityValidator().validate('example.com', user=user),
self.assertEqual(cm.exception.messages, [expected_error % "email address"])
with self.assertRaises(ValidationError) as cm:
UserAttributeSimilarityValidator(
user_attributes=['first_name'],
max_similarity=0.3,
).validate('testclient', user=user)
self.assertEqual(cm.exception.messages, [expected_error % "first name"])
# max_similarity=1 doesn't allow passwords that are identical to the
# attribute's value.
with self.assertRaises(ValidationError) as cm:
UserAttributeSimilarityValidator(
user_attributes=['first_name'],
max_similarity=1,
).validate(user.first_name, user=user)
self.assertEqual(cm.exception.messages, [expected_error % "first name"])
# max_similarity=0 rejects all passwords.
with self.assertRaises(ValidationError) as cm:
UserAttributeSimilarityValidator(
user_attributes=['first_name'],
max_similarity=0,
).validate('XXX', user=user)
self.assertEqual(cm.exception.messages, [expected_error % "first name"])
# Passes validation.
self.assertIsNone(
UserAttributeSimilarityValidator(user_attributes=['first_name']).validate('testclient', user=user)
)
@isolate_apps('auth_tests')
def test_validate_property(self):
class TestUser(models.Model):
pass
@property
def username(self):
return 'foobar'
with self.assertRaises(ValidationError) as cm:
UserAttributeSimilarityValidator().validate('foobar', user=TestUser()),
self.assertEqual(cm.exception.messages, ['The password is too similar to the username.'])
def test_help_text(self):
self.assertEqual(
UserAttributeSimilarityValidator().get_help_text(),
"Your password can't be too similar to your other personal information."
)
class CommonPasswordValidatorTest(SimpleTestCase):
def test_validate(self):
expected_error = "This password is too common."
self.assertIsNone(CommonPasswordValidator().validate('a-safe-password'))
with self.assertRaises(ValidationError) as cm:
CommonPasswordValidator().validate('godzilla')
self.assertEqual(cm.exception.messages, [expected_error])
def test_validate_custom_list(self):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'common-passwords-custom.txt')
validator = CommonPasswordValidator(password_list_path=path)
expected_error = "This password is too common."
self.assertIsNone(validator.validate('a-safe-password'))
with self.assertRaises(ValidationError) as cm:
validator.validate('from-my-custom-list')
self.assertEqual(cm.exception.messages, [expected_error])
self.assertEqual(cm.exception.error_list[0].code, 'password_too_common')
def test_validate_django_supplied_file(self):
validator = CommonPasswordValidator()
for password in validator.passwords:
self.assertEqual(password, password.lower())
def test_help_text(self):
self.assertEqual(
CommonPasswordValidator().get_help_text(),
"Your password can't be a commonly used password."
)
class NumericPasswordValidatorTest(SimpleTestCase):
def test_validate(self):
expected_error = "This password is entirely numeric."
self.assertIsNone(NumericPasswordValidator().validate('a-safe-password'))
with self.assertRaises(ValidationError) as cm:
NumericPasswordValidator().validate('42424242')
self.assertEqual(cm.exception.messages, [expected_error])
self.assertEqual(cm.exception.error_list[0].code, 'password_entirely_numeric')
def test_help_text(self):
self.assertEqual(
NumericPasswordValidator().get_help_text(),
"Your password can't be entirely numeric."
)
class UsernameValidatorsTests(SimpleTestCase):
def test_unicode_validator(self):
valid_usernames = ['joe', 'René', 'ᴮᴵᴳᴮᴵᴿᴰ', 'أحمد']
invalid_usernames = [
"o'connell", "عبد ال",
"zerowidth\u200Bspace", "nonbreaking\u00A0space",
"en\u2013dash",
]
v = validators.UnicodeUsernameValidator()
for valid in valid_usernames:
with self.subTest(valid=valid):
v(valid)
for invalid in invalid_usernames:
with self.subTest(invalid=invalid):
with self.assertRaises(ValidationError):
v(invalid)
def test_ascii_validator(self):
valid_usernames = ['glenn', 'GLEnN', 'jean-marc']
invalid_usernames = ["o'connell", 'Éric', 'jean marc', "أحمد"]
v = validators.ASCIIUsernameValidator()
for valid in valid_usernames:
with self.subTest(valid=valid):
v(valid)
for invalid in invalid_usernames:
with self.subTest(invalid=invalid):
with self.assertRaises(ValidationError):
v(invalid)
|
81bf7e92d89cbe05a0e9e3694cebc7f60d2f852e232e188f96b9304cbfb3a2dc | import datetime
import re
from unittest import mock
from django import forms
from django.contrib.auth.forms import (
AdminPasswordChangeForm, AuthenticationForm, PasswordChangeForm,
PasswordResetForm, ReadOnlyPasswordHashField, ReadOnlyPasswordHashWidget,
SetPasswordForm, UserChangeForm, UserCreationForm,
)
from django.contrib.auth.models import User
from django.contrib.auth.signals import user_login_failed
from django.contrib.sites.models import Site
from django.core import mail
from django.core.mail import EmailMultiAlternatives
from django.forms.fields import CharField, Field, IntegerField
from django.test import SimpleTestCase, TestCase, override_settings
from django.utils import translation
from django.utils.text import capfirst
from django.utils.translation import gettext as _
from .models.custom_user import (
CustomUser, CustomUserWithoutIsActiveField, ExtensionUser,
)
from .models.with_custom_email_field import CustomEmailField
from .models.with_integer_username import IntegerUsernameUser
from .settings import AUTH_TEMPLATES
class TestDataMixin:
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create_user(username='testclient', password='password', email='[email protected]')
cls.u2 = User.objects.create_user(username='inactive', password='password', is_active=False)
cls.u3 = User.objects.create_user(username='staff', password='password')
cls.u4 = User.objects.create(username='empty_password', password='')
cls.u5 = User.objects.create(username='unmanageable_password', password='$')
cls.u6 = User.objects.create(username='unknown_password', password='foo$bar')
class UserCreationFormTest(TestDataMixin, TestCase):
def test_user_already_exists(self):
data = {
'username': 'testclient',
'password1': 'test123',
'password2': 'test123',
}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form["username"].errors,
[str(User._meta.get_field('username').error_messages['unique'])])
def test_invalid_data(self):
data = {
'username': 'jsmith!',
'password1': 'test123',
'password2': 'test123',
}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
validator = next(v for v in User._meta.get_field('username').validators if v.code == 'invalid')
self.assertEqual(form["username"].errors, [str(validator.message)])
def test_password_verification(self):
# The verification password is incorrect.
data = {
'username': 'jsmith',
'password1': 'test123',
'password2': 'test',
}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form["password2"].errors,
[str(form.error_messages['password_mismatch'])])
def test_both_passwords(self):
# One (or both) passwords weren't given
data = {'username': 'jsmith'}
form = UserCreationForm(data)
required_error = [str(Field.default_error_messages['required'])]
self.assertFalse(form.is_valid())
self.assertEqual(form['password1'].errors, required_error)
self.assertEqual(form['password2'].errors, required_error)
data['password2'] = 'test123'
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form['password1'].errors, required_error)
self.assertEqual(form['password2'].errors, [])
@mock.patch('django.contrib.auth.password_validation.password_changed')
def test_success(self, password_changed):
# The success case.
data = {
'username': '[email protected]',
'password1': 'test123',
'password2': 'test123',
}
form = UserCreationForm(data)
self.assertTrue(form.is_valid())
form.save(commit=False)
self.assertEqual(password_changed.call_count, 0)
u = form.save()
self.assertEqual(password_changed.call_count, 1)
self.assertEqual(repr(u), '<User: [email protected]>')
def test_unicode_username(self):
data = {
'username': '宝',
'password1': 'test123',
'password2': 'test123',
}
form = UserCreationForm(data)
self.assertTrue(form.is_valid())
u = form.save()
self.assertEqual(u.username, '宝')
def test_normalize_username(self):
# The normalization happens in AbstractBaseUser.clean() and ModelForm
# validation calls Model.clean().
ohm_username = 'testΩ' # U+2126 OHM SIGN
data = {
'username': ohm_username,
'password1': 'pwd2',
'password2': 'pwd2',
}
form = UserCreationForm(data)
self.assertTrue(form.is_valid())
user = form.save()
self.assertNotEqual(user.username, ohm_username)
self.assertEqual(user.username, 'testΩ') # U+03A9 GREEK CAPITAL LETTER OMEGA
def test_duplicate_normalized_unicode(self):
"""
To prevent almost identical usernames, visually identical but differing
by their unicode code points only, Unicode NFKC normalization should
make appear them equal to Django.
"""
omega_username = 'iamtheΩ' # U+03A9 GREEK CAPITAL LETTER OMEGA
ohm_username = 'iamtheΩ' # U+2126 OHM SIGN
self.assertNotEqual(omega_username, ohm_username)
User.objects.create_user(username=omega_username, password='pwd')
data = {
'username': ohm_username,
'password1': 'pwd2',
'password2': 'pwd2',
}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors['username'], ["A user with that username already exists."]
)
@override_settings(AUTH_PASSWORD_VALIDATORS=[
{'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'},
{'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 'OPTIONS': {
'min_length': 12,
}},
])
def test_validates_password(self):
data = {
'username': 'testclient',
'password1': 'testclient',
'password2': 'testclient',
}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(len(form['password2'].errors), 2)
self.assertIn('The password is too similar to the username.', form['password2'].errors)
self.assertIn(
'This password is too short. It must contain at least 12 characters.',
form['password2'].errors
)
def test_custom_form(self):
class CustomUserCreationForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
model = ExtensionUser
fields = UserCreationForm.Meta.fields + ('date_of_birth',)
data = {
'username': 'testclient',
'password1': 'testclient',
'password2': 'testclient',
'date_of_birth': '1988-02-24',
}
form = CustomUserCreationForm(data)
self.assertTrue(form.is_valid())
def test_custom_form_with_different_username_field(self):
class CustomUserCreationForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
model = CustomUser
fields = ('email', 'date_of_birth')
data = {
'email': '[email protected]',
'password1': 'testclient',
'password2': 'testclient',
'date_of_birth': '1988-02-24',
}
form = CustomUserCreationForm(data)
self.assertTrue(form.is_valid())
def test_custom_form_hidden_username_field(self):
class CustomUserCreationForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
model = CustomUserWithoutIsActiveField
fields = ('email',) # without USERNAME_FIELD
data = {
'email': '[email protected]',
'password1': 'testclient',
'password2': 'testclient',
}
form = CustomUserCreationForm(data)
self.assertTrue(form.is_valid())
def test_password_whitespace_not_stripped(self):
data = {
'username': 'testuser',
'password1': ' testpassword ',
'password2': ' testpassword ',
}
form = UserCreationForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['password1'], data['password1'])
self.assertEqual(form.cleaned_data['password2'], data['password2'])
@override_settings(AUTH_PASSWORD_VALIDATORS=[
{'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'},
])
def test_password_help_text(self):
form = UserCreationForm()
self.assertEqual(
form.fields['password1'].help_text,
'<ul><li>Your password can't be too similar to your other personal information.</li></ul>'
)
@override_settings(AUTH_PASSWORD_VALIDATORS=[
{'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'},
])
def test_user_create_form_validates_password_with_all_data(self):
"""UserCreationForm password validation uses all of the form's data."""
class CustomUserCreationForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
model = User
fields = ('username', 'email', 'first_name', 'last_name')
form = CustomUserCreationForm({
'username': 'testuser',
'password1': 'testpassword',
'password2': 'testpassword',
'first_name': 'testpassword',
'last_name': 'lastname',
})
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors['password2'],
['The password is too similar to the first name.'],
)
# To verify that the login form rejects inactive users, use an authentication
# backend that allows them.
@override_settings(AUTHENTICATION_BACKENDS=['django.contrib.auth.backends.AllowAllUsersModelBackend'])
class AuthenticationFormTest(TestDataMixin, TestCase):
def test_invalid_username(self):
# The user submits an invalid username.
data = {
'username': 'jsmith_does_not_exist',
'password': 'test123',
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.non_field_errors(), [
form.error_messages['invalid_login'] % {
'username': User._meta.get_field('username').verbose_name
}
]
)
def test_inactive_user(self):
# The user is inactive.
data = {
'username': 'inactive',
'password': 'password',
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(form.non_field_errors(), [str(form.error_messages['inactive'])])
# Use an authentication backend that rejects inactive users.
@override_settings(AUTHENTICATION_BACKENDS=['django.contrib.auth.backends.ModelBackend'])
def test_inactive_user_incorrect_password(self):
"""An invalid login doesn't leak the inactive status of a user."""
data = {
'username': 'inactive',
'password': 'incorrect',
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.non_field_errors(), [
form.error_messages['invalid_login'] % {
'username': User._meta.get_field('username').verbose_name
}
]
)
def test_login_failed(self):
signal_calls = []
def signal_handler(**kwargs):
signal_calls.append(kwargs)
user_login_failed.connect(signal_handler)
fake_request = object()
try:
form = AuthenticationForm(fake_request, {
'username': 'testclient',
'password': 'incorrect',
})
self.assertFalse(form.is_valid())
self.assertIs(signal_calls[0]['request'], fake_request)
finally:
user_login_failed.disconnect(signal_handler)
def test_inactive_user_i18n(self):
with self.settings(USE_I18N=True), translation.override('pt-br', deactivate=True):
# The user is inactive.
data = {
'username': 'inactive',
'password': 'password',
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(form.non_field_errors(), [str(form.error_messages['inactive'])])
# Use an authentication backend that allows inactive users.
@override_settings(AUTHENTICATION_BACKENDS=['django.contrib.auth.backends.AllowAllUsersModelBackend'])
def test_custom_login_allowed_policy(self):
# The user is inactive, but our custom form policy allows them to log in.
data = {
'username': 'inactive',
'password': 'password',
}
class AuthenticationFormWithInactiveUsersOkay(AuthenticationForm):
def confirm_login_allowed(self, user):
pass
form = AuthenticationFormWithInactiveUsersOkay(None, data)
self.assertTrue(form.is_valid())
# If we want to disallow some logins according to custom logic,
# we should raise a django.forms.ValidationError in the form.
class PickyAuthenticationForm(AuthenticationForm):
def confirm_login_allowed(self, user):
if user.username == "inactive":
raise forms.ValidationError("This user is disallowed.")
raise forms.ValidationError("Sorry, nobody's allowed in.")
form = PickyAuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(form.non_field_errors(), ['This user is disallowed.'])
data = {
'username': 'testclient',
'password': 'password',
}
form = PickyAuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(form.non_field_errors(), ["Sorry, nobody's allowed in."])
def test_success(self):
# The success case
data = {
'username': 'testclient',
'password': 'password',
}
form = AuthenticationForm(None, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.non_field_errors(), [])
def test_unicode_username(self):
User.objects.create_user(username='Σαρα', password='pwd')
data = {
'username': 'Σαρα',
'password': 'pwd',
}
form = AuthenticationForm(None, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.non_field_errors(), [])
@override_settings(AUTH_USER_MODEL='auth_tests.CustomEmailField')
def test_username_field_max_length_matches_user_model(self):
self.assertEqual(CustomEmailField._meta.get_field('username').max_length, 255)
data = {
'username': 'u' * 255,
'password': 'pwd',
'email': '[email protected]',
}
CustomEmailField.objects.create_user(**data)
form = AuthenticationForm(None, data)
self.assertEqual(form.fields['username'].max_length, 255)
self.assertEqual(form.errors, {})
@override_settings(AUTH_USER_MODEL='auth_tests.IntegerUsernameUser')
def test_username_field_max_length_defaults_to_254(self):
self.assertIsNone(IntegerUsernameUser._meta.get_field('username').max_length)
data = {
'username': '0123456',
'password': 'password',
}
IntegerUsernameUser.objects.create_user(**data)
form = AuthenticationForm(None, data)
self.assertEqual(form.fields['username'].max_length, 254)
self.assertEqual(form.errors, {})
def test_username_field_label(self):
class CustomAuthenticationForm(AuthenticationForm):
username = CharField(label="Name", max_length=75)
form = CustomAuthenticationForm()
self.assertEqual(form['username'].label, "Name")
def test_username_field_label_not_set(self):
class CustomAuthenticationForm(AuthenticationForm):
username = CharField()
form = CustomAuthenticationForm()
username_field = User._meta.get_field(User.USERNAME_FIELD)
self.assertEqual(form.fields['username'].label, capfirst(username_field.verbose_name))
def test_username_field_label_empty_string(self):
class CustomAuthenticationForm(AuthenticationForm):
username = CharField(label='')
form = CustomAuthenticationForm()
self.assertEqual(form.fields['username'].label, "")
def test_password_whitespace_not_stripped(self):
data = {
'username': 'testuser',
'password': ' pass ',
}
form = AuthenticationForm(None, data)
form.is_valid() # Not necessary to have valid credentails for the test.
self.assertEqual(form.cleaned_data['password'], data['password'])
@override_settings(AUTH_USER_MODEL='auth_tests.IntegerUsernameUser')
def test_integer_username(self):
class CustomAuthenticationForm(AuthenticationForm):
username = IntegerField()
user = IntegerUsernameUser.objects.create_user(username=0, password='pwd')
data = {
'username': 0,
'password': 'pwd',
}
form = CustomAuthenticationForm(None, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['username'], data['username'])
self.assertEqual(form.cleaned_data['password'], data['password'])
self.assertEqual(form.errors, {})
self.assertEqual(form.user_cache, user)
def test_get_invalid_login_error(self):
error = AuthenticationForm().get_invalid_login_error()
self.assertIsInstance(error, forms.ValidationError)
self.assertEqual(
error.message,
'Please enter a correct %(username)s and password. Note that both '
'fields may be case-sensitive.',
)
self.assertEqual(error.code, 'invalid_login')
self.assertEqual(error.params, {'username': 'username'})
class SetPasswordFormTest(TestDataMixin, TestCase):
def test_password_verification(self):
# The two new passwords do not match.
user = User.objects.get(username='testclient')
data = {
'new_password1': 'abc123',
'new_password2': 'abc',
}
form = SetPasswordForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form["new_password2"].errors,
[str(form.error_messages['password_mismatch'])]
)
@mock.patch('django.contrib.auth.password_validation.password_changed')
def test_success(self, password_changed):
user = User.objects.get(username='testclient')
data = {
'new_password1': 'abc123',
'new_password2': 'abc123',
}
form = SetPasswordForm(user, data)
self.assertTrue(form.is_valid())
form.save(commit=False)
self.assertEqual(password_changed.call_count, 0)
form.save()
self.assertEqual(password_changed.call_count, 1)
@override_settings(AUTH_PASSWORD_VALIDATORS=[
{'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'},
{'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 'OPTIONS': {
'min_length': 12,
}},
])
def test_validates_password(self):
user = User.objects.get(username='testclient')
data = {
'new_password1': 'testclient',
'new_password2': 'testclient',
}
form = SetPasswordForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(len(form["new_password2"].errors), 2)
self.assertIn('The password is too similar to the username.', form["new_password2"].errors)
self.assertIn(
'This password is too short. It must contain at least 12 characters.',
form["new_password2"].errors
)
def test_password_whitespace_not_stripped(self):
user = User.objects.get(username='testclient')
data = {
'new_password1': ' password ',
'new_password2': ' password ',
}
form = SetPasswordForm(user, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['new_password1'], data['new_password1'])
self.assertEqual(form.cleaned_data['new_password2'], data['new_password2'])
@override_settings(AUTH_PASSWORD_VALIDATORS=[
{'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'},
{'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 'OPTIONS': {
'min_length': 12,
}},
])
def test_help_text_translation(self):
french_help_texts = [
'Votre mot de passe ne peut pas trop ressembler à vos autres informations personnelles.',
'Votre mot de passe doit contenir au minimum 12 caractères.',
]
form = SetPasswordForm(self.u1)
with translation.override('fr'):
html = form.as_p()
for french_text in french_help_texts:
self.assertIn(french_text, html)
class PasswordChangeFormTest(TestDataMixin, TestCase):
def test_incorrect_password(self):
user = User.objects.get(username='testclient')
data = {
'old_password': 'test',
'new_password1': 'abc123',
'new_password2': 'abc123',
}
form = PasswordChangeForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(form["old_password"].errors, [str(form.error_messages['password_incorrect'])])
def test_password_verification(self):
# The two new passwords do not match.
user = User.objects.get(username='testclient')
data = {
'old_password': 'password',
'new_password1': 'abc123',
'new_password2': 'abc',
}
form = PasswordChangeForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(form["new_password2"].errors, [str(form.error_messages['password_mismatch'])])
@mock.patch('django.contrib.auth.password_validation.password_changed')
def test_success(self, password_changed):
# The success case.
user = User.objects.get(username='testclient')
data = {
'old_password': 'password',
'new_password1': 'abc123',
'new_password2': 'abc123',
}
form = PasswordChangeForm(user, data)
self.assertTrue(form.is_valid())
form.save(commit=False)
self.assertEqual(password_changed.call_count, 0)
form.save()
self.assertEqual(password_changed.call_count, 1)
def test_field_order(self):
# Regression test - check the order of fields:
user = User.objects.get(username='testclient')
self.assertEqual(list(PasswordChangeForm(user, {}).fields), ['old_password', 'new_password1', 'new_password2'])
def test_password_whitespace_not_stripped(self):
user = User.objects.get(username='testclient')
user.set_password(' oldpassword ')
data = {
'old_password': ' oldpassword ',
'new_password1': ' pass ',
'new_password2': ' pass ',
}
form = PasswordChangeForm(user, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['old_password'], data['old_password'])
self.assertEqual(form.cleaned_data['new_password1'], data['new_password1'])
self.assertEqual(form.cleaned_data['new_password2'], data['new_password2'])
class UserChangeFormTest(TestDataMixin, TestCase):
def test_username_validity(self):
user = User.objects.get(username='testclient')
data = {'username': 'not valid'}
form = UserChangeForm(data, instance=user)
self.assertFalse(form.is_valid())
validator = next(v for v in User._meta.get_field('username').validators if v.code == 'invalid')
self.assertEqual(form["username"].errors, [str(validator.message)])
def test_bug_14242(self):
# A regression test, introduce by adding an optimization for the
# UserChangeForm.
class MyUserForm(UserChangeForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['groups'].help_text = 'These groups give users different permissions'
class Meta(UserChangeForm.Meta):
fields = ('groups',)
# Just check we can create it
MyUserForm({})
def test_unusable_password(self):
user = User.objects.get(username='empty_password')
user.set_unusable_password()
user.save()
form = UserChangeForm(instance=user)
self.assertIn(_("No password set."), form.as_table())
def test_bug_17944_empty_password(self):
user = User.objects.get(username='empty_password')
form = UserChangeForm(instance=user)
self.assertIn(_("No password set."), form.as_table())
def test_bug_17944_unmanageable_password(self):
user = User.objects.get(username='unmanageable_password')
form = UserChangeForm(instance=user)
self.assertIn(_("Invalid password format or unknown hashing algorithm."), form.as_table())
def test_bug_17944_unknown_password_algorithm(self):
user = User.objects.get(username='unknown_password')
form = UserChangeForm(instance=user)
self.assertIn(_("Invalid password format or unknown hashing algorithm."), form.as_table())
def test_bug_19133(self):
"The change form does not return the password value"
# Use the form to construct the POST data
user = User.objects.get(username='testclient')
form_for_data = UserChangeForm(instance=user)
post_data = form_for_data.initial
# The password field should be readonly, so anything
# posted here should be ignored; the form will be
# valid, and give back the 'initial' value for the
# password field.
post_data['password'] = 'new password'
form = UserChangeForm(instance=user, data=post_data)
self.assertTrue(form.is_valid())
# original hashed password contains $
self.assertIn('$', form.cleaned_data['password'])
def test_bug_19349_bound_password_field(self):
user = User.objects.get(username='testclient')
form = UserChangeForm(data={}, instance=user)
# When rendering the bound password field,
# ReadOnlyPasswordHashWidget needs the initial
# value to render correctly
self.assertEqual(form.initial['password'], form['password'].value())
def test_custom_form(self):
class CustomUserChangeForm(UserChangeForm):
class Meta(UserChangeForm.Meta):
model = ExtensionUser
fields = ('username', 'password', 'date_of_birth',)
user = User.objects.get(username='testclient')
data = {
'username': 'testclient',
'password': 'testclient',
'date_of_birth': '1998-02-24',
}
form = CustomUserChangeForm(data, instance=user)
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(form.cleaned_data['username'], 'testclient')
self.assertEqual(form.cleaned_data['date_of_birth'], datetime.date(1998, 2, 24))
def test_password_excluded(self):
class UserChangeFormWithoutPassword(UserChangeForm):
password = None
class Meta:
model = User
exclude = ['password']
form = UserChangeFormWithoutPassword()
self.assertNotIn('password', form.fields)
@override_settings(TEMPLATES=AUTH_TEMPLATES)
class PasswordResetFormTest(TestDataMixin, TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
# This cleanup is necessary because contrib.sites cache
# makes tests interfere with each other, see #11505
Site.objects.clear_cache()
def create_dummy_user(self):
"""
Create a user and return a tuple (user_object, username, email).
"""
username = 'jsmith'
email = '[email protected]'
user = User.objects.create_user(username, email, 'test123')
return (user, username, email)
def test_invalid_email(self):
data = {'email': 'not valid'}
form = PasswordResetForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form['email'].errors, [_('Enter a valid email address.')])
def test_nonexistent_email(self):
"""
Test nonexistent email address. This should not fail because it would
expose information about registered users.
"""
data = {'email': '[email protected]'}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(len(mail.outbox), 0)
def test_cleaned_data(self):
(user, username, email) = self.create_dummy_user()
data = {'email': email}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
form.save(domain_override='example.com')
self.assertEqual(form.cleaned_data['email'], email)
self.assertEqual(len(mail.outbox), 1)
def test_custom_email_subject(self):
data = {'email': '[email protected]'}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
# Since we're not providing a request object, we must provide a
# domain_override to prevent the save operation from failing in the
# potential case where contrib.sites is not installed. Refs #16412.
form.save(domain_override='example.com')
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Custom password reset on example.com')
def test_custom_email_constructor(self):
data = {'email': '[email protected]'}
class CustomEmailPasswordResetForm(PasswordResetForm):
def send_mail(self, subject_template_name, email_template_name,
context, from_email, to_email,
html_email_template_name=None):
EmailMultiAlternatives(
"Forgot your password?",
"Sorry to hear you forgot your password.",
None, [to_email],
['[email protected]'],
headers={'Reply-To': '[email protected]'},
alternatives=[
("Really sorry to hear you forgot your password.", "text/html")
],
).send()
form = CustomEmailPasswordResetForm(data)
self.assertTrue(form.is_valid())
# Since we're not providing a request object, we must provide a
# domain_override to prevent the save operation from failing in the
# potential case where contrib.sites is not installed. Refs #16412.
form.save(domain_override='example.com')
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Forgot your password?')
self.assertEqual(mail.outbox[0].bcc, ['[email protected]'])
self.assertEqual(mail.outbox[0].content_subtype, "plain")
def test_preserve_username_case(self):
"""
Preserve the case of the user name (before the @ in the email address)
when creating a user (#5605).
"""
user = User.objects.create_user('forms_test2', '[email protected]', 'test')
self.assertEqual(user.email, '[email protected]')
user = User.objects.create_user('forms_test3', 'tesT', 'test')
self.assertEqual(user.email, 'tesT')
def test_inactive_user(self):
"""
Inactive user cannot receive password reset email.
"""
(user, username, email) = self.create_dummy_user()
user.is_active = False
user.save()
form = PasswordResetForm({'email': email})
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(mail.outbox), 0)
def test_unusable_password(self):
user = User.objects.create_user('testuser', '[email protected]', 'test')
data = {"email": "[email protected]"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
user.set_unusable_password()
user.save()
form = PasswordResetForm(data)
# The form itself is valid, but no email is sent
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(mail.outbox), 0)
def test_save_plaintext_email(self):
"""
Test the PasswordResetForm.save() method with no html_email_template_name
parameter passed in.
Test to ensure original behavior is unchanged after the parameter was added.
"""
(user, username, email) = self.create_dummy_user()
form = PasswordResetForm({"email": email})
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0].message()
self.assertFalse(message.is_multipart())
self.assertEqual(message.get_content_type(), 'text/plain')
self.assertEqual(message.get('subject'), 'Custom password reset on example.com')
self.assertEqual(len(mail.outbox[0].alternatives), 0)
self.assertEqual(message.get_all('to'), [email])
self.assertTrue(re.match(r'^http://example.com/reset/[\w+/-]', message.get_payload()))
def test_save_html_email_template_name(self):
"""
Test the PasswordResetFOrm.save() method with html_email_template_name
parameter specified.
Test to ensure that a multipart email is sent with both text/plain
and text/html parts.
"""
(user, username, email) = self.create_dummy_user()
form = PasswordResetForm({"email": email})
self.assertTrue(form.is_valid())
form.save(html_email_template_name='registration/html_password_reset_email.html')
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(len(mail.outbox[0].alternatives), 1)
message = mail.outbox[0].message()
self.assertEqual(message.get('subject'), 'Custom password reset on example.com')
self.assertEqual(len(message.get_payload()), 2)
self.assertTrue(message.is_multipart())
self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain')
self.assertEqual(message.get_payload(1).get_content_type(), 'text/html')
self.assertEqual(message.get_all('to'), [email])
self.assertTrue(re.match(r'^http://example.com/reset/[\w/-]+', message.get_payload(0).get_payload()))
self.assertTrue(re.match(
r'^<html><a href="http://example.com/reset/[\w/-]+/">Link</a></html>$',
message.get_payload(1).get_payload()
))
@override_settings(AUTH_USER_MODEL='auth_tests.CustomEmailField')
def test_custom_email_field(self):
email = '[email protected]'
CustomEmailField.objects.create_user('test name', 'test password', email)
form = PasswordResetForm({'email': email})
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(form.cleaned_data['email'], email)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, [email])
class ReadOnlyPasswordHashTest(SimpleTestCase):
def test_bug_19349_render_with_none_value(self):
# Rendering the widget with value set to None
# mustn't raise an exception.
widget = ReadOnlyPasswordHashWidget()
html = widget.render(name='password', value=None, attrs={})
self.assertIn(_("No password set."), html)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.PBKDF2PasswordHasher'])
def test_render(self):
widget = ReadOnlyPasswordHashWidget()
value = 'pbkdf2_sha256$100000$a6Pucb1qSFcD$WmCkn9Hqidj48NVe5x0FEM6A9YiOqQcl/83m2Z5udm0='
self.assertHTMLEqual(
widget.render('name', value, {'id': 'id_password'}),
"""
<div id="id_password">
<strong>algorithm</strong>: pbkdf2_sha256
<strong>iterations</strong>: 100000
<strong>salt</strong>: a6Pucb******
<strong>hash</strong>: WmCkn9**************************************
</div>
"""
)
def test_readonly_field_has_changed(self):
field = ReadOnlyPasswordHashField()
self.assertFalse(field.has_changed('aaa', 'bbb'))
class AdminPasswordChangeFormTest(TestDataMixin, TestCase):
@mock.patch('django.contrib.auth.password_validation.password_changed')
def test_success(self, password_changed):
user = User.objects.get(username='testclient')
data = {
'password1': 'test123',
'password2': 'test123',
}
form = AdminPasswordChangeForm(user, data)
self.assertTrue(form.is_valid())
form.save(commit=False)
self.assertEqual(password_changed.call_count, 0)
form.save()
self.assertEqual(password_changed.call_count, 1)
def test_password_whitespace_not_stripped(self):
user = User.objects.get(username='testclient')
data = {
'password1': ' pass ',
'password2': ' pass ',
}
form = AdminPasswordChangeForm(user, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['password1'], data['password1'])
self.assertEqual(form.cleaned_data['password2'], data['password2'])
def test_non_matching_passwords(self):
user = User.objects.get(username='testclient')
data = {'password1': 'password1', 'password2': 'password2'}
form = AdminPasswordChangeForm(user, data)
self.assertEqual(form.errors['password2'], [form.error_messages['password_mismatch']])
def test_missing_passwords(self):
user = User.objects.get(username='testclient')
data = {'password1': '', 'password2': ''}
form = AdminPasswordChangeForm(user, data)
required_error = [Field.default_error_messages['required']]
self.assertEqual(form.errors['password1'], required_error)
self.assertEqual(form.errors['password2'], required_error)
def test_one_password(self):
user = User.objects.get(username='testclient')
form1 = AdminPasswordChangeForm(user, {'password1': '', 'password2': 'test'})
required_error = [Field.default_error_messages['required']]
self.assertEqual(form1.errors['password1'], required_error)
self.assertNotIn('password2', form1.errors)
form2 = AdminPasswordChangeForm(user, {'password1': 'test', 'password2': ''})
self.assertEqual(form2.errors['password2'], required_error)
self.assertNotIn('password1', form2.errors)
|
3b97adf7fcf1c675f0760e90ef5970da7f211f053506f6e6aba5d6c825be179d | from django.conf import settings
from django.contrib.auth import models
from django.contrib.auth.decorators import login_required, permission_required
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse
from django.test import TestCase, override_settings
from django.test.client import RequestFactory
from .test_views import AuthViewsTestCase
@override_settings(ROOT_URLCONF='auth_tests.urls')
class LoginRequiredTestCase(AuthViewsTestCase):
"""
Tests the login_required decorators
"""
def testCallable(self):
"""
login_required is assignable to callable objects.
"""
class CallableView:
def __call__(self, *args, **kwargs):
pass
login_required(CallableView())
def testView(self):
"""
login_required is assignable to normal views.
"""
def normal_view(request):
pass
login_required(normal_view)
def testLoginRequired(self, view_url='/login_required/', login_url=None):
"""
login_required works on a simple view wrapped in a login_required
decorator.
"""
if login_url is None:
login_url = settings.LOGIN_URL
response = self.client.get(view_url)
self.assertEqual(response.status_code, 302)
self.assertIn(login_url, response.url)
self.login()
response = self.client.get(view_url)
self.assertEqual(response.status_code, 200)
def testLoginRequiredNextUrl(self):
"""
login_required works on a simple view wrapped in a login_required
decorator with a login_url set.
"""
self.testLoginRequired(view_url='/login_required_login_url/', login_url='/somewhere/')
class PermissionsRequiredDecoratorTest(TestCase):
"""
Tests for the permission_required decorator
"""
factory = RequestFactory()
@classmethod
def setUpTestData(cls):
cls.user = models.User.objects.create(username='joe', password='qwerty')
# Add permissions auth.add_customuser and auth.change_customuser
perms = models.Permission.objects.filter(codename__in=('add_customuser', 'change_customuser'))
cls.user.user_permissions.add(*perms)
def test_many_permissions_pass(self):
@permission_required(['auth_tests.add_customuser', 'auth_tests.change_customuser'])
def a_view(request):
return HttpResponse()
request = self.factory.get('/rand')
request.user = self.user
resp = a_view(request)
self.assertEqual(resp.status_code, 200)
def test_many_permissions_in_set_pass(self):
@permission_required({'auth_tests.add_customuser', 'auth_tests.change_customuser'})
def a_view(request):
return HttpResponse()
request = self.factory.get('/rand')
request.user = self.user
resp = a_view(request)
self.assertEqual(resp.status_code, 200)
def test_single_permission_pass(self):
@permission_required('auth_tests.add_customuser')
def a_view(request):
return HttpResponse()
request = self.factory.get('/rand')
request.user = self.user
resp = a_view(request)
self.assertEqual(resp.status_code, 200)
def test_permissioned_denied_redirect(self):
@permission_required(['auth_tests.add_customuser', 'auth_tests.change_customuser', 'nonexistent-permission'])
def a_view(request):
return HttpResponse()
request = self.factory.get('/rand')
request.user = self.user
resp = a_view(request)
self.assertEqual(resp.status_code, 302)
def test_permissioned_denied_exception_raised(self):
@permission_required([
'auth_tests.add_customuser', 'auth_tests.change_customuser', 'nonexistent-permission'
], raise_exception=True)
def a_view(request):
return HttpResponse()
request = self.factory.get('/rand')
request.user = self.user
with self.assertRaises(PermissionDenied):
a_view(request)
|
ff55c5b83e3c59d7c64cb7e093792e147c5aa86ab4129396c788a5df8bf2388b | import os
import shutil
import sys
import tempfile
import threading
import time
import unittest
from datetime import datetime, timedelta
from io import StringIO
from urllib.request import urlopen
from django.core.cache import cache
from django.core.exceptions import SuspiciousFileOperation
from django.core.files.base import ContentFile, File
from django.core.files.storage import FileSystemStorage, get_storage_class
from django.core.files.uploadedfile import (
InMemoryUploadedFile, SimpleUploadedFile, TemporaryUploadedFile,
)
from django.db.models.fields.files import FileDescriptor
from django.test import (
LiveServerTestCase, SimpleTestCase, TestCase, override_settings,
)
from django.test.utils import requires_tz_support
from django.urls import NoReverseMatch, reverse_lazy
from django.utils import timezone
from .models import Storage, temp_storage, temp_storage_location
FILE_SUFFIX_REGEX = '[A-Za-z0-9]{7}'
class GetStorageClassTests(SimpleTestCase):
def test_get_filesystem_storage(self):
"""
get_storage_class returns the class for a storage backend name/path.
"""
self.assertEqual(
get_storage_class('django.core.files.storage.FileSystemStorage'),
FileSystemStorage)
def test_get_invalid_storage_module(self):
"""
get_storage_class raises an error if the requested import don't exist.
"""
with self.assertRaisesMessage(ImportError, "No module named 'storage'"):
get_storage_class('storage.NonexistentStorage')
def test_get_nonexistent_storage_class(self):
"""
get_storage_class raises an error if the requested class don't exist.
"""
with self.assertRaises(ImportError):
get_storage_class('django.core.files.storage.NonexistentStorage')
def test_get_nonexistent_storage_module(self):
"""
get_storage_class raises an error if the requested module don't exist.
"""
with self.assertRaisesMessage(ImportError, "No module named 'django.core.files.nonexistent_storage'"):
get_storage_class('django.core.files.nonexistent_storage.NonexistentStorage')
class FileSystemStorageTests(unittest.TestCase):
def test_deconstruction(self):
path, args, kwargs = temp_storage.deconstruct()
self.assertEqual(path, "django.core.files.storage.FileSystemStorage")
self.assertEqual(args, ())
self.assertEqual(kwargs, {'location': temp_storage_location})
kwargs_orig = {
'location': temp_storage_location,
'base_url': 'http://myfiles.example.com/'
}
storage = FileSystemStorage(**kwargs_orig)
path, args, kwargs = storage.deconstruct()
self.assertEqual(kwargs, kwargs_orig)
def test_lazy_base_url_init(self):
"""
FileSystemStorage.__init__() shouldn't evaluate base_url.
"""
storage = FileSystemStorage(base_url=reverse_lazy('app:url'))
with self.assertRaises(NoReverseMatch):
storage.url(storage.base_url)
class FileStorageTests(SimpleTestCase):
storage_class = FileSystemStorage
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self.storage = self.storage_class(location=self.temp_dir, base_url='/test_media_url/')
# Set up a second temporary directory which is ensured to have a mixed
# case name.
self.temp_dir2 = tempfile.mkdtemp(suffix='aBc')
def tearDown(self):
shutil.rmtree(self.temp_dir)
shutil.rmtree(self.temp_dir2)
def test_empty_location(self):
"""
Makes sure an exception is raised if the location is empty
"""
storage = self.storage_class(location='')
self.assertEqual(storage.base_location, '')
self.assertEqual(storage.location, os.getcwd())
def test_file_access_options(self):
"""
Standard file access options are available, and work as expected.
"""
self.assertFalse(self.storage.exists('storage_test'))
f = self.storage.open('storage_test', 'w')
f.write('storage contents')
f.close()
self.assertTrue(self.storage.exists('storage_test'))
f = self.storage.open('storage_test', 'r')
self.assertEqual(f.read(), 'storage contents')
f.close()
self.storage.delete('storage_test')
self.assertFalse(self.storage.exists('storage_test'))
def _test_file_time_getter(self, getter):
# Check for correct behavior under both USE_TZ=True and USE_TZ=False.
# The tests are similar since they both set up a situation where the
# system time zone, Django's TIME_ZONE, and UTC are distinct.
self._test_file_time_getter_tz_handling_on(getter)
self._test_file_time_getter_tz_handling_off(getter)
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Algiers')
def _test_file_time_getter_tz_handling_on(self, getter):
# Django's TZ (and hence the system TZ) is set to Africa/Algiers which
# is UTC+1 and has no DST change. We can set the Django TZ to something
# else so that UTC, Django's TIME_ZONE, and the system timezone are all
# different.
now_in_algiers = timezone.make_aware(datetime.now())
with timezone.override(timezone.get_fixed_timezone(-300)):
# At this point the system TZ is +1 and the Django TZ
# is -5. The following will be aware in UTC.
now = timezone.now()
self.assertFalse(self.storage.exists('test.file.tz.on'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file.tz.on', f)
self.addCleanup(self.storage.delete, f_name)
dt = getter(f_name)
# dt should be aware, in UTC
self.assertTrue(timezone.is_aware(dt))
self.assertEqual(now.tzname(), dt.tzname())
# The three timezones are indeed distinct.
naive_now = datetime.now()
algiers_offset = now_in_algiers.tzinfo.utcoffset(naive_now)
django_offset = timezone.get_current_timezone().utcoffset(naive_now)
utc_offset = timezone.utc.utcoffset(naive_now)
self.assertGreater(algiers_offset, utc_offset)
self.assertLess(django_offset, utc_offset)
# dt and now should be the same effective time.
self.assertLess(abs(dt - now), timedelta(seconds=2))
@override_settings(USE_TZ=False, TIME_ZONE='Africa/Algiers')
def _test_file_time_getter_tz_handling_off(self, getter):
# Django's TZ (and hence the system TZ) is set to Africa/Algiers which
# is UTC+1 and has no DST change. We can set the Django TZ to something
# else so that UTC, Django's TIME_ZONE, and the system timezone are all
# different.
now_in_algiers = timezone.make_aware(datetime.now())
with timezone.override(timezone.get_fixed_timezone(-300)):
# At this point the system TZ is +1 and the Django TZ
# is -5.
self.assertFalse(self.storage.exists('test.file.tz.off'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file.tz.off', f)
self.addCleanup(self.storage.delete, f_name)
dt = getter(f_name)
# dt should be naive, in system (+1) TZ
self.assertTrue(timezone.is_naive(dt))
# The three timezones are indeed distinct.
naive_now = datetime.now()
algiers_offset = now_in_algiers.tzinfo.utcoffset(naive_now)
django_offset = timezone.get_current_timezone().utcoffset(naive_now)
utc_offset = timezone.utc.utcoffset(naive_now)
self.assertGreater(algiers_offset, utc_offset)
self.assertLess(django_offset, utc_offset)
# dt and naive_now should be the same effective time.
self.assertLess(abs(dt - naive_now), timedelta(seconds=2))
# If we convert dt to an aware object using the Algiers
# timezone then it should be the same effective time to
# now_in_algiers.
_dt = timezone.make_aware(dt, now_in_algiers.tzinfo)
self.assertLess(abs(_dt - now_in_algiers), timedelta(seconds=2))
def test_file_get_accessed_time(self):
"""
File storage returns a Datetime object for the last accessed time of
a file.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
self.addCleanup(self.storage.delete, f_name)
atime = self.storage.get_accessed_time(f_name)
self.assertEqual(atime, datetime.fromtimestamp(os.path.getatime(self.storage.path(f_name))))
self.assertLess(timezone.now() - self.storage.get_accessed_time(f_name), timedelta(seconds=2))
@requires_tz_support
def test_file_get_accessed_time_timezone(self):
self._test_file_time_getter(self.storage.get_accessed_time)
def test_file_get_created_time(self):
"""
File storage returns a datetime for the creation time of a file.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
self.addCleanup(self.storage.delete, f_name)
ctime = self.storage.get_created_time(f_name)
self.assertEqual(ctime, datetime.fromtimestamp(os.path.getctime(self.storage.path(f_name))))
self.assertLess(timezone.now() - self.storage.get_created_time(f_name), timedelta(seconds=2))
@requires_tz_support
def test_file_get_created_time_timezone(self):
self._test_file_time_getter(self.storage.get_created_time)
def test_file_get_modified_time(self):
"""
File storage returns a datetime for the last modified time of a file.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
self.addCleanup(self.storage.delete, f_name)
mtime = self.storage.get_modified_time(f_name)
self.assertEqual(mtime, datetime.fromtimestamp(os.path.getmtime(self.storage.path(f_name))))
self.assertLess(timezone.now() - self.storage.get_modified_time(f_name), timedelta(seconds=2))
@requires_tz_support
def test_file_get_modified_time_timezone(self):
self._test_file_time_getter(self.storage.get_modified_time)
def test_file_save_without_name(self):
"""
File storage extracts the filename from the content object if no
name is given explicitly.
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f.name = 'test.file'
storage_f_name = self.storage.save(None, f)
self.assertEqual(storage_f_name, f.name)
self.assertTrue(os.path.exists(os.path.join(self.temp_dir, f.name)))
self.storage.delete(storage_f_name)
def test_file_save_with_path(self):
"""
Saving a pathname should create intermediate directories as necessary.
"""
self.assertFalse(self.storage.exists('path/to'))
self.storage.save('path/to/test.file', ContentFile('file saved with path'))
self.assertTrue(self.storage.exists('path/to'))
with self.storage.open('path/to/test.file') as f:
self.assertEqual(f.read(), b'file saved with path')
self.assertTrue(os.path.exists(
os.path.join(self.temp_dir, 'path', 'to', 'test.file')))
self.storage.delete('path/to/test.file')
def test_save_doesnt_close(self):
with TemporaryUploadedFile('test', 'text/plain', 1, 'utf8') as file:
file.write(b'1')
file.seek(0)
self.assertFalse(file.closed)
self.storage.save('path/to/test.file', file)
self.assertFalse(file.closed)
self.assertFalse(file.file.closed)
file = InMemoryUploadedFile(StringIO('1'), '', 'test', 'text/plain', 1, 'utf8')
with file:
self.assertFalse(file.closed)
self.storage.save('path/to/test.file', file)
self.assertFalse(file.closed)
self.assertFalse(file.file.closed)
def test_file_path(self):
"""
File storage returns the full path of a file
"""
self.assertFalse(self.storage.exists('test.file'))
f = ContentFile('custom contents')
f_name = self.storage.save('test.file', f)
self.assertEqual(self.storage.path(f_name), os.path.join(self.temp_dir, f_name))
self.storage.delete(f_name)
def test_file_url(self):
"""
File storage returns a url to access a given file from the Web.
"""
self.assertEqual(self.storage.url('test.file'), self.storage.base_url + 'test.file')
# should encode special chars except ~!*()'
# like encodeURIComponent() JavaScript function do
self.assertEqual(
self.storage.url(r"~!*()'@#$%^&*abc`+ =.file"),
"/test_media_url/~!*()'%40%23%24%25%5E%26*abc%60%2B%20%3D.file"
)
self.assertEqual(self.storage.url("ab\0c"), "/test_media_url/ab%00c")
# should translate os path separator(s) to the url path separator
self.assertEqual(self.storage.url("""a/b\\c.file"""), "/test_media_url/a/b/c.file")
# #25905: remove leading slashes from file names to prevent unsafe url output
self.assertEqual(self.storage.url("/evil.com"), "/test_media_url/evil.com")
self.assertEqual(self.storage.url(r"\evil.com"), "/test_media_url/evil.com")
self.assertEqual(self.storage.url("///evil.com"), "/test_media_url/evil.com")
self.assertEqual(self.storage.url(r"\\\evil.com"), "/test_media_url/evil.com")
self.assertEqual(self.storage.url(None), "/test_media_url/")
def test_base_url(self):
"""
File storage returns a url even when its base_url is unset or modified.
"""
self.storage.base_url = None
with self.assertRaises(ValueError):
self.storage.url('test.file')
# #22717: missing ending slash in base_url should be auto-corrected
storage = self.storage_class(location=self.temp_dir, base_url='/no_ending_slash')
self.assertEqual(
storage.url('test.file'),
'%s%s' % (storage.base_url, 'test.file')
)
def test_listdir(self):
"""
File storage returns a tuple containing directories and files.
"""
self.assertFalse(self.storage.exists('storage_test_1'))
self.assertFalse(self.storage.exists('storage_test_2'))
self.assertFalse(self.storage.exists('storage_dir_1'))
self.storage.save('storage_test_1', ContentFile('custom content'))
self.storage.save('storage_test_2', ContentFile('custom content'))
os.mkdir(os.path.join(self.temp_dir, 'storage_dir_1'))
dirs, files = self.storage.listdir('')
self.assertEqual(set(dirs), {'storage_dir_1'})
self.assertEqual(set(files), {'storage_test_1', 'storage_test_2'})
self.storage.delete('storage_test_1')
self.storage.delete('storage_test_2')
os.rmdir(os.path.join(self.temp_dir, 'storage_dir_1'))
def test_file_storage_prevents_directory_traversal(self):
"""
File storage prevents directory traversal (files can only be accessed if
they're below the storage location).
"""
with self.assertRaises(SuspiciousFileOperation):
self.storage.exists('..')
with self.assertRaises(SuspiciousFileOperation):
self.storage.exists('/etc/passwd')
def test_file_storage_preserves_filename_case(self):
"""The storage backend should preserve case of filenames."""
# Create a storage backend associated with the mixed case name
# directory.
other_temp_storage = self.storage_class(location=self.temp_dir2)
# Ask that storage backend to store a file with a mixed case filename.
mixed_case = 'CaSe_SeNsItIvE'
file = other_temp_storage.open(mixed_case, 'w')
file.write('storage contents')
file.close()
self.assertEqual(os.path.join(self.temp_dir2, mixed_case), other_temp_storage.path(mixed_case))
other_temp_storage.delete(mixed_case)
def test_makedirs_race_handling(self):
"""
File storage should be robust against directory creation race conditions.
"""
real_makedirs = os.makedirs
# Monkey-patch os.makedirs, to simulate a normal call, a raced call,
# and an error.
def fake_makedirs(path):
if path == os.path.join(self.temp_dir, 'normal'):
real_makedirs(path)
elif path == os.path.join(self.temp_dir, 'raced'):
real_makedirs(path)
raise FileExistsError()
elif path == os.path.join(self.temp_dir, 'error'):
raise PermissionError()
else:
self.fail('unexpected argument %r' % path)
try:
os.makedirs = fake_makedirs
self.storage.save('normal/test.file', ContentFile('saved normally'))
with self.storage.open('normal/test.file') as f:
self.assertEqual(f.read(), b'saved normally')
self.storage.save('raced/test.file', ContentFile('saved with race'))
with self.storage.open('raced/test.file') as f:
self.assertEqual(f.read(), b'saved with race')
# Exceptions aside from FileExistsError are raised.
with self.assertRaises(PermissionError):
self.storage.save('error/test.file', ContentFile('not saved'))
finally:
os.makedirs = real_makedirs
def test_remove_race_handling(self):
"""
File storage should be robust against file removal race conditions.
"""
real_remove = os.remove
# Monkey-patch os.remove, to simulate a normal call, a raced call,
# and an error.
def fake_remove(path):
if path == os.path.join(self.temp_dir, 'normal.file'):
real_remove(path)
elif path == os.path.join(self.temp_dir, 'raced.file'):
real_remove(path)
raise FileNotFoundError()
elif path == os.path.join(self.temp_dir, 'error.file'):
raise PermissionError()
else:
self.fail('unexpected argument %r' % path)
try:
os.remove = fake_remove
self.storage.save('normal.file', ContentFile('delete normally'))
self.storage.delete('normal.file')
self.assertFalse(self.storage.exists('normal.file'))
self.storage.save('raced.file', ContentFile('delete with race'))
self.storage.delete('raced.file')
self.assertFalse(self.storage.exists('normal.file'))
# Exceptions aside from FileNotFoundError are raised.
self.storage.save('error.file', ContentFile('delete with error'))
with self.assertRaises(PermissionError):
self.storage.delete('error.file')
finally:
os.remove = real_remove
def test_file_chunks_error(self):
"""
Test behavior when file.chunks() is raising an error
"""
f1 = ContentFile('chunks fails')
def failing_chunks():
raise IOError
f1.chunks = failing_chunks
with self.assertRaises(IOError):
self.storage.save('error.file', f1)
def test_delete_no_name(self):
"""
Calling delete with an empty name should not try to remove the base
storage directory, but fail loudly (#20660).
"""
with self.assertRaises(AssertionError):
self.storage.delete('')
def test_delete_deletes_directories(self):
tmp_dir = tempfile.mkdtemp(dir=self.storage.location)
self.storage.delete(tmp_dir)
self.assertFalse(os.path.exists(tmp_dir))
@override_settings(
MEDIA_ROOT='media_root',
MEDIA_URL='media_url/',
FILE_UPLOAD_PERMISSIONS=0o777,
FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o777,
)
def test_setting_changed(self):
"""
Properties using settings values as defaults should be updated on
referenced settings change while specified values should be unchanged.
"""
storage = self.storage_class(
location='explicit_location',
base_url='explicit_base_url/',
file_permissions_mode=0o666,
directory_permissions_mode=0o666,
)
defaults_storage = self.storage_class()
settings = {
'MEDIA_ROOT': 'overriden_media_root',
'MEDIA_URL': 'overriden_media_url/',
'FILE_UPLOAD_PERMISSIONS': 0o333,
'FILE_UPLOAD_DIRECTORY_PERMISSIONS': 0o333,
}
with self.settings(**settings):
self.assertEqual(storage.base_location, 'explicit_location')
self.assertIn('explicit_location', storage.location)
self.assertEqual(storage.base_url, 'explicit_base_url/')
self.assertEqual(storage.file_permissions_mode, 0o666)
self.assertEqual(storage.directory_permissions_mode, 0o666)
self.assertEqual(defaults_storage.base_location, settings['MEDIA_ROOT'])
self.assertIn(settings['MEDIA_ROOT'], defaults_storage.location)
self.assertEqual(defaults_storage.base_url, settings['MEDIA_URL'])
self.assertEqual(defaults_storage.file_permissions_mode, settings['FILE_UPLOAD_PERMISSIONS'])
self.assertEqual(
defaults_storage.directory_permissions_mode, settings['FILE_UPLOAD_DIRECTORY_PERMISSIONS']
)
class CustomStorage(FileSystemStorage):
def get_available_name(self, name, max_length=None):
"""
Append numbers to duplicate files rather than underscores, like Trac.
"""
basename, *ext = name.split('.')
number = 2
while self.exists(name):
name = '.'.join([basename, str(number)] + ext)
number += 1
return name
class CustomStorageTests(FileStorageTests):
storage_class = CustomStorage
def test_custom_get_available_name(self):
first = self.storage.save('custom_storage', ContentFile('custom contents'))
self.assertEqual(first, 'custom_storage')
second = self.storage.save('custom_storage', ContentFile('more contents'))
self.assertEqual(second, 'custom_storage.2')
self.storage.delete(first)
self.storage.delete(second)
class OverwritingStorage(FileSystemStorage):
"""
Overwrite existing files instead of appending a suffix to generate an
unused name.
"""
# Mask out O_EXCL so os.open() doesn't raise OSError if the file exists.
OS_OPEN_FLAGS = FileSystemStorage.OS_OPEN_FLAGS & ~os.O_EXCL
def get_available_name(self, name, max_length=None):
"""Override the effort to find an used name."""
return name
class OverwritingStorageTests(FileStorageTests):
storage_class = OverwritingStorage
def test_save_overwrite_behavior(self):
"""Saving to same file name twice overwrites the first file."""
name = 'test.file'
self.assertFalse(self.storage.exists(name))
content_1 = b'content one'
content_2 = b'second content'
f_1 = ContentFile(content_1)
f_2 = ContentFile(content_2)
stored_name_1 = self.storage.save(name, f_1)
try:
self.assertEqual(stored_name_1, name)
self.assertTrue(self.storage.exists(name))
self.assertTrue(os.path.exists(os.path.join(self.temp_dir, name)))
with self.storage.open(name) as fp:
self.assertEqual(fp.read(), content_1)
stored_name_2 = self.storage.save(name, f_2)
self.assertEqual(stored_name_2, name)
self.assertTrue(self.storage.exists(name))
self.assertTrue(os.path.exists(os.path.join(self.temp_dir, name)))
with self.storage.open(name) as fp:
self.assertEqual(fp.read(), content_2)
finally:
self.storage.delete(name)
class DiscardingFalseContentStorage(FileSystemStorage):
def _save(self, name, content):
if content:
return super()._save(name, content)
return ''
class DiscardingFalseContentStorageTests(FileStorageTests):
storage_class = DiscardingFalseContentStorage
def test_custom_storage_discarding_empty_content(self):
"""
When Storage.save() wraps a file-like object in File, it should include
the name argument so that bool(file) evaluates to True (#26495).
"""
output = StringIO('content')
self.storage.save('tests/stringio', output)
self.assertTrue(self.storage.exists('tests/stringio'))
with self.storage.open('tests/stringio') as f:
self.assertEqual(f.read(), b'content')
class FileFieldStorageTests(TestCase):
def tearDown(self):
shutil.rmtree(temp_storage_location)
def _storage_max_filename_length(self, storage):
"""
Query filesystem for maximum filename length (e.g. AUFS has 242).
"""
dir_to_test = storage.location
while not os.path.exists(dir_to_test):
dir_to_test = os.path.dirname(dir_to_test)
try:
return os.pathconf(dir_to_test, 'PC_NAME_MAX')
except Exception:
return 255 # Should be safe on most backends
def test_files(self):
self.assertIsInstance(Storage.normal, FileDescriptor)
# An object without a file has limited functionality.
obj1 = Storage()
self.assertEqual(obj1.normal.name, "")
with self.assertRaises(ValueError):
obj1.normal.size
# Saving a file enables full functionality.
obj1.normal.save("django_test.txt", ContentFile("content"))
self.assertEqual(obj1.normal.name, "tests/django_test.txt")
self.assertEqual(obj1.normal.size, 7)
self.assertEqual(obj1.normal.read(), b"content")
obj1.normal.close()
# File objects can be assigned to FileField attributes, but shouldn't
# get committed until the model it's attached to is saved.
obj1.normal = SimpleUploadedFile("assignment.txt", b"content")
dirs, files = temp_storage.listdir("tests")
self.assertEqual(dirs, [])
self.assertNotIn("assignment.txt", files)
obj1.save()
dirs, files = temp_storage.listdir("tests")
self.assertEqual(sorted(files), ["assignment.txt", "django_test.txt"])
# Save another file with the same name.
obj2 = Storage()
obj2.normal.save("django_test.txt", ContentFile("more content"))
obj2_name = obj2.normal.name
self.assertRegex(obj2_name, "tests/django_test_%s.txt" % FILE_SUFFIX_REGEX)
self.assertEqual(obj2.normal.size, 12)
obj2.normal.close()
# Deleting an object does not delete the file it uses.
obj2.delete()
obj2.normal.save("django_test.txt", ContentFile("more content"))
self.assertNotEqual(obj2_name, obj2.normal.name)
self.assertRegex(obj2.normal.name, "tests/django_test_%s.txt" % FILE_SUFFIX_REGEX)
obj2.normal.close()
def test_filefield_read(self):
# Files can be read in a little at a time, if necessary.
obj = Storage.objects.create(
normal=SimpleUploadedFile("assignment.txt", b"content"))
obj.normal.open()
self.assertEqual(obj.normal.read(3), b"con")
self.assertEqual(obj.normal.read(), b"tent")
self.assertEqual(list(obj.normal.chunks(chunk_size=2)), [b"co", b"nt", b"en", b"t"])
obj.normal.close()
def test_filefield_write(self):
# Files can be written to.
obj = Storage.objects.create(normal=SimpleUploadedFile('rewritten.txt', b'content'))
with obj.normal as normal:
normal.open('wb')
normal.write(b'updated')
obj.refresh_from_db()
self.assertEqual(obj.normal.read(), b'updated')
obj.normal.close()
def test_filefield_reopen(self):
obj = Storage.objects.create(normal=SimpleUploadedFile('reopen.txt', b'content'))
with obj.normal as normal:
normal.open()
obj.normal.open()
obj.normal.file.seek(0)
obj.normal.close()
def test_duplicate_filename(self):
# Multiple files with the same name get _(7 random chars) appended to them.
objs = [Storage() for i in range(2)]
for o in objs:
o.normal.save("multiple_files.txt", ContentFile("Same Content"))
try:
names = [o.normal.name for o in objs]
self.assertEqual(names[0], "tests/multiple_files.txt")
self.assertRegex(names[1], "tests/multiple_files_%s.txt" % FILE_SUFFIX_REGEX)
finally:
for o in objs:
o.delete()
def test_file_truncation(self):
# Given the max_length is limited, when multiple files get uploaded
# under the same name, then the filename get truncated in order to fit
# in _(7 random chars). When most of the max_length is taken by
# dirname + extension and there are not enough characters in the
# filename to truncate, an exception should be raised.
objs = [Storage() for i in range(2)]
filename = 'filename.ext'
for o in objs:
o.limited_length.save(filename, ContentFile('Same Content'))
try:
# Testing truncation.
names = [o.limited_length.name for o in objs]
self.assertEqual(names[0], 'tests/%s' % filename)
self.assertRegex(names[1], 'tests/fi_%s.ext' % FILE_SUFFIX_REGEX)
# Testing exception is raised when filename is too short to truncate.
filename = 'short.longext'
objs[0].limited_length.save(filename, ContentFile('Same Content'))
with self.assertRaisesMessage(SuspiciousFileOperation, 'Storage can not find an available filename'):
objs[1].limited_length.save(*(filename, ContentFile('Same Content')))
finally:
for o in objs:
o.delete()
@unittest.skipIf(
sys.platform.startswith('win'),
"Windows supports at most 260 characters in a path.",
)
def test_extended_length_storage(self):
# Testing FileField with max_length > 255. Most systems have filename
# length limitation of 255. Path takes extra chars.
filename = (self._storage_max_filename_length(temp_storage) - 4) * 'a' # 4 chars for extension.
obj = Storage()
obj.extended_length.save('%s.txt' % filename, ContentFile('Same Content'))
self.assertEqual(obj.extended_length.name, 'tests/%s.txt' % filename)
self.assertEqual(obj.extended_length.read(), b'Same Content')
obj.extended_length.close()
def test_filefield_default(self):
# Default values allow an object to access a single file.
temp_storage.save('tests/default.txt', ContentFile('default content'))
obj = Storage.objects.create()
self.assertEqual(obj.default.name, "tests/default.txt")
self.assertEqual(obj.default.read(), b"default content")
obj.default.close()
# But it shouldn't be deleted, even if there are no more objects using
# it.
obj.delete()
obj = Storage()
self.assertEqual(obj.default.read(), b"default content")
obj.default.close()
def test_empty_upload_to(self):
# upload_to can be empty, meaning it does not use subdirectory.
obj = Storage()
obj.empty.save('django_test.txt', ContentFile('more content'))
self.assertEqual(obj.empty.name, "django_test.txt")
self.assertEqual(obj.empty.read(), b"more content")
obj.empty.close()
def test_random_upload_to(self):
# Verify the fix for #5655, making sure the directory is only
# determined once.
obj = Storage()
obj.random.save("random_file", ContentFile("random content"))
self.assertTrue(obj.random.name.endswith("/random_file"))
obj.random.close()
def test_custom_valid_name_callable_upload_to(self):
"""
Storage.get_valid_name() should be called when upload_to is a callable.
"""
obj = Storage()
obj.custom_valid_name.save("random_file", ContentFile("random content"))
# CustomValidNameStorage.get_valid_name() appends '_valid' to the name
self.assertTrue(obj.custom_valid_name.name.endswith("/random_file_valid"))
obj.custom_valid_name.close()
def test_filefield_pickling(self):
# Push an object into the cache to make sure it pickles properly
obj = Storage()
obj.normal.save("django_test.txt", ContentFile("more content"))
obj.normal.close()
cache.set("obj", obj)
self.assertEqual(cache.get("obj").normal.name, "tests/django_test.txt")
def test_file_object(self):
# Create sample file
temp_storage.save('tests/example.txt', ContentFile('some content'))
# Load it as Python file object
with open(temp_storage.path('tests/example.txt')) as file_obj:
# Save it using storage and read its content
temp_storage.save('tests/file_obj', file_obj)
self.assertTrue(temp_storage.exists('tests/file_obj'))
with temp_storage.open('tests/file_obj') as f:
self.assertEqual(f.read(), b'some content')
def test_stringio(self):
# Test passing StringIO instance as content argument to save
output = StringIO()
output.write('content')
output.seek(0)
# Save it and read written file
temp_storage.save('tests/stringio', output)
self.assertTrue(temp_storage.exists('tests/stringio'))
with temp_storage.open('tests/stringio') as f:
self.assertEqual(f.read(), b'content')
# Tests for a race condition on file saving (#4948).
# This is written in such a way that it'll always pass on platforms
# without threading.
class SlowFile(ContentFile):
def chunks(self):
time.sleep(1)
return super().chunks()
class FileSaveRaceConditionTest(SimpleTestCase):
def setUp(self):
self.storage_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(self.storage_dir)
self.thread = threading.Thread(target=self.save_file, args=['conflict'])
def tearDown(self):
shutil.rmtree(self.storage_dir)
def save_file(self, name):
name = self.storage.save(name, SlowFile(b"Data"))
def test_race_condition(self):
self.thread.start()
self.save_file('conflict')
self.thread.join()
files = sorted(os.listdir(self.storage_dir))
self.assertEqual(files[0], 'conflict')
self.assertRegex(files[1], 'conflict_%s' % FILE_SUFFIX_REGEX)
@unittest.skipIf(sys.platform.startswith('win'), "Windows only partially supports umasks and chmod.")
class FileStoragePermissions(unittest.TestCase):
def setUp(self):
self.umask = 0o027
self.old_umask = os.umask(self.umask)
self.storage_dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.storage_dir)
os.umask(self.old_umask)
@override_settings(FILE_UPLOAD_PERMISSIONS=0o654)
def test_file_upload_permissions(self):
self.storage = FileSystemStorage(self.storage_dir)
name = self.storage.save("the_file", ContentFile("data"))
actual_mode = os.stat(self.storage.path(name))[0] & 0o777
self.assertEqual(actual_mode, 0o654)
@override_settings(FILE_UPLOAD_PERMISSIONS=None)
def test_file_upload_default_permissions(self):
self.storage = FileSystemStorage(self.storage_dir)
fname = self.storage.save("some_file", ContentFile("data"))
mode = os.stat(self.storage.path(fname))[0] & 0o777
self.assertEqual(mode, 0o666 & ~self.umask)
@override_settings(FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765)
def test_file_upload_directory_permissions(self):
self.storage = FileSystemStorage(self.storage_dir)
name = self.storage.save("the_directory/the_file", ContentFile("data"))
dir_mode = os.stat(os.path.dirname(self.storage.path(name)))[0] & 0o777
self.assertEqual(dir_mode, 0o765)
@override_settings(FILE_UPLOAD_DIRECTORY_PERMISSIONS=None)
def test_file_upload_directory_default_permissions(self):
self.storage = FileSystemStorage(self.storage_dir)
name = self.storage.save("the_directory/the_file", ContentFile("data"))
dir_mode = os.stat(os.path.dirname(self.storage.path(name)))[0] & 0o777
self.assertEqual(dir_mode, 0o777 & ~self.umask)
class FileStoragePathParsing(SimpleTestCase):
def setUp(self):
self.storage_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(self.storage_dir)
def tearDown(self):
shutil.rmtree(self.storage_dir)
def test_directory_with_dot(self):
"""Regression test for #9610.
If the directory name contains a dot and the file name doesn't, make
sure we still mangle the file name instead of the directory name.
"""
self.storage.save('dotted.path/test', ContentFile("1"))
self.storage.save('dotted.path/test', ContentFile("2"))
files = sorted(os.listdir(os.path.join(self.storage_dir, 'dotted.path')))
self.assertFalse(os.path.exists(os.path.join(self.storage_dir, 'dotted_.path')))
self.assertEqual(files[0], 'test')
self.assertRegex(files[1], 'test_%s' % FILE_SUFFIX_REGEX)
def test_first_character_dot(self):
"""
File names with a dot as their first character don't have an extension,
and the underscore should get added to the end.
"""
self.storage.save('dotted.path/.test', ContentFile("1"))
self.storage.save('dotted.path/.test', ContentFile("2"))
files = sorted(os.listdir(os.path.join(self.storage_dir, 'dotted.path')))
self.assertFalse(os.path.exists(os.path.join(self.storage_dir, 'dotted_.path')))
self.assertEqual(files[0], '.test')
self.assertRegex(files[1], '.test_%s' % FILE_SUFFIX_REGEX)
class ContentFileStorageTestCase(unittest.TestCase):
def setUp(self):
self.storage_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(self.storage_dir)
def tearDown(self):
shutil.rmtree(self.storage_dir)
def test_content_saving(self):
"""
ContentFile can be saved correctly with the filesystem storage,
if it was initialized with either bytes or unicode content.
"""
self.storage.save('bytes.txt', ContentFile(b"content"))
self.storage.save('unicode.txt', ContentFile("español"))
@override_settings(ROOT_URLCONF='file_storage.urls')
class FileLikeObjectTestCase(LiveServerTestCase):
"""
Test file-like objects (#15644).
"""
available_apps = []
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(location=self.temp_dir)
def tearDown(self):
shutil.rmtree(self.temp_dir)
def test_urllib_request_urlopen(self):
"""
Test the File storage API with a file-like object coming from
urllib.request.urlopen().
"""
file_like_object = urlopen(self.live_server_url + '/')
f = File(file_like_object)
stored_filename = self.storage.save("remote_file.html", f)
remote_file = urlopen(self.live_server_url + '/')
with self.storage.open(stored_filename) as stored_file:
self.assertEqual(stored_file.read(), remote_file.read())
|
531eefa4d04ee5fcf812870fdedd06fc8ced9e9a4312ce03f8df7fa11b3b8d8d | import uuid
from django.core.exceptions import ImproperlyConfigured
from django.test import SimpleTestCase
from django.test.utils import override_settings
from django.urls import Resolver404, path, resolve, reverse
from .converters import DynamicConverter
from .views import empty_view
included_kwargs = {'base': b'hello', 'value': b'world'}
converter_test_data = (
# ('url', ('url_name', 'app_name', {kwargs})),
# aGVsbG8= is 'hello' encoded in base64.
('/base64/aGVsbG8=/', ('base64', '', {'value': b'hello'})),
('/base64/aGVsbG8=/subpatterns/d29ybGQ=/', ('subpattern-base64', '', included_kwargs)),
('/base64/aGVsbG8=/namespaced/d29ybGQ=/', ('subpattern-base64', 'namespaced-base64', included_kwargs)),
)
@override_settings(ROOT_URLCONF='urlpatterns.path_urls')
class SimplifiedURLTests(SimpleTestCase):
def test_path_lookup_without_parameters(self):
match = resolve('/articles/2003/')
self.assertEqual(match.url_name, 'articles-2003')
self.assertEqual(match.args, ())
self.assertEqual(match.kwargs, {})
self.assertEqual(match.route, 'articles/2003/')
def test_path_lookup_with_typed_parameters(self):
match = resolve('/articles/2015/')
self.assertEqual(match.url_name, 'articles-year')
self.assertEqual(match.args, ())
self.assertEqual(match.kwargs, {'year': 2015})
self.assertEqual(match.route, 'articles/<int:year>/')
def test_path_lookup_with_multiple_paramaters(self):
match = resolve('/articles/2015/04/12/')
self.assertEqual(match.url_name, 'articles-year-month-day')
self.assertEqual(match.args, ())
self.assertEqual(match.kwargs, {'year': 2015, 'month': 4, 'day': 12})
self.assertEqual(match.route, 'articles/<int:year>/<int:month>/<int:day>/')
def test_two_variable_at_start_of_path_pattern(self):
match = resolve('/en/foo/')
self.assertEqual(match.url_name, 'lang-and-path')
self.assertEqual(match.kwargs, {'lang': 'en', 'url': 'foo'})
self.assertEqual(match.route, '<lang>/<path:url>/')
def test_re_path(self):
match = resolve('/regex/1/')
self.assertEqual(match.url_name, 'regex')
self.assertEqual(match.kwargs, {'pk': '1'})
self.assertEqual(match.route, '^regex/(?P<pk>[0-9]+)/$')
def test_path_lookup_with_inclusion(self):
match = resolve('/included_urls/extra/something/')
self.assertEqual(match.url_name, 'inner-extra')
self.assertEqual(match.route, 'included_urls/extra/<extra>/')
def test_path_lookup_with_empty_string_inclusion(self):
match = resolve('/more/99/')
self.assertEqual(match.url_name, 'inner-more')
self.assertEqual(match.route, r'^more/(?P<extra>\w+)/$')
def test_path_lookup_with_double_inclusion(self):
match = resolve('/included_urls/more/some_value/')
self.assertEqual(match.url_name, 'inner-more')
self.assertEqual(match.route, r'included_urls/more/(?P<extra>\w+)/$')
def test_path_reverse_without_parameter(self):
url = reverse('articles-2003')
self.assertEqual(url, '/articles/2003/')
def test_path_reverse_with_parameter(self):
url = reverse('articles-year-month-day', kwargs={'year': 2015, 'month': 4, 'day': 12})
self.assertEqual(url, '/articles/2015/4/12/')
@override_settings(ROOT_URLCONF='urlpatterns.path_base64_urls')
def test_converter_resolve(self):
for url, (url_name, app_name, kwargs) in converter_test_data:
with self.subTest(url=url):
match = resolve(url)
self.assertEqual(match.url_name, url_name)
self.assertEqual(match.app_name, app_name)
self.assertEqual(match.kwargs, kwargs)
@override_settings(ROOT_URLCONF='urlpatterns.path_base64_urls')
def test_converter_reverse(self):
for expected, (url_name, app_name, kwargs) in converter_test_data:
if app_name:
url_name = '%s:%s' % (app_name, url_name)
with self.subTest(url=url_name):
url = reverse(url_name, kwargs=kwargs)
self.assertEqual(url, expected)
@override_settings(ROOT_URLCONF='urlpatterns.path_base64_urls')
def test_converter_reverse_with_second_layer_instance_namespace(self):
kwargs = included_kwargs.copy()
kwargs['last_value'] = b'world'
url = reverse('instance-ns-base64:subsubpattern-base64', kwargs=kwargs)
self.assertEqual(url, '/base64/aGVsbG8=/subpatterns/d29ybGQ=/d29ybGQ=/')
def test_path_inclusion_is_matchable(self):
match = resolve('/included_urls/extra/something/')
self.assertEqual(match.url_name, 'inner-extra')
self.assertEqual(match.kwargs, {'extra': 'something'})
def test_path_inclusion_is_reversable(self):
url = reverse('inner-extra', kwargs={'extra': 'something'})
self.assertEqual(url, '/included_urls/extra/something/')
def test_invalid_converter(self):
msg = "URL route 'foo/<nonexistent:var>/' uses invalid converter 'nonexistent'."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
path('foo/<nonexistent:var>/', empty_view)
@override_settings(ROOT_URLCONF='urlpatterns.converter_urls')
class ConverterTests(SimpleTestCase):
def test_matching_urls(self):
def no_converter(x):
return x
test_data = (
('int', {'0', '1', '01', 1234567890}, int),
('str', {'abcxyz'}, no_converter),
('path', {'allows.ANY*characters'}, no_converter),
('slug', {'abcxyz-ABCXYZ_01234567890'}, no_converter),
('uuid', {'39da9369-838e-4750-91a5-f7805cd82839'}, uuid.UUID),
)
for url_name, url_suffixes, converter in test_data:
for url_suffix in url_suffixes:
url = '/%s/%s/' % (url_name, url_suffix)
with self.subTest(url=url):
match = resolve(url)
self.assertEqual(match.url_name, url_name)
self.assertEqual(match.kwargs, {url_name: converter(url_suffix)})
# reverse() works with string parameters.
string_kwargs = {url_name: url_suffix}
self.assertEqual(reverse(url_name, kwargs=string_kwargs), url)
# reverse() also works with native types (int, UUID, etc.).
if converter is not no_converter:
# The converted value might be different for int (a
# leading zero is lost in the conversion).
converted_value = match.kwargs[url_name]
converted_url = '/%s/%s/' % (url_name, converted_value)
self.assertEqual(reverse(url_name, kwargs={url_name: converted_value}), converted_url)
def test_nonmatching_urls(self):
test_data = (
('int', {'-1', 'letters'}),
('str', {'', '/'}),
('path', {''}),
('slug', {'', 'stars*notallowed'}),
('uuid', {
'',
'9da9369-838e-4750-91a5-f7805cd82839',
'39da9369-838-4750-91a5-f7805cd82839',
'39da9369-838e-475-91a5-f7805cd82839',
'39da9369-838e-4750-91a-f7805cd82839',
'39da9369-838e-4750-91a5-f7805cd8283',
}),
)
for url_name, url_suffixes in test_data:
for url_suffix in url_suffixes:
url = '/%s/%s/' % (url_name, url_suffix)
with self.subTest(url=url), self.assertRaises(Resolver404):
resolve(url)
class ParameterRestrictionTests(SimpleTestCase):
def test_non_identifier_parameter_name_causes_exception(self):
msg = (
"URL route 'hello/<int:1>/' uses parameter name '1' which isn't "
"a valid Python identifier."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
path(r'hello/<int:1>/', lambda r: None)
def test_allows_non_ascii_but_valid_identifiers(self):
# \u0394 is "GREEK CAPITAL LETTER DELTA", a valid identifier.
p = path('hello/<str:\u0394>/', lambda r: None)
match = p.resolve('hello/1/')
self.assertEqual(match.kwargs, {'\u0394': '1'})
@override_settings(ROOT_URLCONF='urlpatterns.path_dynamic_urls')
class ConversionExceptionTests(SimpleTestCase):
"""How are errors in Converter.to_python() and to_url() handled?"""
def test_resolve_value_error_means_no_match(self):
@DynamicConverter.register_to_python
def raises_value_error(value):
raise ValueError()
with self.assertRaises(Resolver404):
resolve('/dynamic/abc/')
def test_resolve_type_error_propogates(self):
@DynamicConverter.register_to_python
def raises_type_error(value):
raise TypeError('This type error propagates.')
with self.assertRaisesMessage(TypeError, 'This type error propagates.'):
resolve('/dynamic/abc/')
def test_reverse_value_error_propagates(self):
@DynamicConverter.register_to_url
def raises_value_error(value):
raise ValueError('This value error propagates.')
with self.assertRaisesMessage(ValueError, 'This value error propagates.'):
reverse('dynamic', kwargs={'value': object()})
|
8ed4be73b3977a27a4a63ffef7e38783ca518dcad74a24df80bf35ef7ca5cce6 | import threading
import time
from unittest import mock
from multiple_database.routers import TestRouter
from django.core.exceptions import FieldError
from django.db import (
DatabaseError, NotSupportedError, connection, connections, router,
transaction,
)
from django.test import (
TransactionTestCase, override_settings, skipIfDBFeature,
skipUnlessDBFeature,
)
from django.test.utils import CaptureQueriesContext
from .models import City, Country, Person, PersonProfile
class SelectForUpdateTests(TransactionTestCase):
available_apps = ['select_for_update']
def setUp(self):
# This is executed in autocommit mode so that code in
# run_select_for_update can see this data.
self.country1 = Country.objects.create(name='Belgium')
self.country2 = Country.objects.create(name='France')
self.city1 = City.objects.create(name='Liberchies', country=self.country1)
self.city2 = City.objects.create(name='Samois-sur-Seine', country=self.country2)
self.person = Person.objects.create(name='Reinhardt', born=self.city1, died=self.city2)
self.person_profile = PersonProfile.objects.create(person=self.person)
# We need another database connection in transaction to test that one
# connection issuing a SELECT ... FOR UPDATE will block.
self.new_connection = connection.copy()
def tearDown(self):
try:
self.end_blocking_transaction()
except (DatabaseError, AttributeError):
pass
self.new_connection.close()
def start_blocking_transaction(self):
self.new_connection.set_autocommit(False)
# Start a blocking transaction. At some point,
# end_blocking_transaction() should be called.
self.cursor = self.new_connection.cursor()
sql = 'SELECT * FROM %(db_table)s %(for_update)s;' % {
'db_table': Person._meta.db_table,
'for_update': self.new_connection.ops.for_update_sql(),
}
self.cursor.execute(sql, ())
self.cursor.fetchone()
def end_blocking_transaction(self):
# Roll back the blocking transaction.
self.cursor.close()
self.new_connection.rollback()
self.new_connection.set_autocommit(True)
def has_for_update_sql(self, queries, **kwargs):
# Examine the SQL that was executed to determine whether it
# contains the 'SELECT..FOR UPDATE' stanza.
for_update_sql = connection.ops.for_update_sql(**kwargs)
return any(for_update_sql in query['sql'] for query in queries)
@skipUnlessDBFeature('has_select_for_update')
def test_for_update_sql_generated(self):
"""
The backend's FOR UPDATE variant appears in
generated SQL when select_for_update is invoked.
"""
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(Person.objects.all().select_for_update())
self.assertTrue(self.has_for_update_sql(ctx.captured_queries))
@skipUnlessDBFeature('has_select_for_update_nowait')
def test_for_update_sql_generated_nowait(self):
"""
The backend's FOR UPDATE NOWAIT variant appears in
generated SQL when select_for_update is invoked.
"""
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(Person.objects.all().select_for_update(nowait=True))
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, nowait=True))
@skipUnlessDBFeature('has_select_for_update_skip_locked')
def test_for_update_sql_generated_skip_locked(self):
"""
The backend's FOR UPDATE SKIP LOCKED variant appears in
generated SQL when select_for_update is invoked.
"""
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(Person.objects.all().select_for_update(skip_locked=True))
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, skip_locked=True))
@skipUnlessDBFeature('has_select_for_update_of')
def test_for_update_sql_generated_of(self):
"""
The backend's FOR UPDATE OF variant appears in the generated SQL when
select_for_update() is invoked.
"""
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(Person.objects.select_related(
'born__country',
).select_for_update(
of=('born__country',),
).select_for_update(
of=('self', 'born__country')
))
features = connections['default'].features
if features.select_for_update_of_column:
expected = ['select_for_update_person"."id', 'select_for_update_country"."id']
else:
expected = ['select_for_update_person', 'select_for_update_country']
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature('has_select_for_update_of')
def test_for_update_of_followed_by_values(self):
with transaction.atomic():
values = list(Person.objects.select_for_update(of=('self',)).values('pk'))
self.assertEqual(values, [{'pk': self.person.pk}])
@skipUnlessDBFeature('has_select_for_update_of')
def test_for_update_of_followed_by_values_list(self):
with transaction.atomic():
values = list(Person.objects.select_for_update(of=('self',)).values_list('pk'))
self.assertEqual(values, [(self.person.pk,)])
@skipUnlessDBFeature('has_select_for_update_of')
def test_for_update_of_self_when_self_is_not_selected(self):
"""
select_for_update(of=['self']) when the only columns selected are from
related tables.
"""
with transaction.atomic():
values = list(Person.objects.select_related('born').select_for_update(of=('self',)).values('born__name'))
self.assertEqual(values, [{'born__name': self.city1.name}])
@skipUnlessDBFeature('has_select_for_update_nowait')
def test_nowait_raises_error_on_block(self):
"""
If nowait is specified, we expect an error to be raised rather
than blocking.
"""
self.start_blocking_transaction()
status = []
thread = threading.Thread(
target=self.run_select_for_update,
args=(status,),
kwargs={'nowait': True},
)
thread.start()
time.sleep(1)
thread.join()
self.end_blocking_transaction()
self.assertIsInstance(status[-1], DatabaseError)
@skipUnlessDBFeature('has_select_for_update_skip_locked')
def test_skip_locked_skips_locked_rows(self):
"""
If skip_locked is specified, the locked row is skipped resulting in
Person.DoesNotExist.
"""
self.start_blocking_transaction()
status = []
thread = threading.Thread(
target=self.run_select_for_update,
args=(status,),
kwargs={'skip_locked': True},
)
thread.start()
time.sleep(1)
thread.join()
self.end_blocking_transaction()
self.assertIsInstance(status[-1], Person.DoesNotExist)
@skipIfDBFeature('has_select_for_update_nowait')
@skipUnlessDBFeature('has_select_for_update')
def test_unsupported_nowait_raises_error(self):
"""
NotSupportedError is raised if a SELECT...FOR UPDATE NOWAIT is run on
a database backend that supports FOR UPDATE but not NOWAIT.
"""
with self.assertRaisesMessage(NotSupportedError, 'NOWAIT is not supported on this database backend.'):
with transaction.atomic():
Person.objects.select_for_update(nowait=True).get()
@skipIfDBFeature('has_select_for_update_skip_locked')
@skipUnlessDBFeature('has_select_for_update')
def test_unsupported_skip_locked_raises_error(self):
"""
NotSupportedError is raised if a SELECT...FOR UPDATE SKIP LOCKED is run
on a database backend that supports FOR UPDATE but not SKIP LOCKED.
"""
with self.assertRaisesMessage(NotSupportedError, 'SKIP LOCKED is not supported on this database backend.'):
with transaction.atomic():
Person.objects.select_for_update(skip_locked=True).get()
@skipIfDBFeature('has_select_for_update_of')
@skipUnlessDBFeature('has_select_for_update')
def test_unsupported_of_raises_error(self):
"""
NotSupportedError is raised if a SELECT...FOR UPDATE OF... is run on
a database backend that supports FOR UPDATE but not OF.
"""
msg = 'FOR UPDATE OF is not supported on this database backend.'
with self.assertRaisesMessage(NotSupportedError, msg):
with transaction.atomic():
Person.objects.select_for_update(of=('self',)).get()
@skipUnlessDBFeature('has_select_for_update', 'has_select_for_update_of')
def test_unrelated_of_argument_raises_error(self):
"""
FieldError is raised if a non-relation field is specified in of=(...).
"""
msg = (
'Invalid field name(s) given in select_for_update(of=(...)): %s. '
'Only relational fields followed in the query are allowed. '
'Choices are: self, born, born__country.'
)
invalid_of = [
('nonexistent',),
('name',),
('born__nonexistent',),
('born__name',),
('born__nonexistent', 'born__name'),
]
for of in invalid_of:
with self.subTest(of=of):
with self.assertRaisesMessage(FieldError, msg % ', '.join(of)):
with transaction.atomic():
Person.objects.select_related('born__country').select_for_update(of=of).get()
@skipUnlessDBFeature('has_select_for_update', 'has_select_for_update_of')
def test_related_but_unselected_of_argument_raises_error(self):
"""
FieldError is raised if a relation field that is not followed in the
query is specified in of=(...).
"""
msg = (
'Invalid field name(s) given in select_for_update(of=(...)): %s. '
'Only relational fields followed in the query are allowed. '
'Choices are: self, born, profile.'
)
for name in ['born__country', 'died', 'died__country']:
with self.subTest(name=name):
with self.assertRaisesMessage(FieldError, msg % name):
with transaction.atomic():
Person.objects.select_related(
'born', 'profile',
).exclude(profile=None).select_for_update(of=(name,)).get()
@skipUnlessDBFeature('has_select_for_update', 'has_select_for_update_of')
def test_reverse_one_to_one_of_arguments(self):
"""
Reverse OneToOneFields may be included in of=(...) as long as NULLs
are excluded because LEFT JOIN isn't allowed in SELECT FOR UPDATE.
"""
with transaction.atomic():
person = Person.objects.select_related(
'profile',
).exclude(profile=None).select_for_update(of=('profile',)).get()
self.assertEqual(person.profile, self.person_profile)
@skipUnlessDBFeature('has_select_for_update')
def test_for_update_after_from(self):
features_class = connections['default'].features.__class__
attribute_to_patch = "%s.%s.for_update_after_from" % (features_class.__module__, features_class.__name__)
with mock.patch(attribute_to_patch, return_value=True):
with transaction.atomic():
self.assertIn('FOR UPDATE WHERE', str(Person.objects.filter(name='foo').select_for_update().query))
@skipUnlessDBFeature('has_select_for_update')
def test_for_update_requires_transaction(self):
"""
A TransactionManagementError is raised
when a select_for_update query is executed outside of a transaction.
"""
msg = 'select_for_update cannot be used outside of a transaction.'
with self.assertRaisesMessage(transaction.TransactionManagementError, msg):
list(Person.objects.all().select_for_update())
@skipUnlessDBFeature('has_select_for_update')
def test_for_update_requires_transaction_only_in_execution(self):
"""
No TransactionManagementError is raised
when select_for_update is invoked outside of a transaction -
only when the query is executed.
"""
people = Person.objects.all().select_for_update()
msg = 'select_for_update cannot be used outside of a transaction.'
with self.assertRaisesMessage(transaction.TransactionManagementError, msg):
list(people)
@skipUnlessDBFeature('supports_select_for_update_with_limit')
def test_select_for_update_with_limit(self):
other = Person.objects.create(name='Grappeli', born=self.city1, died=self.city2)
with transaction.atomic():
qs = list(Person.objects.all().order_by('pk').select_for_update()[1:2])
self.assertEqual(qs[0], other)
@skipIfDBFeature('supports_select_for_update_with_limit')
def test_unsupported_select_for_update_with_limit(self):
msg = 'LIMIT/OFFSET is not supported with select_for_update on this database backend.'
with self.assertRaisesMessage(NotSupportedError, msg):
with transaction.atomic():
list(Person.objects.all().order_by('pk').select_for_update()[1:2])
def run_select_for_update(self, status, **kwargs):
"""
Utility method that runs a SELECT FOR UPDATE against all
Person instances. After the select_for_update, it attempts
to update the name of the only record, save, and commit.
This function expects to run in a separate thread.
"""
status.append('started')
try:
# We need to enter transaction management again, as this is done on
# per-thread basis
with transaction.atomic():
person = Person.objects.select_for_update(**kwargs).get()
person.name = 'Fred'
person.save()
except (DatabaseError, Person.DoesNotExist) as e:
status.append(e)
finally:
# This method is run in a separate thread. It uses its own
# database connection. Close it without waiting for the GC.
connection.close()
@skipUnlessDBFeature('has_select_for_update')
@skipUnlessDBFeature('supports_transactions')
def test_block(self):
"""
A thread running a select_for_update that accesses rows being touched
by a similar operation on another connection blocks correctly.
"""
# First, let's start the transaction in our thread.
self.start_blocking_transaction()
# Now, try it again using the ORM's select_for_update
# facility. Do this in a separate thread.
status = []
thread = threading.Thread(
target=self.run_select_for_update, args=(status,)
)
# The thread should immediately block, but we'll sleep
# for a bit to make sure.
thread.start()
sanity_count = 0
while len(status) != 1 and sanity_count < 10:
sanity_count += 1
time.sleep(1)
if sanity_count >= 10:
raise ValueError('Thread did not run and block')
# Check the person hasn't been updated. Since this isn't
# using FOR UPDATE, it won't block.
p = Person.objects.get(pk=self.person.pk)
self.assertEqual('Reinhardt', p.name)
# When we end our blocking transaction, our thread should
# be able to continue.
self.end_blocking_transaction()
thread.join(5.0)
# Check the thread has finished. Assuming it has, we should
# find that it has updated the person's name.
self.assertFalse(thread.isAlive())
# We must commit the transaction to ensure that MySQL gets a fresh read,
# since by default it runs in REPEATABLE READ mode
transaction.commit()
p = Person.objects.get(pk=self.person.pk)
self.assertEqual('Fred', p.name)
@skipUnlessDBFeature('has_select_for_update')
def test_raw_lock_not_available(self):
"""
Running a raw query which can't obtain a FOR UPDATE lock raises
the correct exception
"""
self.start_blocking_transaction()
def raw(status):
try:
list(
Person.objects.raw(
'SELECT * FROM %s %s' % (
Person._meta.db_table,
connection.ops.for_update_sql(nowait=True)
)
)
)
except DatabaseError as e:
status.append(e)
finally:
# This method is run in a separate thread. It uses its own
# database connection. Close it without waiting for the GC.
# Connection cannot be closed on Oracle because cursor is still
# open.
if connection.vendor != 'oracle':
connection.close()
status = []
thread = threading.Thread(target=raw, kwargs={'status': status})
thread.start()
time.sleep(1)
thread.join()
self.end_blocking_transaction()
self.assertIsInstance(status[-1], DatabaseError)
@skipUnlessDBFeature('has_select_for_update')
@override_settings(DATABASE_ROUTERS=[TestRouter()])
def test_select_for_update_on_multidb(self):
query = Person.objects.select_for_update()
self.assertEqual(router.db_for_write(Person), query.db)
@skipUnlessDBFeature('has_select_for_update')
def test_select_for_update_with_get(self):
with transaction.atomic():
person = Person.objects.select_for_update().get(name='Reinhardt')
self.assertEqual(person.name, 'Reinhardt')
def test_nowait_and_skip_locked(self):
with self.assertRaisesMessage(ValueError, 'The nowait option cannot be used with skip_locked.'):
Person.objects.select_for_update(nowait=True, skip_locked=True)
def test_ordered_select_for_update(self):
"""
Subqueries should respect ordering as an ORDER BY clause may be useful
to specify a row locking order to prevent deadlocks (#27193).
"""
with transaction.atomic():
qs = Person.objects.filter(id__in=Person.objects.order_by('-id').select_for_update())
self.assertIn('ORDER BY', str(qs.query))
|
1a2bfed9dd14177c19e17d3f4578849de94ff99fc0454c172c81cdca2e5c70c8 | import re
from django.conf import settings
from django.contrib.sessions.backends.cache import SessionStore
from django.core.exceptions import ImproperlyConfigured
from django.http import HttpRequest
from django.middleware.csrf import (
CSRF_SESSION_KEY, CSRF_TOKEN_LENGTH, REASON_BAD_TOKEN,
REASON_NO_CSRF_COOKIE, CsrfViewMiddleware,
_compare_salted_tokens as equivalent_tokens, get_token,
)
from django.test import SimpleTestCase, override_settings
from django.views.decorators.csrf import csrf_exempt, requires_csrf_token
from .views import (
ensure_csrf_cookie_view, non_token_view_using_request_processor,
post_form_view, token_view,
)
class TestingHttpRequest(HttpRequest):
"""
A version of HttpRequest that allows us to change some things
more easily
"""
def __init__(self):
super().__init__()
self.session = SessionStore()
def is_secure(self):
return getattr(self, '_is_secure_override', False)
class CsrfViewMiddlewareTestMixin:
"""
Shared methods and tests for session-based and cookie-based tokens.
"""
_csrf_id = _csrf_id_cookie = '1bcdefghij2bcdefghij3bcdefghij4bcdefghij5bcdefghij6bcdefghijABCD'
mw = CsrfViewMiddleware()
def _get_GET_no_csrf_cookie_request(self):
return TestingHttpRequest()
def _get_GET_csrf_cookie_request(self):
raise NotImplementedError('This method must be implemented by a subclass.')
def _get_POST_csrf_cookie_request(self):
req = self._get_GET_csrf_cookie_request()
req.method = "POST"
return req
def _get_POST_no_csrf_cookie_request(self):
req = self._get_GET_no_csrf_cookie_request()
req.method = "POST"
return req
def _get_POST_request_with_token(self):
req = self._get_POST_csrf_cookie_request()
req.POST['csrfmiddlewaretoken'] = self._csrf_id
return req
def _check_token_present(self, response, csrf_id=None):
text = str(response.content, response.charset)
match = re.search('name="csrfmiddlewaretoken" value="(.*?)"', text)
csrf_token = csrf_id or self._csrf_id
self.assertTrue(
match and equivalent_tokens(csrf_token, match.group(1)),
"Could not find csrfmiddlewaretoken to match %s" % csrf_token
)
def test_process_response_get_token_not_used(self):
"""
If get_token() is not called, the view middleware does not
add a cookie.
"""
# This is important to make pages cacheable. Pages which do call
# get_token(), assuming they use the token, are not cacheable because
# the token is specific to the user
req = self._get_GET_no_csrf_cookie_request()
# non_token_view_using_request_processor does not call get_token(), but
# does use the csrf request processor. By using this, we are testing
# that the view processor is properly lazy and doesn't call get_token()
# until needed.
self.mw.process_request(req)
self.mw.process_view(req, non_token_view_using_request_processor, (), {})
resp = non_token_view_using_request_processor(req)
resp2 = self.mw.process_response(req, resp)
csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, False)
self.assertIs(csrf_cookie, False)
# Check the request processing
def test_process_request_no_csrf_cookie(self):
"""
If no CSRF cookies is present, the middleware rejects the incoming
request. This will stop login CSRF.
"""
req = self._get_POST_no_csrf_cookie_request()
self.mw.process_request(req)
with self.assertLogs('django.security.csrf', 'WARNING') as cm:
req2 = self.mw.process_view(req, post_form_view, (), {})
self.assertEqual(403, req2.status_code)
self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % REASON_NO_CSRF_COOKIE)
def test_process_request_csrf_cookie_no_token(self):
"""
If a CSRF cookie is present but no token, the middleware rejects
the incoming request.
"""
req = self._get_POST_csrf_cookie_request()
self.mw.process_request(req)
with self.assertLogs('django.security.csrf', 'WARNING') as cm:
req2 = self.mw.process_view(req, post_form_view, (), {})
self.assertEqual(403, req2.status_code)
self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % REASON_BAD_TOKEN)
def test_process_request_csrf_cookie_and_token(self):
"""
If both a cookie and a token is present, the middleware lets it through.
"""
req = self._get_POST_request_with_token()
self.mw.process_request(req)
req2 = self.mw.process_view(req, post_form_view, (), {})
self.assertIsNone(req2)
def test_process_request_csrf_cookie_no_token_exempt_view(self):
"""
If a CSRF cookie is present and no token, but the csrf_exempt decorator
has been applied to the view, the middleware lets it through
"""
req = self._get_POST_csrf_cookie_request()
self.mw.process_request(req)
req2 = self.mw.process_view(req, csrf_exempt(post_form_view), (), {})
self.assertIsNone(req2)
def test_csrf_token_in_header(self):
"""
The token may be passed in a header instead of in the form.
"""
req = self._get_POST_csrf_cookie_request()
req.META['HTTP_X_CSRFTOKEN'] = self._csrf_id
self.mw.process_request(req)
req2 = self.mw.process_view(req, post_form_view, (), {})
self.assertIsNone(req2)
@override_settings(CSRF_HEADER_NAME='HTTP_X_CSRFTOKEN_CUSTOMIZED')
def test_csrf_token_in_header_with_customized_name(self):
"""
settings.CSRF_HEADER_NAME can be used to customize the CSRF header name
"""
req = self._get_POST_csrf_cookie_request()
req.META['HTTP_X_CSRFTOKEN_CUSTOMIZED'] = self._csrf_id
self.mw.process_request(req)
req2 = self.mw.process_view(req, post_form_view, (), {})
self.assertIsNone(req2)
def test_put_and_delete_rejected(self):
"""
HTTP PUT and DELETE methods have protection
"""
req = TestingHttpRequest()
req.method = 'PUT'
with self.assertLogs('django.security.csrf', 'WARNING') as cm:
req2 = self.mw.process_view(req, post_form_view, (), {})
self.assertEqual(403, req2.status_code)
self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % REASON_NO_CSRF_COOKIE)
req = TestingHttpRequest()
req.method = 'DELETE'
with self.assertLogs('django.security.csrf', 'WARNING') as cm:
req2 = self.mw.process_view(req, post_form_view, (), {})
self.assertEqual(403, req2.status_code)
self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % REASON_NO_CSRF_COOKIE)
def test_put_and_delete_allowed(self):
"""
HTTP PUT and DELETE can get through with X-CSRFToken and a cookie.
"""
req = self._get_GET_csrf_cookie_request()
req.method = 'PUT'
req.META['HTTP_X_CSRFTOKEN'] = self._csrf_id
self.mw.process_request(req)
req2 = self.mw.process_view(req, post_form_view, (), {})
self.assertIsNone(req2)
req = self._get_GET_csrf_cookie_request()
req.method = 'DELETE'
req.META['HTTP_X_CSRFTOKEN'] = self._csrf_id
self.mw.process_request(req)
req2 = self.mw.process_view(req, post_form_view, (), {})
self.assertIsNone(req2)
# Tests for the template tag method
def test_token_node_no_csrf_cookie(self):
"""
CsrfTokenNode works when no CSRF cookie is set.
"""
req = self._get_GET_no_csrf_cookie_request()
resp = token_view(req)
token = get_token(req)
self.assertIsNotNone(token)
self._check_token_present(resp, token)
def test_token_node_empty_csrf_cookie(self):
"""
A new token is sent if the csrf_cookie is the empty string.
"""
req = self._get_GET_no_csrf_cookie_request()
req.COOKIES[settings.CSRF_COOKIE_NAME] = ""
self.mw.process_view(req, token_view, (), {})
resp = token_view(req)
token = get_token(req)
self.assertIsNotNone(token)
self._check_token_present(resp, token)
def test_token_node_with_csrf_cookie(self):
"""
CsrfTokenNode works when a CSRF cookie is set.
"""
req = self._get_GET_csrf_cookie_request()
self.mw.process_request(req)
self.mw.process_view(req, token_view, (), {})
resp = token_view(req)
self._check_token_present(resp)
def test_get_token_for_exempt_view(self):
"""
get_token still works for a view decorated with 'csrf_exempt'.
"""
req = self._get_GET_csrf_cookie_request()
self.mw.process_request(req)
self.mw.process_view(req, csrf_exempt(token_view), (), {})
resp = token_view(req)
self._check_token_present(resp)
def test_get_token_for_requires_csrf_token_view(self):
"""
get_token() works for a view decorated solely with requires_csrf_token.
"""
req = self._get_GET_csrf_cookie_request()
resp = requires_csrf_token(token_view)(req)
self._check_token_present(resp)
def test_token_node_with_new_csrf_cookie(self):
"""
CsrfTokenNode works when a CSRF cookie is created by
the middleware (when one was not already present)
"""
req = self._get_GET_no_csrf_cookie_request()
self.mw.process_view(req, token_view, (), {})
resp = token_view(req)
resp2 = self.mw.process_response(req, resp)
csrf_cookie = resp2.cookies[settings.CSRF_COOKIE_NAME]
self._check_token_present(resp, csrf_id=csrf_cookie.value)
def test_cookie_not_reset_on_accepted_request(self):
"""
The csrf token used in posts is changed on every request (although
stays equivalent). The csrf cookie should not change on accepted
requests. If it appears in the response, it should keep its value.
"""
req = self._get_POST_request_with_token()
self.mw.process_request(req)
self.mw.process_view(req, token_view, (), {})
resp = token_view(req)
resp = self.mw.process_response(req, resp)
csrf_cookie = resp.cookies.get(settings.CSRF_COOKIE_NAME, None)
if csrf_cookie:
self.assertEqual(
csrf_cookie.value, self._csrf_id_cookie,
"CSRF cookie was changed on an accepted request"
)
@override_settings(DEBUG=True, ALLOWED_HOSTS=['www.example.com'])
def test_https_bad_referer(self):
"""
A POST HTTPS request with a bad referer is rejected
"""
req = self._get_POST_request_with_token()
req._is_secure_override = True
req.META['HTTP_HOST'] = 'www.example.com'
req.META['HTTP_REFERER'] = 'https://www.evil.org/somepage'
req.META['SERVER_PORT'] = '443'
response = self.mw.process_view(req, post_form_view, (), {})
self.assertContains(
response,
'Referer checking failed - https://www.evil.org/somepage does not '
'match any trusted origins.',
status_code=403,
)
def test_https_malformed_host(self):
"""
CsrfViewMiddleware generates a 403 response if it receives an HTTPS
request with a bad host.
"""
req = self._get_GET_no_csrf_cookie_request()
req._is_secure_override = True
req.META['HTTP_HOST'] = '@malformed'
req.META['HTTP_REFERER'] = 'https://www.evil.org/somepage'
req.META['SERVER_PORT'] = '443'
response = self.mw.process_view(req, token_view, (), {})
self.assertEqual(response.status_code, 403)
@override_settings(DEBUG=True)
def test_https_malformed_referer(self):
"""
A POST HTTPS request with a bad referer is rejected.
"""
malformed_referer_msg = 'Referer checking failed - Referer is malformed.'
req = self._get_POST_request_with_token()
req._is_secure_override = True
req.META['HTTP_REFERER'] = 'http://http://www.example.com/'
response = self.mw.process_view(req, post_form_view, (), {})
self.assertContains(
response,
'Referer checking failed - Referer is insecure while host is secure.',
status_code=403,
)
# Empty
req.META['HTTP_REFERER'] = ''
response = self.mw.process_view(req, post_form_view, (), {})
self.assertContains(response, malformed_referer_msg, status_code=403)
# Non-ASCII
req.META['HTTP_REFERER'] = 'ØBöIß'
response = self.mw.process_view(req, post_form_view, (), {})
self.assertContains(response, malformed_referer_msg, status_code=403)
# missing scheme
# >>> urlparse('//example.com/')
# ParseResult(scheme='', netloc='example.com', path='/', params='', query='', fragment='')
req.META['HTTP_REFERER'] = '//example.com/'
response = self.mw.process_view(req, post_form_view, (), {})
self.assertContains(response, malformed_referer_msg, status_code=403)
# missing netloc
# >>> urlparse('https://')
# ParseResult(scheme='https', netloc='', path='', params='', query='', fragment='')
req.META['HTTP_REFERER'] = 'https://'
response = self.mw.process_view(req, post_form_view, (), {})
self.assertContains(response, malformed_referer_msg, status_code=403)
@override_settings(ALLOWED_HOSTS=['www.example.com'])
def test_https_good_referer(self):
"""
A POST HTTPS request with a good referer is accepted.
"""
req = self._get_POST_request_with_token()
req._is_secure_override = True
req.META['HTTP_HOST'] = 'www.example.com'
req.META['HTTP_REFERER'] = 'https://www.example.com/somepage'
self.mw.process_request(req)
req2 = self.mw.process_view(req, post_form_view, (), {})
self.assertIsNone(req2)
@override_settings(ALLOWED_HOSTS=['www.example.com'])
def test_https_good_referer_2(self):
"""
A POST HTTPS request with a good referer is accepted where the referer
contains no trailing slash.
"""
# See ticket #15617
req = self._get_POST_request_with_token()
req._is_secure_override = True
req.META['HTTP_HOST'] = 'www.example.com'
req.META['HTTP_REFERER'] = 'https://www.example.com'
self.mw.process_request(req)
req2 = self.mw.process_view(req, post_form_view, (), {})
self.assertIsNone(req2)
def _test_https_good_referer_behind_proxy(self):
req = self._get_POST_request_with_token()
req._is_secure_override = True
req.META.update({
'HTTP_HOST': '10.0.0.2',
'HTTP_REFERER': 'https://www.example.com/somepage',
'SERVER_PORT': '8080',
'HTTP_X_FORWARDED_HOST': 'www.example.com',
'HTTP_X_FORWARDED_PORT': '443',
})
self.mw.process_request(req)
req2 = self.mw.process_view(req, post_form_view, (), {})
self.assertIsNone(req2)
@override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_TRUSTED_ORIGINS=['dashboard.example.com'])
def test_https_csrf_trusted_origin_allowed(self):
"""
A POST HTTPS request with a referer added to the CSRF_TRUSTED_ORIGINS
setting is accepted.
"""
req = self._get_POST_request_with_token()
req._is_secure_override = True
req.META['HTTP_HOST'] = 'www.example.com'
req.META['HTTP_REFERER'] = 'https://dashboard.example.com'
self.mw.process_request(req)
req2 = self.mw.process_view(req, post_form_view, (), {})
self.assertIsNone(req2)
@override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_TRUSTED_ORIGINS=['.example.com'])
def test_https_csrf_wildcard_trusted_origin_allowed(self):
"""
A POST HTTPS request with a referer that matches a CSRF_TRUSTED_ORIGINS
wildcard is accepted.
"""
req = self._get_POST_request_with_token()
req._is_secure_override = True
req.META['HTTP_HOST'] = 'www.example.com'
req.META['HTTP_REFERER'] = 'https://dashboard.example.com'
self.mw.process_request(req)
response = self.mw.process_view(req, post_form_view, (), {})
self.assertIsNone(response)
def _test_https_good_referer_matches_cookie_domain(self):
req = self._get_POST_request_with_token()
req._is_secure_override = True
req.META['HTTP_REFERER'] = 'https://foo.example.com/'
req.META['SERVER_PORT'] = '443'
self.mw.process_request(req)
response = self.mw.process_view(req, post_form_view, (), {})
self.assertIsNone(response)
def _test_https_good_referer_matches_cookie_domain_with_different_port(self):
req = self._get_POST_request_with_token()
req._is_secure_override = True
req.META['HTTP_HOST'] = 'www.example.com'
req.META['HTTP_REFERER'] = 'https://foo.example.com:4443/'
req.META['SERVER_PORT'] = '4443'
self.mw.process_request(req)
response = self.mw.process_view(req, post_form_view, (), {})
self.assertIsNone(response)
def test_ensures_csrf_cookie_no_logging(self):
"""
ensure_csrf_cookie() doesn't log warnings (#19436).
"""
with self.assertRaisesMessage(AssertionError, 'no logs'):
with self.assertLogs('django.request', 'WARNING'):
req = self._get_GET_no_csrf_cookie_request()
ensure_csrf_cookie_view(req)
def test_post_data_read_failure(self):
"""
#20128 -- IOErrors during POST data reading should be caught and
treated as if the POST data wasn't there.
"""
class CsrfPostRequest(HttpRequest):
"""
HttpRequest that can raise an IOError when accessing POST data
"""
def __init__(self, token, raise_error):
super().__init__()
self.method = 'POST'
self.raise_error = False
self.COOKIES[settings.CSRF_COOKIE_NAME] = token
# Handle both cases here to prevent duplicate code in the
# session tests.
self.session = {}
self.session[CSRF_SESSION_KEY] = token
self.POST['csrfmiddlewaretoken'] = token
self.raise_error = raise_error
def _load_post_and_files(self):
raise IOError('error reading input data')
def _get_post(self):
if self.raise_error:
self._load_post_and_files()
return self._post
def _set_post(self, post):
self._post = post
POST = property(_get_post, _set_post)
token = ('ABC' + self._csrf_id)[:CSRF_TOKEN_LENGTH]
req = CsrfPostRequest(token, raise_error=False)
self.mw.process_request(req)
resp = self.mw.process_view(req, post_form_view, (), {})
self.assertIsNone(resp)
req = CsrfPostRequest(token, raise_error=True)
self.mw.process_request(req)
with self.assertLogs('django.security.csrf', 'WARNING') as cm:
resp = self.mw.process_view(req, post_form_view, (), {})
self.assertEqual(resp.status_code, 403)
self.assertEqual(cm.records[0].getMessage(), 'Forbidden (%s): ' % REASON_BAD_TOKEN)
class CsrfViewMiddlewareTests(CsrfViewMiddlewareTestMixin, SimpleTestCase):
def _get_GET_csrf_cookie_request(self):
req = TestingHttpRequest()
req.COOKIES[settings.CSRF_COOKIE_NAME] = self._csrf_id_cookie
return req
def _get_POST_bare_secret_csrf_cookie_request(self):
req = self._get_POST_no_csrf_cookie_request()
req.COOKIES[settings.CSRF_COOKIE_NAME] = self._csrf_id_cookie[:32]
return req
def _get_POST_bare_secret_csrf_cookie_request_with_token(self):
req = self._get_POST_bare_secret_csrf_cookie_request()
req.POST['csrfmiddlewaretoken'] = self._csrf_id_cookie[:32]
return req
def test_ensures_csrf_cookie_no_middleware(self):
"""
The ensure_csrf_cookie() decorator works without middleware.
"""
req = self._get_GET_no_csrf_cookie_request()
resp = ensure_csrf_cookie_view(req)
self.assertTrue(resp.cookies.get(settings.CSRF_COOKIE_NAME, False))
self.assertIn('Cookie', resp.get('Vary', ''))
def test_ensures_csrf_cookie_with_middleware(self):
"""
The ensure_csrf_cookie() decorator works with the CsrfViewMiddleware
enabled.
"""
req = self._get_GET_no_csrf_cookie_request()
self.mw.process_view(req, ensure_csrf_cookie_view, (), {})
resp = ensure_csrf_cookie_view(req)
resp2 = self.mw.process_response(req, resp)
self.assertTrue(resp2.cookies.get(settings.CSRF_COOKIE_NAME, False))
self.assertIn('Cookie', resp2.get('Vary', ''))
def test_csrf_cookie_age(self):
"""
CSRF cookie age can be set using settings.CSRF_COOKIE_AGE.
"""
req = self._get_GET_no_csrf_cookie_request()
MAX_AGE = 123
with self.settings(CSRF_COOKIE_NAME='csrfcookie',
CSRF_COOKIE_DOMAIN='.example.com',
CSRF_COOKIE_AGE=MAX_AGE,
CSRF_COOKIE_PATH='/test/',
CSRF_COOKIE_SECURE=True,
CSRF_COOKIE_HTTPONLY=True):
# token_view calls get_token() indirectly
self.mw.process_view(req, token_view, (), {})
resp = token_view(req)
resp2 = self.mw.process_response(req, resp)
max_age = resp2.cookies.get('csrfcookie').get('max-age')
self.assertEqual(max_age, MAX_AGE)
def test_csrf_cookie_age_none(self):
"""
CSRF cookie age does not have max age set and therefore uses
session-based cookies.
"""
req = self._get_GET_no_csrf_cookie_request()
MAX_AGE = None
with self.settings(CSRF_COOKIE_NAME='csrfcookie',
CSRF_COOKIE_DOMAIN='.example.com',
CSRF_COOKIE_AGE=MAX_AGE,
CSRF_COOKIE_PATH='/test/',
CSRF_COOKIE_SECURE=True,
CSRF_COOKIE_HTTPONLY=True):
# token_view calls get_token() indirectly
self.mw.process_view(req, token_view, (), {})
resp = token_view(req)
resp2 = self.mw.process_response(req, resp)
max_age = resp2.cookies.get('csrfcookie').get('max-age')
self.assertEqual(max_age, '')
def test_csrf_cookie_samesite(self):
req = self._get_GET_no_csrf_cookie_request()
with self.settings(CSRF_COOKIE_NAME='csrfcookie', CSRF_COOKIE_SAMESITE='Strict'):
self.mw.process_view(req, token_view, (), {})
resp = token_view(req)
resp2 = self.mw.process_response(req, resp)
self.assertEqual(resp2.cookies['csrfcookie']['samesite'], 'Strict')
def test_process_view_token_too_long(self):
"""
If the token is longer than expected, it is ignored and a new token is
created.
"""
req = self._get_GET_no_csrf_cookie_request()
req.COOKIES[settings.CSRF_COOKIE_NAME] = 'x' * 100000
self.mw.process_view(req, token_view, (), {})
resp = token_view(req)
resp2 = self.mw.process_response(req, resp)
csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, False)
self.assertEqual(len(csrf_cookie.value), CSRF_TOKEN_LENGTH)
def test_process_view_token_invalid_chars(self):
"""
If the token contains non-alphanumeric characters, it is ignored and a
new token is created.
"""
token = ('!@#' + self._csrf_id)[:CSRF_TOKEN_LENGTH]
req = self._get_GET_no_csrf_cookie_request()
req.COOKIES[settings.CSRF_COOKIE_NAME] = token
self.mw.process_view(req, token_view, (), {})
resp = token_view(req)
resp2 = self.mw.process_response(req, resp)
csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, False)
self.assertEqual(len(csrf_cookie.value), CSRF_TOKEN_LENGTH)
self.assertNotEqual(csrf_cookie.value, token)
def test_bare_secret_accepted_and_replaced(self):
"""
The csrf token is reset from a bare secret.
"""
req = self._get_POST_bare_secret_csrf_cookie_request_with_token()
self.mw.process_request(req)
req2 = self.mw.process_view(req, token_view, (), {})
self.assertIsNone(req2)
resp = token_view(req)
resp = self.mw.process_response(req, resp)
self.assertIn(settings.CSRF_COOKIE_NAME, resp.cookies, "Cookie was not reset from bare secret")
csrf_cookie = resp.cookies[settings.CSRF_COOKIE_NAME]
self.assertEqual(len(csrf_cookie.value), CSRF_TOKEN_LENGTH)
self._check_token_present(resp, csrf_id=csrf_cookie.value)
@override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_COOKIE_DOMAIN='.example.com', USE_X_FORWARDED_PORT=True)
def test_https_good_referer_behind_proxy(self):
"""
A POST HTTPS request is accepted when USE_X_FORWARDED_PORT=True.
"""
self._test_https_good_referer_behind_proxy()
@override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_COOKIE_DOMAIN='.example.com')
def test_https_good_referer_matches_cookie_domain(self):
"""
A POST HTTPS request with a good referer should be accepted from a
subdomain that's allowed by CSRF_COOKIE_DOMAIN.
"""
self._test_https_good_referer_matches_cookie_domain()
@override_settings(ALLOWED_HOSTS=['www.example.com'], CSRF_COOKIE_DOMAIN='.example.com')
def test_https_good_referer_matches_cookie_domain_with_different_port(self):
"""
A POST HTTPS request with a good referer should be accepted from a
subdomain that's allowed by CSRF_COOKIE_DOMAIN and a non-443 port.
"""
self._test_https_good_referer_matches_cookie_domain_with_different_port()
@override_settings(CSRF_COOKIE_DOMAIN='.example.com', DEBUG=True)
def test_https_reject_insecure_referer(self):
"""
A POST HTTPS request from an insecure referer should be rejected.
"""
req = self._get_POST_request_with_token()
req._is_secure_override = True
req.META['HTTP_REFERER'] = 'http://example.com/'
req.META['SERVER_PORT'] = '443'
response = self.mw.process_view(req, post_form_view, (), {})
self.assertContains(
response,
'Referer checking failed - Referer is insecure while host is secure.',
status_code=403,
)
@override_settings(CSRF_USE_SESSIONS=True, CSRF_COOKIE_DOMAIN=None)
class CsrfViewMiddlewareUseSessionsTests(CsrfViewMiddlewareTestMixin, SimpleTestCase):
"""
CSRF tests with CSRF_USE_SESSIONS=True.
"""
def _get_POST_bare_secret_csrf_cookie_request(self):
req = self._get_POST_no_csrf_cookie_request()
req.session[CSRF_SESSION_KEY] = self._csrf_id_cookie[:32]
return req
def _get_GET_csrf_cookie_request(self):
req = TestingHttpRequest()
req.session[CSRF_SESSION_KEY] = self._csrf_id_cookie
return req
def test_no_session_on_request(self):
msg = (
'CSRF_USE_SESSIONS is enabled, but request.session is not set. '
'SessionMiddleware must appear before CsrfViewMiddleware in MIDDLEWARE.'
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.mw.process_request(HttpRequest())
def test_process_response_get_token_used(self):
"""The ensure_csrf_cookie() decorator works without middleware."""
req = self._get_GET_no_csrf_cookie_request()
ensure_csrf_cookie_view(req)
self.assertTrue(req.session.get(CSRF_SESSION_KEY, False))
def test_session_modify(self):
"""The session isn't saved if the CSRF cookie is unchanged."""
req = self._get_GET_no_csrf_cookie_request()
self.mw.process_view(req, ensure_csrf_cookie_view, (), {})
resp = ensure_csrf_cookie_view(req)
self.mw.process_response(req, resp)
self.assertIsNotNone(req.session.get(CSRF_SESSION_KEY))
req.session.modified = False
self.mw.process_view(req, ensure_csrf_cookie_view, (), {})
resp = ensure_csrf_cookie_view(req)
self.mw.process_response(req, resp)
self.assertFalse(req.session.modified)
def test_ensures_csrf_cookie_with_middleware(self):
"""
The ensure_csrf_cookie() decorator works with the CsrfViewMiddleware
enabled.
"""
req = self._get_GET_no_csrf_cookie_request()
self.mw.process_view(req, ensure_csrf_cookie_view, (), {})
resp = ensure_csrf_cookie_view(req)
self.mw.process_response(req, resp)
self.assertTrue(req.session.get(CSRF_SESSION_KEY, False))
def test_token_node_with_new_csrf_cookie(self):
"""
CsrfTokenNode works when a CSRF cookie is created by the middleware
(when one was not already present).
"""
req = self._get_GET_no_csrf_cookie_request()
self.mw.process_view(req, token_view, (), {})
resp = token_view(req)
self.mw.process_response(req, resp)
csrf_cookie = req.session[CSRF_SESSION_KEY]
self._check_token_present(resp, csrf_id=csrf_cookie)
@override_settings(
ALLOWED_HOSTS=['www.example.com'],
SESSION_COOKIE_DOMAIN='.example.com',
USE_X_FORWARDED_PORT=True,
DEBUG=True,
)
def test_https_good_referer_behind_proxy(self):
"""
A POST HTTPS request is accepted when USE_X_FORWARDED_PORT=True.
"""
self._test_https_good_referer_behind_proxy()
@override_settings(ALLOWED_HOSTS=['www.example.com'], SESSION_COOKIE_DOMAIN='.example.com')
def test_https_good_referer_matches_cookie_domain(self):
"""
A POST HTTPS request with a good referer should be accepted from a
subdomain that's allowed by SESSION_COOKIE_DOMAIN.
"""
self._test_https_good_referer_matches_cookie_domain()
@override_settings(ALLOWED_HOSTS=['www.example.com'], SESSION_COOKIE_DOMAIN='.example.com')
def test_https_good_referer_matches_cookie_domain_with_different_port(self):
"""
A POST HTTPS request with a good referer should be accepted from a
subdomain that's allowed by SESSION_COOKIE_DOMAIN and a non-443 port.
"""
self._test_https_good_referer_matches_cookie_domain_with_different_port()
@override_settings(SESSION_COOKIE_DOMAIN='.example.com', DEBUG=True)
def test_https_reject_insecure_referer(self):
"""
A POST HTTPS request from an insecure referer should be rejected.
"""
req = self._get_POST_request_with_token()
req._is_secure_override = True
req.META['HTTP_REFERER'] = 'http://example.com/'
req.META['SERVER_PORT'] = '443'
response = self.mw.process_view(req, post_form_view, (), {})
self.assertContains(
response,
'Referer checking failed - Referer is insecure while host is secure.',
status_code=403,
)
@override_settings(ROOT_URLCONF='csrf_tests.csrf_token_error_handler_urls', DEBUG=False)
class CsrfInErrorHandlingViewsTests(SimpleTestCase):
def test_csrf_token_on_404_stays_constant(self):
response = self.client.get('/does not exist/')
# The error handler returns status code 599.
self.assertEqual(response.status_code, 599)
token1 = response.content
response = self.client.get('/does not exist/')
self.assertEqual(response.status_code, 599)
token2 = response.content
self.assertTrue(equivalent_tokens(token1.decode('ascii'), token2.decode('ascii')))
|
b5153ded04be746e721fd552f71229b0feeff5fc27914a596fff3a6fd19582f5 | import datetime
import re
from decimal import Decimal
from django.core.exceptions import FieldError
from django.db import connection
from django.db.models import (
Avg, Count, DecimalField, DurationField, F, FloatField, Func, IntegerField,
Max, Min, Sum, Value,
)
from django.test import TestCase
from django.test.utils import Approximate, CaptureQueriesContext
from django.utils import timezone
from .models import Author, Book, Publisher, Store
class AggregateTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name='Adrian Holovaty', age=34)
cls.a2 = Author.objects.create(name='Jacob Kaplan-Moss', age=35)
cls.a3 = Author.objects.create(name='Brad Dayley', age=45)
cls.a4 = Author.objects.create(name='James Bennett', age=29)
cls.a5 = Author.objects.create(name='Jeffrey Forcier', age=37)
cls.a6 = Author.objects.create(name='Paul Bissex', age=29)
cls.a7 = Author.objects.create(name='Wesley J. Chun', age=25)
cls.a8 = Author.objects.create(name='Peter Norvig', age=57)
cls.a9 = Author.objects.create(name='Stuart Russell', age=46)
cls.a1.friends.add(cls.a2, cls.a4)
cls.a2.friends.add(cls.a1, cls.a7)
cls.a4.friends.add(cls.a1)
cls.a5.friends.add(cls.a6, cls.a7)
cls.a6.friends.add(cls.a5, cls.a7)
cls.a7.friends.add(cls.a2, cls.a5, cls.a6)
cls.a8.friends.add(cls.a9)
cls.a9.friends.add(cls.a8)
cls.p1 = Publisher.objects.create(name='Apress', num_awards=3, duration=datetime.timedelta(days=1))
cls.p2 = Publisher.objects.create(name='Sams', num_awards=1, duration=datetime.timedelta(days=2))
cls.p3 = Publisher.objects.create(name='Prentice Hall', num_awards=7)
cls.p4 = Publisher.objects.create(name='Morgan Kaufmann', num_awards=9)
cls.p5 = Publisher.objects.create(name="Jonno's House of Books", num_awards=0)
cls.b1 = Book.objects.create(
isbn='159059725', name='The Definitive Guide to Django: Web Development Done Right',
pages=447, rating=4.5, price=Decimal('30.00'), contact=cls.a1, publisher=cls.p1,
pubdate=datetime.date(2007, 12, 6)
)
cls.b2 = Book.objects.create(
isbn='067232959', name='Sams Teach Yourself Django in 24 Hours',
pages=528, rating=3.0, price=Decimal('23.09'), contact=cls.a3, publisher=cls.p2,
pubdate=datetime.date(2008, 3, 3)
)
cls.b3 = Book.objects.create(
isbn='159059996', name='Practical Django Projects',
pages=300, rating=4.0, price=Decimal('29.69'), contact=cls.a4, publisher=cls.p1,
pubdate=datetime.date(2008, 6, 23)
)
cls.b4 = Book.objects.create(
isbn='013235613', name='Python Web Development with Django',
pages=350, rating=4.0, price=Decimal('29.69'), contact=cls.a5, publisher=cls.p3,
pubdate=datetime.date(2008, 11, 3)
)
cls.b5 = Book.objects.create(
isbn='013790395', name='Artificial Intelligence: A Modern Approach',
pages=1132, rating=4.0, price=Decimal('82.80'), contact=cls.a8, publisher=cls.p3,
pubdate=datetime.date(1995, 1, 15)
)
cls.b6 = Book.objects.create(
isbn='155860191', name='Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp',
pages=946, rating=5.0, price=Decimal('75.00'), contact=cls.a8, publisher=cls.p4,
pubdate=datetime.date(1991, 10, 15)
)
cls.b1.authors.add(cls.a1, cls.a2)
cls.b2.authors.add(cls.a3)
cls.b3.authors.add(cls.a4)
cls.b4.authors.add(cls.a5, cls.a6, cls.a7)
cls.b5.authors.add(cls.a8, cls.a9)
cls.b6.authors.add(cls.a8)
s1 = Store.objects.create(
name='Amazon.com',
original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42),
friday_night_closing=datetime.time(23, 59, 59)
)
s2 = Store.objects.create(
name='Books.com',
original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37),
friday_night_closing=datetime.time(23, 59, 59)
)
s3 = Store.objects.create(
name="Mamma and Pappa's Books",
original_opening=datetime.datetime(1945, 4, 25, 16, 24, 14),
friday_night_closing=datetime.time(21, 30)
)
s1.books.add(cls.b1, cls.b2, cls.b3, cls.b4, cls.b5, cls.b6)
s2.books.add(cls.b1, cls.b3, cls.b5, cls.b6)
s3.books.add(cls.b3, cls.b4, cls.b6)
def test_empty_aggregate(self):
self.assertEqual(Author.objects.all().aggregate(), {})
def test_aggregate_in_order_by(self):
msg = (
'Using an aggregate in order_by() without also including it in '
'annotate() is not allowed: Avg(F(book__rating)'
)
with self.assertRaisesMessage(FieldError, msg):
Author.objects.values('age').order_by(Avg('book__rating'))
def test_single_aggregate(self):
vals = Author.objects.aggregate(Avg("age"))
self.assertEqual(vals, {"age__avg": Approximate(37.4, places=1)})
def test_multiple_aggregates(self):
vals = Author.objects.aggregate(Sum("age"), Avg("age"))
self.assertEqual(vals, {"age__sum": 337, "age__avg": Approximate(37.4, places=1)})
def test_filter_aggregate(self):
vals = Author.objects.filter(age__gt=29).aggregate(Sum("age"))
self.assertEqual(vals, {'age__sum': 254})
def test_related_aggregate(self):
vals = Author.objects.aggregate(Avg("friends__age"))
self.assertEqual(vals, {'friends__age__avg': Approximate(34.07, places=2)})
vals = Book.objects.filter(rating__lt=4.5).aggregate(Avg("authors__age"))
self.assertEqual(vals, {'authors__age__avg': Approximate(38.2857, places=2)})
vals = Author.objects.all().filter(name__contains="a").aggregate(Avg("book__rating"))
self.assertEqual(vals, {'book__rating__avg': 4.0})
vals = Book.objects.aggregate(Sum("publisher__num_awards"))
self.assertEqual(vals, {'publisher__num_awards__sum': 30})
vals = Publisher.objects.aggregate(Sum("book__price"))
self.assertEqual(vals, {'book__price__sum': Decimal('270.27')})
def test_aggregate_multi_join(self):
vals = Store.objects.aggregate(Max("books__authors__age"))
self.assertEqual(vals, {'books__authors__age__max': 57})
vals = Author.objects.aggregate(Min("book__publisher__num_awards"))
self.assertEqual(vals, {'book__publisher__num_awards__min': 1})
def test_aggregate_alias(self):
vals = Store.objects.filter(name="Amazon.com").aggregate(amazon_mean=Avg("books__rating"))
self.assertEqual(vals, {'amazon_mean': Approximate(4.08, places=2)})
def test_annotate_basic(self):
self.assertQuerysetEqual(
Book.objects.annotate().order_by('pk'), [
"The Definitive Guide to Django: Web Development Done Right",
"Sams Teach Yourself Django in 24 Hours",
"Practical Django Projects",
"Python Web Development with Django",
"Artificial Intelligence: A Modern Approach",
"Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp"
],
lambda b: b.name
)
books = Book.objects.annotate(mean_age=Avg("authors__age"))
b = books.get(pk=self.b1.pk)
self.assertEqual(
b.name,
'The Definitive Guide to Django: Web Development Done Right'
)
self.assertEqual(b.mean_age, 34.5)
def test_annotate_defer(self):
qs = Book.objects.annotate(
page_sum=Sum("pages")).defer('name').filter(pk=self.b1.pk)
rows = [
(self.b1.id, "159059725", 447, "The Definitive Guide to Django: Web Development Done Right")
]
self.assertQuerysetEqual(
qs.order_by('pk'), rows,
lambda r: (r.id, r.isbn, r.page_sum, r.name)
)
def test_annotate_defer_select_related(self):
qs = Book.objects.select_related('contact').annotate(
page_sum=Sum("pages")).defer('name').filter(pk=self.b1.pk)
rows = [
(self.b1.id, "159059725", 447, "Adrian Holovaty",
"The Definitive Guide to Django: Web Development Done Right")
]
self.assertQuerysetEqual(
qs.order_by('pk'), rows,
lambda r: (r.id, r.isbn, r.page_sum, r.contact.name, r.name)
)
def test_annotate_m2m(self):
books = Book.objects.filter(rating__lt=4.5).annotate(Avg("authors__age")).order_by("name")
self.assertQuerysetEqual(
books, [
('Artificial Intelligence: A Modern Approach', 51.5),
('Practical Django Projects', 29.0),
('Python Web Development with Django', Approximate(30.3, places=1)),
('Sams Teach Yourself Django in 24 Hours', 45.0)
],
lambda b: (b.name, b.authors__age__avg),
)
books = Book.objects.annotate(num_authors=Count("authors")).order_by("name")
self.assertQuerysetEqual(
books, [
('Artificial Intelligence: A Modern Approach', 2),
('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 1),
('Practical Django Projects', 1),
('Python Web Development with Django', 3),
('Sams Teach Yourself Django in 24 Hours', 1),
('The Definitive Guide to Django: Web Development Done Right', 2)
],
lambda b: (b.name, b.num_authors)
)
def test_backwards_m2m_annotate(self):
authors = Author.objects.filter(name__contains="a").annotate(Avg("book__rating")).order_by("name")
self.assertQuerysetEqual(
authors, [
('Adrian Holovaty', 4.5),
('Brad Dayley', 3.0),
('Jacob Kaplan-Moss', 4.5),
('James Bennett', 4.0),
('Paul Bissex', 4.0),
('Stuart Russell', 4.0)
],
lambda a: (a.name, a.book__rating__avg)
)
authors = Author.objects.annotate(num_books=Count("book")).order_by("name")
self.assertQuerysetEqual(
authors, [
('Adrian Holovaty', 1),
('Brad Dayley', 1),
('Jacob Kaplan-Moss', 1),
('James Bennett', 1),
('Jeffrey Forcier', 1),
('Paul Bissex', 1),
('Peter Norvig', 2),
('Stuart Russell', 1),
('Wesley J. Chun', 1)
],
lambda a: (a.name, a.num_books)
)
def test_reverse_fkey_annotate(self):
books = Book.objects.annotate(Sum("publisher__num_awards")).order_by("name")
self.assertQuerysetEqual(
books, [
('Artificial Intelligence: A Modern Approach', 7),
('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 9),
('Practical Django Projects', 3),
('Python Web Development with Django', 7),
('Sams Teach Yourself Django in 24 Hours', 1),
('The Definitive Guide to Django: Web Development Done Right', 3)
],
lambda b: (b.name, b.publisher__num_awards__sum)
)
publishers = Publisher.objects.annotate(Sum("book__price")).order_by("name")
self.assertQuerysetEqual(
publishers, [
('Apress', Decimal("59.69")),
("Jonno's House of Books", None),
('Morgan Kaufmann', Decimal("75.00")),
('Prentice Hall', Decimal("112.49")),
('Sams', Decimal("23.09"))
],
lambda p: (p.name, p.book__price__sum)
)
def test_annotate_values(self):
books = list(Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values())
self.assertEqual(
books, [
{
"contact_id": self.a1.id,
"id": self.b1.id,
"isbn": "159059725",
"mean_age": 34.5,
"name": "The Definitive Guide to Django: Web Development Done Right",
"pages": 447,
"price": Approximate(Decimal("30")),
"pubdate": datetime.date(2007, 12, 6),
"publisher_id": self.p1.id,
"rating": 4.5,
}
]
)
books = (
Book.objects
.filter(pk=self.b1.pk)
.annotate(mean_age=Avg('authors__age'))
.values('pk', 'isbn', 'mean_age')
)
self.assertEqual(
list(books), [
{
"pk": self.b1.pk,
"isbn": "159059725",
"mean_age": 34.5,
}
]
)
books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values("name")
self.assertEqual(
list(books),
[{'name': 'The Definitive Guide to Django: Web Development Done Right'}],
)
books = Book.objects.filter(pk=self.b1.pk).values().annotate(mean_age=Avg('authors__age'))
self.assertEqual(
list(books), [
{
"contact_id": self.a1.id,
"id": self.b1.id,
"isbn": "159059725",
"mean_age": 34.5,
"name": "The Definitive Guide to Django: Web Development Done Right",
"pages": 447,
"price": Approximate(Decimal("30")),
"pubdate": datetime.date(2007, 12, 6),
"publisher_id": self.p1.id,
"rating": 4.5,
}
]
)
books = (
Book.objects
.values("rating")
.annotate(n_authors=Count("authors__id"), mean_age=Avg("authors__age"))
.order_by("rating")
)
self.assertEqual(
list(books), [
{
"rating": 3.0,
"n_authors": 1,
"mean_age": 45.0,
},
{
"rating": 4.0,
"n_authors": 6,
"mean_age": Approximate(37.16, places=1)
},
{
"rating": 4.5,
"n_authors": 2,
"mean_age": 34.5,
},
{
"rating": 5.0,
"n_authors": 1,
"mean_age": 57.0,
}
]
)
authors = Author.objects.annotate(Avg("friends__age")).order_by("name")
self.assertQuerysetEqual(
authors, [
('Adrian Holovaty', 32.0),
('Brad Dayley', None),
('Jacob Kaplan-Moss', 29.5),
('James Bennett', 34.0),
('Jeffrey Forcier', 27.0),
('Paul Bissex', 31.0),
('Peter Norvig', 46.0),
('Stuart Russell', 57.0),
('Wesley J. Chun', Approximate(33.66, places=1))
],
lambda a: (a.name, a.friends__age__avg)
)
def test_count(self):
vals = Book.objects.aggregate(Count("rating"))
self.assertEqual(vals, {"rating__count": 6})
vals = Book.objects.aggregate(Count("rating", distinct=True))
self.assertEqual(vals, {"rating__count": 4})
def test_count_star(self):
with self.assertNumQueries(1) as ctx:
Book.objects.aggregate(n=Count("*"))
sql = ctx.captured_queries[0]['sql']
self.assertIn('SELECT COUNT(*) ', sql)
def test_non_grouped_annotation_not_in_group_by(self):
"""
An annotation not included in values() before an aggregate should be
excluded from the group by clause.
"""
qs = (
Book.objects.annotate(xprice=F('price')).filter(rating=4.0).values('rating')
.annotate(count=Count('publisher_id', distinct=True)).values('count', 'rating').order_by('count')
)
self.assertEqual(list(qs), [{'rating': 4.0, 'count': 2}])
def test_grouped_annotation_in_group_by(self):
"""
An annotation included in values() before an aggregate should be
included in the group by clause.
"""
qs = (
Book.objects.annotate(xprice=F('price')).filter(rating=4.0).values('rating', 'xprice')
.annotate(count=Count('publisher_id', distinct=True)).values('count', 'rating').order_by('count')
)
self.assertEqual(
list(qs), [
{'rating': 4.0, 'count': 1},
{'rating': 4.0, 'count': 2},
]
)
def test_fkey_aggregate(self):
explicit = list(Author.objects.annotate(Count('book__id')))
implicit = list(Author.objects.annotate(Count('book')))
self.assertEqual(explicit, implicit)
def test_annotate_ordering(self):
books = Book.objects.values('rating').annotate(oldest=Max('authors__age')).order_by('oldest', 'rating')
self.assertEqual(
list(books), [
{'rating': 4.5, 'oldest': 35},
{'rating': 3.0, 'oldest': 45},
{'rating': 4.0, 'oldest': 57},
{'rating': 5.0, 'oldest': 57},
]
)
books = Book.objects.values("rating").annotate(oldest=Max("authors__age")).order_by("-oldest", "-rating")
self.assertEqual(
list(books), [
{'rating': 5.0, 'oldest': 57},
{'rating': 4.0, 'oldest': 57},
{'rating': 3.0, 'oldest': 45},
{'rating': 4.5, 'oldest': 35},
]
)
def test_aggregate_annotation(self):
vals = Book.objects.annotate(num_authors=Count("authors__id")).aggregate(Avg("num_authors"))
self.assertEqual(vals, {"num_authors__avg": Approximate(1.66, places=1)})
def test_avg_duration_field(self):
# Explicit `output_field`.
self.assertEqual(
Publisher.objects.aggregate(Avg('duration', output_field=DurationField())),
{'duration__avg': datetime.timedelta(days=1, hours=12)}
)
# Implicit `output_field`.
self.assertEqual(
Publisher.objects.aggregate(Avg('duration')),
{'duration__avg': datetime.timedelta(days=1, hours=12)}
)
def test_sum_duration_field(self):
self.assertEqual(
Publisher.objects.aggregate(Sum('duration', output_field=DurationField())),
{'duration__sum': datetime.timedelta(days=3)}
)
def test_sum_distinct_aggregate(self):
"""
Sum on a distinct() QuerySet should aggregate only the distinct items.
"""
authors = Author.objects.filter(book__in=[self.b5, self.b6])
self.assertEqual(authors.count(), 3)
distinct_authors = authors.distinct()
self.assertEqual(distinct_authors.count(), 2)
# Selected author ages are 57 and 46
age_sum = distinct_authors.aggregate(Sum('age'))
self.assertEqual(age_sum['age__sum'], 103)
def test_filtering(self):
p = Publisher.objects.create(name='Expensive Publisher', num_awards=0)
Book.objects.create(
name='ExpensiveBook1',
pages=1,
isbn='111',
rating=3.5,
price=Decimal("1000"),
publisher=p,
contact_id=self.a1.id,
pubdate=datetime.date(2008, 12, 1)
)
Book.objects.create(
name='ExpensiveBook2',
pages=1,
isbn='222',
rating=4.0,
price=Decimal("1000"),
publisher=p,
contact_id=self.a1.id,
pubdate=datetime.date(2008, 12, 2)
)
Book.objects.create(
name='ExpensiveBook3',
pages=1,
isbn='333',
rating=4.5,
price=Decimal("35"),
publisher=p,
contact_id=self.a1.id,
pubdate=datetime.date(2008, 12, 3)
)
publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
self.assertQuerysetEqual(
publishers,
['Apress', 'Prentice Hall', 'Expensive Publisher'],
lambda p: p.name,
)
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Apress",
"Sams",
"Prentice Hall",
"Expensive Publisher",
],
lambda p: p.name
)
publishers = (
Publisher.objects
.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1, book__price__lt=Decimal("40.0"))
.order_by("pk")
)
self.assertQuerysetEqual(
publishers,
['Apress', 'Prentice Hall', 'Expensive Publisher'],
lambda p: p.name,
)
publishers = (
Publisher.objects
.filter(book__price__lt=Decimal("40.0"))
.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1)
.order_by("pk")
)
self.assertQuerysetEqual(publishers, ['Apress'], lambda p: p.name)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__range=[1, 3]).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Sams",
"Prentice Hall",
"Morgan Kaufmann",
"Expensive Publisher",
],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__range=[1, 2]).order_by("pk")
self.assertQuerysetEqual(
publishers,
['Apress', 'Sams', 'Prentice Hall', 'Morgan Kaufmann'],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__in=[1, 3]).order_by("pk")
self.assertQuerysetEqual(
publishers,
['Sams', 'Morgan Kaufmann', 'Expensive Publisher'],
lambda p: p.name,
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__isnull=True)
self.assertEqual(len(publishers), 0)
def test_annotation(self):
vals = Author.objects.filter(pk=self.a1.pk).aggregate(Count("friends__id"))
self.assertEqual(vals, {"friends__id__count": 2})
books = Book.objects.annotate(num_authors=Count("authors__name")).filter(num_authors__exact=2).order_by("pk")
self.assertQuerysetEqual(
books, [
"The Definitive Guide to Django: Web Development Done Right",
"Artificial Intelligence: A Modern Approach",
],
lambda b: b.name
)
authors = (
Author.objects
.annotate(num_friends=Count("friends__id", distinct=True))
.filter(num_friends=0)
.order_by("pk")
)
self.assertQuerysetEqual(authors, ['Brad Dayley'], lambda a: a.name)
publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
self.assertQuerysetEqual(publishers, ['Apress', 'Prentice Hall'], lambda p: p.name)
publishers = (
Publisher.objects
.filter(book__price__lt=Decimal("40.0"))
.annotate(num_books=Count("book__id"))
.filter(num_books__gt=1)
)
self.assertQuerysetEqual(publishers, ['Apress'], lambda p: p.name)
books = (
Book.objects
.annotate(num_authors=Count("authors__id"))
.filter(authors__name__contains="Norvig", num_authors__gt=1)
)
self.assertQuerysetEqual(
books,
['Artificial Intelligence: A Modern Approach'],
lambda b: b.name
)
def test_more_aggregation(self):
a = Author.objects.get(name__contains='Norvig')
b = Book.objects.get(name__contains='Done Right')
b.authors.add(a)
b.save()
vals = (
Book.objects
.annotate(num_authors=Count("authors__id"))
.filter(authors__name__contains="Norvig", num_authors__gt=1)
.aggregate(Avg("rating"))
)
self.assertEqual(vals, {"rating__avg": 4.25})
def test_even_more_aggregate(self):
publishers = Publisher.objects.annotate(
earliest_book=Min("book__pubdate"),
).exclude(earliest_book=None).order_by("earliest_book").values(
'earliest_book',
'num_awards',
'id',
'name',
)
self.assertEqual(
list(publishers), [
{
'earliest_book': datetime.date(1991, 10, 15),
'num_awards': 9,
'id': self.p4.id,
'name': 'Morgan Kaufmann'
},
{
'earliest_book': datetime.date(1995, 1, 15),
'num_awards': 7,
'id': self.p3.id,
'name': 'Prentice Hall'
},
{
'earliest_book': datetime.date(2007, 12, 6),
'num_awards': 3,
'id': self.p1.id,
'name': 'Apress'
},
{
'earliest_book': datetime.date(2008, 3, 3),
'num_awards': 1,
'id': self.p2.id,
'name': 'Sams'
}
]
)
vals = Store.objects.aggregate(Max("friday_night_closing"), Min("original_opening"))
self.assertEqual(
vals,
{
"friday_night_closing__max": datetime.time(23, 59, 59),
"original_opening__min": datetime.datetime(1945, 4, 25, 16, 24, 14),
}
)
def test_annotate_values_list(self):
books = (
Book.objects
.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values_list("pk", "isbn", "mean_age")
)
self.assertEqual(list(books), [(self.b1.id, '159059725', 34.5)])
books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values_list("isbn")
self.assertEqual(list(books), [('159059725',)])
books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values_list("mean_age")
self.assertEqual(list(books), [(34.5,)])
books = (
Book.objects
.filter(pk=self.b1.pk)
.annotate(mean_age=Avg("authors__age"))
.values_list("mean_age", flat=True)
)
self.assertEqual(list(books), [34.5])
books = Book.objects.values_list("price").annotate(count=Count("price")).order_by("-count", "price")
self.assertEqual(
list(books), [
(Decimal("29.69"), 2),
(Decimal('23.09'), 1),
(Decimal('30'), 1),
(Decimal('75'), 1),
(Decimal('82.8'), 1),
]
)
def test_dates_with_aggregation(self):
"""
.dates() returns a distinct set of dates when applied to a
QuerySet with aggregation.
Refs #18056. Previously, .dates() would return distinct (date_kind,
aggregation) sets, in this case (year, num_authors), so 2008 would be
returned twice because there are books from 2008 with a different
number of authors.
"""
dates = Book.objects.annotate(num_authors=Count("authors")).dates('pubdate', 'year')
self.assertQuerysetEqual(
dates, [
"datetime.date(1991, 1, 1)",
"datetime.date(1995, 1, 1)",
"datetime.date(2007, 1, 1)",
"datetime.date(2008, 1, 1)"
]
)
def test_values_aggregation(self):
# Refs #20782
max_rating = Book.objects.values('rating').aggregate(max_rating=Max('rating'))
self.assertEqual(max_rating['max_rating'], 5)
max_books_per_rating = Book.objects.values('rating').annotate(
books_per_rating=Count('id')
).aggregate(Max('books_per_rating'))
self.assertEqual(
max_books_per_rating,
{'books_per_rating__max': 3})
def test_ticket17424(self):
"""
Doing exclude() on a foreign model after annotate() doesn't crash.
"""
all_books = list(Book.objects.values_list('pk', flat=True).order_by('pk'))
annotated_books = Book.objects.order_by('pk').annotate(one=Count("id"))
# The value doesn't matter, we just need any negative
# constraint on a related model that's a noop.
excluded_books = annotated_books.exclude(publisher__name="__UNLIKELY_VALUE__")
# Try to generate query tree
str(excluded_books.query)
self.assertQuerysetEqual(excluded_books, all_books, lambda x: x.pk)
# Check internal state
self.assertIsNone(annotated_books.query.alias_map["aggregation_book"].join_type)
self.assertIsNone(excluded_books.query.alias_map["aggregation_book"].join_type)
def test_ticket12886(self):
"""
Aggregation over sliced queryset works correctly.
"""
qs = Book.objects.all().order_by('-rating')[0:3]
vals = qs.aggregate(average_top3_rating=Avg('rating'))['average_top3_rating']
self.assertAlmostEqual(vals, 4.5, places=2)
def test_ticket11881(self):
"""
Subqueries do not needlessly contain ORDER BY, SELECT FOR UPDATE or
select_related() stuff.
"""
qs = Book.objects.all().select_for_update().order_by(
'pk').select_related('publisher').annotate(max_pk=Max('pk'))
with CaptureQueriesContext(connection) as captured_queries:
qs.aggregate(avg_pk=Avg('max_pk'))
self.assertEqual(len(captured_queries), 1)
qstr = captured_queries[0]['sql'].lower()
self.assertNotIn('for update', qstr)
forced_ordering = connection.ops.force_no_ordering()
if forced_ordering:
# If the backend needs to force an ordering we make sure it's
# the only "ORDER BY" clause present in the query.
self.assertEqual(
re.findall(r'order by (\w+)', qstr),
[', '.join(f[1][0] for f in forced_ordering).lower()]
)
else:
self.assertNotIn('order by', qstr)
self.assertEqual(qstr.count(' join '), 0)
def test_decimal_max_digits_has_no_effect(self):
Book.objects.all().delete()
a1 = Author.objects.first()
p1 = Publisher.objects.first()
thedate = timezone.now()
for i in range(10):
Book.objects.create(
isbn="abcde{}".format(i), name="none", pages=10, rating=4.0,
price=9999.98, contact=a1, publisher=p1, pubdate=thedate)
book = Book.objects.aggregate(price_sum=Sum('price'))
self.assertEqual(book['price_sum'], Decimal("99999.80"))
def test_nonaggregate_aggregation_throws(self):
with self.assertRaisesMessage(TypeError, 'fail is not an aggregate expression'):
Book.objects.aggregate(fail=F('price'))
def test_nonfield_annotation(self):
book = Book.objects.annotate(val=Max(Value(2, output_field=IntegerField()))).first()
self.assertEqual(book.val, 2)
book = Book.objects.annotate(val=Max(Value(2), output_field=IntegerField())).first()
self.assertEqual(book.val, 2)
book = Book.objects.annotate(val=Max(2, output_field=IntegerField())).first()
self.assertEqual(book.val, 2)
def test_missing_output_field_raises_error(self):
with self.assertRaisesMessage(FieldError, 'Cannot resolve expression type, unknown output_field'):
Book.objects.annotate(val=Max(2)).first()
def test_annotation_expressions(self):
authors = Author.objects.annotate(combined_ages=Sum(F('age') + F('friends__age'))).order_by('name')
authors2 = Author.objects.annotate(combined_ages=Sum('age') + Sum('friends__age')).order_by('name')
for qs in (authors, authors2):
self.assertQuerysetEqual(
qs, [
('Adrian Holovaty', 132),
('Brad Dayley', None),
('Jacob Kaplan-Moss', 129),
('James Bennett', 63),
('Jeffrey Forcier', 128),
('Paul Bissex', 120),
('Peter Norvig', 103),
('Stuart Russell', 103),
('Wesley J. Chun', 176)
],
lambda a: (a.name, a.combined_ages)
)
def test_aggregation_expressions(self):
a1 = Author.objects.aggregate(av_age=Sum('age') / Count('*'))
a2 = Author.objects.aggregate(av_age=Sum('age') / Count('age'))
a3 = Author.objects.aggregate(av_age=Avg('age'))
self.assertEqual(a1, {'av_age': 37})
self.assertEqual(a2, {'av_age': 37})
self.assertEqual(a3, {'av_age': Approximate(37.4, places=1)})
def test_avg_decimal_field(self):
v = Book.objects.filter(rating=4).aggregate(avg_price=(Avg('price')))['avg_price']
self.assertIsInstance(v, Decimal)
self.assertEqual(v, Approximate(Decimal('47.39'), places=2))
def test_order_of_precedence(self):
p1 = Book.objects.filter(rating=4).aggregate(avg_price=(Avg('price') + 2) * 3)
self.assertEqual(p1, {'avg_price': Approximate(Decimal('148.18'), places=2)})
p2 = Book.objects.filter(rating=4).aggregate(avg_price=Avg('price') + 2 * 3)
self.assertEqual(p2, {'avg_price': Approximate(Decimal('53.39'), places=2)})
def test_combine_different_types(self):
msg = 'Expression contains mixed types. You must set output_field.'
qs = Book.objects.annotate(sums=Sum('rating') + Sum('pages') + Sum('price'))
with self.assertRaisesMessage(FieldError, msg):
qs.first()
with self.assertRaisesMessage(FieldError, msg):
qs.first()
b1 = Book.objects.annotate(sums=Sum(F('rating') + F('pages') + F('price'),
output_field=IntegerField())).get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
b2 = Book.objects.annotate(sums=Sum(F('rating') + F('pages') + F('price'),
output_field=FloatField())).get(pk=self.b4.pk)
self.assertEqual(b2.sums, 383.69)
b3 = Book.objects.annotate(sums=Sum(F('rating') + F('pages') + F('price'),
output_field=DecimalField())).get(pk=self.b4.pk)
self.assertEqual(b3.sums, Approximate(Decimal("383.69"), places=2))
def test_complex_aggregations_require_kwarg(self):
with self.assertRaisesMessage(TypeError, 'Complex annotations require an alias'):
Author.objects.annotate(Sum(F('age') + F('friends__age')))
with self.assertRaisesMessage(TypeError, 'Complex aggregates require an alias'):
Author.objects.aggregate(Sum('age') / Count('age'))
with self.assertRaisesMessage(TypeError, 'Complex aggregates require an alias'):
Author.objects.aggregate(Sum(1))
def test_aggregate_over_complex_annotation(self):
qs = Author.objects.annotate(
combined_ages=Sum(F('age') + F('friends__age')))
age = qs.aggregate(max_combined_age=Max('combined_ages'))
self.assertEqual(age['max_combined_age'], 176)
age = qs.aggregate(max_combined_age_doubled=Max('combined_ages') * 2)
self.assertEqual(age['max_combined_age_doubled'], 176 * 2)
age = qs.aggregate(
max_combined_age_doubled=Max('combined_ages') + Max('combined_ages'))
self.assertEqual(age['max_combined_age_doubled'], 176 * 2)
age = qs.aggregate(
max_combined_age_doubled=Max('combined_ages') + Max('combined_ages'),
sum_combined_age=Sum('combined_ages'))
self.assertEqual(age['max_combined_age_doubled'], 176 * 2)
self.assertEqual(age['sum_combined_age'], 954)
age = qs.aggregate(
max_combined_age_doubled=Max('combined_ages') + Max('combined_ages'),
sum_combined_age_doubled=Sum('combined_ages') + Sum('combined_ages'))
self.assertEqual(age['max_combined_age_doubled'], 176 * 2)
self.assertEqual(age['sum_combined_age_doubled'], 954 * 2)
def test_values_annotation_with_expression(self):
# ensure the F() is promoted to the group by clause
qs = Author.objects.values('name').annotate(another_age=Sum('age') + F('age'))
a = qs.get(name="Adrian Holovaty")
self.assertEqual(a['another_age'], 68)
qs = qs.annotate(friend_count=Count('friends'))
a = qs.get(name="Adrian Holovaty")
self.assertEqual(a['friend_count'], 2)
qs = qs.annotate(combined_age=Sum('age') + F('friends__age')).filter(
name="Adrian Holovaty").order_by('-combined_age')
self.assertEqual(
list(qs), [
{
"name": 'Adrian Holovaty',
"another_age": 68,
"friend_count": 1,
"combined_age": 69
},
{
"name": 'Adrian Holovaty',
"another_age": 68,
"friend_count": 1,
"combined_age": 63
}
]
)
vals = qs.values('name', 'combined_age')
self.assertEqual(
list(vals), [
{'name': 'Adrian Holovaty', 'combined_age': 69},
{'name': 'Adrian Holovaty', 'combined_age': 63},
]
)
def test_annotate_values_aggregate(self):
alias_age = Author.objects.annotate(
age_alias=F('age')
).values(
'age_alias',
).aggregate(sum_age=Sum('age_alias'))
age = Author.objects.values('age').aggregate(sum_age=Sum('age'))
self.assertEqual(alias_age['sum_age'], age['sum_age'])
def test_annotate_over_annotate(self):
author = Author.objects.annotate(
age_alias=F('age')
).annotate(
sum_age=Sum('age_alias')
).get(name="Adrian Holovaty")
other_author = Author.objects.annotate(
sum_age=Sum('age')
).get(name="Adrian Holovaty")
self.assertEqual(author.sum_age, other_author.sum_age)
def test_annotated_aggregate_over_annotated_aggregate(self):
with self.assertRaisesMessage(FieldError, "Cannot compute Sum('id__max'): 'id__max' is an aggregate"):
Book.objects.annotate(Max('id')).annotate(Sum('id__max'))
class MyMax(Max):
def as_sql(self, compiler, connection):
self.set_source_expressions(self.get_source_expressions()[0:1])
return super().as_sql(compiler, connection)
with self.assertRaisesMessage(FieldError, "Cannot compute Max('id__max'): 'id__max' is an aggregate"):
Book.objects.annotate(Max('id')).annotate(my_max=MyMax('id__max', 'price'))
def test_multi_arg_aggregate(self):
class MyMax(Max):
output_field = DecimalField()
def as_sql(self, compiler, connection):
copy = self.copy()
copy.set_source_expressions(copy.get_source_expressions()[0:1])
return super(MyMax, copy).as_sql(compiler, connection)
with self.assertRaisesMessage(TypeError, 'Complex aggregates require an alias'):
Book.objects.aggregate(MyMax('pages', 'price'))
with self.assertRaisesMessage(TypeError, 'Complex annotations require an alias'):
Book.objects.annotate(MyMax('pages', 'price'))
Book.objects.aggregate(max_field=MyMax('pages', 'price'))
def test_add_implementation(self):
class MySum(Sum):
pass
# test completely changing how the output is rendered
def lower_case_function_override(self, compiler, connection):
sql, params = compiler.compile(self.source_expressions[0])
substitutions = {'function': self.function.lower(), 'expressions': sql, 'distinct': ''}
substitutions.update(self.extra)
return self.template % substitutions, params
setattr(MySum, 'as_' + connection.vendor, lower_case_function_override)
qs = Book.objects.annotate(
sums=MySum(F('rating') + F('pages') + F('price'), output_field=IntegerField())
)
self.assertEqual(str(qs.query).count('sum('), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
# test changing the dict and delegating
def lower_case_function_super(self, compiler, connection):
self.extra['function'] = self.function.lower()
return super(MySum, self).as_sql(compiler, connection)
setattr(MySum, 'as_' + connection.vendor, lower_case_function_super)
qs = Book.objects.annotate(
sums=MySum(F('rating') + F('pages') + F('price'), output_field=IntegerField())
)
self.assertEqual(str(qs.query).count('sum('), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
# test overriding all parts of the template
def be_evil(self, compiler, connection):
substitutions = {'function': 'MAX', 'expressions': '2', 'distinct': ''}
substitutions.update(self.extra)
return self.template % substitutions, ()
setattr(MySum, 'as_' + connection.vendor, be_evil)
qs = Book.objects.annotate(
sums=MySum(F('rating') + F('pages') + F('price'), output_field=IntegerField())
)
self.assertEqual(str(qs.query).count('MAX('), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 2)
def test_complex_values_aggregation(self):
max_rating = Book.objects.values('rating').aggregate(
double_max_rating=Max('rating') + Max('rating'))
self.assertEqual(max_rating['double_max_rating'], 5 * 2)
max_books_per_rating = Book.objects.values('rating').annotate(
books_per_rating=Count('id') + 5
).aggregate(Max('books_per_rating'))
self.assertEqual(
max_books_per_rating,
{'books_per_rating__max': 3 + 5})
def test_expression_on_aggregation(self):
# Create a plain expression
class Greatest(Func):
function = 'GREATEST'
def as_sqlite(self, compiler, connection, **extra_context):
return super().as_sql(compiler, connection, function='MAX', **extra_context)
qs = Publisher.objects.annotate(
price_or_median=Greatest(Avg('book__rating', output_field=DecimalField()), Avg('book__price'))
).filter(price_or_median__gte=F('num_awards')).order_by('num_awards')
self.assertQuerysetEqual(
qs, [1, 3, 7, 9], lambda v: v.num_awards)
qs2 = Publisher.objects.annotate(
rating_or_num_awards=Greatest(Avg('book__rating'), F('num_awards'),
output_field=FloatField())
).filter(rating_or_num_awards__gt=F('num_awards')).order_by('num_awards')
self.assertQuerysetEqual(
qs2, [1, 3], lambda v: v.num_awards)
def test_arguments_must_be_expressions(self):
msg = 'QuerySet.aggregate() received non-expression(s): %s.'
with self.assertRaisesMessage(TypeError, msg % FloatField()):
Book.objects.aggregate(FloatField())
with self.assertRaisesMessage(TypeError, msg % True):
Book.objects.aggregate(is_book=True)
with self.assertRaisesMessage(TypeError, msg % ', '.join([str(FloatField()), 'True'])):
Book.objects.aggregate(FloatField(), Avg('price'), is_book=True)
|
a1a1be7ef27e32445fdd9b5ff1bf506e11f76604a07e5578a315e91bfd996267 | import datetime
from decimal import Decimal
from django.db.models import Case, Count, F, Q, Sum, When
from django.test import TestCase
from .models import Author, Book, Publisher
class FilteredAggregateTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name='test', age=40)
cls.a2 = Author.objects.create(name='test2', age=60)
cls.a3 = Author.objects.create(name='test3', age=100)
cls.p1 = Publisher.objects.create(name='Apress', num_awards=3, duration=datetime.timedelta(days=1))
cls.b1 = Book.objects.create(
isbn='159059725', name='The Definitive Guide to Django: Web Development Done Right',
pages=447, rating=4.5, price=Decimal('30.00'), contact=cls.a1, publisher=cls.p1,
pubdate=datetime.date(2007, 12, 6),
)
cls.b2 = Book.objects.create(
isbn='067232959', name='Sams Teach Yourself Django in 24 Hours',
pages=528, rating=3.0, price=Decimal('23.09'), contact=cls.a2, publisher=cls.p1,
pubdate=datetime.date(2008, 3, 3),
)
cls.b3 = Book.objects.create(
isbn='159059996', name='Practical Django Projects',
pages=600, rating=4.5, price=Decimal('29.69'), contact=cls.a3, publisher=cls.p1,
pubdate=datetime.date(2008, 6, 23),
)
cls.a1.friends.add(cls.a2)
cls.a1.friends.add(cls.a3)
cls.b1.authors.add(cls.a1)
cls.b1.authors.add(cls.a3)
cls.b2.authors.add(cls.a2)
cls.b3.authors.add(cls.a3)
def test_filtered_aggregates(self):
agg = Sum('age', filter=Q(name__startswith='test'))
self.assertEqual(Author.objects.aggregate(age=agg)['age'], 200)
def test_double_filtered_aggregates(self):
agg = Sum('age', filter=Q(Q(name='test2') & ~Q(name='test')))
self.assertEqual(Author.objects.aggregate(age=agg)['age'], 60)
def test_excluded_aggregates(self):
agg = Sum('age', filter=~Q(name='test2'))
self.assertEqual(Author.objects.aggregate(age=agg)['age'], 140)
def test_related_aggregates_m2m(self):
agg = Sum('friends__age', filter=~Q(friends__name='test'))
self.assertEqual(Author.objects.filter(name='test').aggregate(age=agg)['age'], 160)
def test_related_aggregates_m2m_and_fk(self):
q = Q(friends__book__publisher__name='Apress') & ~Q(friends__name='test3')
agg = Sum('friends__book__pages', filter=q)
self.assertEqual(Author.objects.filter(name='test').aggregate(pages=agg)['pages'], 528)
def test_plain_annotate(self):
agg = Sum('book__pages', filter=Q(book__rating__gt=3))
qs = Author.objects.annotate(pages=agg).order_by('pk')
self.assertSequenceEqual([a.pages for a in qs], [447, None, 1047])
def test_filtered_aggregate_on_annotate(self):
pages_annotate = Sum('book__pages', filter=Q(book__rating__gt=3))
age_agg = Sum('age', filter=Q(total_pages__gte=400))
aggregated = Author.objects.annotate(total_pages=pages_annotate).aggregate(summed_age=age_agg)
self.assertEqual(aggregated, {'summed_age': 140})
def test_case_aggregate(self):
agg = Sum(
Case(When(friends__age=40, then=F('friends__age'))),
filter=Q(friends__name__startswith='test'),
)
self.assertEqual(Author.objects.aggregate(age=agg)['age'], 80)
def test_sum_star_exception(self):
msg = 'Star cannot be used with filter. Please specify a field.'
with self.assertRaisesMessage(ValueError, msg):
Count('*', filter=Q(age=40))
def test_filtered_reused_subquery(self):
qs = Author.objects.annotate(
older_friends_count=Count('friends', filter=Q(friends__age__gt=F('age'))),
).filter(
older_friends_count__gte=2,
)
self.assertEqual(qs.get(pk__in=qs.values('pk')), self.a1)
|
7f4d6982e2c10a96083404dad3a5ade6d4285f9bd90b2c996c5b4a214e708705 | import os
import unittest
import warnings
from io import StringIO
from unittest import mock
from django.conf import settings
from django.contrib.staticfiles.finders import get_finder, get_finders
from django.contrib.staticfiles.storage import staticfiles_storage
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import default_storage
from django.db import connection, connections, models, router
from django.forms import EmailField, IntegerField
from django.http import HttpResponse
from django.template.loader import render_to_string
from django.test import (
SimpleTestCase, TestCase, TransactionTestCase, skipIfDBFeature,
skipUnlessDBFeature,
)
from django.test.html import HTMLParseError, parse_html
from django.test.utils import (
CaptureQueriesContext, TestContextDecorator, isolate_apps,
override_settings, setup_test_environment,
)
from django.urls import NoReverseMatch, path, reverse
from .models import Car, Person, PossessedCar
from .views import empty_response
class SkippingTestCase(SimpleTestCase):
def _assert_skipping(self, func, expected_exc, msg=None):
try:
if msg is not None:
self.assertRaisesMessage(expected_exc, msg, func)
else:
self.assertRaises(expected_exc, func)
except unittest.SkipTest:
self.fail('%s should not result in a skipped test.' % func.__name__)
def test_skip_unless_db_feature(self):
"""
Testing the django.test.skipUnlessDBFeature decorator.
"""
# Total hack, but it works, just want an attribute that's always true.
@skipUnlessDBFeature("__class__")
def test_func():
raise ValueError
@skipUnlessDBFeature("notprovided")
def test_func2():
raise ValueError
@skipUnlessDBFeature("__class__", "__class__")
def test_func3():
raise ValueError
@skipUnlessDBFeature("__class__", "notprovided")
def test_func4():
raise ValueError
self._assert_skipping(test_func, ValueError)
self._assert_skipping(test_func2, unittest.SkipTest)
self._assert_skipping(test_func3, ValueError)
self._assert_skipping(test_func4, unittest.SkipTest)
class SkipTestCase(SimpleTestCase):
@skipUnlessDBFeature('missing')
def test_foo(self):
pass
self._assert_skipping(
SkipTestCase('test_foo').test_foo,
ValueError,
"skipUnlessDBFeature cannot be used on test_foo (test_utils.tests."
"SkippingTestCase.test_skip_unless_db_feature.<locals>.SkipTestCase) "
"as SkippingTestCase.test_skip_unless_db_feature.<locals>.SkipTestCase "
"doesn't allow queries against the 'default' database."
)
def test_skip_if_db_feature(self):
"""
Testing the django.test.skipIfDBFeature decorator.
"""
@skipIfDBFeature("__class__")
def test_func():
raise ValueError
@skipIfDBFeature("notprovided")
def test_func2():
raise ValueError
@skipIfDBFeature("__class__", "__class__")
def test_func3():
raise ValueError
@skipIfDBFeature("__class__", "notprovided")
def test_func4():
raise ValueError
@skipIfDBFeature("notprovided", "notprovided")
def test_func5():
raise ValueError
self._assert_skipping(test_func, unittest.SkipTest)
self._assert_skipping(test_func2, ValueError)
self._assert_skipping(test_func3, unittest.SkipTest)
self._assert_skipping(test_func4, unittest.SkipTest)
self._assert_skipping(test_func5, ValueError)
class SkipTestCase(SimpleTestCase):
@skipIfDBFeature('missing')
def test_foo(self):
pass
self._assert_skipping(
SkipTestCase('test_foo').test_foo,
ValueError,
"skipIfDBFeature cannot be used on test_foo (test_utils.tests."
"SkippingTestCase.test_skip_if_db_feature.<locals>.SkipTestCase) "
"as SkippingTestCase.test_skip_if_db_feature.<locals>.SkipTestCase "
"doesn't allow queries against the 'default' database."
)
class SkippingClassTestCase(TestCase):
def test_skip_class_unless_db_feature(self):
@skipUnlessDBFeature("__class__")
class NotSkippedTests(TestCase):
def test_dummy(self):
return
@skipUnlessDBFeature("missing")
@skipIfDBFeature("__class__")
class SkippedTests(TestCase):
def test_will_be_skipped(self):
self.fail("We should never arrive here.")
@skipIfDBFeature("__dict__")
class SkippedTestsSubclass(SkippedTests):
pass
test_suite = unittest.TestSuite()
test_suite.addTest(NotSkippedTests('test_dummy'))
try:
test_suite.addTest(SkippedTests('test_will_be_skipped'))
test_suite.addTest(SkippedTestsSubclass('test_will_be_skipped'))
except unittest.SkipTest:
self.fail('SkipTest should not be raised here.')
result = unittest.TextTestRunner(stream=StringIO()).run(test_suite)
self.assertEqual(result.testsRun, 3)
self.assertEqual(len(result.skipped), 2)
self.assertEqual(result.skipped[0][1], 'Database has feature(s) __class__')
self.assertEqual(result.skipped[1][1], 'Database has feature(s) __class__')
def test_missing_default_databases(self):
@skipIfDBFeature('missing')
class MissingDatabases(SimpleTestCase):
def test_assertion_error(self):
pass
suite = unittest.TestSuite()
try:
suite.addTest(MissingDatabases('test_assertion_error'))
except unittest.SkipTest:
self.fail("SkipTest should not be raised at this stage")
runner = unittest.TextTestRunner(stream=StringIO())
msg = (
"skipIfDBFeature cannot be used on <class 'test_utils.tests."
"SkippingClassTestCase.test_missing_default_databases.<locals>."
"MissingDatabases'> as it doesn't allow queries against the "
"'default' database."
)
with self.assertRaisesMessage(ValueError, msg):
runner.run(suite)
@override_settings(ROOT_URLCONF='test_utils.urls')
class AssertNumQueriesTests(TestCase):
def test_assert_num_queries(self):
def test_func():
raise ValueError
with self.assertRaises(ValueError):
self.assertNumQueries(2, test_func)
def test_assert_num_queries_with_client(self):
person = Person.objects.create(name='test')
self.assertNumQueries(
1,
self.client.get,
"/test_utils/get_person/%s/" % person.pk
)
self.assertNumQueries(
1,
self.client.get,
"/test_utils/get_person/%s/" % person.pk
)
def test_func():
self.client.get("/test_utils/get_person/%s/" % person.pk)
self.client.get("/test_utils/get_person/%s/" % person.pk)
self.assertNumQueries(2, test_func)
@unittest.skipUnless(
connection.vendor != 'sqlite' or not connection.is_in_memory_db(),
'For SQLite in-memory tests, closing the connection destroys the database.'
)
class AssertNumQueriesUponConnectionTests(TransactionTestCase):
available_apps = []
def test_ignores_connection_configuration_queries(self):
real_ensure_connection = connection.ensure_connection
connection.close()
def make_configuration_query():
is_opening_connection = connection.connection is None
real_ensure_connection()
if is_opening_connection:
# Avoid infinite recursion. Creating a cursor calls
# ensure_connection() which is currently mocked by this method.
connection.cursor().execute('SELECT 1' + connection.features.bare_select_suffix)
ensure_connection = 'django.db.backends.base.base.BaseDatabaseWrapper.ensure_connection'
with mock.patch(ensure_connection, side_effect=make_configuration_query):
with self.assertNumQueries(1):
list(Car.objects.all())
class AssertQuerysetEqualTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.p1 = Person.objects.create(name='p1')
cls.p2 = Person.objects.create(name='p2')
def test_ordered(self):
self.assertQuerysetEqual(
Person.objects.all().order_by('name'),
[repr(self.p1), repr(self.p2)]
)
def test_unordered(self):
self.assertQuerysetEqual(
Person.objects.all().order_by('name'),
[repr(self.p2), repr(self.p1)],
ordered=False
)
def test_transform(self):
self.assertQuerysetEqual(
Person.objects.all().order_by('name'),
[self.p1.pk, self.p2.pk],
transform=lambda x: x.pk
)
def test_undefined_order(self):
# Using an unordered queryset with more than one ordered value
# is an error.
msg = 'Trying to compare non-ordered queryset against more than one ordered values'
with self.assertRaisesMessage(ValueError, msg):
self.assertQuerysetEqual(
Person.objects.all(),
[repr(self.p1), repr(self.p2)]
)
# No error for one value.
self.assertQuerysetEqual(
Person.objects.filter(name='p1'),
[repr(self.p1)]
)
def test_repeated_values(self):
"""
assertQuerysetEqual checks the number of appearance of each item
when used with option ordered=False.
"""
batmobile = Car.objects.create(name='Batmobile')
k2000 = Car.objects.create(name='K 2000')
PossessedCar.objects.bulk_create([
PossessedCar(car=batmobile, belongs_to=self.p1),
PossessedCar(car=batmobile, belongs_to=self.p1),
PossessedCar(car=k2000, belongs_to=self.p1),
PossessedCar(car=k2000, belongs_to=self.p1),
PossessedCar(car=k2000, belongs_to=self.p1),
PossessedCar(car=k2000, belongs_to=self.p1),
])
with self.assertRaises(AssertionError):
self.assertQuerysetEqual(
self.p1.cars.all(),
[repr(batmobile), repr(k2000)],
ordered=False
)
self.assertQuerysetEqual(
self.p1.cars.all(),
[repr(batmobile)] * 2 + [repr(k2000)] * 4,
ordered=False
)
@override_settings(ROOT_URLCONF='test_utils.urls')
class CaptureQueriesContextManagerTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.person_pk = str(Person.objects.create(name='test').pk)
def test_simple(self):
with CaptureQueriesContext(connection) as captured_queries:
Person.objects.get(pk=self.person_pk)
self.assertEqual(len(captured_queries), 1)
self.assertIn(self.person_pk, captured_queries[0]['sql'])
with CaptureQueriesContext(connection) as captured_queries:
pass
self.assertEqual(0, len(captured_queries))
def test_within(self):
with CaptureQueriesContext(connection) as captured_queries:
Person.objects.get(pk=self.person_pk)
self.assertEqual(len(captured_queries), 1)
self.assertIn(self.person_pk, captured_queries[0]['sql'])
def test_nested(self):
with CaptureQueriesContext(connection) as captured_queries:
Person.objects.count()
with CaptureQueriesContext(connection) as nested_captured_queries:
Person.objects.count()
self.assertEqual(1, len(nested_captured_queries))
self.assertEqual(2, len(captured_queries))
def test_failure(self):
with self.assertRaises(TypeError):
with CaptureQueriesContext(connection):
raise TypeError
def test_with_client(self):
with CaptureQueriesContext(connection) as captured_queries:
self.client.get("/test_utils/get_person/%s/" % self.person_pk)
self.assertEqual(len(captured_queries), 1)
self.assertIn(self.person_pk, captured_queries[0]['sql'])
with CaptureQueriesContext(connection) as captured_queries:
self.client.get("/test_utils/get_person/%s/" % self.person_pk)
self.assertEqual(len(captured_queries), 1)
self.assertIn(self.person_pk, captured_queries[0]['sql'])
with CaptureQueriesContext(connection) as captured_queries:
self.client.get("/test_utils/get_person/%s/" % self.person_pk)
self.client.get("/test_utils/get_person/%s/" % self.person_pk)
self.assertEqual(len(captured_queries), 2)
self.assertIn(self.person_pk, captured_queries[0]['sql'])
self.assertIn(self.person_pk, captured_queries[1]['sql'])
@override_settings(ROOT_URLCONF='test_utils.urls')
class AssertNumQueriesContextManagerTests(TestCase):
def test_simple(self):
with self.assertNumQueries(0):
pass
with self.assertNumQueries(1):
Person.objects.count()
with self.assertNumQueries(2):
Person.objects.count()
Person.objects.count()
def test_failure(self):
with self.assertRaises(AssertionError) as exc_info:
with self.assertNumQueries(2):
Person.objects.count()
exc_lines = str(exc_info.exception).split('\n')
self.assertEqual(exc_lines[0], '1 != 2 : 1 queries executed, 2 expected')
self.assertEqual(exc_lines[1], 'Captured queries were:')
self.assertTrue(exc_lines[2].startswith('1.')) # queries are numbered
with self.assertRaises(TypeError):
with self.assertNumQueries(4000):
raise TypeError
def test_with_client(self):
person = Person.objects.create(name="test")
with self.assertNumQueries(1):
self.client.get("/test_utils/get_person/%s/" % person.pk)
with self.assertNumQueries(1):
self.client.get("/test_utils/get_person/%s/" % person.pk)
with self.assertNumQueries(2):
self.client.get("/test_utils/get_person/%s/" % person.pk)
self.client.get("/test_utils/get_person/%s/" % person.pk)
@override_settings(ROOT_URLCONF='test_utils.urls')
class AssertTemplateUsedContextManagerTests(SimpleTestCase):
def test_usage(self):
with self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/base.html')
with self.assertTemplateUsed(template_name='template_used/base.html'):
render_to_string('template_used/base.html')
with self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/include.html')
with self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/extends.html')
with self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/base.html')
render_to_string('template_used/base.html')
def test_nested_usage(self):
with self.assertTemplateUsed('template_used/base.html'):
with self.assertTemplateUsed('template_used/include.html'):
render_to_string('template_used/include.html')
with self.assertTemplateUsed('template_used/extends.html'):
with self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/extends.html')
with self.assertTemplateUsed('template_used/base.html'):
with self.assertTemplateUsed('template_used/alternative.html'):
render_to_string('template_used/alternative.html')
render_to_string('template_used/base.html')
with self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/extends.html')
with self.assertTemplateNotUsed('template_used/base.html'):
render_to_string('template_used/alternative.html')
render_to_string('template_used/base.html')
def test_not_used(self):
with self.assertTemplateNotUsed('template_used/base.html'):
pass
with self.assertTemplateNotUsed('template_used/alternative.html'):
pass
def test_error_message(self):
msg = 'template_used/base.html was not rendered. No template was rendered.'
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed('template_used/base.html'):
pass
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed(template_name='template_used/base.html'):
pass
msg2 = (
'template_used/base.html was not rendered. Following templates '
'were rendered: template_used/alternative.html'
)
with self.assertRaisesMessage(AssertionError, msg2):
with self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/alternative.html')
with self.assertRaisesMessage(AssertionError, 'No templates used to render the response'):
response = self.client.get('/test_utils/no_template_used/')
self.assertTemplateUsed(response, 'template_used/base.html')
def test_failure(self):
msg = 'response and/or template_name argument must be provided'
with self.assertRaisesMessage(TypeError, msg):
with self.assertTemplateUsed():
pass
msg = 'No templates used to render the response'
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed(''):
pass
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed(''):
render_to_string('template_used/base.html')
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed(template_name=''):
pass
msg = (
'template_used/base.html was not rendered. Following '
'templates were rendered: template_used/alternative.html'
)
with self.assertRaisesMessage(AssertionError, msg):
with self.assertTemplateUsed('template_used/base.html'):
render_to_string('template_used/alternative.html')
def test_assert_used_on_http_response(self):
response = HttpResponse()
error_msg = (
'assertTemplateUsed() and assertTemplateNotUsed() are only '
'usable on responses fetched using the Django test Client.'
)
with self.assertRaisesMessage(ValueError, error_msg):
self.assertTemplateUsed(response, 'template.html')
with self.assertRaisesMessage(ValueError, error_msg):
self.assertTemplateNotUsed(response, 'template.html')
class HTMLEqualTests(SimpleTestCase):
def test_html_parser(self):
element = parse_html('<div><p>Hello</p></div>')
self.assertEqual(len(element.children), 1)
self.assertEqual(element.children[0].name, 'p')
self.assertEqual(element.children[0].children[0], 'Hello')
parse_html('<p>')
parse_html('<p attr>')
dom = parse_html('<p>foo')
self.assertEqual(len(dom.children), 1)
self.assertEqual(dom.name, 'p')
self.assertEqual(dom[0], 'foo')
def test_parse_html_in_script(self):
parse_html('<script>var a = "<p" + ">";</script>')
parse_html('''
<script>
var js_sha_link='<p>***</p>';
</script>
''')
# script content will be parsed to text
dom = parse_html('''
<script><p>foo</p> '</scr'+'ipt>' <span>bar</span></script>
''')
self.assertEqual(len(dom.children), 1)
self.assertEqual(dom.children[0], "<p>foo</p> '</scr'+'ipt>' <span>bar</span>")
def test_self_closing_tags(self):
self_closing_tags = (
'br', 'hr', 'input', 'img', 'meta', 'spacer', 'link', 'frame',
'base', 'col',
)
for tag in self_closing_tags:
dom = parse_html('<p>Hello <%s> world</p>' % tag)
self.assertEqual(len(dom.children), 3)
self.assertEqual(dom[0], 'Hello')
self.assertEqual(dom[1].name, tag)
self.assertEqual(dom[2], 'world')
dom = parse_html('<p>Hello <%s /> world</p>' % tag)
self.assertEqual(len(dom.children), 3)
self.assertEqual(dom[0], 'Hello')
self.assertEqual(dom[1].name, tag)
self.assertEqual(dom[2], 'world')
def test_simple_equal_html(self):
self.assertHTMLEqual('', '')
self.assertHTMLEqual('<p></p>', '<p></p>')
self.assertHTMLEqual('<p></p>', ' <p> </p> ')
self.assertHTMLEqual(
'<div><p>Hello</p></div>',
'<div><p>Hello</p></div>')
self.assertHTMLEqual(
'<div><p>Hello</p></div>',
'<div> <p>Hello</p> </div>')
self.assertHTMLEqual(
'<div>\n<p>Hello</p></div>',
'<div><p>Hello</p></div>\n')
self.assertHTMLEqual(
'<div><p>Hello\nWorld !</p></div>',
'<div><p>Hello World\n!</p></div>')
self.assertHTMLEqual(
'<div><p>Hello\nWorld !</p></div>',
'<div><p>Hello World\n!</p></div>')
self.assertHTMLEqual(
'<p>Hello World !</p>',
'<p>Hello World\n\n!</p>')
self.assertHTMLEqual('<p> </p>', '<p></p>')
self.assertHTMLEqual('<p/>', '<p></p>')
self.assertHTMLEqual('<p />', '<p></p>')
self.assertHTMLEqual('<input checked>', '<input checked="checked">')
self.assertHTMLEqual('<p>Hello', '<p> Hello')
self.assertHTMLEqual('<p>Hello</p>World', '<p>Hello</p> World')
def test_ignore_comments(self):
self.assertHTMLEqual(
'<div>Hello<!-- this is a comment --> World!</div>',
'<div>Hello World!</div>')
def test_unequal_html(self):
self.assertHTMLNotEqual('<p>Hello</p>', '<p>Hello!</p>')
self.assertHTMLNotEqual('<p>foobar</p>', '<p>foo bar</p>')
self.assertHTMLNotEqual('<p>foo bar</p>', '<p>foo bar</p>')
self.assertHTMLNotEqual('<p>foo nbsp</p>', '<p>foo </p>')
self.assertHTMLNotEqual('<p>foo #20</p>', '<p>foo </p>')
self.assertHTMLNotEqual(
'<p><span>Hello</span><span>World</span></p>',
'<p><span>Hello</span>World</p>')
self.assertHTMLNotEqual(
'<p><span>Hello</span>World</p>',
'<p><span>Hello</span><span>World</span></p>')
def test_attributes(self):
self.assertHTMLEqual(
'<input type="text" id="id_name" />',
'<input id="id_name" type="text" />')
self.assertHTMLEqual(
'''<input type='text' id="id_name" />''',
'<input id="id_name" type="text" />')
self.assertHTMLNotEqual(
'<input type="text" id="id_name" />',
'<input type="password" id="id_name" />')
def test_complex_examples(self):
self.assertHTMLEqual(
"""<tr><th><label for="id_first_name">First name:</label></th>
<td><input type="text" name="first_name" value="John" id="id_first_name" /></td></tr>
<tr><th><label for="id_last_name">Last name:</label></th>
<td><input type="text" id="id_last_name" name="last_name" value="Lennon" /></td></tr>
<tr><th><label for="id_birthday">Birthday:</label></th>
<td><input type="text" value="1940-10-9" name="birthday" id="id_birthday" /></td></tr>""",
"""
<tr><th>
<label for="id_first_name">First name:</label></th><td>
<input type="text" name="first_name" value="John" id="id_first_name" />
</td></tr>
<tr><th>
<label for="id_last_name">Last name:</label></th><td>
<input type="text" name="last_name" value="Lennon" id="id_last_name" />
</td></tr>
<tr><th>
<label for="id_birthday">Birthday:</label></th><td>
<input type="text" name="birthday" value="1940-10-9" id="id_birthday" />
</td></tr>
""")
self.assertHTMLEqual(
"""<!DOCTYPE html>
<html>
<head>
<link rel="stylesheet">
<title>Document</title>
<meta attribute="value">
</head>
<body>
<p>
This is a valid paragraph
<div> this is a div AFTER the p</div>
</body>
</html>""", """
<html>
<head>
<link rel="stylesheet">
<title>Document</title>
<meta attribute="value">
</head>
<body>
<p> This is a valid paragraph
<!-- browsers would close the p tag here -->
<div> this is a div AFTER the p</div>
</p> <!-- this is invalid HTML parsing, but it should make no
difference in most cases -->
</body>
</html>""")
def test_html_contain(self):
# equal html contains each other
dom1 = parse_html('<p>foo')
dom2 = parse_html('<p>foo</p>')
self.assertIn(dom1, dom2)
self.assertIn(dom2, dom1)
dom2 = parse_html('<div><p>foo</p></div>')
self.assertIn(dom1, dom2)
self.assertNotIn(dom2, dom1)
self.assertNotIn('<p>foo</p>', dom2)
self.assertIn('foo', dom2)
# when a root element is used ...
dom1 = parse_html('<p>foo</p><p>bar</p>')
dom2 = parse_html('<p>foo</p><p>bar</p>')
self.assertIn(dom1, dom2)
dom1 = parse_html('<p>foo</p>')
self.assertIn(dom1, dom2)
dom1 = parse_html('<p>bar</p>')
self.assertIn(dom1, dom2)
dom1 = parse_html('<div><p>foo</p><p>bar</p></div>')
self.assertIn(dom2, dom1)
def test_count(self):
# equal html contains each other one time
dom1 = parse_html('<p>foo')
dom2 = parse_html('<p>foo</p>')
self.assertEqual(dom1.count(dom2), 1)
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html('<p>foo</p><p>bar</p>')
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html('<p>foo foo</p><p>foo</p>')
self.assertEqual(dom2.count('foo'), 3)
dom2 = parse_html('<p class="bar">foo</p>')
self.assertEqual(dom2.count('bar'), 0)
self.assertEqual(dom2.count('class'), 0)
self.assertEqual(dom2.count('p'), 0)
self.assertEqual(dom2.count('o'), 2)
dom2 = parse_html('<p>foo</p><p>foo</p>')
self.assertEqual(dom2.count(dom1), 2)
dom2 = parse_html('<div><p>foo<input type=""></p><p>foo</p></div>')
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html('<div><div><p>foo</p></div></div>')
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html('<p>foo<p>foo</p></p>')
self.assertEqual(dom2.count(dom1), 1)
dom2 = parse_html('<p>foo<p>bar</p></p>')
self.assertEqual(dom2.count(dom1), 0)
# html with a root element contains the same html with no root element
dom1 = parse_html('<p>foo</p><p>bar</p>')
dom2 = parse_html('<div><p>foo</p><p>bar</p></div>')
self.assertEqual(dom2.count(dom1), 1)
def test_parsing_errors(self):
with self.assertRaises(AssertionError):
self.assertHTMLEqual('<p>', '')
with self.assertRaises(AssertionError):
self.assertHTMLEqual('', '<p>')
error_msg = (
"First argument is not valid HTML:\n"
"('Unexpected end tag `div` (Line 1, Column 6)', (1, 6))"
)
with self.assertRaisesMessage(AssertionError, error_msg):
self.assertHTMLEqual('< div></ div>', '<div></div>')
with self.assertRaises(HTMLParseError):
parse_html('</p>')
def test_contains_html(self):
response = HttpResponse('''<body>
This is a form: <form method="get">
<input type="text" name="Hello" />
</form></body>''')
self.assertNotContains(response, "<input name='Hello' type='text'>")
self.assertContains(response, '<form method="get">')
self.assertContains(response, "<input name='Hello' type='text'>", html=True)
self.assertNotContains(response, '<form method="get">', html=True)
invalid_response = HttpResponse('''<body <bad>>''')
with self.assertRaises(AssertionError):
self.assertContains(invalid_response, '<p></p>')
with self.assertRaises(AssertionError):
self.assertContains(response, '<p "whats" that>')
def test_unicode_handling(self):
response = HttpResponse('<p class="help">Some help text for the title (with unicode ŠĐĆŽćžšđ)</p>')
self.assertContains(
response,
'<p class="help">Some help text for the title (with unicode ŠĐĆŽćžšđ)</p>',
html=True
)
class JSONEqualTests(SimpleTestCase):
def test_simple_equal(self):
json1 = '{"attr1": "foo", "attr2":"baz"}'
json2 = '{"attr1": "foo", "attr2":"baz"}'
self.assertJSONEqual(json1, json2)
def test_simple_equal_unordered(self):
json1 = '{"attr1": "foo", "attr2":"baz"}'
json2 = '{"attr2":"baz", "attr1": "foo"}'
self.assertJSONEqual(json1, json2)
def test_simple_equal_raise(self):
json1 = '{"attr1": "foo", "attr2":"baz"}'
json2 = '{"attr2":"baz"}'
with self.assertRaises(AssertionError):
self.assertJSONEqual(json1, json2)
def test_equal_parsing_errors(self):
invalid_json = '{"attr1": "foo, "attr2":"baz"}'
valid_json = '{"attr1": "foo", "attr2":"baz"}'
with self.assertRaises(AssertionError):
self.assertJSONEqual(invalid_json, valid_json)
with self.assertRaises(AssertionError):
self.assertJSONEqual(valid_json, invalid_json)
def test_simple_not_equal(self):
json1 = '{"attr1": "foo", "attr2":"baz"}'
json2 = '{"attr2":"baz"}'
self.assertJSONNotEqual(json1, json2)
def test_simple_not_equal_raise(self):
json1 = '{"attr1": "foo", "attr2":"baz"}'
json2 = '{"attr1": "foo", "attr2":"baz"}'
with self.assertRaises(AssertionError):
self.assertJSONNotEqual(json1, json2)
def test_not_equal_parsing_errors(self):
invalid_json = '{"attr1": "foo, "attr2":"baz"}'
valid_json = '{"attr1": "foo", "attr2":"baz"}'
with self.assertRaises(AssertionError):
self.assertJSONNotEqual(invalid_json, valid_json)
with self.assertRaises(AssertionError):
self.assertJSONNotEqual(valid_json, invalid_json)
class XMLEqualTests(SimpleTestCase):
def test_simple_equal(self):
xml1 = "<elem attr1='a' attr2='b' />"
xml2 = "<elem attr1='a' attr2='b' />"
self.assertXMLEqual(xml1, xml2)
def test_simple_equal_unordered(self):
xml1 = "<elem attr1='a' attr2='b' />"
xml2 = "<elem attr2='b' attr1='a' />"
self.assertXMLEqual(xml1, xml2)
def test_simple_equal_raise(self):
xml1 = "<elem attr1='a' />"
xml2 = "<elem attr2='b' attr1='a' />"
with self.assertRaises(AssertionError):
self.assertXMLEqual(xml1, xml2)
def test_simple_equal_raises_message(self):
xml1 = "<elem attr1='a' />"
xml2 = "<elem attr2='b' attr1='a' />"
msg = '''{xml1} != {xml2}
- <elem attr1='a' />
+ <elem attr2='b' attr1='a' />
? ++++++++++
'''.format(xml1=repr(xml1), xml2=repr(xml2))
with self.assertRaisesMessage(AssertionError, msg):
self.assertXMLEqual(xml1, xml2)
def test_simple_not_equal(self):
xml1 = "<elem attr1='a' attr2='c' />"
xml2 = "<elem attr1='a' attr2='b' />"
self.assertXMLNotEqual(xml1, xml2)
def test_simple_not_equal_raise(self):
xml1 = "<elem attr1='a' attr2='b' />"
xml2 = "<elem attr2='b' attr1='a' />"
with self.assertRaises(AssertionError):
self.assertXMLNotEqual(xml1, xml2)
def test_parsing_errors(self):
xml_unvalid = "<elem attr1='a attr2='b' />"
xml2 = "<elem attr2='b' attr1='a' />"
with self.assertRaises(AssertionError):
self.assertXMLNotEqual(xml_unvalid, xml2)
def test_comment_root(self):
xml1 = "<?xml version='1.0'?><!-- comment1 --><elem attr1='a' attr2='b' />"
xml2 = "<?xml version='1.0'?><!-- comment2 --><elem attr2='b' attr1='a' />"
self.assertXMLEqual(xml1, xml2)
def test_simple_equal_with_leading_or_trailing_whitespace(self):
xml1 = "<elem>foo</elem> \t\n"
xml2 = " \t\n<elem>foo</elem>"
self.assertXMLEqual(xml1, xml2)
def test_simple_not_equal_with_whitespace_in_the_middle(self):
xml1 = "<elem>foo</elem><elem>bar</elem>"
xml2 = "<elem>foo</elem> <elem>bar</elem>"
self.assertXMLNotEqual(xml1, xml2)
class SkippingExtraTests(TestCase):
fixtures = ['should_not_be_loaded.json']
# HACK: This depends on internals of our TestCase subclasses
def __call__(self, result=None):
# Detect fixture loading by counting SQL queries, should be zero
with self.assertNumQueries(0):
super().__call__(result)
@unittest.skip("Fixture loading should not be performed for skipped tests.")
def test_fixtures_are_skipped(self):
pass
class AssertRaisesMsgTest(SimpleTestCase):
def test_assert_raises_message(self):
msg = "'Expected message' not found in 'Unexpected message'"
# context manager form of assertRaisesMessage()
with self.assertRaisesMessage(AssertionError, msg):
with self.assertRaisesMessage(ValueError, "Expected message"):
raise ValueError("Unexpected message")
# callable form
def func():
raise ValueError("Unexpected message")
with self.assertRaisesMessage(AssertionError, msg):
self.assertRaisesMessage(ValueError, "Expected message", func)
def test_special_re_chars(self):
"""assertRaisesMessage shouldn't interpret RE special chars."""
def func1():
raise ValueError("[.*x+]y?")
with self.assertRaisesMessage(ValueError, "[.*x+]y?"):
func1()
class AssertWarnsMessageTests(SimpleTestCase):
def test_context_manager(self):
with self.assertWarnsMessage(UserWarning, 'Expected message'):
warnings.warn('Expected message', UserWarning)
def test_context_manager_failure(self):
msg = "Expected message' not found in 'Unexpected message'"
with self.assertRaisesMessage(AssertionError, msg):
with self.assertWarnsMessage(UserWarning, 'Expected message'):
warnings.warn('Unexpected message', UserWarning)
def test_callable(self):
def func():
warnings.warn('Expected message', UserWarning)
self.assertWarnsMessage(UserWarning, 'Expected message', func)
def test_special_re_chars(self):
def func1():
warnings.warn('[.*x+]y?', UserWarning)
with self.assertWarnsMessage(UserWarning, '[.*x+]y?'):
func1()
class AssertFieldOutputTests(SimpleTestCase):
def test_assert_field_output(self):
error_invalid = ['Enter a valid email address.']
self.assertFieldOutput(EmailField, {'[email protected]': '[email protected]'}, {'aaa': error_invalid})
with self.assertRaises(AssertionError):
self.assertFieldOutput(EmailField, {'[email protected]': '[email protected]'}, {'aaa': error_invalid + ['Another error']})
with self.assertRaises(AssertionError):
self.assertFieldOutput(EmailField, {'[email protected]': 'Wrong output'}, {'aaa': error_invalid})
with self.assertRaises(AssertionError):
self.assertFieldOutput(
EmailField, {'[email protected]': '[email protected]'}, {'aaa': ['Come on, gimme some well formatted data, dude.']}
)
def test_custom_required_message(self):
class MyCustomField(IntegerField):
default_error_messages = {
'required': 'This is really required.',
}
self.assertFieldOutput(MyCustomField, {}, {}, empty_value=None)
class AssertURLEqualTests(SimpleTestCase):
def test_equal(self):
valid_tests = (
('http://example.com/?', 'http://example.com/'),
('http://example.com/?x=1&', 'http://example.com/?x=1'),
('http://example.com/?x=1&y=2', 'http://example.com/?y=2&x=1'),
('http://example.com/?x=1&y=2', 'http://example.com/?y=2&x=1'),
('http://example.com/?x=1&y=2&a=1&a=2', 'http://example.com/?a=1&a=2&y=2&x=1'),
('/path/to/?x=1&y=2&z=3', '/path/to/?z=3&y=2&x=1'),
('?x=1&y=2&z=3', '?z=3&y=2&x=1'),
)
for url1, url2 in valid_tests:
with self.subTest(url=url1):
self.assertURLEqual(url1, url2)
def test_not_equal(self):
invalid_tests = (
# Protocol must be the same.
('http://example.com/', 'https://example.com/'),
('http://example.com/?x=1&x=2', 'https://example.com/?x=2&x=1'),
('http://example.com/?x=1&y=bar&x=2', 'https://example.com/?y=bar&x=2&x=1'),
# Parameters of the same name must be in the same order.
('/path/to?a=1&a=2', '/path/to/?a=2&a=1')
)
for url1, url2 in invalid_tests:
with self.subTest(url=url1), self.assertRaises(AssertionError):
self.assertURLEqual(url1, url2)
def test_message(self):
msg = (
"Expected 'http://example.com/?x=1&x=2' to equal "
"'https://example.com/?x=2&x=1'"
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertURLEqual('http://example.com/?x=1&x=2', 'https://example.com/?x=2&x=1')
def test_msg_prefix(self):
msg = (
"Prefix: Expected 'http://example.com/?x=1&x=2' to equal "
"'https://example.com/?x=2&x=1'"
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertURLEqual(
'http://example.com/?x=1&x=2', 'https://example.com/?x=2&x=1',
msg_prefix='Prefix: ',
)
class FirstUrls:
urlpatterns = [path('first/', empty_response, name='first')]
class SecondUrls:
urlpatterns = [path('second/', empty_response, name='second')]
class SetupTestEnvironmentTests(SimpleTestCase):
def test_setup_test_environment_calling_more_than_once(self):
with self.assertRaisesMessage(RuntimeError, "setup_test_environment() was already called"):
setup_test_environment()
def test_allowed_hosts(self):
for type_ in (list, tuple):
with self.subTest(type_=type_):
allowed_hosts = type_('*')
with mock.patch('django.test.utils._TestState') as x:
del x.saved_data
with self.settings(ALLOWED_HOSTS=allowed_hosts):
setup_test_environment()
self.assertEqual(settings.ALLOWED_HOSTS, ['*', 'testserver'])
class OverrideSettingsTests(SimpleTestCase):
# #21518 -- If neither override_settings nor a setting_changed receiver
# clears the URL cache between tests, then one of test_first or
# test_second will fail.
@override_settings(ROOT_URLCONF=FirstUrls)
def test_urlconf_first(self):
reverse('first')
@override_settings(ROOT_URLCONF=SecondUrls)
def test_urlconf_second(self):
reverse('second')
def test_urlconf_cache(self):
with self.assertRaises(NoReverseMatch):
reverse('first')
with self.assertRaises(NoReverseMatch):
reverse('second')
with override_settings(ROOT_URLCONF=FirstUrls):
self.client.get(reverse('first'))
with self.assertRaises(NoReverseMatch):
reverse('second')
with override_settings(ROOT_URLCONF=SecondUrls):
with self.assertRaises(NoReverseMatch):
reverse('first')
self.client.get(reverse('second'))
self.client.get(reverse('first'))
with self.assertRaises(NoReverseMatch):
reverse('second')
with self.assertRaises(NoReverseMatch):
reverse('first')
with self.assertRaises(NoReverseMatch):
reverse('second')
def test_override_media_root(self):
"""
Overriding the MEDIA_ROOT setting should be reflected in the
base_location attribute of django.core.files.storage.default_storage.
"""
self.assertEqual(default_storage.base_location, '')
with self.settings(MEDIA_ROOT='test_value'):
self.assertEqual(default_storage.base_location, 'test_value')
def test_override_media_url(self):
"""
Overriding the MEDIA_URL setting should be reflected in the
base_url attribute of django.core.files.storage.default_storage.
"""
self.assertEqual(default_storage.base_location, '')
with self.settings(MEDIA_URL='/test_value/'):
self.assertEqual(default_storage.base_url, '/test_value/')
def test_override_file_upload_permissions(self):
"""
Overriding the FILE_UPLOAD_PERMISSIONS setting should be reflected in
the file_permissions_mode attribute of
django.core.files.storage.default_storage.
"""
self.assertIsNone(default_storage.file_permissions_mode)
with self.settings(FILE_UPLOAD_PERMISSIONS=0o777):
self.assertEqual(default_storage.file_permissions_mode, 0o777)
def test_override_file_upload_directory_permissions(self):
"""
Overriding the FILE_UPLOAD_DIRECTORY_PERMISSIONS setting should be
reflected in the directory_permissions_mode attribute of
django.core.files.storage.default_storage.
"""
self.assertIsNone(default_storage.directory_permissions_mode)
with self.settings(FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o777):
self.assertEqual(default_storage.directory_permissions_mode, 0o777)
def test_override_database_routers(self):
"""
Overriding DATABASE_ROUTERS should update the master router.
"""
test_routers = [object()]
with self.settings(DATABASE_ROUTERS=test_routers):
self.assertEqual(router.routers, test_routers)
def test_override_static_url(self):
"""
Overriding the STATIC_URL setting should be reflected in the
base_url attribute of
django.contrib.staticfiles.storage.staticfiles_storage.
"""
with self.settings(STATIC_URL='/test/'):
self.assertEqual(staticfiles_storage.base_url, '/test/')
def test_override_static_root(self):
"""
Overriding the STATIC_ROOT setting should be reflected in the
location attribute of
django.contrib.staticfiles.storage.staticfiles_storage.
"""
with self.settings(STATIC_ROOT='/tmp/test'):
self.assertEqual(staticfiles_storage.location, os.path.abspath('/tmp/test'))
def test_override_staticfiles_storage(self):
"""
Overriding the STATICFILES_STORAGE setting should be reflected in
the value of django.contrib.staticfiles.storage.staticfiles_storage.
"""
new_class = 'ManifestStaticFilesStorage'
new_storage = 'django.contrib.staticfiles.storage.' + new_class
with self.settings(STATICFILES_STORAGE=new_storage):
self.assertEqual(staticfiles_storage.__class__.__name__, new_class)
def test_override_staticfiles_finders(self):
"""
Overriding the STATICFILES_FINDERS setting should be reflected in
the return value of django.contrib.staticfiles.finders.get_finders.
"""
current = get_finders()
self.assertGreater(len(list(current)), 1)
finders = ['django.contrib.staticfiles.finders.FileSystemFinder']
with self.settings(STATICFILES_FINDERS=finders):
self.assertEqual(len(list(get_finders())), len(finders))
def test_override_staticfiles_dirs(self):
"""
Overriding the STATICFILES_DIRS setting should be reflected in
the locations attribute of the
django.contrib.staticfiles.finders.FileSystemFinder instance.
"""
finder = get_finder('django.contrib.staticfiles.finders.FileSystemFinder')
test_path = '/tmp/test'
expected_location = ('', test_path)
self.assertNotIn(expected_location, finder.locations)
with self.settings(STATICFILES_DIRS=[test_path]):
finder = get_finder('django.contrib.staticfiles.finders.FileSystemFinder')
self.assertIn(expected_location, finder.locations)
class TestBadSetUpTestData(TestCase):
"""
An exception in setUpTestData() shouldn't leak a transaction which would
cascade across the rest of the test suite.
"""
class MyException(Exception):
pass
@classmethod
def setUpClass(cls):
try:
super().setUpClass()
except cls.MyException:
cls._in_atomic_block = connection.in_atomic_block
@classmethod
def tearDownClass(Cls):
# override to avoid a second cls._rollback_atomics() which would fail.
# Normal setUpClass() methods won't have exception handling so this
# method wouldn't typically be run.
pass
@classmethod
def setUpTestData(cls):
# Simulate a broken setUpTestData() method.
raise cls.MyException()
def test_failure_in_setUpTestData_should_rollback_transaction(self):
# setUpTestData() should call _rollback_atomics() so that the
# transaction doesn't leak.
self.assertFalse(self._in_atomic_block)
class DisallowedDatabaseQueriesTests(SimpleTestCase):
def test_disallowed_database_connections(self):
expected_message = (
"Database connections to 'default' are not allowed in SimpleTestCase "
"subclasses. Either subclass TestCase or TransactionTestCase to "
"ensure proper test isolation or add 'default' to "
"test_utils.tests.DisallowedDatabaseQueriesTests.databases to "
"silence this failure."
)
with self.assertRaisesMessage(AssertionError, expected_message):
connection.connect()
with self.assertRaisesMessage(AssertionError, expected_message):
connection.temporary_connection()
def test_disallowed_database_queries(self):
expected_message = (
"Database queries to 'default' are not allowed in SimpleTestCase "
"subclasses. Either subclass TestCase or TransactionTestCase to "
"ensure proper test isolation or add 'default' to "
"test_utils.tests.DisallowedDatabaseQueriesTests.databases to "
"silence this failure."
)
with self.assertRaisesMessage(AssertionError, expected_message):
Car.objects.first()
def test_disallowed_database_chunked_cursor_queries(self):
expected_message = (
"Database queries to 'default' are not allowed in SimpleTestCase "
"subclasses. Either subclass TestCase or TransactionTestCase to "
"ensure proper test isolation or add 'default' to "
"test_utils.tests.DisallowedDatabaseQueriesTests.databases to "
"silence this failure."
)
with self.assertRaisesMessage(AssertionError, expected_message):
next(Car.objects.iterator())
class AllowedDatabaseQueriesTests(SimpleTestCase):
databases = {'default'}
def test_allowed_database_queries(self):
Car.objects.first()
def test_allowed_database_chunked_cursor_queries(self):
next(Car.objects.iterator(), None)
class DatabaseAliasTests(SimpleTestCase):
def setUp(self):
self.addCleanup(setattr, self.__class__, 'databases', self.databases)
def test_no_close_match(self):
self.__class__.databases = {'void'}
message = (
"test_utils.tests.DatabaseAliasTests.databases refers to 'void' which is not defined "
"in settings.DATABASES."
)
with self.assertRaisesMessage(ImproperlyConfigured, message):
self._validate_databases()
def test_close_match(self):
self.__class__.databases = {'defualt'}
message = (
"test_utils.tests.DatabaseAliasTests.databases refers to 'defualt' which is not defined "
"in settings.DATABASES. Did you mean 'default'?"
)
with self.assertRaisesMessage(ImproperlyConfigured, message):
self._validate_databases()
def test_match(self):
self.__class__.databases = {'default', 'other'}
self.assertEqual(self._validate_databases(), frozenset({'default', 'other'}))
def test_all(self):
self.__class__.databases = '__all__'
self.assertEqual(self._validate_databases(), frozenset(connections))
@isolate_apps('test_utils', attr_name='class_apps')
class IsolatedAppsTests(SimpleTestCase):
def test_installed_apps(self):
self.assertEqual([app_config.label for app_config in self.class_apps.get_app_configs()], ['test_utils'])
def test_class_decoration(self):
class ClassDecoration(models.Model):
pass
self.assertEqual(ClassDecoration._meta.apps, self.class_apps)
@isolate_apps('test_utils', kwarg_name='method_apps')
def test_method_decoration(self, method_apps):
class MethodDecoration(models.Model):
pass
self.assertEqual(MethodDecoration._meta.apps, method_apps)
def test_context_manager(self):
with isolate_apps('test_utils') as context_apps:
class ContextManager(models.Model):
pass
self.assertEqual(ContextManager._meta.apps, context_apps)
@isolate_apps('test_utils', kwarg_name='method_apps')
def test_nested(self, method_apps):
class MethodDecoration(models.Model):
pass
with isolate_apps('test_utils') as context_apps:
class ContextManager(models.Model):
pass
with isolate_apps('test_utils') as nested_context_apps:
class NestedContextManager(models.Model):
pass
self.assertEqual(MethodDecoration._meta.apps, method_apps)
self.assertEqual(ContextManager._meta.apps, context_apps)
self.assertEqual(NestedContextManager._meta.apps, nested_context_apps)
class DoNothingDecorator(TestContextDecorator):
def enable(self):
pass
def disable(self):
pass
class TestContextDecoratorTests(SimpleTestCase):
@mock.patch.object(DoNothingDecorator, 'disable')
def test_exception_in_setup(self, mock_disable):
"""An exception is setUp() is reraised after disable() is called."""
class ExceptionInSetUp(unittest.TestCase):
def setUp(self):
raise NotImplementedError('reraised')
decorator = DoNothingDecorator()
decorated_test_class = decorator.__call__(ExceptionInSetUp)()
self.assertFalse(mock_disable.called)
with self.assertRaisesMessage(NotImplementedError, 'reraised'):
decorated_test_class.setUp()
self.assertTrue(mock_disable.called)
|
f35a3f4aa3114f1c088b836ec5ed9cc1a270b0f39eb0f805bd0be09188b87bb2 | from django.urls import path
from . import views
urlpatterns = [
path('test_utils/get_person/<int:pk>/', views.get_person),
path('test_utils/no_template_used/', views.no_template_used),
]
|
1a91a333dbffa9c765f668cbf12f871dcd24648a8bbb5b2defb5163c30f3145a | import datetime
import os
import re
import unittest
from unittest import mock
from urllib.parse import parse_qsl, urljoin, urlparse
import pytz
from django.contrib.admin import AdminSite, ModelAdmin
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.models import ADDITION, DELETION, LogEntry
from django.contrib.admin.options import TO_FIELD_VAR
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.admin.tests import AdminSeleniumTestCase
from django.contrib.admin.utils import quote
from django.contrib.admin.views.main import IS_POPUP_VAR
from django.contrib.auth import REDIRECT_FIELD_NAME, get_permission_codename
from django.contrib.auth.models import Group, Permission, User
from django.contrib.contenttypes.models import ContentType
from django.core import mail
from django.core.checks import Error
from django.core.files import temp as tempfile
from django.forms.utils import ErrorList
from django.template.response import TemplateResponse
from django.test import (
TestCase, modify_settings, override_settings, skipUnlessDBFeature,
)
from django.test.utils import override_script_prefix
from django.urls import NoReverseMatch, resolve, reverse
from django.utils import formats, translation
from django.utils.cache import get_max_age
from django.utils.encoding import iri_to_uri
from django.utils.html import escape
from django.utils.http import urlencode
from . import customadmin
from .admin import CityAdmin, site, site2
from .models import (
Actor, AdminOrderedAdminMethod, AdminOrderedCallable, AdminOrderedField,
AdminOrderedModelMethod, Answer, Answer2, Article, BarAccount, Book,
Bookmark, Category, Chapter, ChapterXtra1, ChapterXtra2, Character, Child,
Choice, City, Collector, Color, ComplexSortedPerson, CoverLetter,
CustomArticle, CyclicOne, CyclicTwo, DooHickey, Employee, EmptyModel,
Fabric, FancyDoodad, FieldOverridePost, FilteredManager, FooAccount,
FoodDelivery, FunkyTag, Gallery, Grommet, Inquisition, Language, Link,
MainPrepopulated, Media, ModelWithStringPrimaryKey, OtherStory, Paper,
Parent, ParentWithDependentChildren, ParentWithUUIDPK, Person, Persona,
Picture, Pizza, Plot, PlotDetails, PluggableSearchPerson, Podcast, Post,
PrePopulatedPost, Promo, Question, ReadablePizza, ReadOnlyPizza,
Recommendation, Recommender, RelatedPrepopulated, RelatedWithUUIDPKModel,
Report, Restaurant, RowLevelChangePermissionModel, SecretHideout, Section,
ShortMessage, Simple, State, Story, SuperSecretHideout, SuperVillain,
Telegram, TitleTranslation, Topping, UnchangeableObject, UndeletableObject,
UnorderedObject, UserProxy, Villain, Vodcast, Whatsit, Widget, Worker,
WorkHour,
)
ERROR_MESSAGE = "Please enter the correct username and password \
for a staff account. Note that both fields may be case-sensitive."
MULTIPART_ENCTYPE = 'enctype="multipart/form-data"'
class AdminFieldExtractionMixin:
"""
Helper methods for extracting data from AdminForm.
"""
def get_admin_form_fields(self, response):
"""
Return a list of AdminFields for the AdminForm in the response.
"""
fields = []
for fieldset in response.context['adminform']:
for field_line in fieldset:
fields.extend(field_line)
return fields
def get_admin_readonly_fields(self, response):
"""
Return the readonly fields for the response's AdminForm.
"""
return [f for f in self.get_admin_form_fields(response) if f.is_readonly]
def get_admin_readonly_field(self, response, field_name):
"""
Return the readonly field for the given field_name.
"""
admin_readonly_fields = self.get_admin_readonly_fields(response)
for field in admin_readonly_fields:
if field.field['name'] == field_name:
return field
@override_settings(ROOT_URLCONF='admin_views.urls', USE_I18N=True, USE_L10N=False, LANGUAGE_CODE='en')
class AdminViewBasicTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.color1 = Color.objects.create(value='Red', warm=True)
cls.color2 = Color.objects.create(value='Orange', warm=True)
cls.color3 = Color.objects.create(value='Blue', warm=False)
cls.color4 = Color.objects.create(value='Green', warm=False)
cls.fab1 = Fabric.objects.create(surface='x')
cls.fab2 = Fabric.objects.create(surface='y')
cls.fab3 = Fabric.objects.create(surface='plain')
cls.b1 = Book.objects.create(name='Book 1')
cls.b2 = Book.objects.create(name='Book 2')
cls.pro1 = Promo.objects.create(name='Promo 1', book=cls.b1)
cls.pro1 = Promo.objects.create(name='Promo 2', book=cls.b2)
cls.chap1 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b1)
cls.chap2 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b1)
cls.chap3 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b2)
cls.chap4 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b2)
cls.cx1 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='ChapterXtra1 1')
cls.cx2 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='ChapterXtra1 2')
Actor.objects.create(name='Palin', age=27)
# Post data for edit inline
cls.inline_post_data = {
"name": "Test section",
# inline data
"article_set-TOTAL_FORMS": "6",
"article_set-INITIAL_FORMS": "3",
"article_set-MAX_NUM_FORMS": "0",
"article_set-0-id": cls.a1.pk,
# there is no title in database, give one here or formset will fail.
"article_set-0-title": "Norske bostaver æøå skaper problemer",
"article_set-0-content": "<p>Middle content</p>",
"article_set-0-date_0": "2008-03-18",
"article_set-0-date_1": "11:54:58",
"article_set-0-section": cls.s1.pk,
"article_set-1-id": cls.a2.pk,
"article_set-1-title": "Need a title.",
"article_set-1-content": "<p>Oldest content</p>",
"article_set-1-date_0": "2000-03-18",
"article_set-1-date_1": "11:54:58",
"article_set-2-id": cls.a3.pk,
"article_set-2-title": "Need a title.",
"article_set-2-content": "<p>Newest content</p>",
"article_set-2-date_0": "2009-03-18",
"article_set-2-date_1": "11:54:58",
"article_set-3-id": "",
"article_set-3-title": "",
"article_set-3-content": "",
"article_set-3-date_0": "",
"article_set-3-date_1": "",
"article_set-4-id": "",
"article_set-4-title": "",
"article_set-4-content": "",
"article_set-4-date_0": "",
"article_set-4-date_1": "",
"article_set-5-id": "",
"article_set-5-title": "",
"article_set-5-content": "",
"article_set-5-date_0": "",
"article_set-5-date_1": "",
}
def setUp(self):
self.client.force_login(self.superuser)
def assertContentBefore(self, response, text1, text2, failing_msg=None):
"""
Testing utility asserting that text1 appears before text2 in response
content.
"""
self.assertEqual(response.status_code, 200)
self.assertLess(
response.content.index(text1.encode()),
response.content.index(text2.encode()),
(failing_msg or '') + '\nResponse:\n' + response.content.decode(response.charset)
)
class AdminViewBasicTest(AdminViewBasicTestCase):
def test_trailing_slash_required(self):
"""
If you leave off the trailing slash, app should redirect and add it.
"""
add_url = reverse('admin:admin_views_article_add')
response = self.client.get(add_url[:-1])
self.assertRedirects(response, add_url, status_code=301)
def test_basic_add_GET(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_add_with_GET_args(self):
response = self.client.get(reverse('admin:admin_views_section_add'), {'name': 'My Section'})
self.assertContains(
response, 'value="My Section"',
msg_prefix="Couldn't find an input with the right value in the response"
)
def test_basic_edit_GET(self):
"""
A smoke test to ensure GET on the change_view works.
"""
response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_basic_edit_GET_string_PK(self):
"""
GET on the change_view (when passing a string as the PK argument for a
model with an integer PK field) redirects to the index page with a
message saying the object doesn't exist.
"""
response = self.client.get(reverse('admin:admin_views_section_change', args=(quote("abc/<b>"),)), follow=True)
self.assertRedirects(response, reverse('admin:index'))
self.assertEqual(
[m.message for m in response.context['messages']],
["""section with ID "abc/<b>" doesn't exist. Perhaps it was deleted?"""]
)
def test_basic_edit_GET_old_url_redirect(self):
"""
The change URL changed in Django 1.9, but the old one still redirects.
"""
response = self.client.get(
reverse('admin:admin_views_section_change', args=(self.s1.pk,)).replace('change/', '')
)
self.assertRedirects(response, reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
def test_basic_inheritance_GET_string_PK(self):
"""
GET on the change_view (for inherited models) redirects to the index
page with a message saying the object doesn't exist.
"""
response = self.client.get(reverse('admin:admin_views_supervillain_change', args=('abc',)), follow=True)
self.assertRedirects(response, reverse('admin:index'))
self.assertEqual(
[m.message for m in response.context['messages']],
["""super villain with ID "abc" doesn't exist. Perhaps it was deleted?"""]
)
def test_basic_add_POST(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
"name": "Another Section",
# inline data
"article_set-TOTAL_FORMS": "3",
"article_set-INITIAL_FORMS": "0",
"article_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse('admin:admin_views_section_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_popup_add_POST(self):
"""
Ensure http response from a popup is properly escaped.
"""
post_data = {
'_popup': '1',
'title': 'title with a new\nline',
'content': 'some content',
'date_0': '2010-09-10',
'date_1': '14:55:39',
}
response = self.client.post(reverse('admin:admin_views_article_add'), post_data)
self.assertContains(response, 'title with a new\\nline')
def test_basic_edit_POST(self):
"""
A smoke test to ensure POST on edit_view works.
"""
url = reverse('admin:admin_views_section_change', args=(self.s1.pk,))
response = self.client.post(url, self.inline_post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as(self):
"""
Test "save as".
"""
post_data = self.inline_post_data.copy()
post_data.update({
'_saveasnew': 'Save+as+new',
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-3-section": "1",
"article_set-4-section": "1",
"article_set-5-section": "1",
})
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as_delete_inline(self):
"""
Should be able to "Save as new" while also deleting an inline.
"""
post_data = self.inline_post_data.copy()
post_data.update({
'_saveasnew': 'Save+as+new',
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-2-DELETE": "1",
"article_set-3-section": "1",
})
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data)
self.assertEqual(response.status_code, 302)
# started with 3 articles, one was deleted.
self.assertEqual(Section.objects.latest('id').article_set.count(), 2)
def test_change_list_column_field_classes(self):
response = self.client.get(reverse('admin:admin_views_article_changelist'))
# callables display the callable name.
self.assertContains(response, 'column-callable_year')
self.assertContains(response, 'field-callable_year')
# lambdas display as "lambda" + index that they appear in list_display.
self.assertContains(response, 'column-lambda8')
self.assertContains(response, 'field-lambda8')
def test_change_list_sorting_callable(self):
"""
Ensure we can sort on a list_display field that is a callable
(column 2 is callable_year in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': 2})
self.assertContentBefore(
response, 'Oldest content', 'Middle content',
"Results of sorting on callable are out of order."
)
self.assertContentBefore(
response, 'Middle content', 'Newest content',
"Results of sorting on callable are out of order."
)
def test_change_list_sorting_callable_query_expression(self):
"""
Query expressions may be used for admin_order_field. (column 9 is
order_by_expression in ArticleAdmin).
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '9'})
self.assertContentBefore(
response, 'Oldest content', 'Middle content',
'Results of sorting on callable are out of order.'
)
self.assertContentBefore(
response, 'Middle content', 'Newest content',
'Results of sorting on callable are out of order.'
)
def test_change_list_sorting_callable_query_expression_reverse(self):
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-9'})
self.assertContentBefore(
response, 'Middle content', 'Oldest content',
'Results of sorting on callable are out of order.'
)
self.assertContentBefore(
response, 'Newest content', 'Middle content',
'Results of sorting on callable are out of order.'
)
def test_change_list_sorting_model(self):
"""
Ensure we can sort on a list_display field that is a Model method
(column 3 is 'model_year' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-3'})
self.assertContentBefore(
response, 'Newest content', 'Middle content',
"Results of sorting on Model method are out of order."
)
self.assertContentBefore(
response, 'Middle content', 'Oldest content',
"Results of sorting on Model method are out of order."
)
def test_change_list_sorting_model_admin(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin method
(column 4 is 'modeladmin_year' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '4'})
self.assertContentBefore(
response, 'Oldest content', 'Middle content',
"Results of sorting on ModelAdmin method are out of order."
)
self.assertContentBefore(
response, 'Middle content', 'Newest content',
"Results of sorting on ModelAdmin method are out of order."
)
def test_change_list_sorting_model_admin_reverse(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin
method in reverse order (i.e. admin_order_field uses the '-' prefix)
(column 6 is 'model_year_reverse' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '6'})
self.assertContentBefore(
response, '2009', '2008',
"Results of sorting on ModelAdmin method are out of order."
)
self.assertContentBefore(
response, '2008', '2000',
"Results of sorting on ModelAdmin method are out of order."
)
# Let's make sure the ordering is right and that we don't get a
# FieldError when we change to descending order
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-6'})
self.assertContentBefore(
response, '2000', '2008',
"Results of sorting on ModelAdmin method are out of order."
)
self.assertContentBefore(
response, '2008', '2009',
"Results of sorting on ModelAdmin method are out of order."
)
def test_change_list_sorting_multiple(self):
p1 = Person.objects.create(name="Chris", gender=1, alive=True)
p2 = Person.objects.create(name="Chris", gender=2, alive=True)
p3 = Person.objects.create(name="Bob", gender=1, alive=True)
link1 = reverse('admin:admin_views_person_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_person_change', args=(p2.pk,))
link3 = reverse('admin:admin_views_person_change', args=(p3.pk,))
# Sort by name, gender
response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '1.2'})
self.assertContentBefore(response, link3, link1)
self.assertContentBefore(response, link1, link2)
# Sort by gender descending, name
response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '-2.1'})
self.assertContentBefore(response, link2, link3)
self.assertContentBefore(response, link3, link1)
def test_change_list_sorting_preserve_queryset_ordering(self):
"""
If no ordering is defined in `ModelAdmin.ordering` or in the query
string, then the underlying order of the queryset should not be
changed, even if it is defined in `Modeladmin.get_queryset()`.
Refs #11868, #7309.
"""
p1 = Person.objects.create(name="Amy", gender=1, alive=True, age=80)
p2 = Person.objects.create(name="Bob", gender=1, alive=True, age=70)
p3 = Person.objects.create(name="Chris", gender=2, alive=False, age=60)
link1 = reverse('admin:admin_views_person_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_person_change', args=(p2.pk,))
link3 = reverse('admin:admin_views_person_change', args=(p3.pk,))
response = self.client.get(reverse('admin:admin_views_person_changelist'), {})
self.assertContentBefore(response, link3, link2)
self.assertContentBefore(response, link2, link1)
def test_change_list_sorting_model_meta(self):
# Test ordering on Model Meta is respected
l1 = Language.objects.create(iso='ur', name='Urdu')
l2 = Language.objects.create(iso='ar', name='Arabic')
link1 = reverse('admin:admin_views_language_change', args=(quote(l1.pk),))
link2 = reverse('admin:admin_views_language_change', args=(quote(l2.pk),))
response = self.client.get(reverse('admin:admin_views_language_changelist'), {})
self.assertContentBefore(response, link2, link1)
# Test we can override with query string
response = self.client.get(reverse('admin:admin_views_language_changelist'), {'o': '-1'})
self.assertContentBefore(response, link1, link2)
def test_change_list_sorting_override_model_admin(self):
# Test ordering on Model Admin is respected, and overrides Model Meta
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse('admin:admin_views_podcast_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_podcast_change', args=(p2.pk,))
response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {})
self.assertContentBefore(response, link1, link2)
def test_multiple_sort_same_field(self):
# The changelist displays the correct columns if two columns correspond
# to the same ordering field.
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse('admin:admin_views_podcast_change', args=(quote(p1.pk),))
link2 = reverse('admin:admin_views_podcast_change', args=(quote(p2.pk),))
response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {})
self.assertContentBefore(response, link1, link2)
p1 = ComplexSortedPerson.objects.create(name="Bob", age=10)
p2 = ComplexSortedPerson.objects.create(name="Amy", age=20)
link1 = reverse('admin:admin_views_complexsortedperson_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_complexsortedperson_change', args=(p2.pk,))
response = self.client.get(reverse('admin:admin_views_complexsortedperson_changelist'), {})
# Should have 5 columns (including action checkbox col)
self.assertContains(response, '<th scope="col"', count=5)
self.assertContains(response, 'Name')
self.assertContains(response, 'Colored name')
# Check order
self.assertContentBefore(response, 'Name', 'Colored name')
# Check sorting - should be by name
self.assertContentBefore(response, link2, link1)
def test_sort_indicators_admin_order(self):
"""
The admin shows default sort indicators for all kinds of 'ordering'
fields: field names, method on the model admin and model itself, and
other callables. See #17252.
"""
models = [(AdminOrderedField, 'adminorderedfield'),
(AdminOrderedModelMethod, 'adminorderedmodelmethod'),
(AdminOrderedAdminMethod, 'adminorderedadminmethod'),
(AdminOrderedCallable, 'adminorderedcallable')]
for model, url in models:
model.objects.create(stuff='The Last Item', order=3)
model.objects.create(stuff='The First Item', order=1)
model.objects.create(stuff='The Middle Item', order=2)
response = self.client.get(reverse('admin:admin_views_%s_changelist' % url), {})
self.assertEqual(response.status_code, 200)
# Should have 3 columns including action checkbox col.
self.assertContains(response, '<th scope="col"', count=3, msg_prefix=url)
# Check if the correct column was selected. 2 is the index of the
# 'order' column in the model admin's 'list_display' with 0 being
# the implicit 'action_checkbox' and 1 being the column 'stuff'.
self.assertEqual(response.context['cl'].get_ordering_field_columns(), {2: 'asc'})
# Check order of records.
self.assertContentBefore(response, 'The First Item', 'The Middle Item')
self.assertContentBefore(response, 'The Middle Item', 'The Last Item')
def test_has_related_field_in_list_display_fk(self):
"""Joins shouldn't be performed for <FK>_id fields in list display."""
state = State.objects.create(name='Karnataka')
City.objects.create(state=state, name='Bangalore')
response = self.client.get(reverse('admin:admin_views_city_changelist'), {})
response.context['cl'].list_display = ['id', 'name', 'state']
self.assertIs(response.context['cl'].has_related_field_in_list_display(), True)
response.context['cl'].list_display = ['id', 'name', 'state_id']
self.assertIs(response.context['cl'].has_related_field_in_list_display(), False)
def test_has_related_field_in_list_display_o2o(self):
"""Joins shouldn't be performed for <O2O>_id fields in list display."""
media = Media.objects.create(name='Foo')
Vodcast.objects.create(media=media)
response = self.client.get(reverse('admin:admin_views_vodcast_changelist'), {})
response.context['cl'].list_display = ['media']
self.assertIs(response.context['cl'].has_related_field_in_list_display(), True)
response.context['cl'].list_display = ['media_id']
self.assertIs(response.context['cl'].has_related_field_in_list_display(), False)
def test_limited_filter(self):
"""Ensure admin changelist filters do not contain objects excluded via limit_choices_to.
This also tests relation-spanning filters (e.g. 'color__value').
"""
response = self.client.get(reverse('admin:admin_views_thing_changelist'))
self.assertContains(
response, '<div id="changelist-filter">',
msg_prefix="Expected filter not found in changelist view"
)
self.assertNotContains(
response, '<a href="?color__id__exact=3">Blue</a>',
msg_prefix="Changelist filter not correctly limited by limit_choices_to"
)
def test_relation_spanning_filters(self):
changelist_url = reverse('admin:admin_views_chapterxtra1_changelist')
response = self.client.get(changelist_url)
self.assertContains(response, '<div id="changelist-filter">')
filters = {
'chap__id__exact': {
'values': [c.id for c in Chapter.objects.all()],
'test': lambda obj, value: obj.chap.id == value,
},
'chap__title': {
'values': [c.title for c in Chapter.objects.all()],
'test': lambda obj, value: obj.chap.title == value,
},
'chap__book__id__exact': {
'values': [b.id for b in Book.objects.all()],
'test': lambda obj, value: obj.chap.book.id == value,
},
'chap__book__name': {
'values': [b.name for b in Book.objects.all()],
'test': lambda obj, value: obj.chap.book.name == value,
},
'chap__book__promo__id__exact': {
'values': [p.id for p in Promo.objects.all()],
'test': lambda obj, value: obj.chap.book.promo_set.filter(id=value).exists(),
},
'chap__book__promo__name': {
'values': [p.name for p in Promo.objects.all()],
'test': lambda obj, value: obj.chap.book.promo_set.filter(name=value).exists(),
},
# A forward relation (book) after a reverse relation (promo).
'guest_author__promo__book__id__exact': {
'values': [p.id for p in Book.objects.all()],
'test': lambda obj, value: obj.guest_author.promo_set.filter(book=value).exists(),
},
}
for filter_path, params in filters.items():
for value in params['values']:
query_string = urlencode({filter_path: value})
# ensure filter link exists
self.assertContains(response, '<a href="?%s"' % query_string)
# ensure link works
filtered_response = self.client.get('%s?%s' % (changelist_url, query_string))
self.assertEqual(filtered_response.status_code, 200)
# ensure changelist contains only valid objects
for obj in filtered_response.context['cl'].queryset.all():
self.assertTrue(params['test'](obj, value))
def test_incorrect_lookup_parameters(self):
"""Ensure incorrect lookup parameters are handled gracefully."""
changelist_url = reverse('admin:admin_views_thing_changelist')
response = self.client.get(changelist_url, {'notarealfield': '5'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
# Spanning relationships through a nonexistent related object (Refs #16716)
response = self.client.get(changelist_url, {'notarealfield__whatever': '5'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
response = self.client.get(changelist_url, {'color__id__exact': 'StringNotInteger!'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
# Regression test for #18530
response = self.client.get(changelist_url, {'pub_date__gte': 'foo'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
def test_isnull_lookups(self):
"""Ensure is_null is handled correctly."""
Article.objects.create(title="I Could Go Anywhere", content="Versatile", date=datetime.datetime.now())
changelist_url = reverse('admin:admin_views_article_changelist')
response = self.client.get(changelist_url)
self.assertContains(response, '4 articles')
response = self.client.get(changelist_url, {'section__isnull': 'false'})
self.assertContains(response, '3 articles')
response = self.client.get(changelist_url, {'section__isnull': '0'})
self.assertContains(response, '3 articles')
response = self.client.get(changelist_url, {'section__isnull': 'true'})
self.assertContains(response, '1 article')
response = self.client.get(changelist_url, {'section__isnull': '1'})
self.assertContains(response, '1 article')
def test_logout_and_password_change_URLs(self):
response = self.client.get(reverse('admin:admin_views_article_changelist'))
self.assertContains(response, '<a href="%s">' % reverse('admin:logout'))
self.assertContains(response, '<a href="%s">' % reverse('admin:password_change'))
def test_named_group_field_choices_change_list(self):
"""
Ensures the admin changelist shows correct values in the relevant column
for rows corresponding to instances of a model in which a named group
has been used in the choices option of a field.
"""
link1 = reverse('admin:admin_views_fabric_change', args=(self.fab1.pk,))
link2 = reverse('admin:admin_views_fabric_change', args=(self.fab2.pk,))
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
fail_msg = (
"Changelist table isn't showing the right human-readable values "
"set by a model field 'choices' option named group."
)
self.assertContains(response, '<a href="%s">Horizontal</a>' % link1, msg_prefix=fail_msg, html=True)
self.assertContains(response, '<a href="%s">Vertical</a>' % link2, msg_prefix=fail_msg, html=True)
def test_named_group_field_choices_filter(self):
"""
Ensures the filter UI shows correctly when at least one named group has
been used in the choices option of a model field.
"""
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
fail_msg = (
"Changelist filter isn't showing options contained inside a model "
"field 'choices' option named group."
)
self.assertContains(response, '<div id="changelist-filter">')
self.assertContains(
response, '<a href="?surface__exact=x" title="Horizontal">Horizontal</a>',
msg_prefix=fail_msg, html=True
)
self.assertContains(
response, '<a href="?surface__exact=y" title="Vertical">Vertical</a>',
msg_prefix=fail_msg, html=True
)
def test_change_list_null_boolean_display(self):
Post.objects.create(public=None)
response = self.client.get(reverse('admin:admin_views_post_changelist'))
self.assertContains(response, 'icon-unknown.svg')
def test_i18n_language_non_english_default(self):
"""
Check if the JavaScript i18n view returns an empty language catalog
if the default language is non-English but the selected language
is English. See #13388 and #3594 for more details.
"""
with self.settings(LANGUAGE_CODE='fr'), translation.override('en-us'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertNotContains(response, 'Choisir une heure')
def test_i18n_language_non_english_fallback(self):
"""
Makes sure that the fallback language is still working properly
in cases where the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE='fr'), translation.override('none'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertContains(response, 'Choisir une heure')
def test_jsi18n_with_context(self):
response = self.client.get(reverse('admin-extra-context:jsi18n'))
self.assertEqual(response.status_code, 200)
def test_L10N_deactivated(self):
"""
Check if L10N is deactivated, the JavaScript i18n view doesn't
return localized date/time formats. Refs #14824.
"""
with self.settings(LANGUAGE_CODE='ru', USE_L10N=False), translation.override('none'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertNotContains(response, '%d.%m.%Y %H:%M:%S')
self.assertContains(response, '%Y-%m-%d %H:%M:%S')
def test_disallowed_filtering(self):
with self.assertLogs('django.security.DisallowedModelAdminLookup', 'ERROR'):
response = self.client.get(
"%s?owner__email__startswith=fuzzy" % reverse('admin:admin_views_album_changelist')
)
self.assertEqual(response.status_code, 400)
# Filters are allowed if explicitly included in list_filter
response = self.client.get("%s?color__value__startswith=red" % reverse('admin:admin_views_thing_changelist'))
self.assertEqual(response.status_code, 200)
response = self.client.get("%s?color__value=red" % reverse('admin:admin_views_thing_changelist'))
self.assertEqual(response.status_code, 200)
# Filters should be allowed if they involve a local field without the
# need to whitelist them in list_filter or date_hierarchy.
response = self.client.get("%s?age__gt=30" % reverse('admin:admin_views_person_changelist'))
self.assertEqual(response.status_code, 200)
e1 = Employee.objects.create(name='Anonymous', gender=1, age=22, alive=True, code='123')
e2 = Employee.objects.create(name='Visitor', gender=2, age=19, alive=True, code='124')
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e1)
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e2)
response = self.client.get(reverse('admin:admin_views_workhour_changelist'))
self.assertContains(response, 'employee__person_ptr__exact')
response = self.client.get("%s?employee__person_ptr__exact=%d" % (
reverse('admin:admin_views_workhour_changelist'), e1.pk)
)
self.assertEqual(response.status_code, 200)
def test_disallowed_to_field(self):
url = reverse('admin:admin_views_section_changelist')
with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'):
response = self.client.get(url, {TO_FIELD_VAR: 'missing_field'})
self.assertEqual(response.status_code, 400)
# Specifying a field that is not referred by any other model registered
# to this admin site should raise an exception.
with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'):
response = self.client.get(reverse('admin:admin_views_section_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
# #23839 - Primary key should always be allowed, even if the referenced model isn't registered.
response = self.client.get(reverse('admin:admin_views_notreferenced_changelist'), {TO_FIELD_VAR: 'id'})
self.assertEqual(response.status_code, 200)
# #23915 - Specifying a field referenced by another model though a m2m should be allowed.
response = self.client.get(reverse('admin:admin_views_recipe_changelist'), {TO_FIELD_VAR: 'rname'})
self.assertEqual(response.status_code, 200)
# #23604, #23915 - Specifying a field referenced through a reverse m2m relationship should be allowed.
response = self.client.get(reverse('admin:admin_views_ingredient_changelist'), {TO_FIELD_VAR: 'iname'})
self.assertEqual(response.status_code, 200)
# #23329 - Specifying a field that is not referred by any other model directly registered
# to this admin site but registered through inheritance should be allowed.
response = self.client.get(reverse('admin:admin_views_referencedbyparent_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 200)
# #23431 - Specifying a field that is only referred to by a inline of a registered
# model should be allowed.
response = self.client.get(reverse('admin:admin_views_referencedbyinline_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 200)
# #25622 - Specifying a field of a model only referred by a generic
# relation should raise DisallowedModelAdminToField.
url = reverse('admin:admin_views_referencedbygenrel_changelist')
with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'):
response = self.client.get(url, {TO_FIELD_VAR: 'object_id'})
self.assertEqual(response.status_code, 400)
# We also want to prevent the add, change, and delete views from
# leaking a disallowed field value.
with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'):
response = self.client.post(reverse('admin:admin_views_section_add'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
section = Section.objects.create()
url = reverse('admin:admin_views_section_change', args=(section.pk,))
with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'):
response = self.client.post(url, {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
url = reverse('admin:admin_views_section_delete', args=(section.pk,))
with self.assertLogs('django.security.DisallowedModelAdminToField', 'ERROR'):
response = self.client.post(url, {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
def test_allowed_filtering_15103(self):
"""
Regressions test for ticket 15103 - filtering on fields defined in a
ForeignKey 'limit_choices_to' should be allowed, otherwise raw_id_fields
can break.
"""
# Filters should be allowed if they are defined on a ForeignKey pointing to this model
url = "%s?leader__name=Palin&leader__age=27" % reverse('admin:admin_views_inquisition_changelist')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_popup_dismiss_related(self):
"""
Regression test for ticket 20664 - ensure the pk is properly quoted.
"""
actor = Actor.objects.create(name="Palin", age=27)
response = self.client.get("%s?%s" % (reverse('admin:admin_views_actor_changelist'), IS_POPUP_VAR))
self.assertContains(response, 'data-popup-opener="%s"' % actor.pk)
def test_hide_change_password(self):
"""
Tests if the "change password" link in the admin is hidden if the User
does not have a usable password set.
(against 9bea85795705d015cdadc82c68b99196a8554f5c)
"""
user = User.objects.get(username='super')
user.set_unusable_password()
user.save()
self.client.force_login(user)
response = self.client.get(reverse('admin:index'))
self.assertNotContains(
response, reverse('admin:password_change'),
msg_prefix='The "change password" link should not be displayed if a user does not have a usable password.'
)
def test_change_view_with_show_delete_extra_context(self):
"""
The 'show_delete' context variable in the admin's change view controls
the display of the delete button.
"""
instance = UndeletableObject.objects.create(name='foo')
response = self.client.get(reverse('admin:admin_views_undeletableobject_change', args=(instance.pk,)))
self.assertNotContains(response, 'deletelink')
def test_change_view_logs_m2m_field_changes(self):
"""Changes to ManyToManyFields are included in the object's history."""
pizza = ReadablePizza.objects.create(name='Cheese')
cheese = Topping.objects.create(name='cheese')
post_data = {'name': pizza.name, 'toppings': [cheese.pk]}
response = self.client.post(reverse('admin:admin_views_readablepizza_change', args=(pizza.pk,)), post_data)
self.assertRedirects(response, reverse('admin:admin_views_readablepizza_changelist'))
pizza_ctype = ContentType.objects.get_for_model(ReadablePizza, for_concrete_model=False)
log = LogEntry.objects.filter(content_type=pizza_ctype, object_id=pizza.pk).first()
self.assertEqual(log.get_change_message(), 'Changed toppings.')
def test_allows_attributeerror_to_bubble_up(self):
"""
AttributeErrors are allowed to bubble when raised inside a change list
view. Requires a model to be created so there's something to display.
Refs: #16655, #18593, and #18747
"""
Simple.objects.create()
with self.assertRaises(AttributeError):
self.client.get(reverse('admin:admin_views_simple_changelist'))
def test_changelist_with_no_change_url(self):
"""
ModelAdmin.changelist_view shouldn't result in a NoReverseMatch if url
for change_view is removed from get_urls (#20934).
"""
o = UnchangeableObject.objects.create()
response = self.client.get(reverse('admin:admin_views_unchangeableobject_changelist'))
self.assertEqual(response.status_code, 200)
# Check the format of the shown object -- shouldn't contain a change link
self.assertContains(response, '<th class="field-__str__">%s</th>' % o, html=True)
def test_invalid_appindex_url(self):
"""
#21056 -- URL reversing shouldn't work for nonexistent apps.
"""
good_url = '/test_admin/admin/admin_views/'
confirm_good_url = reverse('admin:app_list',
kwargs={'app_label': 'admin_views'})
self.assertEqual(good_url, confirm_good_url)
with self.assertRaises(NoReverseMatch):
reverse('admin:app_list', kwargs={'app_label': 'this_should_fail'})
with self.assertRaises(NoReverseMatch):
reverse('admin:app_list', args=('admin_views2',))
def test_resolve_admin_views(self):
index_match = resolve('/test_admin/admin4/')
list_match = resolve('/test_admin/admin4/auth/user/')
self.assertIs(index_match.func.admin_site, customadmin.simple_site)
self.assertIsInstance(list_match.func.model_admin, customadmin.CustomPwdTemplateUserAdmin)
def test_adminsite_display_site_url(self):
"""
#13749 - Admin should display link to front-end site 'View site'
"""
url = reverse('admin:index')
response = self.client.get(url)
self.assertEqual(response.context['site_url'], '/my-site-url/')
self.assertContains(response, '<a href="/my-site-url/">View site</a>')
@override_settings(TIME_ZONE='America/Sao_Paulo', USE_TZ=True)
def test_date_hierarchy_timezone_dst(self):
# This datetime doesn't exist in this timezone due to DST.
date = pytz.timezone('America/Sao_Paulo').localize(datetime.datetime(2016, 10, 16, 15), is_dst=None)
q = Question.objects.create(question='Why?', expires=date)
Answer2.objects.create(question=q, answer='Because.')
response = self.client.get(reverse('admin:admin_views_answer2_changelist'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'question__expires__day=16')
self.assertContains(response, 'question__expires__month=10')
self.assertContains(response, 'question__expires__year=2016')
def test_sortable_by_columns_subset(self):
expected_sortable_fields = ('date', 'callable_year')
expected_not_sortable_fields = (
'content', 'model_year', 'modeladmin_year', 'model_year_reversed',
'section',
)
response = self.client.get(reverse('admin6:admin_views_article_changelist'))
for field_name in expected_sortable_fields:
self.assertContains(response, '<th scope="col" class="sortable column-%s">' % field_name)
for field_name in expected_not_sortable_fields:
self.assertContains(response, '<th scope="col" class="column-%s">' % field_name)
def test_get_sortable_by_columns_subset(self):
response = self.client.get(reverse('admin6:admin_views_actor_changelist'))
self.assertContains(response, '<th scope="col" class="sortable column-age">')
self.assertContains(response, '<th scope="col" class="column-name">')
def test_sortable_by_no_column(self):
expected_not_sortable_fields = ('title', 'book')
response = self.client.get(reverse('admin6:admin_views_chapter_changelist'))
for field_name in expected_not_sortable_fields:
self.assertContains(response, '<th scope="col" class="column-%s">' % field_name)
self.assertNotContains(response, '<th scope="col" class="sortable column')
def test_get_sortable_by_no_column(self):
response = self.client.get(reverse('admin6:admin_views_color_changelist'))
self.assertContains(response, '<th scope="col" class="column-value">')
self.assertNotContains(response, '<th scope="col" class="sortable column')
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# Put this app's and the shared tests templates dirs in DIRS to take precedence
# over the admin's templates dir.
'DIRS': [
os.path.join(os.path.dirname(__file__), 'templates'),
os.path.join(os.path.dirname(os.path.dirname(__file__)), 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}])
class AdminCustomTemplateTests(AdminViewBasicTestCase):
def test_custom_model_admin_templates(self):
# Test custom change list template with custom extra context
response = self.client.get(reverse('admin:admin_views_customarticle_changelist'))
self.assertContains(response, "var hello = 'Hello!';")
self.assertTemplateUsed(response, 'custom_admin/change_list.html')
# Test custom add form template
response = self.client.get(reverse('admin:admin_views_customarticle_add'))
self.assertTemplateUsed(response, 'custom_admin/add_form.html')
# Add an article so we can test delete, change, and history views
post = self.client.post(reverse('admin:admin_views_customarticle_add'), {
'content': '<p>great article</p>',
'date_0': '2008-03-18',
'date_1': '10:54:39'
})
self.assertRedirects(post, reverse('admin:admin_views_customarticle_changelist'))
self.assertEqual(CustomArticle.objects.all().count(), 1)
article_pk = CustomArticle.objects.all()[0].pk
# Test custom delete, change, and object history templates
# Test custom change form template
response = self.client.get(reverse('admin:admin_views_customarticle_change', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/change_form.html')
response = self.client.get(reverse('admin:admin_views_customarticle_delete', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/delete_confirmation.html')
response = self.client.post(reverse('admin:admin_views_customarticle_changelist'), data={
'index': 0,
'action': ['delete_selected'],
'_selected_action': ['1'],
})
self.assertTemplateUsed(response, 'custom_admin/delete_selected_confirmation.html')
response = self.client.get(reverse('admin:admin_views_customarticle_history', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/object_history.html')
# A custom popup response template may be specified by
# ModelAdmin.popup_response_template.
response = self.client.post(reverse('admin:admin_views_customarticle_add') + '?%s=1' % IS_POPUP_VAR, {
'content': '<p>great article</p>',
'date_0': '2008-03-18',
'date_1': '10:54:39',
IS_POPUP_VAR: '1'
})
self.assertEqual(response.template_name, 'custom_admin/popup_response.html')
def test_extended_bodyclass_template_change_form(self):
"""
The admin/change_form.html template uses block.super in the
bodyclass block.
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_change_password_template(self):
user = User.objects.get(username='super')
response = self.client.get(reverse('admin:auth_user_password_change', args=(user.id,)))
# The auth/user/change_password.html template uses super in the
# bodyclass block.
self.assertContains(response, 'bodyclass_consistency_check ')
# When a site has multiple passwords in the browser's password manager,
# a browser pop up asks which user the new password is for. To prevent
# this, the username is added to the change password form.
self.assertContains(response, '<input type="text" name="username" value="super" style="display: none">')
def test_extended_bodyclass_template_index(self):
"""
The admin/index.html template uses block.super in the bodyclass block.
"""
response = self.client.get(reverse('admin:index'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_change_list(self):
"""
The admin/change_list.html' template uses block.super
in the bodyclass block.
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_login(self):
"""
The admin/login.html template uses block.super in the
bodyclass block.
"""
self.client.logout()
response = self.client.get(reverse('admin:login'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_delete_confirmation(self):
"""
The admin/delete_confirmation.html template uses
block.super in the bodyclass block.
"""
group = Group.objects.create(name="foogroup")
response = self.client.get(reverse('admin:auth_group_delete', args=(group.id,)))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_delete_selected_confirmation(self):
"""
The admin/delete_selected_confirmation.html template uses
block.super in bodyclass block.
"""
group = Group.objects.create(name="foogroup")
post_data = {
'action': 'delete_selected',
'selected_across': '0',
'index': '0',
'_selected_action': group.id
}
response = self.client.post(reverse('admin:auth_group_changelist'), post_data)
self.assertEqual(response.context['site_header'], 'Django administration')
self.assertContains(response, 'bodyclass_consistency_check ')
def test_filter_with_custom_template(self):
"""
A custom template can be used to render an admin filter.
"""
response = self.client.get(reverse('admin:admin_views_color2_changelist'))
self.assertTemplateUsed(response, 'custom_filter_template.html')
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewFormUrlTest(TestCase):
current_app = "admin3"
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def setUp(self):
self.client.force_login(self.superuser)
def test_change_form_URL_has_correct_value(self):
"""
change_view has form_url in response.context
"""
response = self.client.get(
reverse('admin:admin_views_section_change', args=(self.s1.pk,), current_app=self.current_app)
)
self.assertIn('form_url', response.context, msg='form_url not present in response.context')
self.assertEqual(response.context['form_url'], 'pony')
def test_initial_data_can_be_overridden(self):
"""
The behavior for setting initial form data can be overridden in the
ModelAdmin class. Usually, the initial value is set via the GET params.
"""
response = self.client.get(
reverse('admin:admin_views_restaurant_add', current_app=self.current_app),
{'name': 'test_value'}
)
# this would be the usual behaviour
self.assertNotContains(response, 'value="test_value"')
# this is the overridden behaviour
self.assertContains(response, 'value="overridden_value"')
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminJavaScriptTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_js_minified_only_if_debug_is_false(self):
"""
The minified versions of the JS files are only used when DEBUG is False.
"""
with override_settings(DEBUG=False):
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertNotContains(response, 'vendor/jquery/jquery.js')
self.assertContains(response, 'vendor/jquery/jquery.min.js')
self.assertNotContains(response, 'prepopulate.js')
self.assertContains(response, 'prepopulate.min.js')
self.assertNotContains(response, 'actions.js')
self.assertContains(response, 'actions.min.js')
self.assertNotContains(response, 'collapse.js')
self.assertContains(response, 'collapse.min.js')
self.assertNotContains(response, 'inlines.js')
self.assertContains(response, 'inlines.min.js')
with override_settings(DEBUG=True):
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertContains(response, 'vendor/jquery/jquery.js')
self.assertNotContains(response, 'vendor/jquery/jquery.min.js')
self.assertContains(response, 'prepopulate.js')
self.assertNotContains(response, 'prepopulate.min.js')
self.assertContains(response, 'actions.js')
self.assertNotContains(response, 'actions.min.js')
self.assertContains(response, 'collapse.js')
self.assertNotContains(response, 'collapse.min.js')
self.assertContains(response, 'inlines.js')
self.assertNotContains(response, 'inlines.min.js')
@override_settings(ROOT_URLCONF='admin_views.urls')
class SaveAsTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_as_duplication(self):
"""'save as' creates a new person"""
post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42}
response = self.client.post(reverse('admin:admin_views_person_change', args=(self.per1.pk,)), post_data)
self.assertEqual(len(Person.objects.filter(name='John M')), 1)
self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1)
new_person = Person.objects.latest('id')
self.assertRedirects(response, reverse('admin:admin_views_person_change', args=(new_person.pk,)))
def test_save_as_continue_false(self):
"""
Saving a new object using "Save as new" redirects to the changelist
instead of the change view when ModelAdmin.save_as_continue=False.
"""
post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42}
url = reverse('admin:admin_views_person_change', args=(self.per1.pk,), current_app=site2.name)
response = self.client.post(url, post_data)
self.assertEqual(len(Person.objects.filter(name='John M')), 1)
self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1)
self.assertRedirects(response, reverse('admin:admin_views_person_changelist', current_app=site2.name))
def test_save_as_new_with_validation_errors(self):
"""
When you click "Save as new" and have a validation error,
you only see the "Save as new" button and not the other save buttons,
and that only the "Save as" button is visible.
"""
response = self.client.post(reverse('admin:admin_views_person_change', args=(self.per1.pk,)), {
'_saveasnew': '',
'gender': 'invalid',
'_addanother': 'fail',
})
self.assertContains(response, 'Please correct the errors below.')
self.assertFalse(response.context['show_save_and_add_another'])
self.assertFalse(response.context['show_save_and_continue'])
self.assertTrue(response.context['show_save_as_new'])
def test_save_as_new_with_validation_errors_with_inlines(self):
parent = Parent.objects.create(name='Father')
child = Child.objects.create(parent=parent, name='Child')
response = self.client.post(reverse('admin:admin_views_parent_change', args=(parent.pk,)), {
'_saveasnew': 'Save as new',
'child_set-0-parent': parent.pk,
'child_set-0-id': child.pk,
'child_set-0-name': 'Child',
'child_set-INITIAL_FORMS': 1,
'child_set-MAX_NUM_FORMS': 1000,
'child_set-MIN_NUM_FORMS': 0,
'child_set-TOTAL_FORMS': 4,
'name': '_invalid',
})
self.assertContains(response, 'Please correct the error below.')
self.assertFalse(response.context['show_save_and_add_another'])
self.assertFalse(response.context['show_save_and_continue'])
self.assertTrue(response.context['show_save_as_new'])
def test_save_as_new_with_inlines_with_validation_errors(self):
parent = Parent.objects.create(name='Father')
child = Child.objects.create(parent=parent, name='Child')
response = self.client.post(reverse('admin:admin_views_parent_change', args=(parent.pk,)), {
'_saveasnew': 'Save as new',
'child_set-0-parent': parent.pk,
'child_set-0-id': child.pk,
'child_set-0-name': '_invalid',
'child_set-INITIAL_FORMS': 1,
'child_set-MAX_NUM_FORMS': 1000,
'child_set-MIN_NUM_FORMS': 0,
'child_set-TOTAL_FORMS': 4,
'name': 'Father',
})
self.assertContains(response, 'Please correct the error below.')
self.assertFalse(response.context['show_save_and_add_another'])
self.assertFalse(response.context['show_save_and_continue'])
self.assertTrue(response.context['show_save_as_new'])
@override_settings(ROOT_URLCONF='admin_views.urls')
class CustomModelAdminTest(AdminViewBasicTestCase):
def test_custom_admin_site_login_form(self):
self.client.logout()
response = self.client.get(reverse('admin2:index'), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
login = self.client.post(reverse('admin2:login'), {
REDIRECT_FIELD_NAME: reverse('admin2:index'),
'username': 'customform',
'password': 'secret',
}, follow=True)
self.assertIsInstance(login, TemplateResponse)
self.assertEqual(login.status_code, 200)
self.assertContains(login, 'custom form error')
self.assertContains(login, 'path/to/media.css')
def test_custom_admin_site_login_template(self):
self.client.logout()
response = self.client.get(reverse('admin2:index'), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/login.html')
self.assertContains(response, 'Hello from a custom login template')
def test_custom_admin_site_logout_template(self):
response = self.client.get(reverse('admin2:logout'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/logout.html')
self.assertContains(response, 'Hello from a custom logout template')
def test_custom_admin_site_index_view_and_template(self):
response = self.client.get(reverse('admin2:index'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/index.html')
self.assertContains(response, 'Hello from a custom index template *bar*')
def test_custom_admin_site_app_index_view_and_template(self):
response = self.client.get(reverse('admin2:app_list', args=('admin_views',)))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/app_index.html')
self.assertContains(response, 'Hello from a custom app_index template')
def test_custom_admin_site_password_change_template(self):
response = self.client.get(reverse('admin2:password_change'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_form.html')
self.assertContains(response, 'Hello from a custom password change form template')
def test_custom_admin_site_password_change_with_extra_context(self):
response = self.client.get(reverse('admin2:password_change'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_form.html')
self.assertContains(response, 'eggs')
def test_custom_admin_site_password_change_done_template(self):
response = self.client.get(reverse('admin2:password_change_done'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_done.html')
self.assertContains(response, 'Hello from a custom password change done template')
def test_custom_admin_site_view(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin2:my_view'))
self.assertEqual(response.content, b"Django is a magical pony!")
def test_pwd_change_custom_template(self):
self.client.force_login(self.superuser)
su = User.objects.get(username='super')
response = self.client.get(reverse('admin4:auth_user_password_change', args=(su.pk,)))
self.assertEqual(response.status_code, 200)
def get_perm(Model, codename):
"""Return the permission object, for the Model"""
ct = ContentType.objects.get_for_model(Model, for_concrete_model=False)
return Permission.objects.get(content_type=ct, codename=codename)
@override_settings(
ROOT_URLCONF='admin_views.urls',
# Test with the admin's documented list of required context processors.
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}],
)
class AdminViewPermissionsTest(TestCase):
"""Tests for Admin Views Permissions."""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.viewuser = User.objects.create_user(username='viewuser', password='secret', is_staff=True)
cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True)
cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True)
cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True)
cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret')
cls.nostaffuser = User.objects.create_user(username='nostaff', password='secret')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1,
another_section=cls.s1,
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
# Setup permissions, for our users who can add, change, and delete.
opts = Article._meta
# User who can view Articles
cls.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('view', opts)))
# User who can add Articles
cls.adduser.user_permissions.add(get_perm(Article, get_permission_codename('add', opts)))
# User who can change Articles
cls.changeuser.user_permissions.add(get_perm(Article, get_permission_codename('change', opts)))
cls.nostaffuser.user_permissions.add(get_perm(Article, get_permission_codename('change', opts)))
# User who can delete Articles
cls.deleteuser.user_permissions.add(get_perm(Article, get_permission_codename('delete', opts)))
cls.deleteuser.user_permissions.add(get_perm(Section, get_permission_codename('delete', Section._meta)))
# login POST dicts
cls.index_url = reverse('admin:index')
cls.super_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'super',
'password': 'secret',
}
cls.super_email_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': '[email protected]',
'password': 'secret',
}
cls.super_email_bad_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': '[email protected]',
'password': 'notsecret',
}
cls.adduser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'adduser',
'password': 'secret',
}
cls.changeuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'changeuser',
'password': 'secret',
}
cls.deleteuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'deleteuser',
'password': 'secret',
}
cls.nostaff_login = {
REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'),
'username': 'nostaff',
'password': 'secret',
}
cls.joepublic_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'joepublic',
'password': 'secret',
}
cls.viewuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'viewuser',
'password': 'secret',
}
cls.no_username_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'password': 'secret',
}
def test_login(self):
"""
Make sure only staff members can log in.
Successful posts to the login page will redirect to the original url.
Unsuccessful attempts will continue to render the login page with
a 200 status code.
"""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
# Super User
response = self.client.get(self.index_url)
self.assertRedirects(response, login_url)
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Test if user enters email address
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# only correct passwords get a username hint
login = self.client.post(login_url, self.super_email_bad_login)
self.assertContains(login, ERROR_MESSAGE)
new_user = User(username='jondoe', password='secret', email='[email protected]')
new_user.save()
# check to ensure if there are multiple email addresses a user doesn't get a 500
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# View User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.viewuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Add User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.adduser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Change User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.changeuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Delete User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.deleteuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Regular User should not be able to login.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, ERROR_MESSAGE)
# Requests without username should not return 500 errors.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.no_username_login)
self.assertEqual(login.status_code, 200)
self.assertFormError(login, 'form', 'username', ['This field is required.'])
def test_login_redirect_for_direct_get(self):
"""
Login redirect should be to the admin index page when going directly to
/admin/login/.
"""
response = self.client.get(reverse('admin:login'))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context[REDIRECT_FIELD_NAME], reverse('admin:index'))
def test_login_has_permission(self):
# Regular User should not be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, 'permission denied')
# User with permissions should be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), self.nostaff_login)
self.assertRedirects(login, reverse('has_permission_admin:index'))
self.assertFalse(login.context)
self.client.get(reverse('has_permission_admin:logout'))
# Staff should be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), {
REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'),
'username': 'deleteuser',
'password': 'secret',
})
self.assertRedirects(login, reverse('has_permission_admin:index'))
self.assertFalse(login.context)
self.client.get(reverse('has_permission_admin:logout'))
def test_login_successfully_redirects_to_original_URL(self):
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
query_string = 'the-answer=42'
redirect_url = '%s?%s' % (self.index_url, query_string)
new_next = {REDIRECT_FIELD_NAME: redirect_url}
post_data = self.super_login.copy()
post_data.pop(REDIRECT_FIELD_NAME)
login = self.client.post(
'%s?%s' % (reverse('admin:login'), urlencode(new_next)),
post_data)
self.assertRedirects(login, redirect_url)
def test_double_login_is_not_allowed(self):
"""Regression test for #19327"""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with non-admin user fails
login = self.client.post(login_url, self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, ERROR_MESSAGE)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with admin user while already logged in
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
def test_login_page_notice_for_non_staff_users(self):
"""
A logged-in non-staff user trying to access the admin index should be
presented with the login page and a hint indicating that the current
user doesn't have access to it.
"""
hint_template = 'You are authenticated as {}'
# Anonymous user should not be shown the hint
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'login-form')
self.assertNotContains(response, hint_template.format(''), status_code=200)
# Non-staff user should be shown the hint
self.client.force_login(self.nostaffuser)
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'login-form')
self.assertContains(response, hint_template.format(self.nostaffuser.username), status_code=200)
def test_add_view(self):
"""Test add view restricts access and actually adds items."""
add_dict = {
'title': 'Døm ikke',
'content': '<p>great article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': self.s1.pk,
}
# Change User should not have access to add articles
self.client.force_login(self.changeuser)
# make sure the view removes test cookie
self.assertIs(self.client.session.test_cookie_worked(), False)
response = self.client.get(reverse('admin:admin_views_article_add'))
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.get(reverse('admin:logout'))
# View User should not have access to add articles
self.client.force_login(self.viewuser)
response = self.client.get(reverse('admin:admin_views_article_add'))
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
# Now give the user permission to add but not change.
self.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('add', Article._meta)))
response = self.client.get(reverse('admin:admin_views_article_add'))
self.assertContains(response, '<input type="submit" value="Save and view" name="_continue">')
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict, follow=False)
self.assertEqual(post.status_code, 302)
self.assertEqual(Article.objects.count(), 4)
article = Article.objects.latest('pk')
response = self.client.get(reverse('admin:admin_views_article_change', args=(article.pk,)))
self.assertContains(response, '<li class="success">The article "Døm ikke" was added successfully.</li>')
article.delete()
self.client.get(reverse('admin:logout'))
# Add user may login and POST to add view, then redirect to admin root
self.client.force_login(self.adduser)
addpage = self.client.get(reverse('admin:admin_views_article_add'))
change_list_link = '› <a href="%s">Articles</a>' % reverse('admin:admin_views_article_changelist')
self.assertNotContains(
addpage, change_list_link,
msg_prefix='User restricted to add permission is given link to change list view in breadcrumbs.'
)
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 4)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a created object')
self.client.get(reverse('admin:logout'))
# The addition was logged correctly
addition_log = LogEntry.objects.all()[0]
new_article = Article.objects.last()
article_ct = ContentType.objects.get_for_model(Article)
self.assertEqual(addition_log.user_id, self.adduser.pk)
self.assertEqual(addition_log.content_type_id, article_ct.pk)
self.assertEqual(addition_log.object_id, str(new_article.pk))
self.assertEqual(addition_log.object_repr, "Døm ikke")
self.assertEqual(addition_log.action_flag, ADDITION)
self.assertEqual(addition_log.get_change_message(), "Added.")
# Super can add too, but is redirected to the change list view
self.client.force_login(self.superuser)
addpage = self.client.get(reverse('admin:admin_views_article_add'))
self.assertContains(
addpage, change_list_link,
msg_prefix='Unrestricted user is not given link to change list view in breadcrumbs.'
)
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertRedirects(post, reverse('admin:admin_views_article_changelist'))
self.assertEqual(Article.objects.count(), 5)
self.client.get(reverse('admin:logout'))
# 8509 - if a normal user is already logged in, it is possible
# to change user into the superuser without error
self.client.force_login(self.joepublicuser)
# Check and make sure that if user expires, data still persists
self.client.force_login(self.superuser)
# make sure the view removes test cookie
self.assertIs(self.client.session.test_cookie_worked(), False)
@mock.patch('django.contrib.admin.options.InlineModelAdmin.has_change_permission')
def test_add_view_with_view_only_inlines(self, has_change_permission):
"""User with add permission to a section but view-only for inlines."""
self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('add', Section._meta)))
self.client.force_login(self.viewuser)
# Valid POST creates a new section.
data = {
'name': 'New obj',
'article_set-TOTAL_FORMS': 0,
'article_set-INITIAL_FORMS': 0,
}
response = self.client.post(reverse('admin:admin_views_section_add'), data)
self.assertRedirects(response, reverse('admin:index'))
self.assertEqual(Section.objects.latest('id').name, data['name'])
# InlineModelAdmin.has_change_permission()'s obj argument is always
# None during object add.
self.assertEqual([obj for (request, obj), _ in has_change_permission.call_args_list], [None, None])
def test_change_view(self):
"""Change view should restrict access and allow users to edit items."""
change_dict = {
'title': 'Ikke fordømt',
'content': '<p>edited article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': self.s1.pk,
}
article_change_url = reverse('admin:admin_views_article_change', args=(self.a1.pk,))
article_changelist_url = reverse('admin:admin_views_article_changelist')
# add user should not be able to view the list of article or change any of them
self.client.force_login(self.adduser)
response = self.client.get(article_changelist_url)
self.assertEqual(response.status_code, 403)
response = self.client.get(article_change_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(article_change_url, change_dict)
self.assertEqual(post.status_code, 403)
self.client.get(reverse('admin:logout'))
# view user should be able to view the article but not change any of them
# (the POST can be sent, but no modification occurs)
self.client.force_login(self.viewuser)
response = self.client.get(article_changelist_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['title'], 'Select article to view')
response = self.client.get(article_change_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['title'], 'View article')
self.assertContains(response, '<label>Extra form field:</label>')
self.assertContains(response, '<a href="/test_admin/admin/admin_views/article/" class="closelink">Close</a>')
post = self.client.post(article_change_url, change_dict)
self.assertEqual(post.status_code, 302)
self.assertEqual(Article.objects.get(pk=self.a1.pk).content, '<p>Middle content</p>')
self.client.get(reverse('admin:logout'))
# change user can view all items and edit them
self.client.force_login(self.changeuser)
response = self.client.get(article_changelist_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['title'], 'Select article to change')
response = self.client.get(article_change_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['title'], 'Change article')
post = self.client.post(article_change_url, change_dict)
self.assertRedirects(post, article_changelist_url)
self.assertEqual(Article.objects.get(pk=self.a1.pk).content, '<p>edited article</p>')
# one error in form should produce singular error message, multiple errors plural
change_dict['title'] = ''
post = self.client.post(article_change_url, change_dict)
self.assertContains(
post, 'Please correct the error below.',
msg_prefix='Singular error message not found in response to post with one error'
)
change_dict['content'] = ''
post = self.client.post(article_change_url, change_dict)
self.assertContains(
post, 'Please correct the errors below.',
msg_prefix='Plural error message not found in response to post with multiple errors'
)
self.client.get(reverse('admin:logout'))
# Test redirection when using row-level change permissions. Refs #11513.
r1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
r2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id")
r3 = RowLevelChangePermissionModel.objects.create(id=3, name='odd id mult 3')
r6 = RowLevelChangePermissionModel.objects.create(id=6, name='even id mult 3')
change_url_1 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r1.pk,))
change_url_2 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r2.pk,))
change_url_3 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r3.pk,))
change_url_6 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r6.pk,))
logins = [self.superuser, self.viewuser, self.adduser, self.changeuser, self.deleteuser]
for login_user in logins:
with self.subTest(login_user.username):
self.client.force_login(login_user)
response = self.client.get(change_url_1)
self.assertEqual(response.status_code, 403)
response = self.client.post(change_url_1, {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id')
self.assertEqual(response.status_code, 403)
response = self.client.get(change_url_2)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_2, {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed')
self.assertRedirects(response, self.index_url)
response = self.client.get(change_url_3)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_3, {'name': 'changed'})
self.assertRedirects(response, self.index_url)
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=3).name, 'odd id mult 3')
response = self.client.get(change_url_6)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_6, {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=6).name, 'changed')
self.assertRedirects(response, self.index_url)
self.client.get(reverse('admin:logout'))
for login_user in [self.joepublicuser, self.nostaffuser]:
with self.subTest(login_user.username):
self.client.force_login(login_user)
response = self.client.get(change_url_1, follow=True)
self.assertContains(response, 'login-form')
response = self.client.post(change_url_1, {'name': 'changed'}, follow=True)
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id')
self.assertContains(response, 'login-form')
response = self.client.get(change_url_2, follow=True)
self.assertContains(response, 'login-form')
response = self.client.post(change_url_2, {'name': 'changed again'}, follow=True)
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed')
self.assertContains(response, 'login-form')
self.client.get(reverse('admin:logout'))
def test_change_view_without_object_change_permission(self):
"""
The object should be read-only if the user has permission to view it
and change objects of that type but not to change the current object.
"""
change_url = reverse('admin9:admin_views_article_change', args=(self.a1.pk,))
self.client.force_login(self.viewuser)
response = self.client.get(change_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['title'], 'View article')
self.assertContains(response, '<a href="/test_admin/admin9/admin_views/article/" class="closelink">Close</a>')
def test_change_view_post_without_object_change_permission(self):
"""A POST redirects to changelist without modifications."""
change_dict = {
'title': 'Ikke fordømt',
'content': '<p>edited article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': self.s1.pk,
}
change_url = reverse('admin10:admin_views_article_change', args=(self.a1.pk,))
changelist_url = reverse('admin10:admin_views_article_changelist')
self.client.force_login(self.viewuser)
response = self.client.post(change_url, change_dict)
self.assertRedirects(response, changelist_url)
self.assertEqual(Article.objects.get(pk=self.a1.pk).content, '<p>Middle content</p>')
def test_change_view_save_as_new(self):
"""
'Save as new' should raise PermissionDenied for users without the 'add'
permission.
"""
change_dict_save_as_new = {
'_saveasnew': 'Save as new',
'title': 'Ikke fordømt',
'content': '<p>edited article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': self.s1.pk,
}
article_change_url = reverse('admin:admin_views_article_change', args=(self.a1.pk,))
# Add user can perform "Save as new".
article_count = Article.objects.count()
self.client.force_login(self.adduser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), article_count + 1)
self.client.logout()
# Change user cannot perform "Save as new" (no 'add' permission).
article_count = Article.objects.count()
self.client.force_login(self.changeuser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), article_count)
# User with both add and change permissions should be redirected to the
# change page for the newly created object.
article_count = Article.objects.count()
self.client.force_login(self.superuser)
post = self.client.post(article_change_url, change_dict_save_as_new)
self.assertEqual(Article.objects.count(), article_count + 1)
new_article = Article.objects.latest('id')
self.assertRedirects(post, reverse('admin:admin_views_article_change', args=(new_article.pk,)))
def test_change_view_with_view_only_inlines(self):
"""
User with change permission to a section but view-only for inlines.
"""
self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('change', Section._meta)))
self.client.force_login(self.viewuser)
# GET shows inlines.
response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
self.assertEqual(len(response.context['inline_admin_formsets']), 1)
formset = response.context['inline_admin_formsets'][0]
self.assertEqual(len(formset.forms), 3)
# Valid POST changes the name.
data = {
'name': 'Can edit name with view-only inlines',
'article_set-TOTAL_FORMS': 3,
'article_set-INITIAL_FORMS': 3
}
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data)
self.assertRedirects(response, reverse('admin:admin_views_section_changelist'))
self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data['name'])
# Invalid POST reshows inlines.
del data['name']
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context['inline_admin_formsets']), 1)
formset = response.context['inline_admin_formsets'][0]
self.assertEqual(len(formset.forms), 3)
def test_change_view_with_view_and_add_inlines(self):
"""User has view and add permissions on the inline model."""
self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('change', Section._meta)))
self.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('add', Article._meta)))
self.client.force_login(self.viewuser)
# GET shows inlines.
response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
self.assertEqual(len(response.context['inline_admin_formsets']), 1)
formset = response.context['inline_admin_formsets'][0]
self.assertEqual(len(formset.forms), 6)
# Valid POST creates a new article.
data = {
'name': 'Can edit name with view-only inlines',
'article_set-TOTAL_FORMS': 6,
'article_set-INITIAL_FORMS': 3,
'article_set-3-id': [''],
'article_set-3-title': ['A title'],
'article_set-3-content': ['Added content'],
'article_set-3-date_0': ['2008-3-18'],
'article_set-3-date_1': ['11:54:58'],
'article_set-3-section': [str(self.s1.pk)],
}
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data)
self.assertRedirects(response, reverse('admin:admin_views_section_changelist'))
self.assertEqual(Section.objects.get(pk=self.s1.pk).name, data['name'])
self.assertEqual(Article.objects.count(), 4)
# Invalid POST reshows inlines.
del data['name']
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.context['inline_admin_formsets']), 1)
formset = response.context['inline_admin_formsets'][0]
self.assertEqual(len(formset.forms), 6)
def test_change_view_with_view_and_delete_inlines(self):
"""User has view and delete permissions on the inline model."""
self.viewuser.user_permissions.add(get_perm(Section, get_permission_codename('change', Section._meta)))
self.client.force_login(self.viewuser)
data = {
'name': 'Name is required.',
'article_set-TOTAL_FORMS': 6,
'article_set-INITIAL_FORMS': 3,
'article_set-0-id': [str(self.a1.pk)],
'article_set-0-DELETE': ['on'],
}
# Inline POST details are ignored without delete permission.
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data)
self.assertRedirects(response, reverse('admin:admin_views_section_changelist'))
self.assertEqual(Article.objects.count(), 3)
# Deletion successful when delete permission is added.
self.viewuser.user_permissions.add(get_perm(Article, get_permission_codename('delete', Article._meta)))
data = {
'name': 'Name is required.',
'article_set-TOTAL_FORMS': 6,
'article_set-INITIAL_FORMS': 3,
'article_set-0-id': [str(self.a1.pk)],
'article_set-0-DELETE': ['on'],
}
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), data)
self.assertRedirects(response, reverse('admin:admin_views_section_changelist'))
self.assertEqual(Article.objects.count(), 2)
def test_delete_view(self):
"""Delete view should restrict access and actually delete items."""
delete_dict = {'post': 'yes'}
delete_url = reverse('admin:admin_views_article_delete', args=(self.a1.pk,))
# add user should not be able to delete articles
self.client.force_login(self.adduser)
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(delete_url, delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.logout()
# view user should not be able to delete articles
self.client.force_login(self.viewuser)
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(delete_url, delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.logout()
# Delete user can delete
self.client.force_login(self.deleteuser)
response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,)))
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 3</li>")
# test response contains link to related Article
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
response = self.client.get(delete_url)
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 1</li>")
self.assertEqual(response.status_code, 200)
post = self.client.post(delete_url, delete_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 2)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a deleted object')
article_ct = ContentType.objects.get_for_model(Article)
logged = LogEntry.objects.get(content_type=article_ct, action_flag=DELETION)
self.assertEqual(logged.object_id, str(self.a1.pk))
def test_delete_view_with_no_default_permissions(self):
"""
The delete view allows users to delete collected objects without a
'delete' permission (ReadOnlyPizza.Meta.default_permissions is empty).
"""
pizza = ReadOnlyPizza.objects.create(name='Double Cheese')
delete_url = reverse('admin:admin_views_readonlypizza_delete', args=(pizza.pk,))
self.client.force_login(self.adduser)
response = self.client.get(delete_url)
self.assertContains(response, 'admin_views/readonlypizza/%s/' % pizza.pk)
self.assertContains(response, '<h2>Summary</h2>')
self.assertContains(response, '<li>Read only pizzas: 1</li>')
self.assertEqual(response.status_code, 200)
post = self.client.post(delete_url, {'post': 'yes'})
self.assertRedirects(post, reverse('admin:admin_views_readonlypizza_changelist'))
self.assertEqual(ReadOnlyPizza.objects.count(), 0)
def test_delete_view_nonexistent_obj(self):
self.client.force_login(self.deleteuser)
url = reverse('admin:admin_views_article_delete', args=('nonexistent',))
response = self.client.get(url, follow=True)
self.assertRedirects(response, reverse('admin:index'))
self.assertEqual(
[m.message for m in response.context['messages']],
["""article with ID "nonexistent" doesn't exist. Perhaps it was deleted?"""]
)
def test_history_view(self):
"""History view should restrict access."""
# add user should not be able to view the list of article or change any of them
self.client.force_login(self.adduser)
response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,)))
self.assertEqual(response.status_code, 403)
self.client.get(reverse('admin:logout'))
# view user can view all items
self.client.force_login(self.viewuser)
response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,)))
self.assertEqual(response.status_code, 200)
self.client.get(reverse('admin:logout'))
# change user can view all items and edit them
self.client.force_login(self.changeuser)
response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,)))
self.assertEqual(response.status_code, 200)
# Test redirection when using row-level change permissions. Refs #11513.
rl1 = RowLevelChangePermissionModel.objects.create(name="odd id")
rl2 = RowLevelChangePermissionModel.objects.create(name="even id")
logins = [self.superuser, self.viewuser, self.adduser, self.changeuser, self.deleteuser]
for login_user in logins:
with self.subTest(login_user.username):
self.client.force_login(login_user)
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,))
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.client.get(reverse('admin:logout'))
for login_user in [self.joepublicuser, self.nostaffuser]:
with self.subTest(login_user.username):
self.client.force_login(login_user)
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,))
response = self.client.get(url, follow=True)
self.assertContains(response, 'login-form')
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,))
response = self.client.get(url, follow=True)
self.assertContains(response, 'login-form')
self.client.get(reverse('admin:logout'))
def test_history_view_bad_url(self):
self.client.force_login(self.changeuser)
response = self.client.get(reverse('admin:admin_views_article_history', args=('foo',)), follow=True)
self.assertRedirects(response, reverse('admin:index'))
self.assertEqual(
[m.message for m in response.context['messages']],
["""article with ID "foo" doesn't exist. Perhaps it was deleted?"""]
)
def test_conditionally_show_add_section_link(self):
"""
The foreign key widget should only show the "add related" button if the
user has permission to add that related item.
"""
self.client.force_login(self.adduser)
# The user can't add sections yet, so they shouldn't see the "add section" link.
url = reverse('admin:admin_views_article_add')
add_link_text = 'add_id_section'
response = self.client.get(url)
self.assertNotContains(response, add_link_text)
# Allow the user to add sections too. Now they can see the "add section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('add', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertContains(response, add_link_text)
def test_conditionally_show_change_section_link(self):
"""
The foreign key widget should only show the "change related" button if
the user has permission to change that related item.
"""
def get_change_related(response):
return response.context['adminform'].form.fields['section'].widget.can_change_related
self.client.force_login(self.adduser)
# The user can't change sections yet, so they shouldn't see the "change section" link.
url = reverse('admin:admin_views_article_add')
change_link_text = 'change_id_section'
response = self.client.get(url)
self.assertFalse(get_change_related(response))
self.assertNotContains(response, change_link_text)
# Allow the user to change sections too. Now they can see the "change section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('change', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_change_related(response))
self.assertContains(response, change_link_text)
def test_conditionally_show_delete_section_link(self):
"""
The foreign key widget should only show the "delete related" button if
the user has permission to delete that related item.
"""
def get_delete_related(response):
return response.context['adminform'].form.fields['sub_section'].widget.can_delete_related
self.client.force_login(self.adduser)
# The user can't delete sections yet, so they shouldn't see the "delete section" link.
url = reverse('admin:admin_views_article_add')
delete_link_text = 'delete_id_sub_section'
response = self.client.get(url)
self.assertFalse(get_delete_related(response))
self.assertNotContains(response, delete_link_text)
# Allow the user to delete sections too. Now they can see the "delete section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('delete', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_delete_related(response))
self.assertContains(response, delete_link_text)
def test_disabled_permissions_when_logged_in(self):
self.client.force_login(self.superuser)
superuser = User.objects.get(username='super')
superuser.is_active = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, 'Log out')
response = self.client.get(reverse('secure_view'), follow=True)
self.assertContains(response, 'id="login-form"')
def test_disabled_staff_permissions_when_logged_in(self):
self.client.force_login(self.superuser)
superuser = User.objects.get(username='super')
superuser.is_staff = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, 'Log out')
response = self.client.get(reverse('secure_view'), follow=True)
self.assertContains(response, 'id="login-form"')
def test_app_list_permissions(self):
"""
If a user has no module perms, the app list returns a 404.
"""
opts = Article._meta
change_user = User.objects.get(username='changeuser')
permission = get_perm(Article, get_permission_codename('change', opts))
self.client.force_login(self.changeuser)
# the user has no module permissions
change_user.user_permissions.remove(permission)
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(response.status_code, 404)
# the user now has module permissions
change_user.user_permissions.add(permission)
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(response.status_code, 200)
def test_shortcut_view_only_available_to_staff(self):
"""
Only admin users should be able to use the admin shortcut view.
"""
model_ctype = ContentType.objects.get_for_model(ModelWithStringPrimaryKey)
obj = ModelWithStringPrimaryKey.objects.create(string_pk='foo')
shortcut_url = reverse('admin:view_on_site', args=(model_ctype.pk, obj.pk))
# Not logged in: we should see the login page.
response = self.client.get(shortcut_url, follow=True)
self.assertTemplateUsed(response, 'admin/login.html')
# Logged in? Redirect.
self.client.force_login(self.superuser)
response = self.client.get(shortcut_url, follow=False)
# Can't use self.assertRedirects() because User.get_absolute_url() is silly.
self.assertEqual(response.status_code, 302)
# Domain may depend on contrib.sites tests also run
self.assertRegex(response.url, 'http://(testserver|example.com)/dummy/foo/')
def test_has_module_permission(self):
"""
has_module_permission() returns True for all users who
have any permission for that module (add, change, or delete), so that
the module is displayed on the admin index page.
"""
self.client.force_login(self.superuser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.viewuser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.adduser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.changeuser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.force_login(self.deleteuser)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
def test_overriding_has_module_permission(self):
"""
If has_module_permission() always returns False, the module shouldn't
be displayed on the admin index page for any users.
"""
articles = Article._meta.verbose_name_plural.title()
sections = Section._meta.verbose_name_plural.title()
index_url = reverse('admin7:index')
self.client.force_login(self.superuser)
response = self.client.get(index_url)
self.assertContains(response, sections)
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.viewuser)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.adduser)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.changeuser)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, articles)
self.client.logout()
self.client.force_login(self.deleteuser)
response = self.client.get(index_url)
self.assertNotContains(response, articles)
# The app list displays Sections but not Articles as the latter has
# ModelAdmin.has_module_permission() = False.
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin7:app_list', args=('admin_views',)))
self.assertContains(response, sections)
self.assertNotContains(response, articles)
def test_post_save_message_no_forbidden_links_visible(self):
"""
Post-save message shouldn't contain a link to the change form if the
user doesn't have the change permission.
"""
self.client.force_login(self.adduser)
# Emulate Article creation for user with add-only permission.
post_data = {
"title": "Fun & games",
"content": "Some content",
"date_0": "2015-10-31",
"date_1": "16:35:00",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_article_add'), post_data, follow=True)
self.assertContains(
response,
'<li class="success">The article "Fun & games" was added successfully.</li>',
html=True
)
@override_settings(
ROOT_URLCONF='admin_views.urls',
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}],
)
class AdminViewProxyModelPermissionsTests(TestCase):
"""Tests for proxy models permissions in the admin."""
@classmethod
def setUpTestData(cls):
cls.viewuser = User.objects.create_user(username='viewuser', password='secret', is_staff=True)
cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True)
cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True)
cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True)
# Setup permissions.
opts = UserProxy._meta
cls.viewuser.user_permissions.add(get_perm(UserProxy, get_permission_codename('view', opts)))
cls.adduser.user_permissions.add(get_perm(UserProxy, get_permission_codename('add', opts)))
cls.changeuser.user_permissions.add(get_perm(UserProxy, get_permission_codename('change', opts)))
cls.deleteuser.user_permissions.add(get_perm(UserProxy, get_permission_codename('delete', opts)))
# UserProxy instances.
cls.user_proxy = UserProxy.objects.create(username='user_proxy', password='secret')
def test_add(self):
self.client.force_login(self.adduser)
url = reverse('admin:admin_views_userproxy_add')
data = {
'username': 'can_add',
'password': 'secret',
'date_joined_0': '2019-01-15',
'date_joined_1': '16:59:10',
}
response = self.client.post(url, data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertTrue(UserProxy.objects.filter(username='can_add').exists())
def test_view(self):
self.client.force_login(self.viewuser)
response = self.client.get(reverse('admin:admin_views_userproxy_changelist'))
self.assertContains(response, '<h1>Select user proxy to view</h1>')
self.assertEqual(response.status_code, 200)
response = self.client.get(reverse('admin:admin_views_userproxy_change', args=(self.user_proxy.pk,)))
self.assertContains(response, '<h1>View user proxy</h1>')
self.assertContains(response, '<div class="readonly">user_proxy</div>')
def test_change(self):
self.client.force_login(self.changeuser)
data = {
'password': self.user_proxy.password,
'username': self.user_proxy.username,
'date_joined_0': self.user_proxy.date_joined.strftime('%Y-%m-%d'),
'date_joined_1': self.user_proxy.date_joined.strftime('%H:%M:%S'),
'first_name': 'first_name',
}
url = reverse('admin:admin_views_userproxy_change', args=(self.user_proxy.pk,))
response = self.client.post(url, data)
self.assertRedirects(response, reverse('admin:admin_views_userproxy_changelist'))
self.assertEqual(UserProxy.objects.get(pk=self.user_proxy.pk).first_name, 'first_name')
def test_delete(self):
self.client.force_login(self.deleteuser)
url = reverse('admin:admin_views_userproxy_delete', args=(self.user_proxy.pk,))
response = self.client.post(url, {'post': 'yes'}, follow=True)
self.assertEqual(response.status_code, 200)
self.assertFalse(UserProxy.objects.filter(pk=self.user_proxy.pk).exists())
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewsNoUrlTest(TestCase):
"""Regression test for #17333"""
@classmethod
def setUpTestData(cls):
# User who can change Reports
cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True)
cls.changeuser.user_permissions.add(get_perm(Report, get_permission_codename('change', Report._meta)))
def test_no_standard_modeladmin_urls(self):
"""Admin index views don't break when user's ModelAdmin removes standard urls"""
self.client.force_login(self.changeuser)
r = self.client.get(reverse('admin:index'))
# we shouldn't get a 500 error caused by a NoReverseMatch
self.assertEqual(r.status_code, 200)
self.client.get(reverse('admin:logout'))
@skipUnlessDBFeature('can_defer_constraint_checks')
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewDeletedObjectsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.deleteuser = User.objects.create_user(username='deleteuser', password='secret', is_staff=True)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.v1 = Villain.objects.create(name='Adam')
cls.v2 = Villain.objects.create(name='Sue')
cls.sv1 = SuperVillain.objects.create(name='Bob')
cls.pl1 = Plot.objects.create(name='World Domination', team_leader=cls.v1, contact=cls.v2)
cls.pl2 = Plot.objects.create(name='World Peace', team_leader=cls.v2, contact=cls.v2)
cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1)
cls.pd1 = PlotDetails.objects.create(details='almost finished', plot=cls.pl1)
cls.sh1 = SecretHideout.objects.create(location='underground bunker', villain=cls.v1)
cls.sh2 = SecretHideout.objects.create(location='floating castle', villain=cls.sv1)
cls.ssh1 = SuperSecretHideout.objects.create(location='super floating castle!', supervillain=cls.sv1)
cls.cy1 = CyclicOne.objects.create(name='I am recursive', two_id=1)
cls.cy2 = CyclicTwo.objects.create(name='I am recursive too', one_id=1)
def setUp(self):
self.client.force_login(self.superuser)
def test_nesting(self):
"""
Objects should be nested to display the relationships that
cause them to be scheduled for deletion.
"""
pattern = re.compile(
r'<li>Plot: <a href="%s">World Domination</a>\s*<ul>\s*'
r'<li>Plot details: <a href="%s">almost finished</a>' % (
reverse('admin:admin_views_plot_change', args=(self.pl1.pk,)),
reverse('admin:admin_views_plotdetails_change', args=(self.pd1.pk,)),
)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,)))
self.assertRegex(response.content.decode(), pattern)
def test_cyclic(self):
"""
Cyclic relationships should still cause each object to only be
listed once.
"""
one = '<li>Cyclic one: <a href="%s">I am recursive</a>' % (
reverse('admin:admin_views_cyclicone_change', args=(self.cy1.pk,)),
)
two = '<li>Cyclic two: <a href="%s">I am recursive too</a>' % (
reverse('admin:admin_views_cyclictwo_change', args=(self.cy2.pk,)),
)
response = self.client.get(reverse('admin:admin_views_cyclicone_delete', args=(self.cy1.pk,)))
self.assertContains(response, one, 1)
self.assertContains(response, two, 1)
def test_perms_needed(self):
self.client.logout()
delete_user = User.objects.get(username='deleteuser')
delete_user.user_permissions.add(get_perm(Plot, get_permission_codename('delete', Plot._meta)))
self.client.force_login(self.deleteuser)
response = self.client.get(reverse('admin:admin_views_plot_delete', args=(self.pl1.pk,)))
self.assertContains(response, "your account doesn't have permission to delete the following types of objects")
self.assertContains(response, "<li>plot details</li>")
def test_protected(self):
q = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q, answer="Because.")
a2 = Answer.objects.create(question=q, answer="Yes.")
response = self.client.get(reverse('admin:admin_views_question_delete', args=(q.pk,)))
self.assertContains(response, "would require deleting the following protected related objects")
self.assertContains(
response,
'<li>Answer: <a href="%s">Because.</a></li>' % reverse('admin:admin_views_answer_change', args=(a1.pk,))
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Yes.</a></li>' % reverse('admin:admin_views_answer_change', args=(a2.pk,))
)
def test_post_delete_protected(self):
"""
A POST request to delete protected objects should display the page
which says the deletion is prohibited.
"""
q = Question.objects.create(question='Why?')
Answer.objects.create(question=q, answer='Because.')
response = self.client.post(reverse('admin:admin_views_question_delete', args=(q.pk,)), {'post': 'yes'})
self.assertEqual(Question.objects.count(), 1)
self.assertContains(response, "would require deleting the following protected related objects")
def test_not_registered(self):
should_contain = """<li>Secret hideout: underground bunker"""
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,)))
self.assertContains(response, should_contain, 1)
def test_multiple_fkeys_to_same_model(self):
"""
If a deleted object has two relationships from another model,
both of those should be followed in looking for related
objects to delete.
"""
should_contain = '<li>Plot: <a href="%s">World Domination</a>' % reverse(
'admin:admin_views_plot_change', args=(self.pl1.pk,)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,)))
self.assertContains(response, should_contain)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,)))
self.assertContains(response, should_contain)
def test_multiple_fkeys_to_same_instance(self):
"""
If a deleted object has two relationships pointing to it from
another object, the other object should still only be listed
once.
"""
should_contain = '<li>Plot: <a href="%s">World Peace</a></li>' % reverse(
'admin:admin_views_plot_change', args=(self.pl2.pk,)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,)))
self.assertContains(response, should_contain, 1)
def test_inheritance(self):
"""
In the case of an inherited model, if either the child or
parent-model instance is deleted, both instances are listed
for deletion, as well as any relationships they have.
"""
should_contain = [
'<li>Villain: <a href="%s">Bob</a>' % reverse('admin:admin_views_villain_change', args=(self.sv1.pk,)),
'<li>Super villain: <a href="%s">Bob</a>' % reverse(
'admin:admin_views_supervillain_change', args=(self.sv1.pk,)
),
'<li>Secret hideout: floating castle',
'<li>Super secret hideout: super floating castle!',
]
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.sv1.pk,)))
for should in should_contain:
self.assertContains(response, should, 1)
response = self.client.get(reverse('admin:admin_views_supervillain_delete', args=(self.sv1.pk,)))
for should in should_contain:
self.assertContains(response, should, 1)
def test_generic_relations(self):
"""
If a deleted object has GenericForeignKeys pointing to it,
those objects should be listed for deletion.
"""
plot = self.pl3
tag = FunkyTag.objects.create(content_object=plot, name='hott')
should_contain = '<li>Funky tag: <a href="%s">hott' % reverse(
'admin:admin_views_funkytag_change', args=(tag.id,))
response = self.client.get(reverse('admin:admin_views_plot_delete', args=(plot.pk,)))
self.assertContains(response, should_contain)
def test_generic_relations_with_related_query_name(self):
"""
If a deleted object has GenericForeignKey with
GenericRelation(related_query_name='...') pointing to it, those objects
should be listed for deletion.
"""
bookmark = Bookmark.objects.create(name='djangoproject')
tag = FunkyTag.objects.create(content_object=bookmark, name='django')
tag_url = reverse('admin:admin_views_funkytag_change', args=(tag.id,))
should_contain = '<li>Funky tag: <a href="%s">django' % tag_url
response = self.client.get(reverse('admin:admin_views_bookmark_delete', args=(bookmark.pk,)))
self.assertContains(response, should_contain)
def test_delete_view_uses_get_deleted_objects(self):
"""The delete view uses ModelAdmin.get_deleted_objects()."""
book = Book.objects.create(name='Test Book')
response = self.client.get(reverse('admin2:admin_views_book_delete', args=(book.pk,)))
# BookAdmin.get_deleted_objects() returns custom text.
self.assertContains(response, 'a deletable object')
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestGenericRelations(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.v1 = Villain.objects.create(name='Adam')
cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1)
def setUp(self):
self.client.force_login(self.superuser)
def test_generic_content_object_in_list_display(self):
FunkyTag.objects.create(content_object=self.pl3, name='hott')
response = self.client.get(reverse('admin:admin_views_funkytag_changelist'))
self.assertContains(response, "%s</td>" % self.pl3)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewStringPrimaryKeyTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.pk = (
"abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 "
r"""-_.!~*'() ;/?:@&=+$, <>#%" {}|\^[]`"""
)
cls.m1 = ModelWithStringPrimaryKey.objects.create(string_pk=cls.pk)
content_type_pk = ContentType.objects.get_for_model(ModelWithStringPrimaryKey).pk
user_pk = cls.superuser.pk
LogEntry.objects.log_action(user_pk, content_type_pk, cls.pk, cls.pk, 2, change_message='Changed something')
def setUp(self):
self.client.force_login(self.superuser)
def test_get_history_view(self):
"""
Retrieving the history for an object using urlencoded form of primary
key should work.
Refs #12349, #18550.
"""
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_history', args=(self.pk,)))
self.assertContains(response, escape(self.pk))
self.assertContains(response, 'Changed something')
self.assertEqual(response.status_code, 200)
def test_get_change_view(self):
"Retrieving the object using urlencoded form of primary key should work"
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_change', args=(self.pk,)))
self.assertContains(response, escape(self.pk))
self.assertEqual(response.status_code, 200)
def test_changelist_to_changeform_link(self):
"Link to the changeform of the object in changelist should use reverse() and be quoted -- #18072"
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_changelist'))
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
pk_final_url = escape(iri_to_uri(quote(self.pk)))
change_url = reverse(
'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',)
).replace('__fk__', pk_final_url)
should_contain = '<th class="field-__str__"><a href="%s">%s</a></th>' % (change_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_recentactions_link(self):
"The link from the recent actions list referring to the changeform of the object should be quoted"
response = self.client.get(reverse('admin:index'))
link = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),))
should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk))
self.assertContains(response, should_contain)
def test_deleteconfirmation_link(self):
"The link from the delete confirmation page referring back to the changeform of the object should be quoted"
url = reverse('admin:admin_views_modelwithstringprimarykey_delete', args=(quote(self.pk),))
response = self.client.get(url)
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
change_url = reverse(
'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',)
).replace('__fk__', escape(iri_to_uri(quote(self.pk))))
should_contain = '<a href="%s">%s</a>' % (change_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_url_conflicts_with_add(self):
"A model with a primary key that ends with add or is `add` should be visible"
add_model = ModelWithStringPrimaryKey.objects.create(pk="i have something to add")
add_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
add_model2 = ModelWithStringPrimaryKey.objects.create(pk="add")
add_url = reverse('admin:admin_views_modelwithstringprimarykey_add')
change_url = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model2.pk),))
self.assertNotEqual(add_url, change_url)
def test_url_conflicts_with_delete(self):
"A model with a primary key that ends with delete should be visible"
delete_model = ModelWithStringPrimaryKey(pk="delete")
delete_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(delete_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_url_conflicts_with_history(self):
"A model with a primary key that ends with history should be visible"
history_model = ModelWithStringPrimaryKey(pk="history")
history_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(history_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_shortcut_view_with_escaping(self):
"'View on site should' work properly with char fields"
model = ModelWithStringPrimaryKey(pk='abc_123')
model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(model.pk),))
)
should_contain = '/%s/" class="viewsitelink">' % model.pk
self.assertContains(response, should_contain)
def test_change_view_history_link(self):
"""Object history button link should work and contain the pk value quoted."""
url = reverse(
'admin:%s_modelwithstringprimarykey_change' % ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),)
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
expected_link = reverse(
'admin:%s_modelwithstringprimarykey_history' % ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),)
)
self.assertContains(response, '<a href="%s" class="historylink"' % escape(expected_link))
def test_redirect_on_add_view_continue_button(self):
"""As soon as an object is added using "Save and continue editing"
button, the user should be redirected to the object's change_view.
In case primary key is a string containing some special characters
like slash or underscore, these characters must be escaped (see #22266)
"""
response = self.client.post(
reverse('admin:admin_views_modelwithstringprimarykey_add'),
{
'string_pk': '123/history',
"_continue": "1", # Save and continue editing
}
)
self.assertEqual(response.status_code, 302) # temporary redirect
self.assertIn('/123_2Fhistory/', response['location']) # PK is quoted
@override_settings(ROOT_URLCONF='admin_views.urls')
class SecureViewTests(TestCase):
"""
Test behavior of a view protected by the staff_member_required decorator.
"""
def test_secure_view_shows_login_if_not_logged_in(self):
secure_url = reverse('secure_view')
response = self.client.get(secure_url)
self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), secure_url))
response = self.client.get(secure_url, follow=True)
self.assertTemplateUsed(response, 'admin/login.html')
self.assertEqual(response.context[REDIRECT_FIELD_NAME], secure_url)
def test_staff_member_required_decorator_works_with_argument(self):
"""
Staff_member_required decorator works with an argument
(redirect_field_name).
"""
secure_url = '/test_admin/admin/secure-view2/'
response = self.client.get(secure_url)
self.assertRedirects(response, '%s?myfield=%s' % (reverse('admin:login'), secure_url))
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewUnicodeTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.b1 = Book.objects.create(name='Lærdommer')
cls.p1 = Promo.objects.create(name='<Promo for Lærdommer>', book=cls.b1)
cls.chap1 = Chapter.objects.create(
title='Norske bostaver æøå skaper problemer', content='<p>Svært frustrerende med UnicodeDecodeErro</p>',
book=cls.b1
)
cls.chap2 = Chapter.objects.create(
title='Kjærlighet', content='<p>La kjærligheten til de lidende seire.</p>', book=cls.b1)
cls.chap3 = Chapter.objects.create(title='Kjærlighet', content='<p>Noe innhold</p>', book=cls.b1)
cls.chap4 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='<Xtra(1) Norske bostaver æøå skaper problemer>')
cls.chap5 = ChapterXtra1.objects.create(chap=cls.chap2, xtra='<Xtra(1) Kjærlighet>')
cls.chap6 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='<Xtra(1) Kjærlighet>')
cls.chap7 = ChapterXtra2.objects.create(chap=cls.chap1, xtra='<Xtra(2) Norske bostaver æøå skaper problemer>')
cls.chap8 = ChapterXtra2.objects.create(chap=cls.chap2, xtra='<Xtra(2) Kjærlighet>')
cls.chap9 = ChapterXtra2.objects.create(chap=cls.chap3, xtra='<Xtra(2) Kjærlighet>')
def setUp(self):
self.client.force_login(self.superuser)
def test_unicode_edit(self):
"""
A test to ensure that POST on edit_view handles non-ASCII characters.
"""
post_data = {
"name": "Test lærdommer",
# inline data
"chapter_set-TOTAL_FORMS": "6",
"chapter_set-INITIAL_FORMS": "3",
"chapter_set-MAX_NUM_FORMS": "0",
"chapter_set-0-id": self.chap1.pk,
"chapter_set-0-title": "Norske bostaver æøå skaper problemer",
"chapter_set-0-content": "<p>Svært frustrerende med UnicodeDecodeError</p>",
"chapter_set-1-id": self.chap2.id,
"chapter_set-1-title": "Kjærlighet.",
"chapter_set-1-content": "<p>La kjærligheten til de lidende seire.</p>",
"chapter_set-2-id": self.chap3.id,
"chapter_set-2-title": "Need a title.",
"chapter_set-2-content": "<p>Newest content</p>",
"chapter_set-3-id": "",
"chapter_set-3-title": "",
"chapter_set-3-content": "",
"chapter_set-4-id": "",
"chapter_set-4-title": "",
"chapter_set-4-content": "",
"chapter_set-5-id": "",
"chapter_set-5-title": "",
"chapter_set-5-content": "",
}
response = self.client.post(reverse('admin:admin_views_book_change', args=(self.b1.pk,)), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_unicode_delete(self):
"""
The delete_view handles non-ASCII characters
"""
delete_dict = {'post': 'yes'}
delete_url = reverse('admin:admin_views_book_delete', args=(self.b1.pk,))
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 200)
response = self.client.post(delete_url, delete_dict)
self.assertRedirects(response, reverse('admin:admin_views_book_changelist'))
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewListEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False)
cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_inheritance(self):
Podcast.objects.create(name="This Week in Django", release_date=datetime.date.today())
response = self.client.get(reverse('admin:admin_views_podcast_changelist'))
self.assertEqual(response.status_code, 200)
def test_inheritance_2(self):
Vodcast.objects.create(name="This Week in Django", released=True)
response = self.client.get(reverse('admin:admin_views_vodcast_changelist'))
self.assertEqual(response.status_code, 200)
def test_custom_pk(self):
Language.objects.create(iso='en', name='English', english_name='English')
response = self.client.get(reverse('admin:admin_views_language_changelist'))
self.assertEqual(response.status_code, 200)
def test_changelist_input_html(self):
response = self.client.get(reverse('admin:admin_views_person_changelist'))
# 2 inputs per object(the field and the hidden id field) = 6
# 4 management hidden fields = 4
# 4 action inputs (3 regular checkboxes, 1 checkbox to select all)
# main form submit button = 1
# search field and search submit button = 2
# CSRF field = 1
# field to track 'select all' across paginated views = 1
# 6 + 4 + 4 + 1 + 2 + 1 + 1 = 19 inputs
self.assertContains(response, "<input", count=19)
# 1 select per object = 3 selects
self.assertContains(response, "<select", count=4)
def test_post_messages(self):
# Ticket 12707: Saving inline editable should not show admin
# action warnings
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "%s" % self.per1.pk,
"form-1-gender": "2",
"form-1-id": "%s" % self.per2.pk,
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "%s" % self.per3.pk,
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'),
data, follow=True)
self.assertEqual(len(response.context['messages']), 1)
def test_post_submission(self):
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "%s" % self.per1.pk,
"form-1-gender": "2",
"form-1-id": "%s" % self.per2.pk,
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "%s" % self.per3.pk,
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
# test a filtered page
data = {
"form-TOTAL_FORMS": "2",
"form-INITIAL_FORMS": "2",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per1.pk,
"form-0-gender": "1",
"form-0-alive": "checked",
"form-1-id": "%s" % self.per3.pk,
"form-1-gender": "1",
"form-1-alive": "checked",
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist') + '?gender__exact=1', data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, True)
# test a searched page
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per1.pk,
"form-0-gender": "1",
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist') + '?q=john', data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
def test_non_field_errors(self):
"""
Non-field errors are displayed for each of the forms in the
changelist's formset.
"""
fd1 = FoodDelivery.objects.create(reference='123', driver='bill', restaurant='thai')
fd2 = FoodDelivery.objects.create(reference='456', driver='bill', restaurant='india')
fd3 = FoodDelivery.objects.create(reference='789', driver='bill', restaurant='pizza')
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "pizza",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
'with this Driver and Restaurant already exists.</li></ul></td></tr>',
1,
html=True
)
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
# Same data also.
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "thai",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
'with this Driver and Restaurant already exists.</li></ul></td></tr>',
2,
html=True
)
def test_non_form_errors(self):
# test if non-form errors are handled; ticket #12716
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per2.pk,
"form-0-alive": "1",
"form-0-gender": "2",
# The form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertContains(response, "Grace is not a Zombie")
def test_non_form_errors_is_errorlist(self):
# test if non-form errors are correctly handled; ticket #12878
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per2.pk,
"form-0-alive": "1",
"form-0-gender": "2",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'), data)
non_form_errors = response.context['cl'].formset.non_form_errors()
self.assertIsInstance(non_form_errors, ErrorList)
self.assertEqual(str(non_form_errors), str(ErrorList(["Grace is not a Zombie"])))
def test_list_editable_ordering(self):
collector = Collector.objects.create(id=1, name="Frederick Clegg")
Category.objects.create(id=1, order=1, collector=collector)
Category.objects.create(id=2, order=2, collector=collector)
Category.objects.create(id=3, order=0, collector=collector)
Category.objects.create(id=4, order=0, collector=collector)
# NB: The order values must be changed so that the items are reordered.
data = {
"form-TOTAL_FORMS": "4",
"form-INITIAL_FORMS": "4",
"form-MAX_NUM_FORMS": "0",
"form-0-order": "14",
"form-0-id": "1",
"form-0-collector": "1",
"form-1-order": "13",
"form-1-id": "2",
"form-1-collector": "1",
"form-2-order": "1",
"form-2-id": "3",
"form-2-collector": "1",
"form-3-order": "0",
"form-3-id": "4",
"form-3-collector": "1",
# The form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_category_changelist'), data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# The order values have been applied to the right objects
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
def test_list_editable_pagination(self):
"""
Pagination works for list_editable items.
"""
UnorderedObject.objects.create(id=1, name='Unordered object #1')
UnorderedObject.objects.create(id=2, name='Unordered object #2')
UnorderedObject.objects.create(id=3, name='Unordered object #3')
response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist'))
self.assertContains(response, 'Unordered object #3')
self.assertContains(response, 'Unordered object #2')
self.assertNotContains(response, 'Unordered object #1')
response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist') + '?p=1')
self.assertNotContains(response, 'Unordered object #3')
self.assertNotContains(response, 'Unordered object #2')
self.assertContains(response, 'Unordered object #1')
def test_list_editable_action_submit(self):
# List editable changes should not be executed if the action "Go" button is
# used to submit the form.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"index": "0",
"_selected_action": ['3'],
"action": ['', 'delete_selected'],
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, True)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 1)
def test_list_editable_action_choices(self):
# List editable changes should be executed if the "Save" button is
# used to submit the form - any action choices should be ignored.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "%s" % self.per1.pk,
"form-1-gender": "2",
"form-1-id": "%s" % self.per2.pk,
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "%s" % self.per3.pk,
"_save": "Save",
"_selected_action": ['1'],
"action": ['', 'delete_selected'],
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertIs(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
def test_list_editable_popup(self):
"""
Fields should not be list-editable in popups.
"""
response = self.client.get(reverse('admin:admin_views_person_changelist'))
self.assertNotEqual(response.context['cl'].list_editable, ())
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?%s' % IS_POPUP_VAR)
self.assertEqual(response.context['cl'].list_editable, ())
def test_pk_hidden_fields(self):
"""
hidden pk fields aren't displayed in the table body and their
corresponding human-readable value is displayed instead. The hidden pk
fields are displayed but separately (not in the table) and only once.
"""
story1 = Story.objects.create(title='The adventures of Guido', content='Once upon a time in Djangoland...')
story2 = Story.objects.create(
title='Crouching Tiger, Hidden Python',
content='The Python was sneaking into...',
)
response = self.client.get(reverse('admin:admin_views_story_changelist'))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n</div>'
% (story2.id, story1.id),
html=True
)
self.assertContains(response, '<td class="field-id">%d</td>' % story1.id, 1)
self.assertContains(response, '<td class="field-id">%d</td>' % story2.id, 1)
def test_pk_hidden_fields_with_list_display_links(self):
""" Similarly as test_pk_hidden_fields, but when the hidden pk fields are
referenced in list_display_links.
Refs #12475.
"""
story1 = OtherStory.objects.create(
title='The adventures of Guido',
content='Once upon a time in Djangoland...',
)
story2 = OtherStory.objects.create(
title='Crouching Tiger, Hidden Python',
content='The Python was sneaking into...',
)
link1 = reverse('admin:admin_views_otherstory_change', args=(story1.pk,))
link2 = reverse('admin:admin_views_otherstory_change', args=(story2.pk,))
response = self.client.get(reverse('admin:admin_views_otherstory_changelist'))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id">'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id">\n</div>'
% (story2.id, story1.id),
html=True
)
self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link1, story1.id), 1)
self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link2, story2.id), 1)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminSearchTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False)
cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
cls.t1 = Recommender.objects.create()
cls.t2 = Recommendation.objects.create(the_recommender=cls.t1)
cls.t3 = Recommender.objects.create()
cls.t4 = Recommendation.objects.create(the_recommender=cls.t3)
cls.tt1 = TitleTranslation.objects.create(title=cls.t1, text='Bar')
cls.tt2 = TitleTranslation.objects.create(title=cls.t2, text='Foo')
cls.tt3 = TitleTranslation.objects.create(title=cls.t3, text='Few')
cls.tt4 = TitleTranslation.objects.create(title=cls.t4, text='Bas')
def setUp(self):
self.client.force_login(self.superuser)
def test_search_on_sibling_models(self):
"A search that mentions sibling models"
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
# confirm the search returned 1 object
self.assertContains(response, "\n1 recommendation\n")
def test_with_fk_to_field(self):
"""
The to_field GET parameter is preserved when a search is performed.
Refs #10918.
"""
response = self.client.get(reverse('admin:auth_user_changelist') + '?q=joe&%s=id' % TO_FIELD_VAR)
self.assertContains(response, "\n1 user\n")
self.assertContains(response, '<input type="hidden" name="%s" value="id">' % TO_FIELD_VAR, html=True)
def test_exact_matches(self):
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
# confirm the search returned one object
self.assertContains(response, "\n1 recommendation\n")
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=ba')
# confirm the search returned zero objects
self.assertContains(response, "\n0 recommendations\n")
def test_beginning_matches(self):
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui')
# confirm the search returned one object
self.assertContains(response, "\n1 person\n")
self.assertContains(response, "Guido")
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=uido')
# confirm the search returned zero objects
self.assertContains(response, "\n0 persons\n")
self.assertNotContains(response, "Guido")
def test_pluggable_search(self):
PluggableSearchPerson.objects.create(name="Bob", age=10)
PluggableSearchPerson.objects.create(name="Amy", age=20)
response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=Bob')
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Bob")
response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=20')
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Amy")
def test_reset_link(self):
"""
Test presence of reset link in search bar ("1 result (_x total_)").
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
# + 1 for total count
with self.assertNumQueries(5):
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui')
self.assertContains(
response,
"""<span class="small quiet">1 result (<a href="?">3 total</a>)</span>""",
html=True
)
def test_no_total_count(self):
"""
#8408 -- "Show all" should be displayed instead of the total count if
ModelAdmin.show_full_result_count is False.
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
with self.assertNumQueries(4):
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
self.assertContains(
response,
"""<span class="small quiet">1 result (<a href="?">Show all</a>)</span>""",
html=True
)
self.assertTrue(response.context['cl'].show_admin_actions)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminInheritedInlinesTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_inline(self):
"""
Inline models which inherit from a common parent are correctly handled.
"""
foo_user = "foo username"
bar_user = "bar username"
name_re = re.compile(b'name="(.*?)"')
# test the add case
response = self.client.get(reverse('admin:admin_views_persona_add'))
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
# test the add case
post_data = {
"name": "Test Name",
# inline data
"accounts-TOTAL_FORMS": "1",
"accounts-INITIAL_FORMS": "0",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": foo_user,
"accounts-2-TOTAL_FORMS": "1",
"accounts-2-INITIAL_FORMS": "0",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": bar_user,
}
response = self.client.post(reverse('admin:admin_views_persona_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
persona_id = Persona.objects.all()[0].id
foo_id = FooAccount.objects.all()[0].id
bar_id = BarAccount.objects.all()[0].id
# test the edit case
response = self.client.get(reverse('admin:admin_views_persona_change', args=(persona_id,)))
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
post_data = {
"name": "Test Name",
"accounts-TOTAL_FORMS": "2",
"accounts-INITIAL_FORMS": "1",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": "%s-1" % foo_user,
"accounts-0-account_ptr": str(foo_id),
"accounts-0-persona": str(persona_id),
"accounts-2-TOTAL_FORMS": "2",
"accounts-2-INITIAL_FORMS": "1",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": "%s-1" % bar_user,
"accounts-2-0-account_ptr": str(bar_id),
"accounts-2-0-persona": str(persona_id),
}
response = self.client.post(reverse('admin:admin_views_persona_change', args=(persona_id,)), post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, "%s-1" % foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, "%s-1" % bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestCustomChangeList(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_custom_changelist(self):
"""
Validate that a custom ChangeList class can be used (#9749)
"""
# Insert some data
post_data = {"name": "First Gadget"}
response = self.client.post(reverse('admin:admin_views_gadget_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
# Hit the page once to get messages out of the queue message list
response = self.client.get(reverse('admin:admin_views_gadget_changelist'))
# Data is still not visible on the page
response = self.client.get(reverse('admin:admin_views_gadget_changelist'))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'First Gadget')
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestInlineNotEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_GET_parent_add(self):
"""
InlineModelAdmin broken?
"""
response = self.client.get(reverse('admin:admin_views_parent_add'))
self.assertEqual(response.status_code, 200)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminCustomQuerysetTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.pks = [EmptyModel.objects.create().id for i in range(3)]
def setUp(self):
self.client.force_login(self.superuser)
self.super_login = {
REDIRECT_FIELD_NAME: reverse('admin:index'),
'username': 'super',
'password': 'secret',
}
def test_changelist_view(self):
response = self.client.get(reverse('admin:admin_views_emptymodel_changelist'))
for i in self.pks:
if i > 1:
self.assertContains(response, 'Primary key = %s' % i)
else:
self.assertNotContains(response, 'Primary key = %s' % i)
def test_changelist_view_count_queries(self):
# create 2 Person objects
Person.objects.create(name='person1', gender=1)
Person.objects.create(name='person2', gender=2)
changelist_url = reverse('admin:admin_views_person_changelist')
# 5 queries are expected: 1 for the session, 1 for the user,
# 2 for the counts and 1 for the objects on the page
with self.assertNumQueries(5):
resp = self.client.get(changelist_url)
self.assertEqual(resp.context['selection_note'], '0 of 2 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 2 selected')
with self.assertNumQueries(5):
extra = {'q': 'not_in_name'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 0 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 0 selected')
with self.assertNumQueries(5):
extra = {'q': 'person'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 2 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 2 selected')
with self.assertNumQueries(5):
extra = {'gender__exact': '1'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 1 selected')
self.assertEqual(resp.context['selection_note_all'], '1 selected')
def test_change_view(self):
for i in self.pks:
url = reverse('admin:admin_views_emptymodel_change', args=(i,))
response = self.client.get(url, follow=True)
if i > 1:
self.assertEqual(response.status_code, 200)
else:
self.assertRedirects(response, reverse('admin:index'))
self.assertEqual(
[m.message for m in response.context['messages']],
["""empty model with ID "1" doesn't exist. Perhaps it was deleted?"""]
)
def test_add_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __str__ method
self.assertEqual(CoverLetter.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"author": "Candidate, Best",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_coverletter_add'), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = CoverLetter.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The cover letter "<a href="%s">'
'Candidate, Best</a>" was added successfully.</li>' %
reverse('admin:admin_views_coverletter_change', args=(pk,)), html=True
)
# model has no __str__ method
self.assertEqual(ShortMessage.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"content": "What's this SMS thing?",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_shortmessage_add'), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name
sm = ShortMessage.objects.all()[0]
self.assertContains(
response,
'<li class="success">The short message "<a href="%s">'
'%s</a>" was added successfully.</li>' %
(reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)), sm), html=True
)
def test_add_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __str__ method
self.assertEqual(Telegram.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "Urgent telegram",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_telegram_add'), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name
pk = Telegram.objects.all()[0].pk
self.assertContains(
response,
'<li class="success">The telegram "<a href="%s">'
'Urgent telegram</a>" was added successfully.</li>' %
reverse('admin:admin_views_telegram_change', args=(pk,)), html=True
)
# model has no __str__ method
self.assertEqual(Paper.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_paper_add'), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name
p = Paper.objects.all()[0]
self.assertContains(
response,
'<li class="success">The paper "<a href="%s">'
'%s</a>" was added successfully.</li>' %
(reverse('admin:admin_views_paper_change', args=(p.pk,)), p), html=True
)
def test_edit_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __str__ method
cl = CoverLetter.objects.create(author="John Doe")
self.assertEqual(CoverLetter.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_coverletter_change', args=(cl.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"author": "John Doe II",
"_save": "Save",
}
url = reverse('admin:admin_views_coverletter_change', args=(cl.pk,))
response = self.client.post(url, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name. Instance
# representation is set by model's __str__()
self.assertContains(
response,
'<li class="success">The cover letter "<a href="%s">'
'John Doe II</a>" was changed successfully.</li>' %
reverse('admin:admin_views_coverletter_change', args=(cl.pk,)), html=True
)
# model has no __str__ method
sm = ShortMessage.objects.create(content="This is expensive")
self.assertEqual(ShortMessage.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"content": "Too expensive",
"_save": "Save",
}
url = reverse('admin:admin_views_shortmessage_change', args=(sm.pk,))
response = self.client.post(url, post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by __str__().
self.assertContains(
response,
'<li class="success">The short message "<a href="%s">'
'%s</a>" was changed successfully.</li>' %
(reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)), sm), html=True
)
def test_edit_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __str__ method
t = Telegram.objects.create(title="First Telegram")
self.assertEqual(Telegram.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_telegram_change', args=(t.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "Telegram without typo",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_telegram_change', args=(t.pk,)), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name. The instance
# representation is set by model's __str__()
self.assertContains(
response,
'<li class="success">The telegram "<a href="%s">'
'Telegram without typo</a>" was changed successfully.</li>' %
reverse('admin:admin_views_telegram_change', args=(t.pk,)), html=True
)
# model has no __str__ method
p = Paper.objects.create(title="My Paper Title")
self.assertEqual(Paper.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_paper_change', args=(p.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_paper_change', args=(p.pk,)), post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by __str__().
self.assertContains(
response,
'<li class="success">The paper "<a href="%s">'
'%s</a>" was changed successfully.</li>' %
(reverse('admin:admin_views_paper_change', args=(p.pk,)), p), html=True
)
def test_history_view_custom_qs(self):
"""
Custom querysets are considered for the admin history view.
"""
self.client.post(reverse('admin:login'), self.super_login)
FilteredManager.objects.create(pk=1)
FilteredManager.objects.create(pk=2)
response = self.client.get(reverse('admin:admin_views_filteredmanager_changelist'))
self.assertContains(response, "PK=1")
self.assertContains(response, "PK=2")
self.assertEqual(
self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(1,))).status_code, 200
)
self.assertEqual(
self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(2,))).status_code, 200
)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminInlineFileUploadTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
file1 = tempfile.NamedTemporaryFile(suffix=".file1")
file1.write(b'a' * (2 ** 21))
filename = file1.name
file1.close()
cls.gallery = Gallery.objects.create(name='Test Gallery')
cls.picture = Picture.objects.create(
name='Test Picture',
image=filename,
gallery=cls.gallery,
)
def setUp(self):
self.client.force_login(self.superuser)
def test_form_has_multipart_enctype(self):
response = self.client.get(
reverse('admin:admin_views_gallery_change', args=(self.gallery.id,))
)
self.assertIs(response.context['has_file_field'], True)
self.assertContains(response, MULTIPART_ENCTYPE)
def test_inline_file_upload_edit_validation_error_post(self):
"""
Inline file uploads correctly display prior data (#10002).
"""
post_data = {
"name": "Test Gallery",
"pictures-TOTAL_FORMS": "2",
"pictures-INITIAL_FORMS": "1",
"pictures-MAX_NUM_FORMS": "0",
"pictures-0-id": str(self.picture.id),
"pictures-0-gallery": str(self.gallery.id),
"pictures-0-name": "Test Picture",
"pictures-0-image": "",
"pictures-1-id": "",
"pictures-1-gallery": str(self.gallery.id),
"pictures-1-name": "Test Picture 2",
"pictures-1-image": "",
}
response = self.client.post(
reverse('admin:admin_views_gallery_change', args=(self.gallery.id,)), post_data
)
self.assertContains(response, b"Currently")
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminInlineTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.collector = Collector.objects.create(pk=1, name='John Fowles')
def setUp(self):
self.post_data = {
"name": "Test Name",
"widget_set-TOTAL_FORMS": "3",
"widget_set-INITIAL_FORMS": "0",
"widget_set-MAX_NUM_FORMS": "0",
"widget_set-0-id": "",
"widget_set-0-owner": "1",
"widget_set-0-name": "",
"widget_set-1-id": "",
"widget_set-1-owner": "1",
"widget_set-1-name": "",
"widget_set-2-id": "",
"widget_set-2-owner": "1",
"widget_set-2-name": "",
"doohickey_set-TOTAL_FORMS": "3",
"doohickey_set-INITIAL_FORMS": "0",
"doohickey_set-MAX_NUM_FORMS": "0",
"doohickey_set-0-owner": "1",
"doohickey_set-0-code": "",
"doohickey_set-0-name": "",
"doohickey_set-1-owner": "1",
"doohickey_set-1-code": "",
"doohickey_set-1-name": "",
"doohickey_set-2-owner": "1",
"doohickey_set-2-code": "",
"doohickey_set-2-name": "",
"grommet_set-TOTAL_FORMS": "3",
"grommet_set-INITIAL_FORMS": "0",
"grommet_set-MAX_NUM_FORMS": "0",
"grommet_set-0-code": "",
"grommet_set-0-owner": "1",
"grommet_set-0-name": "",
"grommet_set-1-code": "",
"grommet_set-1-owner": "1",
"grommet_set-1-name": "",
"grommet_set-2-code": "",
"grommet_set-2-owner": "1",
"grommet_set-2-name": "",
"whatsit_set-TOTAL_FORMS": "3",
"whatsit_set-INITIAL_FORMS": "0",
"whatsit_set-MAX_NUM_FORMS": "0",
"whatsit_set-0-owner": "1",
"whatsit_set-0-index": "",
"whatsit_set-0-name": "",
"whatsit_set-1-owner": "1",
"whatsit_set-1-index": "",
"whatsit_set-1-name": "",
"whatsit_set-2-owner": "1",
"whatsit_set-2-index": "",
"whatsit_set-2-name": "",
"fancydoodad_set-TOTAL_FORMS": "3",
"fancydoodad_set-INITIAL_FORMS": "0",
"fancydoodad_set-MAX_NUM_FORMS": "0",
"fancydoodad_set-0-doodad_ptr": "",
"fancydoodad_set-0-owner": "1",
"fancydoodad_set-0-name": "",
"fancydoodad_set-0-expensive": "on",
"fancydoodad_set-1-doodad_ptr": "",
"fancydoodad_set-1-owner": "1",
"fancydoodad_set-1-name": "",
"fancydoodad_set-1-expensive": "on",
"fancydoodad_set-2-doodad_ptr": "",
"fancydoodad_set-2-owner": "1",
"fancydoodad_set-2-name": "",
"fancydoodad_set-2-expensive": "on",
"category_set-TOTAL_FORMS": "3",
"category_set-INITIAL_FORMS": "0",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "",
"category_set-0-id": "",
"category_set-0-collector": "1",
"category_set-1-order": "",
"category_set-1-id": "",
"category_set-1-collector": "1",
"category_set-2-order": "",
"category_set-2-id": "",
"category_set-2-collector": "1",
}
self.client.force_login(self.superuser)
def test_simple_inline(self):
"A simple model can be saved as inlines"
# First add a new inline
self.post_data['widget_set-0-name'] = "Widget 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
widget_id = Widget.objects.all()[0].id
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="widget_set-0-id"')
# No file or image fields, no enctype on the forms
self.assertIs(response.context['has_file_field'], False)
self.assertNotContains(response, MULTIPART_ENCTYPE)
# Now resave that inline
self.post_data['widget_set-INITIAL_FORMS'] = "1"
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = "Widget 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
# Now modify that inline
self.post_data['widget_set-INITIAL_FORMS'] = "1"
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = "Widget 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1 Updated")
def test_simple_inline_permissions(self):
"""
Changes aren't allowed without change permissions for the inline object.
"""
# User who can view Articles
permissionuser = User.objects.create_user(
username='permissionuser', password='secret',
email='[email protected]', is_staff=True,
)
permissionuser.user_permissions.add(get_perm(Collector, get_permission_codename('view', Collector._meta)))
permissionuser.user_permissions.add(get_perm(Widget, get_permission_codename('view', Widget._meta)))
self.client.force_login(permissionuser)
# Without add permission, a new inline can't be added.
self.post_data['widget_set-0-name'] = 'Widget 1'
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 0)
# But after adding the permisson it can.
permissionuser.user_permissions.add(get_perm(Widget, get_permission_codename('add', Widget._meta)))
self.post_data['widget_set-0-name'] = "Widget 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.first().name, 'Widget 1')
widget_id = Widget.objects.first().id
# Without the change permission, a POST doesn't change the object.
self.post_data['widget_set-INITIAL_FORMS'] = '1'
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = 'Widget 1 Updated'
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.first().name, 'Widget 1')
# Now adding the change permission and editing works.
permissionuser.user_permissions.remove(get_perm(Widget, get_permission_codename('add', Widget._meta)))
permissionuser.user_permissions.add(get_perm(Widget, get_permission_codename('change', Widget._meta)))
self.post_data['widget_set-INITIAL_FORMS'] = '1'
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = 'Widget 1 Updated'
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.first().name, 'Widget 1 Updated')
def test_explicit_autofield_inline(self):
"A model with an explicit autofield primary key can be saved as inlines. Regression for #8093"
# First add a new inline
self.post_data['grommet_set-0-name'] = "Grommet 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="grommet_set-0-code"')
# Now resave that inline
self.post_data['grommet_set-INITIAL_FORMS'] = "1"
self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code)
self.post_data['grommet_set-0-name'] = "Grommet 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Now modify that inline
self.post_data['grommet_set-INITIAL_FORMS'] = "1"
self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code)
self.post_data['grommet_set-0-name'] = "Grommet 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1 Updated")
def test_char_pk_inline(self):
"A model with a character PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="doohickey_set-0-code"')
# Now resave that inline
self.post_data['doohickey_set-INITIAL_FORMS'] = "1"
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Now modify that inline
self.post_data['doohickey_set-INITIAL_FORMS'] = "1"
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1 Updated")
def test_integer_pk_inline(self):
"A model with an integer PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="whatsit_set-0-index"')
# Now resave that inline
self.post_data['whatsit_set-INITIAL_FORMS'] = "1"
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Now modify that inline
self.post_data['whatsit_set-INITIAL_FORMS'] = "1"
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1 Updated")
def test_inherited_inline(self):
"An inherited model can be saved as inlines. Regression for #11042"
# First add a new inline
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
doodad_pk = FancyDoodad.objects.all()[0].pk
# The PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="fancydoodad_set-0-doodad_ptr"')
# Now resave that inline
self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1"
self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk)
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
# Now modify that inline
self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1"
self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk)
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1 Updated")
def test_ordered_inline(self):
"""
An inline with an editable ordering fields is updated correctly.
"""
# Create some objects with an initial ordering
Category.objects.create(id=1, order=1, collector=self.collector)
Category.objects.create(id=2, order=2, collector=self.collector)
Category.objects.create(id=3, order=0, collector=self.collector)
Category.objects.create(id=4, order=0, collector=self.collector)
# NB: The order values must be changed so that the items are reordered.
self.post_data.update({
"name": "Frederick Clegg",
"category_set-TOTAL_FORMS": "7",
"category_set-INITIAL_FORMS": "4",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "14",
"category_set-0-id": "1",
"category_set-0-collector": "1",
"category_set-1-order": "13",
"category_set-1-id": "2",
"category_set-1-collector": "1",
"category_set-2-order": "1",
"category_set-2-id": "3",
"category_set-2-collector": "1",
"category_set-3-order": "0",
"category_set-3-id": "4",
"category_set-3-collector": "1",
"category_set-4-order": "",
"category_set-4-id": "",
"category_set-4-collector": "1",
"category_set-5-order": "",
"category_set-5-id": "",
"category_set-5-collector": "1",
"category_set-6-order": "",
"category_set-6-id": "",
"category_set-6-collector": "1",
})
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# The order values have been applied to the right objects
self.assertEqual(self.collector.category_set.count(), 4)
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
@override_settings(ROOT_URLCONF='admin_views.urls')
class NeverCacheTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = Section.objects.create(name='Test section')
def setUp(self):
self.client.force_login(self.superuser)
def test_admin_index(self):
"Check the never-cache status of the main index"
response = self.client.get(reverse('admin:index'))
self.assertEqual(get_max_age(response), 0)
def test_app_index(self):
"Check the never-cache status of an application index"
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(get_max_age(response), 0)
def test_model_index(self):
"Check the never-cache status of a model index"
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
self.assertEqual(get_max_age(response), 0)
def test_model_add(self):
"Check the never-cache status of a model add page"
response = self.client.get(reverse('admin:admin_views_fabric_add'))
self.assertEqual(get_max_age(response), 0)
def test_model_view(self):
"Check the never-cache status of a model edit page"
response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
self.assertEqual(get_max_age(response), 0)
def test_model_history(self):
"Check the never-cache status of a model history page"
response = self.client.get(reverse('admin:admin_views_section_history', args=(self.s1.pk,)))
self.assertEqual(get_max_age(response), 0)
def test_model_delete(self):
"Check the never-cache status of a model delete page"
response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,)))
self.assertEqual(get_max_age(response), 0)
def test_login(self):
"Check the never-cache status of login views"
self.client.logout()
response = self.client.get(reverse('admin:index'))
self.assertEqual(get_max_age(response), 0)
def test_logout(self):
"Check the never-cache status of logout view"
response = self.client.get(reverse('admin:logout'))
self.assertEqual(get_max_age(response), 0)
def test_password_change(self):
"Check the never-cache status of the password change view"
self.client.logout()
response = self.client.get(reverse('admin:password_change'))
self.assertIsNone(get_max_age(response))
def test_password_change_done(self):
"Check the never-cache status of the password change done view"
response = self.client.get(reverse('admin:password_change_done'))
self.assertIsNone(get_max_age(response))
def test_JS_i18n(self):
"Check the never-cache status of the JavaScript i18n view"
response = self.client.get(reverse('admin:jsi18n'))
self.assertIsNone(get_max_age(response))
@override_settings(ROOT_URLCONF='admin_views.urls')
class PrePopulatedTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def setUp(self):
self.client.force_login(self.superuser)
def test_prepopulated_on(self):
response = self.client.get(reverse('admin:admin_views_prepopulatedpost_add'))
self.assertContains(response, ""id": "#id_slug"")
self.assertContains(response, ""dependency_ids": ["#id_title"]")
self.assertContains(response, ""id": "#id_prepopulatedsubpost_set-0-subslug"")
def test_prepopulated_off(self):
response = self.client.get(reverse('admin:admin_views_prepopulatedpost_change', args=(self.p1.pk,)))
self.assertContains(response, "A Long Title")
self.assertNotContains(response, ""id": "#id_slug"")
self.assertNotContains(response, ""dependency_ids": ["#id_title"]")
self.assertNotContains(
response,
""id": "#id_prepopulatedsubpost_set-0-subslug""
)
@override_settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
def test_prepopulated_maxlength_localized(self):
"""
Regression test for #15938: if USE_THOUSAND_SEPARATOR is set, make sure
that maxLength (in the JavaScript) is rendered without separators.
"""
response = self.client.get(reverse('admin:admin_views_prepopulatedpostlargeslug_add'))
self.assertContains(response, ""maxLength": 1000") # instead of 1,000
def test_view_only_add_form(self):
"""
PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug'
which is present in the add view, even if the
ModelAdmin.has_change_permission() returns False.
"""
response = self.client.get(reverse('admin7:admin_views_prepopulatedpost_add'))
self.assertContains(response, 'data-prepopulated-fields=')
self.assertContains(response, '"id": "#id_slug"')
def test_view_only_change_form(self):
"""
PrePopulatedPostReadOnlyAdmin.prepopulated_fields includes 'slug'. That
doesn't break a view-only change view.
"""
response = self.client.get(reverse('admin7:admin_views_prepopulatedpost_change', args=(self.p1.pk,)))
self.assertContains(response, 'data-prepopulated-fields="[]"')
self.assertContains(response, '<div class="readonly">%s</div>' % self.p1.slug)
@override_settings(ROOT_URLCONF='admin_views.urls')
class SeleniumTests(AdminSeleniumTestCase):
available_apps = ['admin_views'] + AdminSeleniumTestCase.available_apps
def setUp(self):
self.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
self.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def test_prepopulated_fields(self):
"""
The JavaScript-automated prepopulated fields work with the main form
and with stacked and tabular inlines.
Refs #13068, #9264, #9983, #9784.
"""
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_mainprepopulated_add'))
self.wait_for('.select2')
# Main form ----------------------------------------------------------
self.selenium.find_element_by_id('id_pubdate').send_keys('2012-02-18')
self.get_select_option('#id_status', 'option two').click()
self.selenium.find_element_by_id('id_name').send_keys(' this is the mAin nÀMë and it\'s awεšomeııı')
slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value')
slug3 = self.selenium.find_element_by_id('id_slug3').get_attribute('value')
self.assertEqual(slug1, 'main-name-and-its-awesomeiii-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-and-its-awesomeiii')
self.assertEqual(slug3, 'this-is-the-main-n\xe0m\xeb-and-its-aw\u03b5\u0161ome\u0131\u0131\u0131')
# Stacked inlines ----------------------------------------------------
# Initial inline
self.selenium.find_element_by_id('id_relatedprepopulated_set-0-pubdate').send_keys('2011-12-17')
self.get_select_option('#id_relatedprepopulated_set-0-status', 'option one').click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-0-name').send_keys(
' here is a sŤāÇkeð inline ! '
)
slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-0-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-0-slug2').get_attribute('value')
self.assertEqual(slug1, 'here-stacked-inline-2011-12-17')
self.assertEqual(slug2, 'option-one-here-stacked-inline')
initial_select2_inputs = self.selenium.find_elements_by_class_name('select2-selection')
# Inline formsets have empty/invisible forms.
# Only the 4 visible select2 inputs are initialized.
num_initial_select2_inputs = len(initial_select2_inputs)
self.assertEqual(num_initial_select2_inputs, 4)
# Add an inline
self.selenium.find_elements_by_link_text('Add another Related prepopulated')[0].click()
self.assertEqual(
len(self.selenium.find_elements_by_class_name('select2-selection')),
num_initial_select2_inputs + 2
)
self.selenium.find_element_by_id('id_relatedprepopulated_set-1-pubdate').send_keys('1999-01-25')
self.get_select_option('#id_relatedprepopulated_set-1-status', 'option two').click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-1-name').send_keys(
' now you haVe anöther sŤāÇkeð inline with a very ... '
'loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog text... '
)
slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-1-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-1-slug2').get_attribute('value')
# 50 characters maximum for slug1 field
self.assertEqual(slug1, 'now-you-have-another-stacked-inline-very-loooooooo')
# 60 characters maximum for slug2 field
self.assertEqual(slug2, 'option-two-now-you-have-another-stacked-inline-very-looooooo')
# Tabular inlines ----------------------------------------------------
# Initial inline
self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-pubdate').send_keys('1234-12-07')
self.get_select_option('#id_relatedprepopulated_set-2-0-status', 'option two').click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-name').send_keys(
'And now, with a tÃbűlaŘ inline !!!'
)
slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-0-slug2').get_attribute('value')
self.assertEqual(slug1, 'and-now-tabular-inline-1234-12-07')
self.assertEqual(slug2, 'option-two-and-now-tabular-inline')
# Add an inline
self.selenium.find_elements_by_link_text('Add another Related prepopulated')[1].click()
self.assertEqual(
len(self.selenium.find_elements_by_class_name('select2-selection')),
num_initial_select2_inputs + 4
)
self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-pubdate').send_keys('1981-08-22')
self.get_select_option('#id_relatedprepopulated_set-2-1-status', 'option one').click()
self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-name').send_keys(
r'a tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters'
)
slug1 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_relatedprepopulated_set-2-1-slug2').get_attribute('value')
self.assertEqual(slug1, 'tabular-inline-ignored-characters-1981-08-22')
self.assertEqual(slug2, 'option-one-tabular-inline-ignored-characters')
# Add an inline without an initial inline.
# The button is outside of the browser frame.
self.selenium.execute_script("window.scrollTo(0, document.body.scrollHeight);")
self.selenium.find_elements_by_link_text('Add another Related prepopulated')[2].click()
self.assertEqual(
len(self.selenium.find_elements_by_class_name('select2-selection')),
num_initial_select2_inputs + 6
)
# Save and check that everything is properly stored in the database
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.assertEqual(MainPrepopulated.objects.all().count(), 1)
MainPrepopulated.objects.get(
name=' this is the mAin nÀMë and it\'s awεšomeııı',
pubdate='2012-02-18',
status='option two',
slug1='main-name-and-its-awesomeiii-2012-02-18',
slug2='option-two-main-name-and-its-awesomeiii',
)
self.assertEqual(RelatedPrepopulated.objects.all().count(), 4)
RelatedPrepopulated.objects.get(
name=' here is a sŤāÇkeð inline ! ',
pubdate='2011-12-17',
status='option one',
slug1='here-stacked-inline-2011-12-17',
slug2='option-one-here-stacked-inline',
)
RelatedPrepopulated.objects.get(
# 75 characters in name field
name=' now you haVe anöther sŤāÇkeð inline with a very ... loooooooooooooooooo',
pubdate='1999-01-25',
status='option two',
slug1='now-you-have-another-stacked-inline-very-loooooooo',
slug2='option-two-now-you-have-another-stacked-inline-very-looooooo',
)
RelatedPrepopulated.objects.get(
name='And now, with a tÃbűlaŘ inline !!!',
pubdate='1234-12-07',
status='option two',
slug1='and-now-tabular-inline-1234-12-07',
slug2='option-two-and-now-tabular-inline',
)
RelatedPrepopulated.objects.get(
name=r'a tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters',
pubdate='1981-08-22',
status='option one',
slug1='tabular-inline-ignored-characters-1981-08-22',
slug2='option-one-tabular-inline-ignored-characters',
)
def test_populate_existing_object(self):
"""
The prepopulation works for existing objects too, as long as
the original field is empty (#19082).
"""
# Slugs are empty to start with.
item = MainPrepopulated.objects.create(
name=' this is the mAin nÀMë',
pubdate='2012-02-18',
status='option two',
slug1='',
slug2='',
)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
object_url = self.live_server_url + reverse('admin:admin_views_mainprepopulated_change', args=(item.id,))
self.selenium.get(object_url)
self.selenium.find_element_by_id('id_name').send_keys(' the best')
# The slugs got prepopulated since they were originally empty
slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value')
self.assertEqual(slug1, 'main-name-best-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-best')
# Save the object
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.selenium.get(object_url)
self.selenium.find_element_by_id('id_name').send_keys(' hello')
# The slugs got prepopulated didn't change since they were originally not empty
slug1 = self.selenium.find_element_by_id('id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_id('id_slug2').get_attribute('value')
self.assertEqual(slug1, 'main-name-best-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-best')
def test_collapsible_fieldset(self):
"""
The 'collapse' class in fieldsets definition allows to
show/hide the appropriate field section.
"""
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_article_add'))
self.assertFalse(self.selenium.find_element_by_id('id_title').is_displayed())
self.selenium.find_elements_by_link_text('Show')[0].click()
self.assertTrue(self.selenium.find_element_by_id('id_title').is_displayed())
self.assertEqual(self.selenium.find_element_by_id('fieldsetcollapser0').text, "Hide")
def test_first_field_focus(self):
"""JavaScript-assisted auto-focus on first usable form field."""
# First form field has a single widget
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_picture_add'))
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element_by_id('id_name')
)
# First form field has a MultiWidget
self.selenium.get(self.live_server_url + reverse('admin:admin_views_reservation_add'))
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element_by_id('id_start_date_0')
)
def test_cancel_delete_confirmation(self):
"Cancelling the deletion of an object takes the user back one page."
pizza = Pizza.objects.create(name="Double Cheese")
url = reverse('admin:admin_views_pizza_change', args=(pizza.id,))
full_url = self.live_server_url + url
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(full_url)
self.selenium.find_element_by_class_name('deletelink').click()
# Click 'cancel' on the delete page.
self.selenium.find_element_by_class_name('cancel-link').click()
# Wait until we're back on the change page.
self.wait_for_text('#content h1', 'Change pizza')
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
def test_cancel_delete_related_confirmation(self):
"""
Cancelling the deletion of an object with relations takes the user back
one page.
"""
pizza = Pizza.objects.create(name="Double Cheese")
topping1 = Topping.objects.create(name="Cheddar")
topping2 = Topping.objects.create(name="Mozzarella")
pizza.toppings.add(topping1, topping2)
url = reverse('admin:admin_views_pizza_change', args=(pizza.id,))
full_url = self.live_server_url + url
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(full_url)
self.selenium.find_element_by_class_name('deletelink').click()
# Click 'cancel' on the delete page.
self.selenium.find_element_by_class_name('cancel-link').click()
# Wait until we're back on the change page.
self.wait_for_text('#content h1', 'Change pizza')
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
self.assertEqual(Topping.objects.count(), 2)
def test_list_editable_popups(self):
"""
list_editable foreign keys have add/change popups.
"""
from selenium.webdriver.support.ui import Select
s1 = Section.objects.create(name='Test section')
Article.objects.create(
title='foo',
content='<p>Middle content</p>',
date=datetime.datetime(2008, 3, 18, 11, 54, 58),
section=s1,
)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_article_changelist'))
# Change popup
self.selenium.find_element_by_id('change_id_form-0-section').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.wait_for_text('#content h1', 'Change section')
name_input = self.selenium.find_element_by_id('id_name')
name_input.clear()
name_input.send_keys('<i>edited section</i>')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_form-0-section'))
self.assertEqual(select.first_selected_option.text, '<i>edited section</i>')
# Rendered select2 input.
select2_display = self.selenium.find_element_by_class_name('select2-selection__rendered')
# Clear button (×\n) is included in text.
self.assertEqual(select2_display.text, '×\n<i>edited section</i>')
# Add popup
self.selenium.find_element_by_id('add_id_form-0-section').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.wait_for_text('#content h1', 'Add section')
self.selenium.find_element_by_id('id_name').send_keys('new section')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_form-0-section'))
self.assertEqual(select.first_selected_option.text, 'new section')
select2_display = self.selenium.find_element_by_class_name('select2-selection__rendered')
# Clear button (×\n) is included in text.
self.assertEqual(select2_display.text, '×\nnew section')
def test_inline_uuid_pk_edit_with_popup(self):
from selenium.webdriver.support.ui import Select
parent = ParentWithUUIDPK.objects.create(title='test')
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,))
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element_by_id('change_id_parent').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_parent'))
self.assertEqual(select.first_selected_option.text, str(parent.id))
self.assertEqual(select.first_selected_option.get_attribute('value'), str(parent.id))
def test_inline_uuid_pk_add_with_popup(self):
from selenium.webdriver.support.ui import Select
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(self.live_server_url + reverse('admin:admin_views_relatedwithuuidpkmodel_add'))
self.selenium.find_element_by_id('add_id_parent').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.selenium.find_element_by_id('id_title').send_keys('test')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_parent'))
uuid_id = str(ParentWithUUIDPK.objects.first().id)
self.assertEqual(select.first_selected_option.text, uuid_id)
self.assertEqual(select.first_selected_option.get_attribute('value'), uuid_id)
def test_inline_uuid_pk_delete_with_popup(self):
from selenium.webdriver.support.ui import Select
parent = ParentWithUUIDPK.objects.create(title='test')
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,))
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element_by_id('delete_id_parent').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.selenium.find_element_by_xpath('//input[@value="Yes, I\'m sure"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
select = Select(self.selenium.find_element_by_id('id_parent'))
self.assertEqual(ParentWithUUIDPK.objects.count(), 0)
self.assertEqual(select.first_selected_option.text, '---------')
self.assertEqual(select.first_selected_option.get_attribute('value'), '')
def test_inline_with_popup_cancel_delete(self):
"""Clicking ""No, take me back" on a delete popup closes the window."""
parent = ParentWithUUIDPK.objects.create(title='test')
related_with_parent = RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_change', args=(related_with_parent.id,))
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element_by_id('delete_id_parent').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
self.selenium.find_element_by_xpath('//a[text()="No, take me back"]').click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
self.assertEqual(len(self.selenium.window_handles), 1)
def test_list_editable_raw_id_fields(self):
parent = ParentWithUUIDPK.objects.create(title='test')
parent2 = ParentWithUUIDPK.objects.create(title='test2')
RelatedWithUUIDPKModel.objects.create(parent=parent)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
change_url = reverse('admin:admin_views_relatedwithuuidpkmodel_changelist', current_app=site2.name)
self.selenium.get(self.live_server_url + change_url)
self.selenium.find_element_by_id('lookup_id_form-0-parent').click()
self.wait_for_popup()
self.selenium.switch_to.window(self.selenium.window_handles[-1])
# Select "parent2" in the popup.
self.selenium.find_element_by_link_text(str(parent2.pk)).click()
self.selenium.switch_to.window(self.selenium.window_handles[0])
# The newly selected pk should appear in the raw id input.
value = self.selenium.find_element_by_id('id_form-0-parent').get_attribute('value')
self.assertEqual(value, str(parent2.pk))
@override_settings(ROOT_URLCONF='admin_views.urls')
class ReadonlyTest(AdminFieldExtractionMixin, TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_readonly_get(self):
response = self.client.get(reverse('admin:admin_views_post_add'))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="posted"')
# 3 fields + 2 submit buttons + 5 inline management form fields, + 2
# hidden fields for inlines + 1 field for the inline + 2 empty form
self.assertContains(response, "<input", count=15)
self.assertContains(response, formats.localize(datetime.date.today()))
self.assertContains(response, "<label>Awesomeness level:</label>")
self.assertContains(response, "Very awesome.")
self.assertContains(response, "Unknown coolness.")
self.assertContains(response, "foo")
# Multiline text in a readonly field gets <br> tags
self.assertContains(response, 'Multiline<br>test<br>string')
self.assertContains(response, '<div class="readonly">Multiline<br>html<br>content</div>', html=True)
self.assertContains(response, 'InlineMultiline<br>test<br>string')
self.assertContains(response, formats.localize(datetime.date.today() - datetime.timedelta(days=7)))
self.assertContains(response, '<div class="form-row field-coolness">')
self.assertContains(response, '<div class="form-row field-awesomeness_level">')
self.assertContains(response, '<div class="form-row field-posted">')
self.assertContains(response, '<div class="form-row field-value">')
self.assertContains(response, '<div class="form-row">')
self.assertContains(response, '<div class="help">', 3)
self.assertContains(
response,
'<div class="help">Some help text for the title (with unicode ŠĐĆŽćžšđ)</div>',
html=True
)
self.assertContains(
response,
'<div class="help">Some help text for the content (with unicode ŠĐĆŽćžšđ)</div>',
html=True
)
self.assertContains(
response,
'<div class="help">Some help text for the date (with unicode ŠĐĆŽćžšđ)</div>',
html=True
)
p = Post.objects.create(title="I worked on readonly_fields", content="Its good stuff")
response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,)))
self.assertContains(response, "%d amount of cool" % p.pk)
def test_readonly_text_field(self):
p = Post.objects.create(
title="Readonly test", content="test",
readonly_content='test\r\n\r\ntest\r\n\r\ntest\r\n\r\ntest',
)
Link.objects.create(
url="http://www.djangoproject.com", post=p,
readonly_link_content="test\r\nlink",
)
response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,)))
# Checking readonly field.
self.assertContains(response, 'test<br><br>test<br><br>test<br><br>test')
# Checking readonly field in inline.
self.assertContains(response, 'test<br>link')
def test_readonly_post(self):
data = {
"title": "Django Got Readonly Fields",
"content": "This is an incredible development.",
"link_set-TOTAL_FORMS": "1",
"link_set-INITIAL_FORMS": "0",
"link_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse('admin:admin_views_post_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 1)
p = Post.objects.get()
self.assertEqual(p.posted, datetime.date.today())
data["posted"] = "10-8-1990" # some date that's not today
response = self.client.post(reverse('admin:admin_views_post_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 2)
p = Post.objects.order_by('-id')[0]
self.assertEqual(p.posted, datetime.date.today())
def test_readonly_manytomany(self):
"Regression test for #13004"
response = self.client.get(reverse('admin:admin_views_pizza_add'))
self.assertEqual(response.status_code, 200)
def test_user_password_change_limited_queryset(self):
su = User.objects.filter(is_superuser=True)[0]
response = self.client.get(reverse('admin2:auth_user_password_change', args=(su.pk,)))
self.assertEqual(response.status_code, 404)
def test_change_form_renders_correct_null_choice_value(self):
"""
Regression test for #17911.
"""
choice = Choice.objects.create(choice=None)
response = self.client.get(reverse('admin:admin_views_choice_change', args=(choice.pk,)))
self.assertContains(response, '<div class="readonly">No opinion</div>', html=True)
def test_readonly_manytomany_backwards_ref(self):
"""
Regression test for #16433 - backwards references for related objects
broke if the related field is read-only due to the help_text attribute
"""
topping = Topping.objects.create(name='Salami')
pizza = Pizza.objects.create(name='Americano')
pizza.toppings.add(topping)
response = self.client.get(reverse('admin:admin_views_topping_add'))
self.assertEqual(response.status_code, 200)
def test_readonly_manytomany_forwards_ref(self):
topping = Topping.objects.create(name='Salami')
pizza = Pizza.objects.create(name='Americano')
pizza.toppings.add(topping)
response = self.client.get(reverse('admin:admin_views_pizza_change', args=(pizza.pk,)))
self.assertContains(response, '<label>Toppings:</label>', html=True)
self.assertContains(response, '<div class="readonly">Salami</div>', html=True)
def test_readonly_onetoone_backwards_ref(self):
"""
Can reference a reverse OneToOneField in ModelAdmin.readonly_fields.
"""
v1 = Villain.objects.create(name='Adam')
pl = Plot.objects.create(name='Test Plot', team_leader=v1, contact=v1)
pd = PlotDetails.objects.create(details='Brand New Plot', plot=pl)
response = self.client.get(reverse('admin:admin_views_plotproxy_change', args=(pl.pk,)))
field = self.get_admin_readonly_field(response, 'plotdetails')
self.assertEqual(field.contents(), 'Brand New Plot')
# The reverse relation also works if the OneToOneField is null.
pd.plot = None
pd.save()
response = self.client.get(reverse('admin:admin_views_plotproxy_change', args=(pl.pk,)))
field = self.get_admin_readonly_field(response, 'plotdetails')
self.assertEqual(field.contents(), '-') # default empty value
def test_readonly_field_overrides(self):
"""
Regression test for #22087 - ModelForm Meta overrides are ignored by
AdminReadonlyField
"""
p = FieldOverridePost.objects.create(title="Test Post", content="Test Content")
response = self.client.get(reverse('admin:admin_views_fieldoverridepost_change', args=(p.pk,)))
self.assertContains(response, '<div class="help">Overridden help text for the date</div>')
self.assertContains(response, '<label for="id_public">Overridden public label:</label>', html=True)
self.assertNotContains(response, "Some help text for the date (with unicode ŠĐĆŽćžšđ)")
def test_correct_autoescaping(self):
"""
Make sure that non-field readonly elements are properly autoescaped (#24461)
"""
section = Section.objects.create(name='<a>evil</a>')
response = self.client.get(reverse('admin:admin_views_section_change', args=(section.pk,)))
self.assertNotContains(response, "<a>evil</a>", status_code=200)
self.assertContains(response, "<a>evil</a>", status_code=200)
@override_settings(ROOT_URLCONF='admin_views.urls')
class LimitChoicesToInAdminTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_limit_choices_to_as_callable(self):
"""Test for ticket 2445 changes to admin."""
threepwood = Character.objects.create(
username='threepwood',
last_action=datetime.datetime.today() + datetime.timedelta(days=1),
)
marley = Character.objects.create(
username='marley',
last_action=datetime.datetime.today() - datetime.timedelta(days=1),
)
response = self.client.get(reverse('admin:admin_views_stumpjoke_add'))
# The allowed option should appear twice; the limited option should not appear.
self.assertContains(response, threepwood.username, count=2)
self.assertNotContains(response, marley.username)
@override_settings(ROOT_URLCONF='admin_views.urls')
class RawIdFieldsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_limit_choices_to(self):
"""Regression test for 14880"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True,
leader=actor,
country="England")
Inquisition.objects.create(expected=False,
leader=actor,
country="Spain")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_inquisition"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step also tests integers, strings and booleans in the
# lookup query string; in model we define inquisition field to have a
# limit_choices_to option that includes a filter on a string field
# (inquisition__actor__name), a filter on an integer field
# (inquisition__actor__age), and a filter on a boolean field
# (inquisition__expected).
response2 = self.client.get(popup_url)
self.assertContains(response2, "Spain")
self.assertNotContains(response2, "England")
def test_limit_choices_to_isnull_false(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant0"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=0 gets parsed correctly from the
# lookup query string; in model we define defendant0 field to have a
# limit_choices_to option that includes "actor__title__isnull=False".
response2 = self.client.get(popup_url)
self.assertContains(response2, "Kilbraken")
self.assertNotContains(response2, "Palin")
def test_limit_choices_to_isnull_true(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant1"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=1 gets parsed correctly from the
# lookup query string; in model we define defendant1 field to have a
# limit_choices_to option that includes "actor__title__isnull=True".
response2 = self.client.get(popup_url)
self.assertNotContains(response2, "Kilbraken")
self.assertContains(response2, "Palin")
def test_list_display_method_same_name_as_reverse_accessor(self):
"""
Should be able to use a ModelAdmin method in list_display that has the
same name as a reverse model field ("sketch" in this case).
"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True, leader=actor, country="England")
response = self.client.get(reverse('admin:admin_views_inquisition_changelist'))
self.assertContains(response, 'list-display-sketch')
@override_settings(ROOT_URLCONF='admin_views.urls')
class UserAdminTest(TestCase):
"""
Tests user CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.adduser = User.objects.create_user(username='adduser', password='secret', is_staff=True)
cls.changeuser = User.objects.create_user(username='changeuser', password='secret', is_staff=True)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False)
cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
def setUp(self):
self.client.force_login(self.superuser)
def test_save_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
})
new_user = User.objects.get(username='newuser')
self.assertRedirects(response, reverse('admin:auth_user_change', args=(new_user.pk,)))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_save_continue_editing_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_continue': '1',
})
new_user = User.objects.get(username='newuser')
new_user_url = reverse('admin:auth_user_change', args=(new_user.pk,))
self.assertRedirects(response, new_user_url, fetch_redirect_response=False)
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
response = self.client.get(new_user_url)
self.assertContains(
response,
'<li class="success">The user "<a href="%s">'
'%s</a>" was added successfully. You may edit it again below.</li>'
% (new_user_url, new_user),
html=True,
)
def test_password_mismatch(self):
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'mismatch',
})
self.assertEqual(response.status_code, 200)
self.assertFormError(response, 'adminform', 'password', [])
self.assertFormError(response, 'adminform', 'password2', ["The two password fields didn't match."])
def test_user_fk_add_popup(self):
"""User addition through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertContains(response, reverse('admin:auth_user_add'))
self.assertContains(response, 'class="related-widget-wrapper-link add-related" id="add_id_owner"')
response = self.client.get(reverse('admin:auth_user_add') + '?_popup=1')
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_popup': '1',
'_save': '1',
}
response = self.client.post(reverse('admin:auth_user_add') + '?_popup=1', data, follow=True)
self.assertContains(response, '"obj": "newuser"')
def test_user_fk_change_popup(self):
"""User change through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertContains(response, reverse('admin:auth_user_change', args=('__fk__',)))
self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"')
user = User.objects.get(username='changeuser')
url = reverse('admin:auth_user_change', args=(user.pk,)) + '?_popup=1'
response = self.client.get(url)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'last_login_0': '2007-05-30',
'last_login_1': '13:20:10',
'date_joined_0': '2007-05-30',
'date_joined_1': '13:20:10',
'_popup': '1',
'_save': '1',
}
response = self.client.post(url, data, follow=True)
self.assertContains(response, '"obj": "newuser"')
self.assertContains(response, '"action": "change"')
def test_user_fk_delete_popup(self):
"""User deletion through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertContains(response, reverse('admin:auth_user_delete', args=('__fk__',)))
self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"')
user = User.objects.get(username='changeuser')
url = reverse('admin:auth_user_delete', args=(user.pk,)) + '?_popup=1'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = {
'post': 'yes',
'_popup': '1',
}
response = self.client.post(url, data, follow=True)
self.assertContains(response, '"action": "delete"')
def test_save_add_another_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_addanother': '1',
})
new_user = User.objects.order_by('-id')[0]
self.assertRedirects(response, reverse('admin:auth_user_add'))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_user_permission_performance(self):
u = User.objects.all()[0]
# Don't depend on a warm cache, see #17377.
ContentType.objects.clear_cache()
with self.assertNumQueries(10):
response = self.client.get(reverse('admin:auth_user_change', args=(u.pk,)))
self.assertEqual(response.status_code, 200)
def test_form_url_present_in_context(self):
u = User.objects.all()[0]
response = self.client.get(reverse('admin3:auth_user_password_change', args=(u.pk,)))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['form_url'], 'pony')
@override_settings(ROOT_URLCONF='admin_views.urls')
class GroupAdminTest(TestCase):
"""
Tests group CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_save_button(self):
group_count = Group.objects.count()
response = self.client.post(reverse('admin:auth_group_add'), {
'name': 'newgroup',
})
Group.objects.order_by('-id')[0]
self.assertRedirects(response, reverse('admin:auth_group_changelist'))
self.assertEqual(Group.objects.count(), group_count + 1)
def test_group_permission_performance(self):
g = Group.objects.create(name="test_group")
# Ensure no queries are skipped due to cached content type for Group.
ContentType.objects.clear_cache()
with self.assertNumQueries(8):
response = self.client.get(reverse('admin:auth_group_change', args=(g.pk,)))
self.assertEqual(response.status_code, 200)
@override_settings(ROOT_URLCONF='admin_views.urls')
class CSSTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def setUp(self):
self.client.force_login(self.superuser)
def test_field_prefix_css_classes(self):
"""
Fields have a CSS class name with a 'field-' prefix.
"""
response = self.client.get(reverse('admin:admin_views_post_add'))
# The main form
self.assertContains(response, 'class="form-row field-title"')
self.assertContains(response, 'class="form-row field-content"')
self.assertContains(response, 'class="form-row field-public"')
self.assertContains(response, 'class="form-row field-awesomeness_level"')
self.assertContains(response, 'class="form-row field-coolness"')
self.assertContains(response, 'class="form-row field-value"')
self.assertContains(response, 'class="form-row"') # The lambda function
# The tabular inline
self.assertContains(response, '<td class="field-url">')
self.assertContains(response, '<td class="field-posted">')
def test_index_css_classes(self):
"""
CSS class names are used for each app and model on the admin index
pages (#17050).
"""
# General index page
response = self.client.get(reverse('admin:index'))
self.assertContains(response, '<div class="app-admin_views module">')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
# App index page
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertContains(response, '<div class="app-admin_views module">')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
def test_app_model_in_form_body_class(self):
"""
Ensure app and model tag are correctly read by change_form template
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_list_body_class(self):
"""
Ensure app and model tag are correctly read by change_list template
"""
response = self.client.get(reverse('admin:admin_views_section_changelist'))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_delete_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by delete_confirmation
template
"""
response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,)))
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_app_model_in_app_index_body_class(self):
"""
Ensure app and model tag are correctly read by app_index template
"""
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertContains(response, '<body class=" dashboard app-admin_views')
def test_app_model_in_delete_selected_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by
delete_selected_confirmation template
"""
action_data = {
ACTION_CHECKBOX_NAME: [self.s1.pk],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_section_changelist'), action_data)
self.assertContains(response, '<body class=" app-admin_views model-section ')
def test_changelist_field_classes(self):
"""
Cells of the change list table should contain the field name in their class attribute
Refs #11195.
"""
Podcast.objects.create(name="Django Dose", release_date=datetime.date.today())
response = self.client.get(reverse('admin:admin_views_podcast_changelist'))
self.assertContains(response, '<th class="field-name">')
self.assertContains(response, '<td class="field-release_date nowrap">')
self.assertContains(response, '<td class="action-checkbox">')
try:
import docutils
except ImportError:
docutils = None
@unittest.skipUnless(docutils, "no docutils installed.")
@override_settings(ROOT_URLCONF='admin_views.urls')
@modify_settings(INSTALLED_APPS={'append': ['django.contrib.admindocs', 'django.contrib.flatpages']})
class AdminDocsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_tags(self):
response = self.client.get(reverse('django-admindocs-tags'))
# The builtin tag group exists
self.assertContains(response, "<h2>Built-in tags</h2>", count=2, html=True)
# A builtin tag exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-autoescape">autoescape</h3>', html=True)
self.assertContains(response, '<li><a href="#built_in-autoescape">autoescape</a></li>', html=True)
# An app tag exists in both the index and detail
self.assertContains(response, '<h3 id="flatpages-get_flatpages">get_flatpages</h3>', html=True)
self.assertContains(response, '<li><a href="#flatpages-get_flatpages">get_flatpages</a></li>', html=True)
# The admin list tag group exists
self.assertContains(response, "<h2>admin_list</h2>", count=2, html=True)
# An admin list tag exists in both the index and detail
self.assertContains(response, '<h3 id="admin_list-admin_actions">admin_actions</h3>', html=True)
self.assertContains(response, '<li><a href="#admin_list-admin_actions">admin_actions</a></li>', html=True)
def test_filters(self):
response = self.client.get(reverse('django-admindocs-filters'))
# The builtin filter group exists
self.assertContains(response, "<h2>Built-in filters</h2>", count=2, html=True)
# A builtin filter exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-add">add</h3>', html=True)
self.assertContains(response, '<li><a href="#built_in-add">add</a></li>', html=True)
@override_settings(
ROOT_URLCONF='admin_views.urls',
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}],
USE_I18N=False,
)
class ValidXHTMLTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_lang_name_present(self):
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertNotContains(response, ' lang=""')
self.assertNotContains(response, ' xml:lang=""')
@override_settings(ROOT_URLCONF='admin_views.urls', USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
class DateHierarchyTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def assert_non_localized_year(self, response, year):
"""
The year is not localized with USE_THOUSAND_SEPARATOR (#15234).
"""
self.assertNotContains(response, formats.number_format(year))
def assert_contains_year_link(self, response, date):
self.assertContains(response, '?release_date__year=%d"' % (date.year,))
def assert_contains_month_link(self, response, date):
self.assertContains(
response, '?release_date__month=%d&release_date__year=%d"' % (
date.month, date.year))
def assert_contains_day_link(self, response, date):
self.assertContains(
response, '?release_date__day=%d&'
'release_date__month=%d&release_date__year=%d"' % (
date.day, date.month, date.year))
def test_empty(self):
"""
No date hierarchy links display with empty changelist.
"""
response = self.client.get(
reverse('admin:admin_views_podcast_changelist'))
self.assertNotContains(response, 'release_date__year=')
self.assertNotContains(response, 'release_date__month=')
self.assertNotContains(response, 'release_date__day=')
def test_single(self):
"""
Single day-level date hierarchy appears for single object.
"""
DATE = datetime.date(2000, 6, 30)
Podcast.objects.create(release_date=DATE)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
self.assert_contains_day_link(response, DATE)
self.assert_non_localized_year(response, 2000)
def test_within_month(self):
"""
day-level links appear for changelist within single month.
"""
DATES = (datetime.date(2000, 6, 30),
datetime.date(2000, 6, 15),
datetime.date(2000, 6, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
for date in DATES:
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_within_year(self):
"""
month-level links appear for changelist within single year.
"""
DATES = (datetime.date(2000, 1, 30),
datetime.date(2000, 3, 15),
datetime.date(2000, 5, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
# no day-level links
self.assertNotContains(response, 'release_date__day=')
for date in DATES:
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_multiple_years(self):
"""
year-level links appear for year-spanning changelist.
"""
DATES = (datetime.date(2001, 1, 30),
datetime.date(2003, 3, 15),
datetime.date(2005, 5, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
response = self.client.get(
reverse('admin:admin_views_podcast_changelist'))
# no day/month-level links
self.assertNotContains(response, 'release_date__day=')
self.assertNotContains(response, 'release_date__month=')
for date in DATES:
self.assert_contains_year_link(response, date)
# and make sure GET parameters still behave correctly
for date in DATES:
url = '%s?release_date__year=%d' % (
reverse('admin:admin_views_podcast_changelist'),
date.year)
response = self.client.get(url)
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
url = '%s?release_date__year=%d&release_date__month=%d' % (
reverse('admin:admin_views_podcast_changelist'),
date.year, date.month)
response = self.client.get(url)
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
def test_related_field(self):
questions_data = (
# (posted data, number of answers),
(datetime.date(2001, 1, 30), 0),
(datetime.date(2003, 3, 15), 1),
(datetime.date(2005, 5, 3), 2),
)
for date, answer_count in questions_data:
question = Question.objects.create(posted=date)
for i in range(answer_count):
question.answer_set.create()
response = self.client.get(reverse('admin:admin_views_answer_changelist'))
for date, answer_count in questions_data:
link = '?question__posted__year=%d"' % (date.year,)
if answer_count > 0:
self.assertContains(response, link)
else:
self.assertNotContains(response, link)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminCustomSaveRelatedTests(TestCase):
"""
One can easily customize the way related objects are saved.
Refs #16115.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_should_be_able_to_edit_related_objects_on_add_view(self):
post = {
'child_set-TOTAL_FORMS': '3',
'child_set-INITIAL_FORMS': '0',
'name': 'Josh Stone',
'child_set-0-name': 'Paul',
'child_set-1-name': 'Catherine',
}
self.client.post(reverse('admin:admin_views_parent_add'), post)
self.assertEqual(1, Parent.objects.count())
self.assertEqual(2, Child.objects.count())
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
def test_should_be_able_to_edit_related_objects_on_change_view(self):
parent = Parent.objects.create(name='Josh Stone')
paul = Child.objects.create(parent=parent, name='Paul')
catherine = Child.objects.create(parent=parent, name='Catherine')
post = {
'child_set-TOTAL_FORMS': '5',
'child_set-INITIAL_FORMS': '2',
'name': 'Josh Stone',
'child_set-0-name': 'Paul',
'child_set-0-id': paul.id,
'child_set-1-name': 'Catherine',
'child_set-1-id': catherine.id,
}
self.client.post(reverse('admin:admin_views_parent_change', args=(parent.id,)), post)
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
def test_should_be_able_to_edit_related_objects_on_changelist_view(self):
parent = Parent.objects.create(name='Josh Rock')
Child.objects.create(parent=parent, name='Paul')
Child.objects.create(parent=parent, name='Catherine')
post = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '1',
'form-MAX_NUM_FORMS': '0',
'form-0-id': parent.id,
'form-0-name': 'Josh Stone',
'_save': 'Save'
}
self.client.post(reverse('admin:admin_views_parent_changelist'), post)
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewLogoutTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def test_logout(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse('admin:logout'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'registration/logged_out.html')
self.assertEqual(response.request['PATH_INFO'], reverse('admin:logout'))
self.assertFalse(response.context['has_permission'])
self.assertNotContains(response, 'user-tools') # user-tools div shouldn't visible.
def test_client_logout_url_can_be_used_to_login(self):
response = self.client.get(reverse('admin:logout'))
self.assertEqual(response.status_code, 302) # we should be redirected to the login page.
# follow the redirect and test results.
response = self.client.get(reverse('admin:logout'), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'admin/login.html')
self.assertEqual(response.request['PATH_INFO'], reverse('admin:login'))
self.assertContains(response, '<input type="hidden" name="next" value="%s">' % reverse('admin:index'))
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminUserMessageTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def send_message(self, level):
"""
Helper that sends a post to the dummy test methods and asserts that a
message with the level has appeared in the response.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'message_%s' % level,
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'),
action_data, follow=True)
self.assertContains(response,
'<li class="%s">Test %s</li>' % (level, level),
html=True)
@override_settings(MESSAGE_LEVEL=10) # Set to DEBUG for this request
def test_message_debug(self):
self.send_message('debug')
def test_message_info(self):
self.send_message('info')
def test_message_success(self):
self.send_message('success')
def test_message_warning(self):
self.send_message('warning')
def test_message_error(self):
self.send_message('error')
def test_message_extra_tags(self):
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'message_extra_tags',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'),
action_data, follow=True)
self.assertContains(response,
'<li class="extra_tag info">Test tags</li>',
html=True)
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminKeepChangeListFiltersTests(TestCase):
admin_site = site
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.joepublicuser = User.objects.create_user(username='joepublic', password='secret')
def setUp(self):
self.client.force_login(self.superuser)
def assertURLEqual(self, url1, url2, msg_prefix=''):
"""
Assert that two URLs are equal despite the ordering
of their querystring. Refs #22360.
"""
parsed_url1 = urlparse(url1)
path1 = parsed_url1.path
parsed_qs1 = dict(parse_qsl(parsed_url1.query))
parsed_url2 = urlparse(url2)
path2 = parsed_url2.path
parsed_qs2 = dict(parse_qsl(parsed_url2.query))
for parsed_qs in [parsed_qs1, parsed_qs2]:
if '_changelist_filters' in parsed_qs:
changelist_filters = parsed_qs['_changelist_filters']
parsed_filters = dict(parse_qsl(changelist_filters))
parsed_qs['_changelist_filters'] = parsed_filters
self.assertEqual(path1, path2)
self.assertEqual(parsed_qs1, parsed_qs2)
def test_assert_url_equal(self):
# Test equality.
change_user_url = reverse('admin:auth_user_change', args=(self.joepublicuser.pk,))
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
),
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
)
)
# Test inequality.
with self.assertRaises(AssertionError):
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
),
'http://testserver{}?_changelist_filters=is_staff__exact%3D1%26is_superuser__exact%3D1'.format(
change_user_url
)
)
# Ignore scheme and host.
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
),
'{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url)
)
# Ignore ordering of querystring.
self.assertURLEqual(
'{}?is_staff__exact=0&is_superuser__exact=0'.format(reverse('admin:auth_user_changelist')),
'{}?is_superuser__exact=0&is_staff__exact=0'.format(reverse('admin:auth_user_changelist'))
)
# Ignore ordering of _changelist_filters.
self.assertURLEqual(
'{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url),
'{}?_changelist_filters=is_superuser__exact%3D0%26is_staff__exact%3D0'.format(change_user_url)
)
def get_changelist_filters(self):
return {
'is_superuser__exact': 0,
'is_staff__exact': 0,
}
def get_changelist_filters_querystring(self):
return urlencode(self.get_changelist_filters())
def get_preserved_filters_querystring(self):
return urlencode({
'_changelist_filters': self.get_changelist_filters_querystring()
})
def get_sample_user_id(self):
return self.joepublicuser.pk
def get_changelist_url(self):
return '%s?%s' % (
reverse('admin:auth_user_changelist',
current_app=self.admin_site.name),
self.get_changelist_filters_querystring(),
)
def get_add_url(self):
return '%s?%s' % (
reverse('admin:auth_user_add',
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_change_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_change', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_history_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_history', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_delete_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_delete', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def test_changelist_view(self):
response = self.client.get(self.get_changelist_url())
self.assertEqual(response.status_code, 200)
# Check the `change_view` link has the correct querystring.
detail_link = re.search(
'<a href="(.*?)">{}</a>'.format(self.joepublicuser.username),
response.content.decode()
)
self.assertURLEqual(detail_link.group(1), self.get_change_url())
def test_change_view(self):
# Get the `change_view`.
response = self.client.get(self.get_change_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form action="(.*?)" method="post" id="user_form".*?>',
response.content.decode()
)
self.assertURLEqual(form_action.group(1), '?%s' % self.get_preserved_filters_querystring())
# Check the history link.
history_link = re.search(
'<a href="(.*?)" class="historylink">History</a>',
response.content.decode()
)
self.assertURLEqual(history_link.group(1), self.get_history_url())
# Check the delete link.
delete_link = re.search(
'<a href="(.*?)" class="deletelink">Delete</a>',
response.content.decode()
)
self.assertURLEqual(delete_link.group(1), self.get_delete_url())
# Test redirect on "Save".
post_data = {
'username': 'joepublic',
'last_login_0': '2007-05-30',
'last_login_1': '13:20:10',
'date_joined_0': '2007-05-30',
'date_joined_1': '13:20:10',
}
post_data['_save'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertRedirects(response, self.get_changelist_url())
post_data.pop('_save')
# Test redirect on "Save and continue".
post_data['_continue'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertRedirects(response, self.get_change_url())
post_data.pop('_continue')
# Test redirect on "Save and add new".
post_data['_addanother'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertRedirects(response, self.get_add_url())
post_data.pop('_addanother')
def test_add_view(self):
# Get the `add_view`.
response = self.client.get(self.get_add_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form action="(.*?)" method="post" id="user_form".*?>',
response.content.decode()
)
self.assertURLEqual(form_action.group(1), '?%s' % self.get_preserved_filters_querystring())
post_data = {
'username': 'dummy',
'password1': 'test',
'password2': 'test',
}
# Test redirect on "Save".
post_data['_save'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertRedirects(response, self.get_change_url(User.objects.get(username='dummy').pk))
post_data.pop('_save')
# Test redirect on "Save and continue".
post_data['username'] = 'dummy2'
post_data['_continue'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertRedirects(response, self.get_change_url(User.objects.get(username='dummy2').pk))
post_data.pop('_continue')
# Test redirect on "Save and add new".
post_data['username'] = 'dummy3'
post_data['_addanother'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertRedirects(response, self.get_add_url())
post_data.pop('_addanother')
def test_delete_view(self):
# Test redirect on "Delete".
response = self.client.post(self.get_delete_url(), {'post': 'yes'})
self.assertRedirects(response, self.get_changelist_url())
def test_url_prefix(self):
context = {
'preserved_filters': self.get_preserved_filters_querystring(),
'opts': User._meta,
}
prefixes = ('', '/prefix/', '/後台/')
for prefix in prefixes:
with self.subTest(prefix=prefix), override_script_prefix(prefix):
url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
class NamespacedAdminKeepChangeListFiltersTests(AdminKeepChangeListFiltersTests):
admin_site = site2
@override_settings(ROOT_URLCONF='admin_views.urls')
class TestLabelVisibility(TestCase):
""" #11277 -Labels of hidden fields in admin were not hidden. """
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_all_fields_visible(self):
response = self.client.get(reverse('admin:admin_views_emptymodelvisible_add'))
self.assert_fieldline_visible(response)
self.assert_field_visible(response, 'first')
self.assert_field_visible(response, 'second')
def test_all_fields_hidden(self):
response = self.client.get(reverse('admin:admin_views_emptymodelhidden_add'))
self.assert_fieldline_hidden(response)
self.assert_field_hidden(response, 'first')
self.assert_field_hidden(response, 'second')
def test_mixin(self):
response = self.client.get(reverse('admin:admin_views_emptymodelmixin_add'))
self.assert_fieldline_visible(response)
self.assert_field_hidden(response, 'first')
self.assert_field_visible(response, 'second')
def assert_field_visible(self, response, field_name):
self.assertContains(response, '<div class="fieldBox field-%s">' % field_name)
def assert_field_hidden(self, response, field_name):
self.assertContains(response, '<div class="fieldBox field-%s hidden">' % field_name)
def assert_fieldline_visible(self, response):
self.assertContains(response, '<div class="form-row field-first field-second">')
def assert_fieldline_hidden(self, response):
self.assertContains(response, '<div class="form-row hidden')
@override_settings(ROOT_URLCONF='admin_views.urls')
class AdminViewOnSiteTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = State.objects.create(name='New York')
cls.s2 = State.objects.create(name='Illinois')
cls.s3 = State.objects.create(name='California')
cls.c1 = City.objects.create(state=cls.s1, name='New York')
cls.c2 = City.objects.create(state=cls.s2, name='Chicago')
cls.c3 = City.objects.create(state=cls.s3, name='San Francisco')
cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza')
cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard')
cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner')
cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels')
cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away')
cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant')
cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi')
cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi')
cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe')
def setUp(self):
self.client.force_login(self.superuser)
def test_add_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data.
Also, assertFormError() and assertFormsetError() is usable for admin
forms and formsets.
"""
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {
'family_name': 'Test1',
'dependentchild_set-TOTAL_FORMS': '1',
'dependentchild_set-INITIAL_FORMS': '0',
'dependentchild_set-MAX_NUM_FORMS': '1',
'dependentchild_set-0-id': '',
'dependentchild_set-0-parent': '',
'dependentchild_set-0-family_name': 'Test2',
}
response = self.client.post(reverse('admin:admin_views_parentwithdependentchildren_add'), post_data)
self.assertFormError(response, 'adminform', 'some_required_info', ['This field is required.'])
msg = "The form 'adminform' in context 0 does not contain the non-field error 'Error'"
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormError(response, 'adminform', None, ['Error'])
self.assertFormsetError(
response, 'inline_admin_formset', 0, None,
['Children must share a family name with their parents in this contrived test case']
)
msg = "The formset 'inline_admin_formset' in context 10 does not contain any non-form errors."
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(response, 'inline_admin_formset', None, None, ['Error'])
def test_change_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data
"""
pwdc = ParentWithDependentChildren.objects.create(some_required_info=6, family_name='Test1')
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {
'family_name': 'Test2',
'dependentchild_set-TOTAL_FORMS': '1',
'dependentchild_set-INITIAL_FORMS': '0',
'dependentchild_set-MAX_NUM_FORMS': '1',
'dependentchild_set-0-id': '',
'dependentchild_set-0-parent': str(pwdc.id),
'dependentchild_set-0-family_name': 'Test1',
}
response = self.client.post(
reverse('admin:admin_views_parentwithdependentchildren_change', args=(pwdc.id,)), post_data
)
self.assertFormError(response, 'adminform', 'some_required_info', ['This field is required.'])
self.assertFormsetError(
response, 'inline_admin_formset', 0, None,
['Children must share a family name with their parents in this contrived test case']
)
def test_check(self):
"The view_on_site value is either a boolean or a callable"
try:
admin = CityAdmin(City, AdminSite())
CityAdmin.view_on_site = True
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = False
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = lambda obj: obj.get_absolute_url()
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = []
self.assertEqual(admin.check(), [
Error(
"The value of 'view_on_site' must be a callable or a boolean value.",
obj=CityAdmin,
id='admin.E025',
),
])
finally:
# Restore the original values for the benefit of other tests.
CityAdmin.view_on_site = True
def test_false(self):
"The 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,)))
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, 1)))
def test_true(self):
"The default behavior is followed if view_on_site is True"
response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,)))
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk)))
def test_callable(self):
"The right link is displayed if view_on_site is a callable"
response = self.client.get(reverse('admin:admin_views_worker_change', args=(self.w1.pk,)))
self.assertContains(response, '"/worker/%s/%s/"' % (self.w1.surname, self.w1.name))
def test_missing_get_absolute_url(self):
"None is returned if model doesn't have get_absolute_url"
model_admin = ModelAdmin(Worker, None)
self.assertIsNone(model_admin.get_view_on_site_url(Worker()))
@override_settings(ROOT_URLCONF='admin_views.urls')
class InlineAdminViewOnSiteTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
cls.s1 = State.objects.create(name='New York')
cls.s2 = State.objects.create(name='Illinois')
cls.s3 = State.objects.create(name='California')
cls.c1 = City.objects.create(state=cls.s1, name='New York')
cls.c2 = City.objects.create(state=cls.s2, name='Chicago')
cls.c3 = City.objects.create(state=cls.s3, name='San Francisco')
cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza')
cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard')
cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner')
cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels')
cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away')
cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant')
cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi')
cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi')
cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe')
def setUp(self):
self.client.force_login(self.superuser)
def test_false(self):
"The 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(reverse('admin:admin_views_state_change', args=(self.s1.pk,)))
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk)))
def test_true(self):
"The 'View on site' button is displayed if view_on_site is True"
response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,)))
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.r1.pk)))
def test_callable(self):
"The right link is displayed if view_on_site is a callable"
response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,)))
self.assertContains(response, '"/worker_inline/%s/%s/"' % (self.w1.surname, self.w1.name))
@override_settings(ROOT_URLCONF='admin_views.urls')
class GetFormsetsWithInlinesArgumentTest(TestCase):
"""
#23934 - When adding a new model instance in the admin, the 'obj' argument
of get_formsets_with_inlines() should be None. When changing, it should be
equal to the existing model instance.
The GetFormsetsArgumentCheckingAdmin ModelAdmin throws an exception
if obj is not None during add_view or obj is None during change_view.
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def setUp(self):
self.client.force_login(self.superuser)
def test_explicitly_provided_pk(self):
post_data = {'name': '1'}
response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_add'), post_data)
self.assertEqual(response.status_code, 302)
post_data = {'name': '2'}
response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_change', args=(1,)), post_data)
self.assertEqual(response.status_code, 302)
def test_implicitly_generated_pk(self):
post_data = {'name': '1'}
response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_add'), post_data)
self.assertEqual(response.status_code, 302)
post_data = {'name': '2'}
response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_change', args=(1,)), post_data)
self.assertEqual(response.status_code, 302)
|
d3ad30be770081f3bdf4e901b300516c79d08b9b655043a409baf7687e4a5a4c | import datetime
import os
import tempfile
import uuid
from django.contrib.auth.models import User
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.core.files.storage import FileSystemStorage
from django.db import models
class Section(models.Model):
"""
A simple section that links to articles, to test linking to related items
in admin views.
"""
name = models.CharField(max_length=100)
def __str__(self):
return self.name
@property
def name_property(self):
"""
A property that simply returns the name. Used to test #24461
"""
return self.name
class Article(models.Model):
"""
A simple article to test admin views. Test backwards compatibility.
"""
title = models.CharField(max_length=100)
content = models.TextField()
date = models.DateTimeField()
section = models.ForeignKey(Section, models.CASCADE, null=True, blank=True)
another_section = models.ForeignKey(Section, models.CASCADE, null=True, blank=True, related_name='+')
sub_section = models.ForeignKey(Section, models.SET_NULL, null=True, blank=True, related_name='+')
def __str__(self):
return self.title
def model_year(self):
return self.date.year
model_year.admin_order_field = 'date'
model_year.short_description = ''
def model_year_reversed(self):
return self.date.year
model_year_reversed.admin_order_field = '-date'
model_year_reversed.short_description = ''
class Book(models.Model):
"""
A simple book that has chapters.
"""
name = models.CharField(max_length=100, verbose_name='¿Name?')
def __str__(self):
return self.name
class Promo(models.Model):
name = models.CharField(max_length=100, verbose_name='¿Name?')
book = models.ForeignKey(Book, models.CASCADE)
author = models.ForeignKey(User, models.SET_NULL, blank=True, null=True)
def __str__(self):
return self.name
class Chapter(models.Model):
title = models.CharField(max_length=100, verbose_name='¿Title?')
content = models.TextField()
book = models.ForeignKey(Book, models.CASCADE)
class Meta:
# Use a utf-8 bytestring to ensure it works (see #11710)
verbose_name = '¿Chapter?'
def __str__(self):
return self.title
class ChapterXtra1(models.Model):
chap = models.OneToOneField(Chapter, models.CASCADE, verbose_name='¿Chap?')
xtra = models.CharField(max_length=100, verbose_name='¿Xtra?')
guest_author = models.ForeignKey(User, models.SET_NULL, blank=True, null=True)
def __str__(self):
return '¿Xtra1: %s' % self.xtra
class ChapterXtra2(models.Model):
chap = models.OneToOneField(Chapter, models.CASCADE, verbose_name='¿Chap?')
xtra = models.CharField(max_length=100, verbose_name='¿Xtra?')
def __str__(self):
return '¿Xtra2: %s' % self.xtra
class RowLevelChangePermissionModel(models.Model):
name = models.CharField(max_length=100, blank=True)
class CustomArticle(models.Model):
content = models.TextField()
date = models.DateTimeField()
class ModelWithStringPrimaryKey(models.Model):
string_pk = models.CharField(max_length=255, primary_key=True)
def __str__(self):
return self.string_pk
def get_absolute_url(self):
return '/dummy/%s/' % self.string_pk
class Color(models.Model):
value = models.CharField(max_length=10)
warm = models.BooleanField(default=False)
def __str__(self):
return self.value
# we replicate Color to register with another ModelAdmin
class Color2(Color):
class Meta:
proxy = True
class Thing(models.Model):
title = models.CharField(max_length=20)
color = models.ForeignKey(Color, models.CASCADE, limit_choices_to={'warm': True})
pub_date = models.DateField(blank=True, null=True)
def __str__(self):
return self.title
class Actor(models.Model):
name = models.CharField(max_length=50)
age = models.IntegerField()
title = models.CharField(max_length=50, null=True, blank=True)
def __str__(self):
return self.name
class Inquisition(models.Model):
expected = models.BooleanField(default=False)
leader = models.ForeignKey(Actor, models.CASCADE)
country = models.CharField(max_length=20)
def __str__(self):
return "by %s from %s" % (self.leader, self.country)
class Sketch(models.Model):
title = models.CharField(max_length=100)
inquisition = models.ForeignKey(
Inquisition,
models.CASCADE,
limit_choices_to={
'leader__name': 'Palin',
'leader__age': 27,
'expected': False,
},
)
defendant0 = models.ForeignKey(
Actor,
models.CASCADE,
limit_choices_to={'title__isnull': False},
related_name='as_defendant0',
)
defendant1 = models.ForeignKey(
Actor,
models.CASCADE,
limit_choices_to={'title__isnull': True},
related_name='as_defendant1',
)
def __str__(self):
return self.title
def today_callable_dict():
return {"last_action__gte": datetime.datetime.today()}
def today_callable_q():
return models.Q(last_action__gte=datetime.datetime.today())
class Character(models.Model):
username = models.CharField(max_length=100)
last_action = models.DateTimeField()
def __str__(self):
return self.username
class StumpJoke(models.Model):
variation = models.CharField(max_length=100)
most_recently_fooled = models.ForeignKey(
Character,
models.CASCADE,
limit_choices_to=today_callable_dict,
related_name="+",
)
has_fooled_today = models.ManyToManyField(Character, limit_choices_to=today_callable_q, related_name="+")
def __str__(self):
return self.variation
class Fabric(models.Model):
NG_CHOICES = (
('Textured', (
('x', 'Horizontal'),
('y', 'Vertical'),
)),
('plain', 'Smooth'),
)
surface = models.CharField(max_length=20, choices=NG_CHOICES)
class Person(models.Model):
GENDER_CHOICES = (
(1, "Male"),
(2, "Female"),
)
name = models.CharField(max_length=100)
gender = models.IntegerField(choices=GENDER_CHOICES)
age = models.IntegerField(default=21)
alive = models.BooleanField(default=True)
def __str__(self):
return self.name
class Persona(models.Model):
"""
A simple persona associated with accounts, to test inlining of related
accounts which inherit from a common accounts class.
"""
name = models.CharField(blank=False, max_length=80)
def __str__(self):
return self.name
class Account(models.Model):
"""
A simple, generic account encapsulating the information shared by all
types of accounts.
"""
username = models.CharField(blank=False, max_length=80)
persona = models.ForeignKey(Persona, models.CASCADE, related_name="accounts")
servicename = 'generic service'
def __str__(self):
return "%s: %s" % (self.servicename, self.username)
class FooAccount(Account):
"""A service-specific account of type Foo."""
servicename = 'foo'
class BarAccount(Account):
"""A service-specific account of type Bar."""
servicename = 'bar'
class Subscriber(models.Model):
name = models.CharField(blank=False, max_length=80)
email = models.EmailField(blank=False, max_length=175)
def __str__(self):
return "%s (%s)" % (self.name, self.email)
class ExternalSubscriber(Subscriber):
pass
class OldSubscriber(Subscriber):
pass
class Media(models.Model):
name = models.CharField(max_length=60)
class Podcast(Media):
release_date = models.DateField()
class Meta:
ordering = ('release_date',) # overridden in PodcastAdmin
class Vodcast(Media):
media = models.OneToOneField(Media, models.CASCADE, primary_key=True, parent_link=True)
released = models.BooleanField(default=False)
class Parent(models.Model):
name = models.CharField(max_length=128)
def clean(self):
if self.name == '_invalid':
raise ValidationError('invalid')
class Child(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE, editable=False)
name = models.CharField(max_length=30, blank=True)
def clean(self):
if self.name == '_invalid':
raise ValidationError('invalid')
class EmptyModel(models.Model):
def __str__(self):
return "Primary key = %s" % self.id
temp_storage = FileSystemStorage(tempfile.mkdtemp())
UPLOAD_TO = os.path.join(temp_storage.location, 'test_upload')
class Gallery(models.Model):
name = models.CharField(max_length=100)
class Picture(models.Model):
name = models.CharField(max_length=100)
image = models.FileField(storage=temp_storage, upload_to='test_upload')
gallery = models.ForeignKey(Gallery, models.CASCADE, related_name="pictures")
class Language(models.Model):
iso = models.CharField(max_length=5, primary_key=True)
name = models.CharField(max_length=50)
english_name = models.CharField(max_length=50)
shortlist = models.BooleanField(default=False)
class Meta:
ordering = ('iso',)
# a base class for Recommender and Recommendation
class Title(models.Model):
pass
class TitleTranslation(models.Model):
title = models.ForeignKey(Title, models.CASCADE)
text = models.CharField(max_length=100)
class Recommender(Title):
pass
class Recommendation(Title):
the_recommender = models.ForeignKey(Recommender, models.CASCADE)
class Collector(models.Model):
name = models.CharField(max_length=100)
class Widget(models.Model):
owner = models.ForeignKey(Collector, models.CASCADE)
name = models.CharField(max_length=100)
class DooHickey(models.Model):
code = models.CharField(max_length=10, primary_key=True)
owner = models.ForeignKey(Collector, models.CASCADE)
name = models.CharField(max_length=100)
class Grommet(models.Model):
code = models.AutoField(primary_key=True)
owner = models.ForeignKey(Collector, models.CASCADE)
name = models.CharField(max_length=100)
class Whatsit(models.Model):
index = models.IntegerField(primary_key=True)
owner = models.ForeignKey(Collector, models.CASCADE)
name = models.CharField(max_length=100)
class Doodad(models.Model):
name = models.CharField(max_length=100)
class FancyDoodad(Doodad):
owner = models.ForeignKey(Collector, models.CASCADE)
expensive = models.BooleanField(default=True)
class Category(models.Model):
collector = models.ForeignKey(Collector, models.CASCADE)
order = models.PositiveIntegerField()
class Meta:
ordering = ('order',)
def __str__(self):
return '%s:o%s' % (self.id, self.order)
def link_posted_default():
return datetime.date.today() - datetime.timedelta(days=7)
class Link(models.Model):
posted = models.DateField(default=link_posted_default)
url = models.URLField()
post = models.ForeignKey("Post", models.CASCADE)
readonly_link_content = models.TextField()
class PrePopulatedPost(models.Model):
title = models.CharField(max_length=100)
published = models.BooleanField(default=False)
slug = models.SlugField()
class PrePopulatedSubPost(models.Model):
post = models.ForeignKey(PrePopulatedPost, models.CASCADE)
subtitle = models.CharField(max_length=100)
subslug = models.SlugField()
class Post(models.Model):
title = models.CharField(max_length=100, help_text="Some help text for the title (with unicode ŠĐĆŽćžšđ)")
content = models.TextField(help_text="Some help text for the content (with unicode ŠĐĆŽćžšđ)")
readonly_content = models.TextField()
posted = models.DateField(
default=datetime.date.today,
help_text="Some help text for the date (with unicode ŠĐĆŽćžšđ)"
)
public = models.BooleanField(null=True, blank=True)
def awesomeness_level(self):
return "Very awesome."
# Proxy model to test overridden fields attrs on Post model so as not to
# interfere with other tests.
class FieldOverridePost(Post):
class Meta:
proxy = True
class Gadget(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
class Villain(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
class SuperVillain(Villain):
pass
class FunkyTag(models.Model):
"Because we all know there's only one real use case for GFKs."
name = models.CharField(max_length=25)
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
def __str__(self):
return self.name
class Plot(models.Model):
name = models.CharField(max_length=100)
team_leader = models.ForeignKey(Villain, models.CASCADE, related_name='lead_plots')
contact = models.ForeignKey(Villain, models.CASCADE, related_name='contact_plots')
tags = GenericRelation(FunkyTag)
def __str__(self):
return self.name
class PlotDetails(models.Model):
details = models.CharField(max_length=100)
plot = models.OneToOneField(Plot, models.CASCADE, null=True, blank=True)
def __str__(self):
return self.details
class PlotProxy(Plot):
class Meta:
proxy = True
class SecretHideout(models.Model):
""" Secret! Not registered with the admin! """
location = models.CharField(max_length=100)
villain = models.ForeignKey(Villain, models.CASCADE)
def __str__(self):
return self.location
class SuperSecretHideout(models.Model):
""" Secret! Not registered with the admin! """
location = models.CharField(max_length=100)
supervillain = models.ForeignKey(SuperVillain, models.CASCADE)
def __str__(self):
return self.location
class Bookmark(models.Model):
name = models.CharField(max_length=60)
tag = GenericRelation(FunkyTag, related_query_name='bookmark')
def __str__(self):
return self.name
class CyclicOne(models.Model):
name = models.CharField(max_length=25)
two = models.ForeignKey('CyclicTwo', models.CASCADE)
def __str__(self):
return self.name
class CyclicTwo(models.Model):
name = models.CharField(max_length=25)
one = models.ForeignKey(CyclicOne, models.CASCADE)
def __str__(self):
return self.name
class Topping(models.Model):
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class Pizza(models.Model):
name = models.CharField(max_length=20)
toppings = models.ManyToManyField('Topping', related_name='pizzas')
# Pizza's ModelAdmin has readonly_fields = ['toppings'].
# toppings is editable for this model's admin.
class ReadablePizza(Pizza):
class Meta:
proxy = True
# No default permissions are created for this model and both name and toppings
# are readonly for this model's admin.
class ReadOnlyPizza(Pizza):
class Meta:
proxy = True
default_permissions = ()
class Album(models.Model):
owner = models.ForeignKey(User, models.SET_NULL, null=True, blank=True)
title = models.CharField(max_length=30)
class Employee(Person):
code = models.CharField(max_length=20)
class WorkHour(models.Model):
datum = models.DateField()
employee = models.ForeignKey(Employee, models.CASCADE)
class Question(models.Model):
question = models.CharField(max_length=20)
posted = models.DateField(default=datetime.date.today)
expires = models.DateTimeField(null=True, blank=True)
related_questions = models.ManyToManyField('self')
def __str__(self):
return self.question
class Answer(models.Model):
question = models.ForeignKey(Question, models.PROTECT)
answer = models.CharField(max_length=20)
def __str__(self):
return self.answer
class Answer2(Answer):
class Meta:
proxy = True
class Reservation(models.Model):
start_date = models.DateTimeField()
price = models.IntegerField()
class FoodDelivery(models.Model):
DRIVER_CHOICES = (
('bill', 'Bill G'),
('steve', 'Steve J'),
)
RESTAURANT_CHOICES = (
('indian', 'A Taste of India'),
('thai', 'Thai Pography'),
('pizza', 'Pizza Mama'),
)
reference = models.CharField(max_length=100)
driver = models.CharField(max_length=100, choices=DRIVER_CHOICES, blank=True)
restaurant = models.CharField(max_length=100, choices=RESTAURANT_CHOICES, blank=True)
class Meta:
unique_together = (("driver", "restaurant"),)
class CoverLetter(models.Model):
author = models.CharField(max_length=30)
date_written = models.DateField(null=True, blank=True)
def __str__(self):
return self.author
class Paper(models.Model):
title = models.CharField(max_length=30)
author = models.CharField(max_length=30, blank=True, null=True)
class ShortMessage(models.Model):
content = models.CharField(max_length=140)
timestamp = models.DateTimeField(null=True, blank=True)
class Telegram(models.Model):
title = models.CharField(max_length=30)
date_sent = models.DateField(null=True, blank=True)
def __str__(self):
return self.title
class Story(models.Model):
title = models.CharField(max_length=100)
content = models.TextField()
class OtherStory(models.Model):
title = models.CharField(max_length=100)
content = models.TextField()
class ComplexSortedPerson(models.Model):
name = models.CharField(max_length=100)
age = models.PositiveIntegerField()
is_employee = models.BooleanField(null=True)
class PluggableSearchPerson(models.Model):
name = models.CharField(max_length=100)
age = models.PositiveIntegerField()
class PrePopulatedPostLargeSlug(models.Model):
"""
Regression test for #15938: a large max_length for the slugfield must not
be localized in prepopulated_fields_js.html or it might end up breaking
the javascript (ie, using THOUSAND_SEPARATOR ends up with maxLength=1,000)
"""
title = models.CharField(max_length=100)
published = models.BooleanField(default=False)
# `db_index=False` because MySQL cannot index large CharField (#21196).
slug = models.SlugField(max_length=1000, db_index=False)
class AdminOrderedField(models.Model):
order = models.IntegerField()
stuff = models.CharField(max_length=200)
class AdminOrderedModelMethod(models.Model):
order = models.IntegerField()
stuff = models.CharField(max_length=200)
def some_order(self):
return self.order
some_order.admin_order_field = 'order'
class AdminOrderedAdminMethod(models.Model):
order = models.IntegerField()
stuff = models.CharField(max_length=200)
class AdminOrderedCallable(models.Model):
order = models.IntegerField()
stuff = models.CharField(max_length=200)
class Report(models.Model):
title = models.CharField(max_length=100)
def __str__(self):
return self.title
class MainPrepopulated(models.Model):
name = models.CharField(max_length=100)
pubdate = models.DateField()
status = models.CharField(
max_length=20,
choices=(('option one', 'Option One'),
('option two', 'Option Two')))
slug1 = models.SlugField(blank=True)
slug2 = models.SlugField(blank=True)
slug3 = models.SlugField(blank=True, allow_unicode=True)
class RelatedPrepopulated(models.Model):
parent = models.ForeignKey(MainPrepopulated, models.CASCADE)
name = models.CharField(max_length=75)
fk = models.ForeignKey('self', models.CASCADE, blank=True, null=True)
m2m = models.ManyToManyField('self', blank=True)
pubdate = models.DateField()
status = models.CharField(
max_length=20,
choices=(('option one', 'Option One'),
('option two', 'Option Two')))
slug1 = models.SlugField(max_length=50)
slug2 = models.SlugField(max_length=60)
class UnorderedObject(models.Model):
"""
Model without any defined `Meta.ordering`.
Refs #16819.
"""
name = models.CharField(max_length=255)
bool = models.BooleanField(default=True)
class UndeletableObject(models.Model):
"""
Model whose show_delete in admin change_view has been disabled
Refs #10057.
"""
name = models.CharField(max_length=255)
class UnchangeableObject(models.Model):
"""
Model whose change_view is disabled in admin
Refs #20640.
"""
class UserMessenger(models.Model):
"""
Dummy class for testing message_user functions on ModelAdmin
"""
class Simple(models.Model):
"""
Simple model with nothing on it for use in testing
"""
class Choice(models.Model):
choice = models.IntegerField(
blank=True, null=True,
choices=((1, 'Yes'), (0, 'No'), (None, 'No opinion')),
)
class ParentWithDependentChildren(models.Model):
"""
Issue #20522
Model where the validation of child foreign-key relationships depends
on validation of the parent
"""
some_required_info = models.PositiveIntegerField()
family_name = models.CharField(max_length=255, blank=False)
class DependentChild(models.Model):
"""
Issue #20522
Model that depends on validation of the parent class for one of its
fields to validate during clean
"""
parent = models.ForeignKey(ParentWithDependentChildren, models.CASCADE)
family_name = models.CharField(max_length=255)
class _Manager(models.Manager):
def get_queryset(self):
return super().get_queryset().filter(pk__gt=1)
class FilteredManager(models.Model):
def __str__(self):
return "PK=%d" % self.pk
pk_gt_1 = _Manager()
objects = models.Manager()
class EmptyModelVisible(models.Model):
""" See ticket #11277. """
class EmptyModelHidden(models.Model):
""" See ticket #11277. """
class EmptyModelMixin(models.Model):
""" See ticket #11277. """
class State(models.Model):
name = models.CharField(max_length=100)
class City(models.Model):
state = models.ForeignKey(State, models.CASCADE)
name = models.CharField(max_length=100)
def get_absolute_url(self):
return '/dummy/%s/' % self.pk
class Restaurant(models.Model):
city = models.ForeignKey(City, models.CASCADE)
name = models.CharField(max_length=100)
def get_absolute_url(self):
return '/dummy/%s/' % self.pk
class Worker(models.Model):
work_at = models.ForeignKey(Restaurant, models.CASCADE)
name = models.CharField(max_length=50)
surname = models.CharField(max_length=50)
# Models for #23329
class ReferencedByParent(models.Model):
name = models.CharField(max_length=20, unique=True)
class ParentWithFK(models.Model):
fk = models.ForeignKey(
ReferencedByParent,
models.CASCADE,
to_field='name',
related_name='hidden+',
)
class ChildOfReferer(ParentWithFK):
pass
# Models for #23431
class InlineReferer(models.Model):
pass
class ReferencedByInline(models.Model):
name = models.CharField(max_length=20, unique=True)
class InlineReference(models.Model):
referer = models.ForeignKey(InlineReferer, models.CASCADE)
fk = models.ForeignKey(
ReferencedByInline,
models.CASCADE,
to_field='name',
related_name='hidden+',
)
class Recipe(models.Model):
rname = models.CharField(max_length=20, unique=True)
class Ingredient(models.Model):
iname = models.CharField(max_length=20, unique=True)
recipes = models.ManyToManyField(Recipe, through='RecipeIngredient')
class RecipeIngredient(models.Model):
ingredient = models.ForeignKey(Ingredient, models.CASCADE, to_field='iname')
recipe = models.ForeignKey(Recipe, models.CASCADE, to_field='rname')
# Model for #23839
class NotReferenced(models.Model):
# Don't point any FK at this model.
pass
# Models for #23934
class ExplicitlyProvidedPK(models.Model):
name = models.IntegerField(primary_key=True)
class ImplicitlyGeneratedPK(models.Model):
name = models.IntegerField(unique=True)
# Models for #25622
class ReferencedByGenRel(models.Model):
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
class GenRelReference(models.Model):
references = GenericRelation(ReferencedByGenRel)
class ParentWithUUIDPK(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
title = models.CharField(max_length=100)
def __str__(self):
return str(self.id)
class RelatedWithUUIDPKModel(models.Model):
parent = models.ForeignKey(ParentWithUUIDPK, on_delete=models.SET_NULL, null=True, blank=True)
class Author(models.Model):
pass
class Authorship(models.Model):
book = models.ForeignKey(Book, models.CASCADE)
author = models.ForeignKey(Author, models.CASCADE)
class UserProxy(User):
"""Proxy a model with a different app_label."""
class Meta:
proxy = True
|
440dff4ce679e61ba065df61bbbe978477760709a5c354bb6cd4d35ab375bbc5 | import datetime
import os
import tempfile
from io import StringIO
from wsgiref.util import FileWrapper
from django import forms
from django.contrib import admin
from django.contrib.admin import BooleanFieldListFilter
from django.contrib.admin.views.main import ChangeList
from django.contrib.auth.admin import GroupAdmin, UserAdmin
from django.contrib.auth.models import Group, User
from django.core.exceptions import ValidationError
from django.core.files.storage import FileSystemStorage
from django.core.mail import EmailMessage
from django.db import models
from django.forms.models import BaseModelFormSet
from django.http import HttpResponse, StreamingHttpResponse
from django.urls import path
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from .forms import MediaActionForm
from .models import (
Actor, AdminOrderedAdminMethod, AdminOrderedCallable, AdminOrderedField,
AdminOrderedModelMethod, Album, Answer, Answer2, Article, BarAccount, Book,
Bookmark, Category, Chapter, ChapterXtra1, Child, ChildOfReferer, Choice,
City, Collector, Color, Color2, ComplexSortedPerson, CoverLetter,
CustomArticle, CyclicOne, CyclicTwo, DependentChild, DooHickey, EmptyModel,
EmptyModelHidden, EmptyModelMixin, EmptyModelVisible, ExplicitlyProvidedPK,
ExternalSubscriber, Fabric, FancyDoodad, FieldOverridePost,
FilteredManager, FooAccount, FoodDelivery, FunkyTag, Gadget, Gallery,
GenRelReference, Grommet, ImplicitlyGeneratedPK, Ingredient,
InlineReference, InlineReferer, Inquisition, Language, Link,
MainPrepopulated, ModelWithStringPrimaryKey, NotReferenced, OldSubscriber,
OtherStory, Paper, Parent, ParentWithDependentChildren, ParentWithUUIDPK,
Person, Persona, Picture, Pizza, Plot, PlotDetails, PlotProxy,
PluggableSearchPerson, Podcast, Post, PrePopulatedPost,
PrePopulatedPostLargeSlug, PrePopulatedSubPost, Promo, Question,
ReadablePizza, ReadOnlyPizza, Recipe, Recommendation, Recommender,
ReferencedByGenRel, ReferencedByInline, ReferencedByParent,
RelatedPrepopulated, RelatedWithUUIDPKModel, Report, Reservation,
Restaurant, RowLevelChangePermissionModel, Section, ShortMessage, Simple,
Sketch, State, Story, StumpJoke, Subscriber, SuperVillain, Telegram, Thing,
Topping, UnchangeableObject, UndeletableObject, UnorderedObject,
UserMessenger, UserProxy, Villain, Vodcast, Whatsit, Widget, Worker,
WorkHour,
)
def callable_year(dt_value):
try:
return dt_value.year
except AttributeError:
return None
callable_year.admin_order_field = 'date'
class ArticleInline(admin.TabularInline):
model = Article
fk_name = 'section'
prepopulated_fields = {
'title': ('content',)
}
fieldsets = (
('Some fields', {
'classes': ('collapse',),
'fields': ('title', 'content')
}),
('Some other fields', {
'classes': ('wide',),
'fields': ('date', 'section')
})
)
class ChapterInline(admin.TabularInline):
model = Chapter
class ChapterXtra1Admin(admin.ModelAdmin):
list_filter = (
'chap',
'chap__title',
'chap__book',
'chap__book__name',
'chap__book__promo',
'chap__book__promo__name',
'guest_author__promo__book',
)
class ArticleForm(forms.ModelForm):
extra_form_field = forms.BooleanField(required=False)
class Meta:
fields = '__all__'
model = Article
class ArticleAdmin(admin.ModelAdmin):
list_display = (
'content', 'date', callable_year, 'model_year', 'modeladmin_year',
'model_year_reversed', 'section', lambda obj: obj.title,
'order_by_expression',
)
list_editable = ('section',)
list_filter = ('date', 'section')
autocomplete_fields = ('section',)
view_on_site = False
form = ArticleForm
fieldsets = (
('Some fields', {
'classes': ('collapse',),
'fields': ('title', 'content', 'extra_form_field'),
}),
('Some other fields', {
'classes': ('wide',),
'fields': ('date', 'section', 'sub_section')
})
)
def order_by_expression(self, obj):
return obj.model_year
# This ordering isn't particularly useful but shows that expressions can
# be used for admin_order_field.
order_by_expression.admin_order_field = models.F('date') + datetime.timedelta(days=3)
def changelist_view(self, request):
return super().changelist_view(request, extra_context={'extra_var': 'Hello!'})
def modeladmin_year(self, obj):
return obj.date.year
modeladmin_year.admin_order_field = 'date'
modeladmin_year.short_description = None
def delete_model(self, request, obj):
EmailMessage(
'Greetings from a deleted object',
'I hereby inform you that some user deleted me',
'[email protected]',
['[email protected]']
).send()
return super().delete_model(request, obj)
def save_model(self, request, obj, form, change=True):
EmailMessage(
'Greetings from a created object',
'I hereby inform you that some user created me',
'[email protected]',
['[email protected]']
).send()
return super().save_model(request, obj, form, change)
class ArticleAdmin2(admin.ModelAdmin):
def has_module_permission(self, request):
return False
class RowLevelChangePermissionModelAdmin(admin.ModelAdmin):
def has_change_permission(self, request, obj=None):
""" Only allow changing objects with even id number """
return request.user.is_staff and (obj is not None) and (obj.id % 2 == 0)
def has_view_permission(self, request, obj=None):
"""Only allow viewing objects if id is a multiple of 3."""
return request.user.is_staff and obj is not None and obj.id % 3 == 0
class CustomArticleAdmin(admin.ModelAdmin):
"""
Tests various hooks for using custom templates and contexts.
"""
change_list_template = 'custom_admin/change_list.html'
change_form_template = 'custom_admin/change_form.html'
add_form_template = 'custom_admin/add_form.html'
object_history_template = 'custom_admin/object_history.html'
delete_confirmation_template = 'custom_admin/delete_confirmation.html'
delete_selected_confirmation_template = 'custom_admin/delete_selected_confirmation.html'
popup_response_template = 'custom_admin/popup_response.html'
def changelist_view(self, request):
return super().changelist_view(request, extra_context={'extra_var': 'Hello!'})
class ThingAdmin(admin.ModelAdmin):
list_filter = ('color', 'color__warm', 'color__value', 'pub_date')
class InquisitionAdmin(admin.ModelAdmin):
list_display = ('leader', 'country', 'expected', 'sketch')
def sketch(self, obj):
# A method with the same name as a reverse accessor.
return 'list-display-sketch'
class SketchAdmin(admin.ModelAdmin):
raw_id_fields = ('inquisition', 'defendant0', 'defendant1')
class FabricAdmin(admin.ModelAdmin):
list_display = ('surface',)
list_filter = ('surface',)
class BasePersonModelFormSet(BaseModelFormSet):
def clean(self):
for person_dict in self.cleaned_data:
person = person_dict.get('id')
alive = person_dict.get('alive')
if person and alive and person.name == "Grace Hopper":
raise forms.ValidationError("Grace is not a Zombie")
class PersonAdmin(admin.ModelAdmin):
list_display = ('name', 'gender', 'alive')
list_editable = ('gender', 'alive')
list_filter = ('gender',)
search_fields = ('^name',)
save_as = True
def get_changelist_formset(self, request, **kwargs):
return super().get_changelist_formset(request, formset=BasePersonModelFormSet, **kwargs)
def get_queryset(self, request):
# Order by a field that isn't in list display, to be able to test
# whether ordering is preserved.
return super().get_queryset(request).order_by('age')
class FooAccountAdmin(admin.StackedInline):
model = FooAccount
extra = 1
class BarAccountAdmin(admin.StackedInline):
model = BarAccount
extra = 1
class PersonaAdmin(admin.ModelAdmin):
inlines = (
FooAccountAdmin,
BarAccountAdmin
)
class SubscriberAdmin(admin.ModelAdmin):
actions = ['mail_admin']
action_form = MediaActionForm
def delete_queryset(self, request, queryset):
SubscriberAdmin.overridden = True
super().delete_queryset(request, queryset)
def mail_admin(self, request, selected):
EmailMessage(
'Greetings from a ModelAdmin action',
'This is the test email from an admin action',
'[email protected]',
['[email protected]']
).send()
def external_mail(modeladmin, request, selected):
EmailMessage(
'Greetings from a function action',
'This is the test email from a function action',
'[email protected]',
['[email protected]']
).send()
external_mail.short_description = 'External mail (Another awesome action)'
def redirect_to(modeladmin, request, selected):
from django.http import HttpResponseRedirect
return HttpResponseRedirect('/some-where-else/')
redirect_to.short_description = 'Redirect to (Awesome action)'
def download(modeladmin, request, selected):
buf = StringIO('This is the content of the file')
return StreamingHttpResponse(FileWrapper(buf))
download.short_description = 'Download subscription'
def no_perm(modeladmin, request, selected):
return HttpResponse(content='No permission to perform this action', status=403)
no_perm.short_description = 'No permission to run'
class ExternalSubscriberAdmin(admin.ModelAdmin):
actions = [redirect_to, external_mail, download, no_perm]
class PodcastAdmin(admin.ModelAdmin):
list_display = ('name', 'release_date')
list_editable = ('release_date',)
date_hierarchy = 'release_date'
ordering = ('name',)
class VodcastAdmin(admin.ModelAdmin):
list_display = ('name', 'released')
list_editable = ('released',)
ordering = ('name',)
class ChildInline(admin.StackedInline):
model = Child
class ParentAdmin(admin.ModelAdmin):
model = Parent
inlines = [ChildInline]
save_as = True
list_display = ('id', 'name',)
list_display_links = ('id',)
list_editable = ('name',)
def save_related(self, request, form, formsets, change):
super().save_related(request, form, formsets, change)
first_name, last_name = form.instance.name.split()
for child in form.instance.child_set.all():
if len(child.name.split()) < 2:
child.name = child.name + ' ' + last_name
child.save()
class EmptyModelAdmin(admin.ModelAdmin):
def get_queryset(self, request):
return super().get_queryset(request).filter(pk__gt=1)
class OldSubscriberAdmin(admin.ModelAdmin):
actions = None
temp_storage = FileSystemStorage(tempfile.mkdtemp())
UPLOAD_TO = os.path.join(temp_storage.location, 'test_upload')
class PictureInline(admin.TabularInline):
model = Picture
extra = 1
class GalleryAdmin(admin.ModelAdmin):
inlines = [PictureInline]
class PictureAdmin(admin.ModelAdmin):
pass
class LanguageAdmin(admin.ModelAdmin):
list_display = ['iso', 'shortlist', 'english_name', 'name']
list_editable = ['shortlist']
class RecommendationAdmin(admin.ModelAdmin):
show_full_result_count = False
search_fields = ('=titletranslation__text', '=the_recommender__titletranslation__text',)
class WidgetInline(admin.StackedInline):
model = Widget
class DooHickeyInline(admin.StackedInline):
model = DooHickey
class GrommetInline(admin.StackedInline):
model = Grommet
class WhatsitInline(admin.StackedInline):
model = Whatsit
class FancyDoodadInline(admin.StackedInline):
model = FancyDoodad
class CategoryAdmin(admin.ModelAdmin):
list_display = ('id', 'collector', 'order')
list_editable = ('order',)
class CategoryInline(admin.StackedInline):
model = Category
class CollectorAdmin(admin.ModelAdmin):
inlines = [
WidgetInline, DooHickeyInline, GrommetInline, WhatsitInline,
FancyDoodadInline, CategoryInline
]
class LinkInline(admin.TabularInline):
model = Link
extra = 1
readonly_fields = ("posted", "multiline", "readonly_link_content")
def multiline(self, instance):
return "InlineMultiline\ntest\nstring"
class SubPostInline(admin.TabularInline):
model = PrePopulatedSubPost
prepopulated_fields = {
'subslug': ('subtitle',)
}
def get_readonly_fields(self, request, obj=None):
if obj and obj.published:
return ('subslug',)
return self.readonly_fields
def get_prepopulated_fields(self, request, obj=None):
if obj and obj.published:
return {}
return self.prepopulated_fields
class PrePopulatedPostAdmin(admin.ModelAdmin):
list_display = ['title', 'slug']
prepopulated_fields = {
'slug': ('title',)
}
inlines = [SubPostInline]
def get_readonly_fields(self, request, obj=None):
if obj and obj.published:
return ('slug',)
return self.readonly_fields
def get_prepopulated_fields(self, request, obj=None):
if obj and obj.published:
return {}
return self.prepopulated_fields
class PrePopulatedPostReadOnlyAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
def has_change_permission(self, *args, **kwargs):
return False
class PostAdmin(admin.ModelAdmin):
list_display = ['title', 'public']
readonly_fields = (
'posted', 'awesomeness_level', 'coolness', 'value',
'multiline', 'multiline_html', lambda obj: "foo",
'readonly_content',
)
inlines = [
LinkInline
]
def coolness(self, instance):
if instance.pk:
return "%d amount of cool." % instance.pk
else:
return "Unknown coolness."
def value(self, instance):
return 1000
value.short_description = 'Value in $US'
def multiline(self, instance):
return "Multiline\ntest\nstring"
def multiline_html(self, instance):
return mark_safe("Multiline<br>\nhtml<br>\ncontent")
class FieldOverridePostForm(forms.ModelForm):
model = FieldOverridePost
class Meta:
help_texts = {
'posted': 'Overridden help text for the date',
}
labels = {
'public': 'Overridden public label',
}
class FieldOverridePostAdmin(PostAdmin):
form = FieldOverridePostForm
class CustomChangeList(ChangeList):
def get_queryset(self, request):
return self.root_queryset.order_by('pk').filter(pk=9999) # Doesn't exist
class GadgetAdmin(admin.ModelAdmin):
def get_changelist(self, request, **kwargs):
return CustomChangeList
class ToppingAdmin(admin.ModelAdmin):
readonly_fields = ('pizzas',)
class PizzaAdmin(admin.ModelAdmin):
readonly_fields = ('toppings',)
class StudentAdmin(admin.ModelAdmin):
search_fields = ('name',)
class ReadOnlyPizzaAdmin(admin.ModelAdmin):
readonly_fields = ('name', 'toppings')
def has_add_permission(self, request):
return False
def has_change_permission(self, request, obj=None):
return True
def has_delete_permission(self, request, obj=None):
return True
class WorkHourAdmin(admin.ModelAdmin):
list_display = ('datum', 'employee')
list_filter = ('employee',)
class FoodDeliveryAdmin(admin.ModelAdmin):
list_display = ('reference', 'driver', 'restaurant')
list_editable = ('driver', 'restaurant')
class CoverLetterAdmin(admin.ModelAdmin):
"""
A ModelAdmin with a custom get_queryset() method that uses defer(), to test
verbose_name display in messages shown after adding/editing CoverLetter
instances. Note that the CoverLetter model defines a __str__ method.
For testing fix for ticket #14529.
"""
def get_queryset(self, request):
return super().get_queryset(request).defer('date_written')
class PaperAdmin(admin.ModelAdmin):
"""
A ModelAdmin with a custom get_queryset() method that uses only(), to test
verbose_name display in messages shown after adding/editing Paper
instances.
For testing fix for ticket #14529.
"""
def get_queryset(self, request):
return super().get_queryset(request).only('title')
class ShortMessageAdmin(admin.ModelAdmin):
"""
A ModelAdmin with a custom get_queryset() method that uses defer(), to test
verbose_name display in messages shown after adding/editing ShortMessage
instances.
For testing fix for ticket #14529.
"""
def get_queryset(self, request):
return super().get_queryset(request).defer('timestamp')
class TelegramAdmin(admin.ModelAdmin):
"""
A ModelAdmin with a custom get_queryset() method that uses only(), to test
verbose_name display in messages shown after adding/editing Telegram
instances. Note that the Telegram model defines a __str__ method.
For testing fix for ticket #14529.
"""
def get_queryset(self, request):
return super().get_queryset(request).only('title')
class StoryForm(forms.ModelForm):
class Meta:
widgets = {'title': forms.HiddenInput}
class StoryAdmin(admin.ModelAdmin):
list_display = ('id', 'title', 'content')
list_display_links = ('title',) # 'id' not in list_display_links
list_editable = ('content',)
form = StoryForm
ordering = ['-id']
class OtherStoryAdmin(admin.ModelAdmin):
list_display = ('id', 'title', 'content')
list_display_links = ('title', 'id') # 'id' in list_display_links
list_editable = ('content',)
ordering = ['-id']
class ComplexSortedPersonAdmin(admin.ModelAdmin):
list_display = ('name', 'age', 'is_employee', 'colored_name')
ordering = ('name',)
def colored_name(self, obj):
return format_html('<span style="color: #ff00ff;">{}</span>', obj.name)
colored_name.admin_order_field = 'name'
class PluggableSearchPersonAdmin(admin.ModelAdmin):
list_display = ('name', 'age')
search_fields = ('name',)
def get_search_results(self, request, queryset, search_term):
queryset, use_distinct = super().get_search_results(request, queryset, search_term)
try:
search_term_as_int = int(search_term)
except ValueError:
pass
else:
queryset |= self.model.objects.filter(age=search_term_as_int)
return queryset, use_distinct
class AlbumAdmin(admin.ModelAdmin):
list_filter = ['title']
class QuestionAdmin(admin.ModelAdmin):
ordering = ['-posted']
search_fields = ['question']
autocomplete_fields = ['related_questions']
class AnswerAdmin(admin.ModelAdmin):
autocomplete_fields = ['question']
class PrePopulatedPostLargeSlugAdmin(admin.ModelAdmin):
prepopulated_fields = {
'slug': ('title',)
}
class AdminOrderedFieldAdmin(admin.ModelAdmin):
ordering = ('order',)
list_display = ('stuff', 'order')
class AdminOrderedModelMethodAdmin(admin.ModelAdmin):
ordering = ('order',)
list_display = ('stuff', 'some_order')
class AdminOrderedAdminMethodAdmin(admin.ModelAdmin):
def some_admin_order(self, obj):
return obj.order
some_admin_order.admin_order_field = 'order'
ordering = ('order',)
list_display = ('stuff', 'some_admin_order')
def admin_ordered_callable(obj):
return obj.order
admin_ordered_callable.admin_order_field = 'order'
class AdminOrderedCallableAdmin(admin.ModelAdmin):
ordering = ('order',)
list_display = ('stuff', admin_ordered_callable)
class ReportAdmin(admin.ModelAdmin):
def extra(self, request):
return HttpResponse()
def get_urls(self):
# Corner case: Don't call parent implementation
return [path('extra/', self.extra, name='cable_extra')]
class CustomTemplateBooleanFieldListFilter(BooleanFieldListFilter):
template = 'custom_filter_template.html'
class CustomTemplateFilterColorAdmin(admin.ModelAdmin):
list_filter = (('warm', CustomTemplateBooleanFieldListFilter),)
# For Selenium Prepopulated tests -------------------------------------
class RelatedPrepopulatedInline1(admin.StackedInline):
fieldsets = (
(None, {
'fields': (
('fk', 'm2m'),
('pubdate', 'status'),
('name', 'slug1', 'slug2',),
),
}),
)
formfield_overrides = {models.CharField: {'strip': False}}
model = RelatedPrepopulated
extra = 1
autocomplete_fields = ['fk', 'm2m']
prepopulated_fields = {
'slug1': ['name', 'pubdate'],
'slug2': ['status', 'name'],
}
class RelatedPrepopulatedInline2(admin.TabularInline):
model = RelatedPrepopulated
extra = 1
autocomplete_fields = ['fk', 'm2m']
prepopulated_fields = {
'slug1': ['name', 'pubdate'],
'slug2': ['status', 'name'],
}
class RelatedPrepopulatedInline3(admin.TabularInline):
model = RelatedPrepopulated
extra = 0
autocomplete_fields = ['fk', 'm2m']
class MainPrepopulatedAdmin(admin.ModelAdmin):
inlines = [RelatedPrepopulatedInline1, RelatedPrepopulatedInline2, RelatedPrepopulatedInline3]
fieldsets = (
(None, {
'fields': (('pubdate', 'status'), ('name', 'slug1', 'slug2', 'slug3'))
}),
)
formfield_overrides = {models.CharField: {'strip': False}}
prepopulated_fields = {
'slug1': ['name', 'pubdate'],
'slug2': ['status', 'name'],
'slug3': ['name'],
}
class UnorderedObjectAdmin(admin.ModelAdmin):
list_display = ['id', 'name']
list_display_links = ['id']
list_editable = ['name']
list_per_page = 2
class UndeletableObjectAdmin(admin.ModelAdmin):
def change_view(self, *args, **kwargs):
kwargs['extra_context'] = {'show_delete': False}
return super().change_view(*args, **kwargs)
class UnchangeableObjectAdmin(admin.ModelAdmin):
def get_urls(self):
# Disable change_view, but leave other urls untouched
urlpatterns = super().get_urls()
return [p for p in urlpatterns if p.name and not p.name.endswith("_change")]
def callable_on_unknown(obj):
return obj.unknown
class AttributeErrorRaisingAdmin(admin.ModelAdmin):
list_display = [callable_on_unknown]
class CustomManagerAdmin(admin.ModelAdmin):
def get_queryset(self, request):
return FilteredManager.objects
class MessageTestingAdmin(admin.ModelAdmin):
actions = ["message_debug", "message_info", "message_success",
"message_warning", "message_error", "message_extra_tags"]
def message_debug(self, request, selected):
self.message_user(request, "Test debug", level="debug")
def message_info(self, request, selected):
self.message_user(request, "Test info", level="info")
def message_success(self, request, selected):
self.message_user(request, "Test success", level="success")
def message_warning(self, request, selected):
self.message_user(request, "Test warning", level="warning")
def message_error(self, request, selected):
self.message_user(request, "Test error", level="error")
def message_extra_tags(self, request, selected):
self.message_user(request, "Test tags", extra_tags="extra_tag")
class ChoiceList(admin.ModelAdmin):
list_display = ['choice']
readonly_fields = ['choice']
fields = ['choice']
class DependentChildAdminForm(forms.ModelForm):
"""
Issue #20522
Form to test child dependency on parent object's validation
"""
def clean(self):
parent = self.cleaned_data.get('parent')
if parent.family_name and parent.family_name != self.cleaned_data.get('family_name'):
raise ValidationError("Children must share a family name with their parents " +
"in this contrived test case")
return super().clean()
class DependentChildInline(admin.TabularInline):
model = DependentChild
form = DependentChildAdminForm
class ParentWithDependentChildrenAdmin(admin.ModelAdmin):
inlines = [DependentChildInline]
# Tests for ticket 11277 ----------------------------------
class FormWithoutHiddenField(forms.ModelForm):
first = forms.CharField()
second = forms.CharField()
class FormWithoutVisibleField(forms.ModelForm):
first = forms.CharField(widget=forms.HiddenInput)
second = forms.CharField(widget=forms.HiddenInput)
class FormWithVisibleAndHiddenField(forms.ModelForm):
first = forms.CharField(widget=forms.HiddenInput)
second = forms.CharField()
class EmptyModelVisibleAdmin(admin.ModelAdmin):
form = FormWithoutHiddenField
fieldsets = (
(None, {
'fields': (('first', 'second'),),
}),
)
class EmptyModelHiddenAdmin(admin.ModelAdmin):
form = FormWithoutVisibleField
fieldsets = EmptyModelVisibleAdmin.fieldsets
class EmptyModelMixinAdmin(admin.ModelAdmin):
form = FormWithVisibleAndHiddenField
fieldsets = EmptyModelVisibleAdmin.fieldsets
class CityInlineAdmin(admin.TabularInline):
model = City
view_on_site = False
class StateAdmin(admin.ModelAdmin):
inlines = [CityInlineAdmin]
class RestaurantInlineAdmin(admin.TabularInline):
model = Restaurant
view_on_site = True
class CityAdmin(admin.ModelAdmin):
inlines = [RestaurantInlineAdmin]
view_on_site = True
class WorkerAdmin(admin.ModelAdmin):
def view_on_site(self, obj):
return '/worker/%s/%s/' % (obj.surname, obj.name)
class WorkerInlineAdmin(admin.TabularInline):
model = Worker
def view_on_site(self, obj):
return '/worker_inline/%s/%s/' % (obj.surname, obj.name)
class RestaurantAdmin(admin.ModelAdmin):
inlines = [WorkerInlineAdmin]
view_on_site = False
def get_changeform_initial_data(self, request):
return {'name': 'overridden_value'}
class FunkyTagAdmin(admin.ModelAdmin):
list_display = ('name', 'content_object')
class InlineReferenceInline(admin.TabularInline):
model = InlineReference
class InlineRefererAdmin(admin.ModelAdmin):
inlines = [InlineReferenceInline]
class PlotReadonlyAdmin(admin.ModelAdmin):
readonly_fields = ('plotdetails',)
class GetFormsetsArgumentCheckingAdmin(admin.ModelAdmin):
fields = ['name']
def add_view(self, request, *args, **kwargs):
request.is_add_view = True
return super().add_view(request, *args, **kwargs)
def change_view(self, request, *args, **kwargs):
request.is_add_view = False
return super().change_view(request, *args, **kwargs)
def get_formsets_with_inlines(self, request, obj=None):
if request.is_add_view and obj is not None:
raise Exception("'obj' passed to get_formsets_with_inlines wasn't None during add_view")
if not request.is_add_view and obj is None:
raise Exception("'obj' passed to get_formsets_with_inlines was None during change_view")
return super().get_formsets_with_inlines(request, obj)
site = admin.AdminSite(name="admin")
site.site_url = '/my-site-url/'
site.register(Article, ArticleAdmin)
site.register(CustomArticle, CustomArticleAdmin)
site.register(
Section, save_as=True, inlines=[ArticleInline],
readonly_fields=['name_property'], search_fields=['name'],
)
site.register(ModelWithStringPrimaryKey)
site.register(Color)
site.register(Thing, ThingAdmin)
site.register(Actor)
site.register(Inquisition, InquisitionAdmin)
site.register(Sketch, SketchAdmin)
site.register(Person, PersonAdmin)
site.register(Persona, PersonaAdmin)
site.register(Subscriber, SubscriberAdmin)
site.register(ExternalSubscriber, ExternalSubscriberAdmin)
site.register(OldSubscriber, OldSubscriberAdmin)
site.register(Podcast, PodcastAdmin)
site.register(Vodcast, VodcastAdmin)
site.register(Parent, ParentAdmin)
site.register(EmptyModel, EmptyModelAdmin)
site.register(Fabric, FabricAdmin)
site.register(Gallery, GalleryAdmin)
site.register(Picture, PictureAdmin)
site.register(Language, LanguageAdmin)
site.register(Recommendation, RecommendationAdmin)
site.register(Recommender)
site.register(Collector, CollectorAdmin)
site.register(Category, CategoryAdmin)
site.register(Post, PostAdmin)
site.register(FieldOverridePost, FieldOverridePostAdmin)
site.register(Gadget, GadgetAdmin)
site.register(Villain)
site.register(SuperVillain)
site.register(Plot)
site.register(PlotDetails)
site.register(PlotProxy, PlotReadonlyAdmin)
site.register(Bookmark)
site.register(CyclicOne)
site.register(CyclicTwo)
site.register(WorkHour, WorkHourAdmin)
site.register(Reservation)
site.register(FoodDelivery, FoodDeliveryAdmin)
site.register(RowLevelChangePermissionModel, RowLevelChangePermissionModelAdmin)
site.register(Paper, PaperAdmin)
site.register(CoverLetter, CoverLetterAdmin)
site.register(ShortMessage, ShortMessageAdmin)
site.register(Telegram, TelegramAdmin)
site.register(Story, StoryAdmin)
site.register(OtherStory, OtherStoryAdmin)
site.register(Report, ReportAdmin)
site.register(MainPrepopulated, MainPrepopulatedAdmin)
site.register(UnorderedObject, UnorderedObjectAdmin)
site.register(UndeletableObject, UndeletableObjectAdmin)
site.register(UnchangeableObject, UnchangeableObjectAdmin)
site.register(State, StateAdmin)
site.register(City, CityAdmin)
site.register(Restaurant, RestaurantAdmin)
site.register(Worker, WorkerAdmin)
site.register(FunkyTag, FunkyTagAdmin)
site.register(ReferencedByParent)
site.register(ChildOfReferer)
site.register(ReferencedByInline)
site.register(InlineReferer, InlineRefererAdmin)
site.register(ReferencedByGenRel)
site.register(GenRelReference)
site.register(ParentWithUUIDPK)
site.register(RelatedPrepopulated, search_fields=['name'])
site.register(RelatedWithUUIDPKModel)
# We intentionally register Promo and ChapterXtra1 but not Chapter nor ChapterXtra2.
# That way we cover all four cases:
# related ForeignKey object registered in admin
# related ForeignKey object not registered in admin
# related OneToOne object registered in admin
# related OneToOne object not registered in admin
# when deleting Book so as exercise all four paths through
# contrib.admin.utils's get_deleted_objects function.
site.register(Book, inlines=[ChapterInline])
site.register(Promo)
site.register(ChapterXtra1, ChapterXtra1Admin)
site.register(Pizza, PizzaAdmin)
site.register(ReadOnlyPizza, ReadOnlyPizzaAdmin)
site.register(ReadablePizza)
site.register(Topping, ToppingAdmin)
site.register(Album, AlbumAdmin)
site.register(Question, QuestionAdmin)
site.register(Answer, AnswerAdmin, date_hierarchy='question__posted')
site.register(Answer2, date_hierarchy='question__expires')
site.register(PrePopulatedPost, PrePopulatedPostAdmin)
site.register(ComplexSortedPerson, ComplexSortedPersonAdmin)
site.register(FilteredManager, CustomManagerAdmin)
site.register(PluggableSearchPerson, PluggableSearchPersonAdmin)
site.register(PrePopulatedPostLargeSlug, PrePopulatedPostLargeSlugAdmin)
site.register(AdminOrderedField, AdminOrderedFieldAdmin)
site.register(AdminOrderedModelMethod, AdminOrderedModelMethodAdmin)
site.register(AdminOrderedAdminMethod, AdminOrderedAdminMethodAdmin)
site.register(AdminOrderedCallable, AdminOrderedCallableAdmin)
site.register(Color2, CustomTemplateFilterColorAdmin)
site.register(Simple, AttributeErrorRaisingAdmin)
site.register(UserMessenger, MessageTestingAdmin)
site.register(Choice, ChoiceList)
site.register(ParentWithDependentChildren, ParentWithDependentChildrenAdmin)
site.register(EmptyModelHidden, EmptyModelHiddenAdmin)
site.register(EmptyModelVisible, EmptyModelVisibleAdmin)
site.register(EmptyModelMixin, EmptyModelMixinAdmin)
site.register(StumpJoke)
site.register(Recipe)
site.register(Ingredient)
site.register(NotReferenced)
site.register(ExplicitlyProvidedPK, GetFormsetsArgumentCheckingAdmin)
site.register(ImplicitlyGeneratedPK, GetFormsetsArgumentCheckingAdmin)
site.register(UserProxy)
# Register core models we need in our tests
site.register(User, UserAdmin)
site.register(Group, GroupAdmin)
# Used to test URL namespaces
site2 = admin.AdminSite(name="namespaced_admin")
site2.register(User, UserAdmin)
site2.register(Group, GroupAdmin)
site2.register(ParentWithUUIDPK)
site2.register(
RelatedWithUUIDPKModel,
list_display=['pk', 'parent'],
list_editable=['parent'],
raw_id_fields=['parent'],
)
site2.register(Person, save_as_continue=False)
site7 = admin.AdminSite(name="admin7")
site7.register(Article, ArticleAdmin2)
site7.register(Section)
site7.register(PrePopulatedPost, PrePopulatedPostReadOnlyAdmin)
# Used to test ModelAdmin.sortable_by and get_sortable_by().
class ArticleAdmin6(admin.ModelAdmin):
list_display = (
'content', 'date', callable_year, 'model_year', 'modeladmin_year',
'model_year_reversed', 'section',
)
sortable_by = ('date', callable_year)
def modeladmin_year(self, obj):
return obj.date.year
modeladmin_year.admin_order_field = 'date'
class ActorAdmin6(admin.ModelAdmin):
list_display = ('name', 'age')
sortable_by = ('name',)
def get_sortable_by(self, request):
return ('age',)
class ChapterAdmin6(admin.ModelAdmin):
list_display = ('title', 'book')
sortable_by = ()
class ColorAdmin6(admin.ModelAdmin):
list_display = ('value',)
def get_sortable_by(self, request):
return ()
site6 = admin.AdminSite(name='admin6')
site6.register(Article, ArticleAdmin6)
site6.register(Actor, ActorAdmin6)
site6.register(Chapter, ChapterAdmin6)
site6.register(Color, ColorAdmin6)
class ArticleAdmin9(admin.ModelAdmin):
def has_change_permission(self, request, obj=None):
# Simulate that the user can't change a specific object.
return obj is None
site9 = admin.AdminSite(name='admin9')
site9.register(Article, ArticleAdmin9)
class ArticleAdmin10(admin.ModelAdmin):
def has_change_permission(self, request, obj=None):
return False
site10 = admin.AdminSite(name='admin10')
site10.register(Article, ArticleAdmin10)
|
e0bf3aace19d5b5076942619f4690ecd7988500af3db23e9c082f74b78fb73d3 | import logging
from contextlib import contextmanager
from io import StringIO
from admin_scripts.tests import AdminScriptTestCase
from django.conf import settings
from django.core import mail
from django.core.exceptions import PermissionDenied
from django.core.files.temp import NamedTemporaryFile
from django.core.management import color
from django.http.multipartparser import MultiPartParserError
from django.test import RequestFactory, SimpleTestCase, override_settings
from django.test.utils import LoggingCaptureMixin
from django.utils.log import (
DEFAULT_LOGGING, AdminEmailHandler, CallbackFilter, RequireDebugFalse,
RequireDebugTrue, ServerFormatter,
)
from . import views
from .logconfig import MyEmailBackend
# logging config prior to using filter with mail_admins
OLD_LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
class LoggingFiltersTest(SimpleTestCase):
def test_require_debug_false_filter(self):
"""
Test the RequireDebugFalse filter class.
"""
filter_ = RequireDebugFalse()
with self.settings(DEBUG=True):
self.assertIs(filter_.filter("record is not used"), False)
with self.settings(DEBUG=False):
self.assertIs(filter_.filter("record is not used"), True)
def test_require_debug_true_filter(self):
"""
Test the RequireDebugTrue filter class.
"""
filter_ = RequireDebugTrue()
with self.settings(DEBUG=True):
self.assertIs(filter_.filter("record is not used"), True)
with self.settings(DEBUG=False):
self.assertIs(filter_.filter("record is not used"), False)
class SetupDefaultLoggingMixin:
@classmethod
def setUpClass(cls):
super().setUpClass()
cls._logging = settings.LOGGING
logging.config.dictConfig(DEFAULT_LOGGING)
@classmethod
def tearDownClass(cls):
super().tearDownClass()
logging.config.dictConfig(cls._logging)
class DefaultLoggingTests(SetupDefaultLoggingMixin, LoggingCaptureMixin, SimpleTestCase):
def test_django_logger(self):
"""
The 'django' base logger only output anything when DEBUG=True.
"""
self.logger.error("Hey, this is an error.")
self.assertEqual(self.logger_output.getvalue(), '')
with self.settings(DEBUG=True):
self.logger.error("Hey, this is an error.")
self.assertEqual(self.logger_output.getvalue(), 'Hey, this is an error.\n')
@override_settings(DEBUG=True)
def test_django_logger_warning(self):
self.logger.warning('warning')
self.assertEqual(self.logger_output.getvalue(), 'warning\n')
@override_settings(DEBUG=True)
def test_django_logger_info(self):
self.logger.info('info')
self.assertEqual(self.logger_output.getvalue(), 'info\n')
@override_settings(DEBUG=True)
def test_django_logger_debug(self):
self.logger.debug('debug')
self.assertEqual(self.logger_output.getvalue(), '')
class LoggingAssertionMixin:
def assertLogsRequest(self, url, level, msg, status_code, logger='django.request', exc_class=None):
with self.assertLogs(logger, level) as cm:
try:
self.client.get(url)
except views.UncaughtException:
pass
self.assertEqual(
len(cm.records), 1,
"Wrong number of calls for logger %r in %r level." % (logger, level)
)
record = cm.records[0]
self.assertEqual(record.getMessage(), msg)
self.assertEqual(record.status_code, status_code)
if exc_class:
self.assertIsNotNone(record.exc_info)
self.assertEqual(record.exc_info[0], exc_class)
@override_settings(DEBUG=True, ROOT_URLCONF='logging_tests.urls')
class HandlerLoggingTests(SetupDefaultLoggingMixin, LoggingAssertionMixin, LoggingCaptureMixin, SimpleTestCase):
def test_page_found_no_warning(self):
self.client.get('/innocent/')
self.assertEqual(self.logger_output.getvalue(), '')
def test_redirect_no_warning(self):
self.client.get('/redirect/')
self.assertEqual(self.logger_output.getvalue(), '')
def test_page_not_found_warning(self):
self.assertLogsRequest(
url='/does_not_exist/',
level='WARNING',
status_code=404,
msg='Not Found: /does_not_exist/',
)
def test_page_not_found_raised(self):
self.assertLogsRequest(
url='/does_not_exist_raised/',
level='WARNING',
status_code=404,
msg='Not Found: /does_not_exist_raised/',
)
def test_uncaught_exception(self):
self.assertLogsRequest(
url='/uncaught_exception/',
level='ERROR',
status_code=500,
msg='Internal Server Error: /uncaught_exception/',
exc_class=views.UncaughtException,
)
def test_internal_server_error(self):
self.assertLogsRequest(
url='/internal_server_error/',
level='ERROR',
status_code=500,
msg='Internal Server Error: /internal_server_error/',
)
def test_internal_server_error_599(self):
self.assertLogsRequest(
url='/internal_server_error/?status=599',
level='ERROR',
status_code=599,
msg='Unknown Status Code: /internal_server_error/',
)
def test_permission_denied(self):
self.assertLogsRequest(
url='/permission_denied/',
level='WARNING',
status_code=403,
msg='Forbidden (Permission denied): /permission_denied/',
exc_class=PermissionDenied,
)
def test_multi_part_parser_error(self):
self.assertLogsRequest(
url='/multi_part_parser_error/',
level='WARNING',
status_code=400,
msg='Bad request (Unable to parse request body): /multi_part_parser_error/',
exc_class=MultiPartParserError,
)
@override_settings(
DEBUG=True,
USE_I18N=True,
LANGUAGES=[('en', 'English')],
MIDDLEWARE=[
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
],
ROOT_URLCONF='logging_tests.urls_i18n',
)
class I18nLoggingTests(SetupDefaultLoggingMixin, LoggingCaptureMixin, SimpleTestCase):
def test_i18n_page_found_no_warning(self):
self.client.get('/exists/')
self.client.get('/en/exists/')
self.assertEqual(self.logger_output.getvalue(), '')
def test_i18n_page_not_found_warning(self):
self.client.get('/this_does_not/')
self.client.get('/en/nor_this/')
self.assertEqual(self.logger_output.getvalue(), 'Not Found: /this_does_not/\nNot Found: /en/nor_this/\n')
class CallbackFilterTest(SimpleTestCase):
def test_sense(self):
f_false = CallbackFilter(lambda r: False)
f_true = CallbackFilter(lambda r: True)
self.assertEqual(f_false.filter("record"), False)
self.assertEqual(f_true.filter("record"), True)
def test_passes_on_record(self):
collector = []
def _callback(record):
collector.append(record)
return True
f = CallbackFilter(_callback)
f.filter("a record")
self.assertEqual(collector, ["a record"])
class AdminEmailHandlerTest(SimpleTestCase):
logger = logging.getLogger('django')
request_factory = RequestFactory()
def get_admin_email_handler(self, logger):
# AdminEmailHandler does not get filtered out
# even with DEBUG=True.
admin_email_handler = [
h for h in logger.handlers
if h.__class__.__name__ == "AdminEmailHandler"
][0]
return admin_email_handler
def test_fail_silently(self):
admin_email_handler = self.get_admin_email_handler(self.logger)
self.assertTrue(admin_email_handler.connection().fail_silently)
@override_settings(
ADMINS=[('whatever admin', '[email protected]')],
EMAIL_SUBJECT_PREFIX='-SuperAwesomeSubject-'
)
def test_accepts_args(self):
"""
User-supplied arguments and the EMAIL_SUBJECT_PREFIX setting are used
to compose the email subject (#16736).
"""
message = "Custom message that says '%s' and '%s'"
token1 = 'ping'
token2 = 'pong'
admin_email_handler = self.get_admin_email_handler(self.logger)
# Backup then override original filters
orig_filters = admin_email_handler.filters
try:
admin_email_handler.filters = []
self.logger.error(message, token1, token2)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ['[email protected]'])
self.assertEqual(mail.outbox[0].subject,
"-SuperAwesomeSubject-ERROR: Custom message that says 'ping' and 'pong'")
finally:
# Restore original filters
admin_email_handler.filters = orig_filters
@override_settings(
ADMINS=[('whatever admin', '[email protected]')],
EMAIL_SUBJECT_PREFIX='-SuperAwesomeSubject-',
INTERNAL_IPS=['127.0.0.1'],
)
def test_accepts_args_and_request(self):
"""
The subject is also handled if being passed a request object.
"""
message = "Custom message that says '%s' and '%s'"
token1 = 'ping'
token2 = 'pong'
admin_email_handler = self.get_admin_email_handler(self.logger)
# Backup then override original filters
orig_filters = admin_email_handler.filters
try:
admin_email_handler.filters = []
request = self.request_factory.get('/')
self.logger.error(
message, token1, token2,
extra={
'status_code': 403,
'request': request,
}
)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ['[email protected]'])
self.assertEqual(mail.outbox[0].subject,
"-SuperAwesomeSubject-ERROR (internal IP): Custom message that says 'ping' and 'pong'")
finally:
# Restore original filters
admin_email_handler.filters = orig_filters
@override_settings(
ADMINS=[('admin', '[email protected]')],
EMAIL_SUBJECT_PREFIX='',
DEBUG=False,
)
def test_subject_accepts_newlines(self):
"""
Newlines in email reports' subjects are escaped to prevent
AdminErrorHandler from failing (#17281).
"""
message = 'Message \r\n with newlines'
expected_subject = 'ERROR: Message \\r\\n with newlines'
self.assertEqual(len(mail.outbox), 0)
self.logger.error(message)
self.assertEqual(len(mail.outbox), 1)
self.assertNotIn('\n', mail.outbox[0].subject)
self.assertNotIn('\r', mail.outbox[0].subject)
self.assertEqual(mail.outbox[0].subject, expected_subject)
@override_settings(
ADMINS=[('admin', '[email protected]')],
DEBUG=False,
)
def test_uses_custom_email_backend(self):
"""
Refs #19325
"""
message = 'All work and no play makes Jack a dull boy'
admin_email_handler = self.get_admin_email_handler(self.logger)
mail_admins_called = {'called': False}
def my_mail_admins(*args, **kwargs):
connection = kwargs['connection']
self.assertIsInstance(connection, MyEmailBackend)
mail_admins_called['called'] = True
# Monkeypatches
orig_mail_admins = mail.mail_admins
orig_email_backend = admin_email_handler.email_backend
mail.mail_admins = my_mail_admins
admin_email_handler.email_backend = (
'logging_tests.logconfig.MyEmailBackend')
try:
self.logger.error(message)
self.assertTrue(mail_admins_called['called'])
finally:
# Revert Monkeypatches
mail.mail_admins = orig_mail_admins
admin_email_handler.email_backend = orig_email_backend
@override_settings(
ADMINS=[('whatever admin', '[email protected]')],
)
def test_emit_non_ascii(self):
"""
#23593 - AdminEmailHandler should allow Unicode characters in the
request.
"""
handler = self.get_admin_email_handler(self.logger)
record = self.logger.makeRecord('name', logging.ERROR, 'function', 'lno', 'message', None, None)
url_path = '/º'
record.request = self.request_factory.get(url_path)
handler.emit(record)
self.assertEqual(len(mail.outbox), 1)
msg = mail.outbox[0]
self.assertEqual(msg.to, ['[email protected]'])
self.assertEqual(msg.subject, "[Django] ERROR (EXTERNAL IP): message")
self.assertIn("Report at %s" % url_path, msg.body)
@override_settings(
MANAGERS=[('manager', '[email protected]')],
DEBUG=False,
)
def test_customize_send_mail_method(self):
class ManagerEmailHandler(AdminEmailHandler):
def send_mail(self, subject, message, *args, **kwargs):
mail.mail_managers(subject, message, *args, connection=self.connection(), **kwargs)
handler = ManagerEmailHandler()
record = self.logger.makeRecord('name', logging.ERROR, 'function', 'lno', 'message', None, None)
self.assertEqual(len(mail.outbox), 0)
handler.emit(record)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ['[email protected]'])
@override_settings(ALLOWED_HOSTS='example.com')
def test_disallowed_host_doesnt_crash(self):
admin_email_handler = self.get_admin_email_handler(self.logger)
old_include_html = admin_email_handler.include_html
# Text email
admin_email_handler.include_html = False
try:
self.client.get('/', HTTP_HOST='evil.com')
finally:
admin_email_handler.include_html = old_include_html
# HTML email
admin_email_handler.include_html = True
try:
self.client.get('/', HTTP_HOST='evil.com')
finally:
admin_email_handler.include_html = old_include_html
class SettingsConfigTest(AdminScriptTestCase):
"""
Accessing settings in a custom logging handler does not trigger
a circular import error.
"""
def setUp(self):
log_config = """{
'version': 1,
'handlers': {
'custom_handler': {
'level': 'INFO',
'class': 'logging_tests.logconfig.MyHandler',
}
}
}"""
self.write_settings('settings.py', sdict={'LOGGING': log_config})
def tearDown(self):
self.remove_settings('settings.py')
def test_circular_dependency(self):
# validate is just an example command to trigger settings configuration
out, err = self.run_manage(['check'])
self.assertNoOutput(err)
self.assertOutput(out, "System check identified no issues (0 silenced).")
def dictConfig(config):
dictConfig.called = True
dictConfig.called = False
class SetupConfigureLogging(SimpleTestCase):
"""
Calling django.setup() initializes the logging configuration.
"""
@override_settings(
LOGGING_CONFIG='logging_tests.tests.dictConfig',
LOGGING=OLD_LOGGING,
)
def test_configure_initializes_logging(self):
from django import setup
setup()
self.assertTrue(dictConfig.called)
@override_settings(DEBUG=True, ROOT_URLCONF='logging_tests.urls')
class SecurityLoggerTest(LoggingAssertionMixin, SimpleTestCase):
def test_suspicious_operation_creates_log_message(self):
self.assertLogsRequest(
url='/suspicious/',
level='ERROR',
msg='dubious',
status_code=400,
logger='django.security.SuspiciousOperation',
)
def test_suspicious_operation_uses_sublogger(self):
self.assertLogsRequest(
url='/suspicious_spec/',
level='ERROR',
msg='dubious',
status_code=400,
logger='django.security.DisallowedHost',
)
@override_settings(
ADMINS=[('admin', '[email protected]')],
DEBUG=False,
)
def test_suspicious_email_admins(self):
self.client.get('/suspicious/')
self.assertEqual(len(mail.outbox), 1)
self.assertIn('Report at /suspicious/', mail.outbox[0].body)
class SettingsCustomLoggingTest(AdminScriptTestCase):
"""
Using a logging defaults are still applied when using a custom
callable in LOGGING_CONFIG (i.e., logging.config.fileConfig).
"""
def setUp(self):
logging_conf = """
[loggers]
keys=root
[handlers]
keys=stream
[formatters]
keys=simple
[logger_root]
handlers=stream
[handler_stream]
class=StreamHandler
formatter=simple
args=(sys.stdout,)
[formatter_simple]
format=%(message)s
"""
self.temp_file = NamedTemporaryFile()
self.temp_file.write(logging_conf.encode())
self.temp_file.flush()
self.write_settings('settings.py', sdict={
'LOGGING_CONFIG': '"logging.config.fileConfig"',
'LOGGING': 'r"%s"' % self.temp_file.name,
})
def tearDown(self):
self.temp_file.close()
self.remove_settings('settings.py')
def test_custom_logging(self):
out, err = self.run_manage(['check'])
self.assertNoOutput(err)
self.assertOutput(out, "System check identified no issues (0 silenced).")
class LogFormattersTests(SimpleTestCase):
def test_server_formatter_styles(self):
color_style = color.make_style('')
formatter = ServerFormatter()
formatter.style = color_style
log_msg = 'log message'
status_code_styles = [
(200, 'HTTP_SUCCESS'),
(100, 'HTTP_INFO'),
(304, 'HTTP_NOT_MODIFIED'),
(300, 'HTTP_REDIRECT'),
(404, 'HTTP_NOT_FOUND'),
(400, 'HTTP_BAD_REQUEST'),
(500, 'HTTP_SERVER_ERROR'),
]
for status_code, style in status_code_styles:
record = logging.makeLogRecord({'msg': log_msg, 'status_code': status_code})
self.assertEqual(formatter.format(record), getattr(color_style, style)(log_msg))
record = logging.makeLogRecord({'msg': log_msg})
self.assertEqual(formatter.format(record), log_msg)
def test_server_formatter_default_format(self):
server_time = '2016-09-25 10:20:30'
log_msg = 'log message'
logger = logging.getLogger('django.server')
@contextmanager
def patch_django_server_logger():
old_stream = logger.handlers[0].stream
new_stream = StringIO()
logger.handlers[0].stream = new_stream
yield new_stream
logger.handlers[0].stream = old_stream
with patch_django_server_logger() as logger_output:
logger.info(log_msg, extra={'server_time': server_time})
self.assertEqual('[%s] %s\n' % (server_time, log_msg), logger_output.getvalue())
with patch_django_server_logger() as logger_output:
logger.info(log_msg)
self.assertRegex(logger_output.getvalue(), r'^\[[-:,.\s\d]+\] %s' % log_msg)
|
52021f8a172e91be6196781ea563a55a446e5eb870ed1272c04b106fb782aeaa | from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import FieldError
from django.db import IntegrityError
from django.db.models import Q
from django.test import SimpleTestCase, TestCase
from .models import (
AllowsNullGFK, Animal, Carrot, Comparison, ConcreteRelatedModel,
ForConcreteModelModel, ForProxyModelModel, Gecko, ManualPK, Mineral,
ProxyRelatedModel, Rock, TaggedItem, ValuableRock, ValuableTaggedItem,
Vegetable,
)
class GenericRelationsTests(TestCase):
def setUp(self):
self.lion = Animal.objects.create(
common_name="Lion", latin_name="Panthera leo")
self.platypus = Animal.objects.create(
common_name="Platypus", latin_name="Ornithorhynchus anatinus")
Vegetable.objects.create(name="Eggplant", is_yucky=True)
self.bacon = Vegetable.objects.create(name="Bacon", is_yucky=False)
self.quartz = Mineral.objects.create(name="Quartz", hardness=7)
# Tagging stuff.
self.bacon.tags.create(tag="fatty")
self.bacon.tags.create(tag="salty")
self.lion.tags.create(tag="yellow")
self.lion.tags.create(tag="hairy")
# Original list of tags:
self.comp_func = lambda obj: (
obj.tag, obj.content_type.model_class(), obj.object_id
)
def test_generic_update_or_create_when_created(self):
"""
Should be able to use update_or_create from the generic related manager
to create a tag. Refs #23611.
"""
count = self.bacon.tags.count()
tag, created = self.bacon.tags.update_or_create(tag='stinky')
self.assertTrue(created)
self.assertEqual(count + 1, self.bacon.tags.count())
def test_generic_update_or_create_when_updated(self):
"""
Should be able to use update_or_create from the generic related manager
to update a tag. Refs #23611.
"""
count = self.bacon.tags.count()
tag = self.bacon.tags.create(tag='stinky')
self.assertEqual(count + 1, self.bacon.tags.count())
tag, created = self.bacon.tags.update_or_create(defaults={'tag': 'juicy'}, id=tag.id)
self.assertFalse(created)
self.assertEqual(count + 1, self.bacon.tags.count())
self.assertEqual(tag.tag, 'juicy')
def test_generic_get_or_create_when_created(self):
"""
Should be able to use get_or_create from the generic related manager
to create a tag. Refs #23611.
"""
count = self.bacon.tags.count()
tag, created = self.bacon.tags.get_or_create(tag='stinky')
self.assertTrue(created)
self.assertEqual(count + 1, self.bacon.tags.count())
def test_generic_get_or_create_when_exists(self):
"""
Should be able to use get_or_create from the generic related manager
to get a tag. Refs #23611.
"""
count = self.bacon.tags.count()
tag = self.bacon.tags.create(tag="stinky")
self.assertEqual(count + 1, self.bacon.tags.count())
tag, created = self.bacon.tags.get_or_create(id=tag.id, defaults={'tag': 'juicy'})
self.assertFalse(created)
self.assertEqual(count + 1, self.bacon.tags.count())
# shouldn't had changed the tag
self.assertEqual(tag.tag, 'stinky')
def test_generic_relations_m2m_mimic(self):
"""
Objects with declared GenericRelations can be tagged directly -- the
API mimics the many-to-many API.
"""
self.assertQuerysetEqual(self.lion.tags.all(), [
"<TaggedItem: hairy>",
"<TaggedItem: yellow>"
])
self.assertQuerysetEqual(self.bacon.tags.all(), [
"<TaggedItem: fatty>",
"<TaggedItem: salty>"
])
def test_access_content_object(self):
"""
Test accessing the content object like a foreign key.
"""
tagged_item = TaggedItem.objects.get(tag="salty")
self.assertEqual(tagged_item.content_object, self.bacon)
def test_query_content_object(self):
qs = TaggedItem.objects.filter(
animal__isnull=False).order_by('animal__common_name', 'tag')
self.assertQuerysetEqual(
qs, ["<TaggedItem: hairy>", "<TaggedItem: yellow>"]
)
mpk = ManualPK.objects.create(id=1)
mpk.tags.create(tag='mpk')
qs = TaggedItem.objects.filter(
Q(animal__isnull=False) | Q(manualpk__id=1)).order_by('tag')
self.assertQuerysetEqual(
qs, ["hairy", "mpk", "yellow"], lambda x: x.tag)
def test_exclude_generic_relations(self):
"""
Test lookups over an object without GenericRelations.
"""
# Recall that the Mineral class doesn't have an explicit GenericRelation
# defined. That's OK, because you can create TaggedItems explicitly.
# However, excluding GenericRelations means your lookups have to be a
# bit more explicit.
TaggedItem.objects.create(content_object=self.quartz, tag="shiny")
TaggedItem.objects.create(content_object=self.quartz, tag="clearish")
ctype = ContentType.objects.get_for_model(self.quartz)
q = TaggedItem.objects.filter(
content_type__pk=ctype.id, object_id=self.quartz.id
)
self.assertQuerysetEqual(q, [
"<TaggedItem: clearish>",
"<TaggedItem: shiny>"
])
def test_access_via_content_type(self):
"""
Test lookups through content type.
"""
self.lion.delete()
self.platypus.tags.create(tag="fatty")
ctype = ContentType.objects.get_for_model(self.platypus)
self.assertQuerysetEqual(
Animal.objects.filter(tags__content_type=ctype),
["<Animal: Platypus>"])
def test_set_foreign_key(self):
"""
You can set a generic foreign key in the way you'd expect.
"""
tag1 = TaggedItem.objects.create(content_object=self.quartz, tag="shiny")
tag1.content_object = self.platypus
tag1.save()
self.assertQuerysetEqual(
self.platypus.tags.all(),
["<TaggedItem: shiny>"])
def test_queries_across_generic_relations(self):
"""
Queries across generic relations respect the content types. Even though
there are two TaggedItems with a tag of "fatty", this query only pulls
out the one with the content type related to Animals.
"""
self.assertQuerysetEqual(Animal.objects.order_by('common_name'), [
"<Animal: Lion>",
"<Animal: Platypus>"
])
def test_queries_content_type_restriction(self):
"""
Create another fatty tagged instance with different PK to ensure there
is a content type restriction in the generated queries below.
"""
mpk = ManualPK.objects.create(id=self.lion.pk)
mpk.tags.create(tag="fatty")
self.platypus.tags.create(tag="fatty")
self.assertQuerysetEqual(
Animal.objects.filter(tags__tag='fatty'), ["<Animal: Platypus>"])
self.assertQuerysetEqual(
Animal.objects.exclude(tags__tag='fatty'), ["<Animal: Lion>"])
def test_object_deletion_with_generic_relation(self):
"""
If you delete an object with an explicit Generic relation, the related
objects are deleted when the source object is deleted.
"""
self.assertQuerysetEqual(TaggedItem.objects.all(), [
('fatty', Vegetable, self.bacon.pk),
('hairy', Animal, self.lion.pk),
('salty', Vegetable, self.bacon.pk),
('yellow', Animal, self.lion.pk)
],
self.comp_func
)
self.lion.delete()
self.assertQuerysetEqual(TaggedItem.objects.all(), [
('fatty', Vegetable, self.bacon.pk),
('salty', Vegetable, self.bacon.pk),
],
self.comp_func
)
def test_object_deletion_without_generic_relation(self):
"""
If Generic Relation is not explicitly defined, any related objects
remain after deletion of the source object.
"""
TaggedItem.objects.create(content_object=self.quartz, tag="clearish")
quartz_pk = self.quartz.pk
self.quartz.delete()
self.assertQuerysetEqual(TaggedItem.objects.all(), [
('clearish', Mineral, quartz_pk),
('fatty', Vegetable, self.bacon.pk),
('hairy', Animal, self.lion.pk),
('salty', Vegetable, self.bacon.pk),
('yellow', Animal, self.lion.pk),
],
self.comp_func
)
def test_tag_deletion_related_objects_unaffected(self):
"""
If you delete a tag, the objects using the tag are unaffected (other
than losing a tag).
"""
ctype = ContentType.objects.get_for_model(self.lion)
tag = TaggedItem.objects.get(
content_type__pk=ctype.id, object_id=self.lion.id, tag="hairy")
tag.delete()
self.assertQuerysetEqual(self.lion.tags.all(), ["<TaggedItem: yellow>"])
self.assertQuerysetEqual(TaggedItem.objects.all(), [
('fatty', Vegetable, self.bacon.pk),
('salty', Vegetable, self.bacon.pk),
('yellow', Animal, self.lion.pk)
],
self.comp_func
)
def test_add_bulk(self):
bacon = Vegetable.objects.create(name="Bacon", is_yucky=False)
t1 = TaggedItem.objects.create(content_object=self.quartz, tag="shiny")
t2 = TaggedItem.objects.create(content_object=self.quartz, tag="clearish")
# One update() query.
with self.assertNumQueries(1):
bacon.tags.add(t1, t2)
self.assertEqual(t1.content_object, bacon)
self.assertEqual(t2.content_object, bacon)
def test_add_bulk_false(self):
bacon = Vegetable.objects.create(name="Bacon", is_yucky=False)
t1 = TaggedItem.objects.create(content_object=self.quartz, tag="shiny")
t2 = TaggedItem.objects.create(content_object=self.quartz, tag="clearish")
# One save() for each object.
with self.assertNumQueries(2):
bacon.tags.add(t1, t2, bulk=False)
self.assertEqual(t1.content_object, bacon)
self.assertEqual(t2.content_object, bacon)
def test_add_rejects_unsaved_objects(self):
t1 = TaggedItem(content_object=self.quartz, tag="shiny")
msg = "<TaggedItem: shiny> instance isn't saved. Use bulk=False or save the object first."
with self.assertRaisesMessage(ValueError, msg):
self.bacon.tags.add(t1)
def test_add_rejects_wrong_instances(self):
msg = "'TaggedItem' instance expected, got <Animal: Lion>"
with self.assertRaisesMessage(TypeError, msg):
self.bacon.tags.add(self.lion)
def test_set(self):
bacon = Vegetable.objects.create(name="Bacon", is_yucky=False)
fatty = bacon.tags.create(tag="fatty")
salty = bacon.tags.create(tag="salty")
bacon.tags.set([fatty, salty])
self.assertQuerysetEqual(bacon.tags.all(), [
"<TaggedItem: fatty>",
"<TaggedItem: salty>",
])
bacon.tags.set([fatty])
self.assertQuerysetEqual(bacon.tags.all(), [
"<TaggedItem: fatty>",
])
bacon.tags.set([])
self.assertQuerysetEqual(bacon.tags.all(), [])
bacon.tags.set([fatty, salty], bulk=False, clear=True)
self.assertQuerysetEqual(bacon.tags.all(), [
"<TaggedItem: fatty>",
"<TaggedItem: salty>",
])
bacon.tags.set([fatty], bulk=False, clear=True)
self.assertQuerysetEqual(bacon.tags.all(), [
"<TaggedItem: fatty>",
])
bacon.tags.set([], clear=True)
self.assertQuerysetEqual(bacon.tags.all(), [])
def test_assign(self):
bacon = Vegetable.objects.create(name="Bacon", is_yucky=False)
fatty = bacon.tags.create(tag="fatty")
salty = bacon.tags.create(tag="salty")
bacon.tags.set([fatty, salty])
self.assertQuerysetEqual(bacon.tags.all(), [
"<TaggedItem: fatty>",
"<TaggedItem: salty>",
])
bacon.tags.set([fatty])
self.assertQuerysetEqual(bacon.tags.all(), [
"<TaggedItem: fatty>",
])
bacon.tags.set([])
self.assertQuerysetEqual(bacon.tags.all(), [])
def test_assign_with_queryset(self):
# Querysets used in reverse GFK assignments are pre-evaluated so their
# value isn't affected by the clearing operation
# in ManyRelatedManager.set() (#19816).
bacon = Vegetable.objects.create(name="Bacon", is_yucky=False)
bacon.tags.create(tag="fatty")
bacon.tags.create(tag="salty")
self.assertEqual(2, bacon.tags.count())
qs = bacon.tags.filter(tag="fatty")
bacon.tags.set(qs)
self.assertEqual(1, bacon.tags.count())
self.assertEqual(1, qs.count())
def test_generic_relation_related_name_default(self):
# GenericRelation isn't usable from the reverse side by default.
msg = (
"Cannot resolve keyword 'vegetable' into field. Choices are: "
"animal, content_object, content_type, content_type_id, id, "
"manualpk, object_id, tag, valuabletaggeditem"
)
with self.assertRaisesMessage(FieldError, msg):
TaggedItem.objects.filter(vegetable__isnull=True)
def test_multiple_gfk(self):
# Simple tests for multiple GenericForeignKeys
# only uses one model, since the above tests should be sufficient.
tiger = Animal.objects.create(common_name="tiger")
cheetah = Animal.objects.create(common_name="cheetah")
bear = Animal.objects.create(common_name="bear")
# Create directly
Comparison.objects.create(
first_obj=cheetah, other_obj=tiger, comparative="faster"
)
Comparison.objects.create(
first_obj=tiger, other_obj=cheetah, comparative="cooler"
)
# Create using GenericRelation
tiger.comparisons.create(other_obj=bear, comparative="cooler")
tiger.comparisons.create(other_obj=cheetah, comparative="stronger")
self.assertQuerysetEqual(cheetah.comparisons.all(), [
"<Comparison: cheetah is faster than tiger>"
])
# Filtering works
self.assertQuerysetEqual(tiger.comparisons.filter(comparative="cooler"), [
"<Comparison: tiger is cooler than cheetah>",
"<Comparison: tiger is cooler than bear>",
], ordered=False)
# Filtering and deleting works
subjective = ["cooler"]
tiger.comparisons.filter(comparative__in=subjective).delete()
self.assertQuerysetEqual(Comparison.objects.all(), [
"<Comparison: cheetah is faster than tiger>",
"<Comparison: tiger is stronger than cheetah>"
], ordered=False)
# If we delete cheetah, Comparisons with cheetah as 'first_obj' will be
# deleted since Animal has an explicit GenericRelation to Comparison
# through first_obj. Comparisons with cheetah as 'other_obj' will not
# be deleted.
cheetah.delete()
self.assertQuerysetEqual(Comparison.objects.all(), [
"<Comparison: tiger is stronger than None>"
])
def test_gfk_subclasses(self):
# GenericForeignKey should work with subclasses (see #8309)
quartz = Mineral.objects.create(name="Quartz", hardness=7)
valuedtag = ValuableTaggedItem.objects.create(
content_object=quartz, tag="shiny", value=10
)
self.assertEqual(valuedtag.content_object, quartz)
def test_generic_relation_to_inherited_child(self):
# GenericRelations to models that use multi-table inheritance work.
granite = ValuableRock.objects.create(name='granite', hardness=5)
ValuableTaggedItem.objects.create(content_object=granite, tag="countertop", value=1)
self.assertEqual(ValuableRock.objects.filter(tags__value=1).count(), 1)
# We're generating a slightly inefficient query for tags__tag - we
# first join ValuableRock -> TaggedItem -> ValuableTaggedItem, and then
# we fetch tag by joining TaggedItem from ValuableTaggedItem. The last
# join isn't necessary, as TaggedItem <-> ValuableTaggedItem is a
# one-to-one join.
self.assertEqual(ValuableRock.objects.filter(tags__tag="countertop").count(), 1)
granite.delete() # deleting the rock should delete the related tag.
self.assertEqual(ValuableTaggedItem.objects.count(), 0)
def test_gfk_manager(self):
# GenericForeignKey should not use the default manager (which may filter objects) #16048
tailless = Gecko.objects.create(has_tail=False)
tag = TaggedItem.objects.create(content_object=tailless, tag="lizard")
self.assertEqual(tag.content_object, tailless)
def test_subclasses_with_gen_rel(self):
"""
Concrete model subclasses with generic relations work
correctly (ticket 11263).
"""
granite = Rock.objects.create(name='granite', hardness=5)
TaggedItem.objects.create(content_object=granite, tag="countertop")
self.assertEqual(Rock.objects.get(tags__tag="countertop"), granite)
def test_subclasses_with_parent_gen_rel(self):
"""
Generic relations on a base class (Vegetable) work correctly in
subclasses (Carrot).
"""
bear = Carrot.objects.create(name='carrot')
TaggedItem.objects.create(content_object=bear, tag='orange')
self.assertEqual(Carrot.objects.get(tags__tag='orange'), bear)
def test_get_or_create(self):
# get_or_create should work with virtual fields (content_object)
quartz = Mineral.objects.create(name="Quartz", hardness=7)
tag, created = TaggedItem.objects.get_or_create(tag="shiny", defaults={'content_object': quartz})
self.assertTrue(created)
self.assertEqual(tag.tag, "shiny")
self.assertEqual(tag.content_object.id, quartz.id)
def test_update_or_create_defaults(self):
# update_or_create should work with virtual fields (content_object)
quartz = Mineral.objects.create(name="Quartz", hardness=7)
diamond = Mineral.objects.create(name="Diamond", hardness=7)
tag, created = TaggedItem.objects.update_or_create(tag="shiny", defaults={'content_object': quartz})
self.assertTrue(created)
self.assertEqual(tag.content_object.id, quartz.id)
tag, created = TaggedItem.objects.update_or_create(tag="shiny", defaults={'content_object': diamond})
self.assertFalse(created)
self.assertEqual(tag.content_object.id, diamond.id)
def test_query_content_type(self):
msg = "Field 'content_object' does not generate an automatic reverse relation"
with self.assertRaisesMessage(FieldError, msg):
TaggedItem.objects.get(content_object='')
def test_unsaved_instance_on_generic_foreign_key(self):
"""
Assigning an unsaved object to GenericForeignKey should raise an
exception on model.save().
"""
quartz = Mineral(name="Quartz", hardness=7)
with self.assertRaises(IntegrityError):
TaggedItem.objects.create(tag="shiny", content_object=quartz)
def test_cache_invalidation_for_content_type_id(self):
# Create a Vegetable and Mineral with the same id.
new_id = max(Vegetable.objects.order_by('-id')[0].id,
Mineral.objects.order_by('-id')[0].id) + 1
broccoli = Vegetable.objects.create(id=new_id, name="Broccoli")
diamond = Mineral.objects.create(id=new_id, name="Diamond", hardness=7)
tag = TaggedItem.objects.create(content_object=broccoli, tag="yummy")
tag.content_type = ContentType.objects.get_for_model(diamond)
self.assertEqual(tag.content_object, diamond)
def test_cache_invalidation_for_object_id(self):
broccoli = Vegetable.objects.create(name="Broccoli")
cauliflower = Vegetable.objects.create(name="Cauliflower")
tag = TaggedItem.objects.create(content_object=broccoli, tag="yummy")
tag.object_id = cauliflower.id
self.assertEqual(tag.content_object, cauliflower)
def test_assign_content_object_in_init(self):
spinach = Vegetable(name="spinach")
tag = TaggedItem(content_object=spinach)
self.assertEqual(tag.content_object, spinach)
def test_create_after_prefetch(self):
platypus = Animal.objects.prefetch_related('tags').get(pk=self.platypus.pk)
self.assertSequenceEqual(platypus.tags.all(), [])
weird_tag = platypus.tags.create(tag='weird')
self.assertSequenceEqual(platypus.tags.all(), [weird_tag])
def test_add_after_prefetch(self):
platypus = Animal.objects.prefetch_related('tags').get(pk=self.platypus.pk)
self.assertSequenceEqual(platypus.tags.all(), [])
weird_tag = TaggedItem.objects.create(tag='weird', content_object=platypus)
platypus.tags.add(weird_tag)
self.assertSequenceEqual(platypus.tags.all(), [weird_tag])
def test_remove_after_prefetch(self):
weird_tag = self.platypus.tags.create(tag='weird')
platypus = Animal.objects.prefetch_related('tags').get(pk=self.platypus.pk)
self.assertSequenceEqual(platypus.tags.all(), [weird_tag])
platypus.tags.remove(weird_tag)
self.assertSequenceEqual(platypus.tags.all(), [])
def test_clear_after_prefetch(self):
weird_tag = self.platypus.tags.create(tag='weird')
platypus = Animal.objects.prefetch_related('tags').get(pk=self.platypus.pk)
self.assertSequenceEqual(platypus.tags.all(), [weird_tag])
platypus.tags.clear()
self.assertSequenceEqual(platypus.tags.all(), [])
def test_set_after_prefetch(self):
platypus = Animal.objects.prefetch_related('tags').get(pk=self.platypus.pk)
self.assertSequenceEqual(platypus.tags.all(), [])
furry_tag = TaggedItem.objects.create(tag='furry', content_object=platypus)
platypus.tags.set([furry_tag])
self.assertSequenceEqual(platypus.tags.all(), [furry_tag])
weird_tag = TaggedItem.objects.create(tag='weird', content_object=platypus)
platypus.tags.set([weird_tag])
self.assertSequenceEqual(platypus.tags.all(), [weird_tag])
def test_add_then_remove_after_prefetch(self):
furry_tag = self.platypus.tags.create(tag='furry')
platypus = Animal.objects.prefetch_related('tags').get(pk=self.platypus.pk)
self.assertSequenceEqual(platypus.tags.all(), [furry_tag])
weird_tag = self.platypus.tags.create(tag='weird')
platypus.tags.add(weird_tag)
self.assertSequenceEqual(platypus.tags.all(), [furry_tag, weird_tag])
platypus.tags.remove(weird_tag)
self.assertSequenceEqual(platypus.tags.all(), [furry_tag])
class ProxyRelatedModelTest(TestCase):
def test_default_behavior(self):
"""
The default for for_concrete_model should be True
"""
base = ForConcreteModelModel()
base.obj = rel = ProxyRelatedModel.objects.create()
base.save()
base = ForConcreteModelModel.objects.get(pk=base.pk)
rel = ConcreteRelatedModel.objects.get(pk=rel.pk)
self.assertEqual(base.obj, rel)
def test_works_normally(self):
"""
When for_concrete_model is False, we should still be able to get
an instance of the concrete class.
"""
base = ForProxyModelModel()
base.obj = rel = ConcreteRelatedModel.objects.create()
base.save()
base = ForProxyModelModel.objects.get(pk=base.pk)
self.assertEqual(base.obj, rel)
def test_proxy_is_returned(self):
"""
Instances of the proxy should be returned when
for_concrete_model is False.
"""
base = ForProxyModelModel()
base.obj = ProxyRelatedModel.objects.create()
base.save()
base = ForProxyModelModel.objects.get(pk=base.pk)
self.assertIsInstance(base.obj, ProxyRelatedModel)
def test_query(self):
base = ForProxyModelModel()
base.obj = rel = ConcreteRelatedModel.objects.create()
base.save()
self.assertEqual(rel, ConcreteRelatedModel.objects.get(bases__id=base.id))
def test_query_proxy(self):
base = ForProxyModelModel()
base.obj = rel = ProxyRelatedModel.objects.create()
base.save()
self.assertEqual(rel, ProxyRelatedModel.objects.get(bases__id=base.id))
def test_generic_relation(self):
base = ForProxyModelModel()
base.obj = ProxyRelatedModel.objects.create()
base.save()
base = ForProxyModelModel.objects.get(pk=base.pk)
rel = ProxyRelatedModel.objects.get(pk=base.obj.pk)
self.assertEqual(base, rel.bases.get())
def test_generic_relation_set(self):
base = ForProxyModelModel()
base.obj = ConcreteRelatedModel.objects.create()
base.save()
newrel = ConcreteRelatedModel.objects.create()
newrel.bases.set([base])
newrel = ConcreteRelatedModel.objects.get(pk=newrel.pk)
self.assertEqual(base, newrel.bases.get())
class TestInitWithNoneArgument(SimpleTestCase):
def test_none_allowed(self):
# AllowsNullGFK doesn't require a content_type, so None argument should
# also be allowed.
AllowsNullGFK(content_object=None)
# TaggedItem requires a content_type but initializing with None should
# be allowed.
TaggedItem(content_object=None)
|
778960649dbaa48358a3d2f376a501415c3418c7d815c9b9f27fd0add4457197 | import os
from io import StringIO
from unittest import mock
from admin_scripts.tests import AdminScriptTestCase
from django.apps import apps
from django.core import management
from django.core.management import BaseCommand, CommandError, find_commands
from django.core.management.utils import (
find_command, get_random_secret_key, popen_wrapper,
)
from django.db import connection
from django.test import SimpleTestCase, override_settings
from django.test.utils import captured_stderr, extend_sys_path
from django.utils import translation
from .management.commands import dance
# A minimal set of apps to avoid system checks running on all apps.
@override_settings(
INSTALLED_APPS=[
'django.contrib.auth',
'django.contrib.contenttypes',
'user_commands',
],
)
class CommandTests(SimpleTestCase):
def test_command(self):
out = StringIO()
management.call_command('dance', stdout=out)
self.assertIn("I don't feel like dancing Rock'n'Roll.\n", out.getvalue())
def test_command_style(self):
out = StringIO()
management.call_command('dance', style='Jive', stdout=out)
self.assertIn("I don't feel like dancing Jive.\n", out.getvalue())
# Passing options as arguments also works (thanks argparse)
management.call_command('dance', '--style', 'Jive', stdout=out)
self.assertIn("I don't feel like dancing Jive.\n", out.getvalue())
def test_language_preserved(self):
out = StringIO()
with translation.override('fr'):
management.call_command('dance', stdout=out)
self.assertEqual(translation.get_language(), 'fr')
def test_explode(self):
""" An unknown command raises CommandError """
with self.assertRaisesMessage(CommandError, "Unknown command: 'explode'"):
management.call_command(('explode',))
def test_system_exit(self):
""" Exception raised in a command should raise CommandError with
call_command, but SystemExit when run from command line
"""
with self.assertRaises(CommandError):
management.call_command('dance', example="raise")
dance.Command.requires_system_checks = False
try:
with captured_stderr() as stderr, self.assertRaises(SystemExit):
management.ManagementUtility(['manage.py', 'dance', '--example=raise']).execute()
finally:
dance.Command.requires_system_checks = True
self.assertIn("CommandError", stderr.getvalue())
def test_no_translations_deactivate_translations(self):
"""
When the Command handle method is decorated with @no_translations,
translations are deactivated inside the command.
"""
current_locale = translation.get_language()
with translation.override('pl'):
result = management.call_command('no_translations', stdout=StringIO())
self.assertIsNone(result)
self.assertEqual(translation.get_language(), current_locale)
def test_find_command_without_PATH(self):
"""
find_command should still work when the PATH environment variable
doesn't exist (#22256).
"""
current_path = os.environ.pop('PATH', None)
try:
self.assertIsNone(find_command('_missing_'))
finally:
if current_path is not None:
os.environ['PATH'] = current_path
def test_discover_commands_in_eggs(self):
"""
Management commands can also be loaded from Python eggs.
"""
egg_dir = '%s/eggs' % os.path.dirname(__file__)
egg_name = '%s/basic.egg' % egg_dir
with extend_sys_path(egg_name):
with self.settings(INSTALLED_APPS=['commandegg']):
cmds = find_commands(os.path.join(apps.get_app_config('commandegg').path, 'management'))
self.assertEqual(cmds, ['eggcommand'])
def test_call_command_option_parsing(self):
"""
When passing the long option name to call_command, the available option
key is the option dest name (#22985).
"""
out = StringIO()
management.call_command('dance', stdout=out, opt_3=True)
self.assertIn("option3", out.getvalue())
self.assertNotIn("opt_3", out.getvalue())
self.assertNotIn("opt-3", out.getvalue())
def test_call_command_option_parsing_non_string_arg(self):
"""
It should be possible to pass non-string arguments to call_command.
"""
out = StringIO()
management.call_command('dance', 1, verbosity=0, stdout=out)
self.assertIn("You passed 1 as a positional argument.", out.getvalue())
def test_calling_a_command_with_only_empty_parameter_should_ends_gracefully(self):
out = StringIO()
management.call_command('hal', "--empty", stdout=out)
self.assertIn("Dave, I can't do that.\n", out.getvalue())
def test_calling_command_with_app_labels_and_parameters_should_be_ok(self):
out = StringIO()
management.call_command('hal', 'myapp', "--verbosity", "3", stdout=out)
self.assertIn("Dave, my mind is going. I can feel it. I can feel it.\n", out.getvalue())
def test_calling_command_with_parameters_and_app_labels_at_the_end_should_be_ok(self):
out = StringIO()
management.call_command('hal', "--verbosity", "3", "myapp", stdout=out)
self.assertIn("Dave, my mind is going. I can feel it. I can feel it.\n", out.getvalue())
def test_calling_a_command_with_no_app_labels_and_parameters_should_raise_a_command_error(self):
with self.assertRaises(CommandError):
management.call_command('hal', stdout=StringIO())
def test_output_transaction(self):
output = management.call_command('transaction', stdout=StringIO(), no_color=True)
self.assertTrue(output.strip().startswith(connection.ops.start_transaction_sql()))
self.assertTrue(output.strip().endswith(connection.ops.end_transaction_sql()))
def test_call_command_no_checks(self):
"""
By default, call_command should not trigger the check framework, unless
specifically asked.
"""
self.counter = 0
def patched_check(self_, **kwargs):
self.counter += 1
saved_check = BaseCommand.check
BaseCommand.check = patched_check
try:
management.call_command("dance", verbosity=0)
self.assertEqual(self.counter, 0)
management.call_command("dance", verbosity=0, skip_checks=False)
self.assertEqual(self.counter, 1)
finally:
BaseCommand.check = saved_check
def test_check_migrations(self):
requires_migrations_checks = dance.Command.requires_migrations_checks
self.assertIs(requires_migrations_checks, False)
try:
with mock.patch.object(BaseCommand, 'check_migrations') as check_migrations:
management.call_command('dance', verbosity=0)
self.assertFalse(check_migrations.called)
dance.Command.requires_migrations_checks = True
management.call_command('dance', verbosity=0)
self.assertTrue(check_migrations.called)
finally:
dance.Command.requires_migrations_checks = requires_migrations_checks
def test_call_command_unrecognized_option(self):
msg = (
'Unknown option(s) for dance command: unrecognized. Valid options '
'are: example, force_color, help, integer, no_color, opt_3, '
'option3, pythonpath, settings, skip_checks, stderr, stdout, '
'style, traceback, verbosity, version.'
)
with self.assertRaisesMessage(TypeError, msg):
management.call_command('dance', unrecognized=1)
msg = (
'Unknown option(s) for dance command: unrecognized, unrecognized2. '
'Valid options are: example, force_color, help, integer, no_color, '
'opt_3, option3, pythonpath, settings, skip_checks, stderr, '
'stdout, style, traceback, verbosity, version.'
)
with self.assertRaisesMessage(TypeError, msg):
management.call_command('dance', unrecognized=1, unrecognized2=1)
def test_call_command_with_required_parameters_in_options(self):
out = StringIO()
management.call_command('required_option', need_me='foo', needme2='bar', stdout=out)
self.assertIn('need_me', out.getvalue())
self.assertIn('needme2', out.getvalue())
def test_call_command_with_required_parameters_in_mixed_options(self):
out = StringIO()
management.call_command('required_option', '--need-me=foo', needme2='bar', stdout=out)
self.assertIn('need_me', out.getvalue())
self.assertIn('needme2', out.getvalue())
def test_command_add_arguments_after_common_arguments(self):
out = StringIO()
management.call_command('common_args', stdout=out)
self.assertIn('Detected that --version already exists', out.getvalue())
def test_subparser(self):
out = StringIO()
management.call_command('subparser', 'foo', 12, stdout=out)
self.assertIn('bar', out.getvalue())
def test_subparser_invalid_option(self):
msg = "Error: invalid choice: 'test' (choose from 'foo')"
with self.assertRaisesMessage(CommandError, msg):
management.call_command('subparser', 'test', 12)
def test_create_parser_kwargs(self):
"""BaseCommand.create_parser() passes kwargs to CommandParser."""
epilog = 'some epilog text'
parser = BaseCommand().create_parser('prog_name', 'subcommand', epilog=epilog)
self.assertEqual(parser.epilog, epilog)
class CommandRunTests(AdminScriptTestCase):
"""
Tests that need to run by simulating the command line, not by call_command.
"""
def tearDown(self):
self.remove_settings('settings.py')
def test_script_prefix_set_in_commands(self):
self.write_settings('settings.py', apps=['user_commands'], sdict={
'ROOT_URLCONF': '"user_commands.urls"',
'FORCE_SCRIPT_NAME': '"/PREFIX/"',
})
out, err = self.run_manage(['reverse_url'])
self.assertNoOutput(err)
self.assertEqual(out.strip(), '/PREFIX/some/url/')
def test_disallowed_abbreviated_options(self):
"""
To avoid conflicts with custom options, commands don't allow
abbreviated forms of the --setting and --pythonpath options.
"""
self.write_settings('settings.py', apps=['user_commands'])
out, err = self.run_manage(['set_option', '--set', 'foo'])
self.assertNoOutput(err)
self.assertEqual(out.strip(), 'Set foo')
class UtilsTests(SimpleTestCase):
def test_no_existent_external_program(self):
msg = 'Error executing a_42_command_that_doesnt_exist_42'
with self.assertRaisesMessage(CommandError, msg):
popen_wrapper(['a_42_command_that_doesnt_exist_42'])
def test_get_random_secret_key(self):
key = get_random_secret_key()
self.assertEqual(len(key), 50)
for char in key:
self.assertIn(char, 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)')
|
cc13fc76d8c361a30f829f435d9831e98b5f51dafaa360ea5103b55a6ef18773 | import sys
import unittest
from django.conf import settings
from django.contrib.admindocs import utils, views
from django.contrib.admindocs.views import get_return_data_type, simplify_regex
from django.contrib.sites.models import Site
from django.db import models
from django.db.models import fields
from django.test import SimpleTestCase, modify_settings, override_settings
from django.test.utils import captured_stderr
from django.urls import reverse
from .models import Company, Person
from .tests import AdminDocsTestCase, TestDataMixin
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class AdminDocViewTests(TestDataMixin, AdminDocsTestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_index(self):
response = self.client.get(reverse('django-admindocs-docroot'))
self.assertContains(response, '<h1>Documentation</h1>', html=True)
self.assertContains(response, '<h1 id="site-name"><a href="/admin/">Django administration</a></h1>')
self.client.logout()
response = self.client.get(reverse('django-admindocs-docroot'), follow=True)
# Should display the login screen
self.assertContains(response, '<input type="hidden" name="next" value="/admindocs/">', html=True)
def test_bookmarklets(self):
response = self.client.get(reverse('django-admindocs-bookmarklets'))
self.assertContains(response, '/admindocs/views/')
def test_templatetag_index(self):
response = self.client.get(reverse('django-admindocs-tags'))
self.assertContains(response, '<h3 id="built_in-extends">extends</h3>', html=True)
def test_templatefilter_index(self):
response = self.client.get(reverse('django-admindocs-filters'))
self.assertContains(response, '<h3 id="built_in-first">first</h3>', html=True)
def test_view_index(self):
response = self.client.get(reverse('django-admindocs-views-index'))
self.assertContains(
response,
'<h3><a href="/admindocs/views/django.contrib.admindocs.views.BaseAdminDocsView/">/admindocs/</a></h3>',
html=True
)
self.assertContains(response, 'Views by namespace test')
self.assertContains(response, 'Name: <code>test:func</code>.')
self.assertContains(
response,
'<h3><a href="/admindocs/views/admin_docs.views.XViewCallableObject/">'
'/xview/callable_object_without_xview/</a></h3>',
html=True,
)
def test_view_index_with_method(self):
"""
Views that are methods are listed correctly.
"""
response = self.client.get(reverse('django-admindocs-views-index'))
self.assertContains(
response,
'<h3><a href="/admindocs/views/django.contrib.admin.sites.AdminSite.index/">/admin/</a></h3>',
html=True
)
def test_view_detail(self):
url = reverse('django-admindocs-views-detail', args=['django.contrib.admindocs.views.BaseAdminDocsView'])
response = self.client.get(url)
# View docstring
self.assertContains(response, 'Base view for admindocs views.')
@override_settings(ROOT_URLCONF='admin_docs.namespace_urls')
def test_namespaced_view_detail(self):
url = reverse('django-admindocs-views-detail', args=['admin_docs.views.XViewClass'])
response = self.client.get(url)
self.assertContains(response, '<h1>admin_docs.views.XViewClass</h1>')
def test_view_detail_illegal_import(self):
url = reverse('django-admindocs-views-detail', args=['urlpatterns_reverse.nonimported_module.view'])
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
self.assertNotIn("urlpatterns_reverse.nonimported_module", sys.modules)
def test_view_detail_as_method(self):
"""
Views that are methods can be displayed.
"""
url = reverse('django-admindocs-views-detail', args=['django.contrib.admin.sites.AdminSite.index'])
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_model_index(self):
response = self.client.get(reverse('django-admindocs-models-index'))
self.assertContains(
response,
'<h2 id="app-auth">Authentication and Authorization (django.contrib.auth)</h2>',
html=True
)
def test_template_detail(self):
response = self.client.get(reverse('django-admindocs-templates', args=['admin_doc/template_detail.html']))
self.assertContains(response, '<h1>Template: "admin_doc/template_detail.html"</h1>', html=True)
def test_missing_docutils(self):
utils.docutils_is_available = False
try:
response = self.client.get(reverse('django-admindocs-docroot'))
self.assertContains(
response,
'<h3>The admin documentation system requires Python\'s '
'<a href="http://docutils.sf.net/">docutils</a> library.</h3>',
html=True
)
self.assertContains(response, '<h1 id="site-name"><a href="/admin/">Django administration</a></h1>')
finally:
utils.docutils_is_available = True
@modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'})
@override_settings(SITE_ID=None) # will restore SITE_ID after the test
def test_no_sites_framework(self):
"""
Without the sites framework, should not access SITE_ID or Site
objects. Deleting settings is fine here as UserSettingsHolder is used.
"""
Site.objects.all().delete()
del settings.SITE_ID
response = self.client.get(reverse('django-admindocs-views-index'))
self.assertContains(response, 'View documentation')
@override_settings(TEMPLATES=[{
'NAME': 'ONE',
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
}, {
'NAME': 'TWO',
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
}])
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class AdminDocViewWithMultipleEngines(AdminDocViewTests):
def test_templatefilter_index(self):
# Overridden because non-trivial TEMPLATES settings aren't supported
# but the page shouldn't crash (#24125).
response = self.client.get(reverse('django-admindocs-filters'))
self.assertContains(response, '<title>Template filters</title>', html=True)
def test_templatetag_index(self):
# Overridden because non-trivial TEMPLATES settings aren't supported
# but the page shouldn't crash (#24125).
response = self.client.get(reverse('django-admindocs-tags'))
self.assertContains(response, '<title>Template tags</title>', html=True)
@unittest.skipUnless(utils.docutils_is_available, "no docutils installed.")
class TestModelDetailView(TestDataMixin, AdminDocsTestCase):
def setUp(self):
self.client.force_login(self.superuser)
with captured_stderr() as self.docutils_stderr:
self.response = self.client.get(reverse('django-admindocs-models-detail', args=['admin_docs', 'Person']))
def test_method_excludes(self):
"""
Methods that begin with strings defined in
``django.contrib.admindocs.views.MODEL_METHODS_EXCLUDE``
shouldn't be displayed in the admin docs.
"""
self.assertContains(self.response, "<td>get_full_name</td>")
self.assertNotContains(self.response, "<td>_get_full_name</td>")
self.assertNotContains(self.response, "<td>add_image</td>")
self.assertNotContains(self.response, "<td>delete_image</td>")
self.assertNotContains(self.response, "<td>set_status</td>")
self.assertNotContains(self.response, "<td>save_changes</td>")
def test_methods_with_arguments(self):
"""
Methods that take arguments should also displayed.
"""
self.assertContains(self.response, "<h3>Methods with arguments</h3>")
self.assertContains(self.response, "<td>rename_company</td>")
self.assertContains(self.response, "<td>dummy_function</td>")
self.assertContains(self.response, "<td>suffix_company_name</td>")
def test_methods_with_arguments_display_arguments(self):
"""
Methods with arguments should have their arguments displayed.
"""
self.assertContains(self.response, "<td>new_name</td>")
def test_methods_with_arguments_display_arguments_default_value(self):
"""
Methods with keyword arguments should have their arguments displayed.
"""
self.assertContains(self.response, "<td>suffix='ltd'</td>")
def test_methods_with_multiple_arguments_display_arguments(self):
"""
Methods with multiple arguments should have all their arguments
displayed, but omitting 'self'.
"""
self.assertContains(self.response, "<td>baz, rox, *some_args, **some_kwargs</td>")
def test_instance_of_property_methods_are_displayed(self):
"""Model properties are displayed as fields."""
self.assertContains(self.response, '<td>a_property</td>')
def test_method_data_types(self):
company = Company.objects.create(name="Django")
person = Person.objects.create(first_name="Human", last_name="User", company=company)
self.assertEqual(get_return_data_type(person.get_status_count.__name__), 'Integer')
self.assertEqual(get_return_data_type(person.get_groups_list.__name__), 'List')
def test_descriptions_render_correctly(self):
"""
The ``description`` field should render correctly for each field type.
"""
# help text in fields
self.assertContains(self.response, "<td>first name - The person's first name</td>")
self.assertContains(self.response, "<td>last name - The person's last name</td>")
# method docstrings
self.assertContains(self.response, "<p>Get the full name of the person</p>")
link = '<a class="reference external" href="/admindocs/models/%s/">%s</a>'
markup = '<p>the related %s object</p>'
company_markup = markup % (link % ("admin_docs.company", "admin_docs.Company"))
# foreign keys
self.assertContains(self.response, company_markup)
# foreign keys with help text
self.assertContains(self.response, "%s\n - place of work" % company_markup)
# many to many fields
self.assertContains(
self.response,
"number of related %s objects" % (link % ("admin_docs.group", "admin_docs.Group"))
)
self.assertContains(
self.response,
"all related %s objects" % (link % ("admin_docs.group", "admin_docs.Group"))
)
# "raw" and "include" directives are disabled
self.assertContains(self.response, '<p>"raw" directive disabled.</p>',)
self.assertContains(self.response, '.. raw:: html\n :file: admin_docs/evilfile.txt')
self.assertContains(self.response, '<p>"include" directive disabled.</p>',)
self.assertContains(self.response, '.. include:: admin_docs/evilfile.txt')
out = self.docutils_stderr.getvalue()
self.assertIn('"raw" directive disabled', out)
self.assertIn('"include" directive disabled', out)
def test_model_with_many_to_one(self):
link = '<a class="reference external" href="/admindocs/models/%s/">%s</a>'
response = self.client.get(
reverse('django-admindocs-models-detail', args=['admin_docs', 'company'])
)
self.assertContains(
response,
"number of related %s objects" % (link % ("admin_docs.person", "admin_docs.Person"))
)
self.assertContains(
response,
"all related %s objects" % (link % ("admin_docs.person", "admin_docs.Person"))
)
def test_model_with_no_backward_relations_render_only_relevant_fields(self):
"""
A model with ``related_name`` of `+` shouldn't show backward
relationship links.
"""
response = self.client.get(reverse('django-admindocs-models-detail', args=['admin_docs', 'family']))
fields = response.context_data.get('fields')
self.assertEqual(len(fields), 2)
def test_model_docstring_renders_correctly(self):
summary = (
'<h2 class="subhead"><p>Stores information about a person, related to <a class="reference external" '
'href="/admindocs/models/myapp.company/">myapp.Company</a>.</p></h2>'
)
subheading = '<p><strong>Notes</strong></p>'
body = '<p>Use <tt class="docutils literal">save_changes()</tt> when saving this object.</p>'
model_body = (
'<dl class="docutils"><dt><tt class="'
'docutils literal">company</tt></dt><dd>Field storing <a class="'
'reference external" href="/admindocs/models/myapp.company/">'
'myapp.Company</a> where the person works.</dd></dl>'
)
self.assertContains(self.response, 'DESCRIPTION')
self.assertContains(self.response, summary, html=True)
self.assertContains(self.response, subheading, html=True)
self.assertContains(self.response, body, html=True)
self.assertContains(self.response, model_body, html=True)
def test_model_detail_title(self):
self.assertContains(self.response, '<h1>admin_docs.Person</h1>', html=True)
def test_app_not_found(self):
response = self.client.get(reverse('django-admindocs-models-detail', args=['doesnotexist', 'Person']))
self.assertEqual(response.context['exception'], "App 'doesnotexist' not found")
self.assertEqual(response.status_code, 404)
def test_model_not_found(self):
response = self.client.get(reverse('django-admindocs-models-detail', args=['admin_docs', 'doesnotexist']))
self.assertEqual(response.context['exception'], "Model 'doesnotexist' not found in app 'admin_docs'")
self.assertEqual(response.status_code, 404)
class CustomField(models.Field):
description = "A custom field type"
class DescriptionLackingField(models.Field):
pass
class TestFieldType(unittest.TestCase):
def test_field_name(self):
with self.assertRaises(AttributeError):
views.get_readable_field_data_type("NotAField")
def test_builtin_fields(self):
self.assertEqual(
views.get_readable_field_data_type(fields.BooleanField()),
'Boolean (Either True or False)'
)
def test_custom_fields(self):
self.assertEqual(views.get_readable_field_data_type(CustomField()), 'A custom field type')
self.assertEqual(
views.get_readable_field_data_type(DescriptionLackingField()),
'Field of type: DescriptionLackingField'
)
class AdminDocViewFunctionsTests(SimpleTestCase):
def test_simplify_regex(self):
tests = (
(r'^a', '/a'),
(r'^(?P<a>\w+)/b/(?P<c>\w+)/$', '/<a>/b/<c>/'),
(r'^(?P<a>\w+)/b/(?P<c>\w+)$', '/<a>/b/<c>'),
(r'^(?P<a>\w+)/b/(\w+)$', '/<a>/b/<var>'),
(r'^(?P<a>\w+)/b/((x|y)\w+)$', '/<a>/b/<var>'),
(r'^(?P<a>(x|y))/b/(?P<c>\w+)$', '/<a>/b/<c>'),
(r'^(?P<a>(x|y))/b/(?P<c>\w+)ab', '/<a>/b/<c>ab'),
(r'^(?P<a>(x|y)(\(|\)))/b/(?P<c>\w+)ab', '/<a>/b/<c>ab'),
(r'^a/?$', '/a/'),
)
for pattern, output in tests:
with self.subTest(pattern=pattern):
self.assertEqual(simplify_regex(pattern), output)
|
bfbf353fa71f9359a353f0ab2da28db78af3a7504b080451db241e36ddae388e | import errno
import gzip
import os
import struct
import tempfile
import unittest
from io import BytesIO, StringIO, TextIOWrapper
from unittest import mock
from django.core.files import File
from django.core.files.base import ContentFile
from django.core.files.move import file_move_safe
from django.core.files.temp import NamedTemporaryFile
from django.core.files.uploadedfile import (
InMemoryUploadedFile, SimpleUploadedFile, TemporaryUploadedFile,
UploadedFile,
)
try:
from PIL import Image
except ImportError:
Image = None
else:
from django.core.files import images
class FileTests(unittest.TestCase):
def test_unicode_uploadedfile_name(self):
uf = UploadedFile(name='¿Cómo?', content_type='text')
self.assertIs(type(repr(uf)), str)
def test_unicode_file_name(self):
f = File(None, 'djángö')
self.assertIs(type(repr(f)), str)
def test_context_manager(self):
orig_file = tempfile.TemporaryFile()
base_file = File(orig_file)
with base_file as f:
self.assertIs(base_file, f)
self.assertFalse(f.closed)
self.assertTrue(f.closed)
self.assertTrue(orig_file.closed)
def test_open_resets_opened_file_to_start_and_returns_context_manager(self):
file = File(BytesIO(b'content'))
file.read()
with file.open() as f:
self.assertEqual(f.read(), b'content')
def test_open_reopens_closed_file_and_returns_context_manager(self):
temporary_file = tempfile.NamedTemporaryFile(delete=False)
file = File(temporary_file)
try:
file.close()
with file.open() as f:
self.assertFalse(f.closed)
finally:
# remove temporary file
os.unlink(file.name)
def test_namedtemporaryfile_closes(self):
"""
The symbol django.core.files.NamedTemporaryFile is assigned as
a different class on different operating systems. In
any case, the result should minimally mock some of the API of
tempfile.NamedTemporaryFile from the Python standard library.
"""
tempfile = NamedTemporaryFile()
self.assertTrue(hasattr(tempfile, "closed"))
self.assertFalse(tempfile.closed)
tempfile.close()
self.assertTrue(tempfile.closed)
def test_file_mode(self):
# Should not set mode to None if it is not present.
# See #14681, stdlib gzip module crashes if mode is set to None
file = SimpleUploadedFile("mode_test.txt", b"content")
self.assertFalse(hasattr(file, 'mode'))
gzip.GzipFile(fileobj=file)
def test_file_iteration(self):
"""
File objects should yield lines when iterated over.
Refs #22107.
"""
file = File(BytesIO(b'one\ntwo\nthree'))
self.assertEqual(list(file), [b'one\n', b'two\n', b'three'])
def test_file_iteration_windows_newlines(self):
"""
#8149 - File objects with \r\n line endings should yield lines
when iterated over.
"""
f = File(BytesIO(b'one\r\ntwo\r\nthree'))
self.assertEqual(list(f), [b'one\r\n', b'two\r\n', b'three'])
def test_file_iteration_mac_newlines(self):
"""
#8149 - File objects with \r line endings should yield lines
when iterated over.
"""
f = File(BytesIO(b'one\rtwo\rthree'))
self.assertEqual(list(f), [b'one\r', b'two\r', b'three'])
def test_file_iteration_mixed_newlines(self):
f = File(BytesIO(b'one\rtwo\nthree\r\nfour'))
self.assertEqual(list(f), [b'one\r', b'two\n', b'three\r\n', b'four'])
def test_file_iteration_with_unix_newline_at_chunk_boundary(self):
f = File(BytesIO(b'one\ntwo\nthree'))
# Set chunk size to create a boundary after \n:
# b'one\n...
# ^
f.DEFAULT_CHUNK_SIZE = 4
self.assertEqual(list(f), [b'one\n', b'two\n', b'three'])
def test_file_iteration_with_windows_newline_at_chunk_boundary(self):
f = File(BytesIO(b'one\r\ntwo\r\nthree'))
# Set chunk size to create a boundary between \r and \n:
# b'one\r\n...
# ^
f.DEFAULT_CHUNK_SIZE = 4
self.assertEqual(list(f), [b'one\r\n', b'two\r\n', b'three'])
def test_file_iteration_with_mac_newline_at_chunk_boundary(self):
f = File(BytesIO(b'one\rtwo\rthree'))
# Set chunk size to create a boundary after \r:
# b'one\r...
# ^
f.DEFAULT_CHUNK_SIZE = 4
self.assertEqual(list(f), [b'one\r', b'two\r', b'three'])
def test_file_iteration_with_text(self):
f = File(StringIO('one\ntwo\nthree'))
self.assertEqual(list(f), ['one\n', 'two\n', 'three'])
def test_readable(self):
with tempfile.TemporaryFile() as temp, File(temp, name='something.txt') as test_file:
self.assertTrue(test_file.readable())
self.assertFalse(test_file.readable())
def test_writable(self):
with tempfile.TemporaryFile() as temp, File(temp, name='something.txt') as test_file:
self.assertTrue(test_file.writable())
self.assertFalse(test_file.writable())
with tempfile.TemporaryFile('rb') as temp, File(temp, name='something.txt') as test_file:
self.assertFalse(test_file.writable())
def test_seekable(self):
with tempfile.TemporaryFile() as temp, File(temp, name='something.txt') as test_file:
self.assertTrue(test_file.seekable())
self.assertFalse(test_file.seekable())
def test_io_wrapper(self):
content = "vive l'été\n"
with tempfile.TemporaryFile() as temp, File(temp, name='something.txt') as test_file:
test_file.write(content.encode())
test_file.seek(0)
wrapper = TextIOWrapper(test_file, 'utf-8', newline='\n')
self.assertEqual(wrapper.read(), content)
wrapper.write(content)
wrapper.seek(0)
self.assertEqual(wrapper.read(), content * 2)
test_file = wrapper.detach()
test_file.seek(0)
self.assertEqual(test_file.read(), (content * 2).encode())
class NoNameFileTestCase(unittest.TestCase):
"""
Other examples of unnamed files may be tempfile.SpooledTemporaryFile or
urllib.urlopen()
"""
def test_noname_file_default_name(self):
self.assertIsNone(File(BytesIO(b'A file with no name')).name)
def test_noname_file_get_size(self):
self.assertEqual(File(BytesIO(b'A file with no name')).size, 19)
class ContentFileTestCase(unittest.TestCase):
def test_content_file_default_name(self):
self.assertIsNone(ContentFile(b"content").name)
def test_content_file_custom_name(self):
"""
The constructor of ContentFile accepts 'name' (#16590).
"""
name = "I can have a name too!"
self.assertEqual(ContentFile(b"content", name=name).name, name)
def test_content_file_input_type(self):
"""
ContentFile can accept both bytes and strings and the retrieved content
is of the same type.
"""
self.assertIsInstance(ContentFile(b"content").read(), bytes)
self.assertIsInstance(ContentFile("español").read(), str)
def test_open_resets_file_to_start_and_returns_context_manager(self):
file = ContentFile(b'content')
with file.open() as f:
self.assertEqual(f.read(), b'content')
with file.open() as f:
self.assertEqual(f.read(), b'content')
def test_size_changing_after_writing(self):
"""ContentFile.size changes after a write()."""
f = ContentFile('')
self.assertEqual(f.size, 0)
f.write('Test ')
f.write('string')
self.assertEqual(f.size, 11)
with f.open() as fh:
self.assertEqual(fh.read(), 'Test string')
class InMemoryUploadedFileTests(unittest.TestCase):
def test_open_resets_file_to_start_and_returns_context_manager(self):
uf = InMemoryUploadedFile(StringIO('1'), '', 'test', 'text/plain', 1, 'utf8')
uf.read()
with uf.open() as f:
self.assertEqual(f.read(), '1')
class TemporaryUploadedFileTests(unittest.TestCase):
def test_extension_kept(self):
"""The temporary file name has the same suffix as the original file."""
with TemporaryUploadedFile('test.txt', 'text/plain', 1, 'utf8') as temp_file:
self.assertTrue(temp_file.file.name.endswith('.upload.txt'))
class DimensionClosingBug(unittest.TestCase):
"""
get_image_dimensions() properly closes files (#8817)
"""
@unittest.skipUnless(Image, "Pillow not installed")
def test_not_closing_of_files(self):
"""
Open files passed into get_image_dimensions() should stay opened.
"""
empty_io = BytesIO()
try:
images.get_image_dimensions(empty_io)
finally:
self.assertTrue(not empty_io.closed)
@unittest.skipUnless(Image, "Pillow not installed")
def test_closing_of_filenames(self):
"""
get_image_dimensions() called with a filename should closed the file.
"""
# We need to inject a modified open() builtin into the images module
# that checks if the file was closed properly if the function is
# called with a filename instead of a file object.
# get_image_dimensions will call our catching_open instead of the
# regular builtin one.
class FileWrapper:
_closed = []
def __init__(self, f):
self.f = f
def __getattr__(self, name):
return getattr(self.f, name)
def close(self):
self._closed.append(True)
self.f.close()
def catching_open(*args):
return FileWrapper(open(*args))
images.open = catching_open
try:
images.get_image_dimensions(os.path.join(os.path.dirname(__file__), "test1.png"))
finally:
del images.open
self.assertTrue(FileWrapper._closed)
class InconsistentGetImageDimensionsBug(unittest.TestCase):
"""
get_image_dimensions() works properly after various calls
using a file handler (#11158)
"""
@unittest.skipUnless(Image, "Pillow not installed")
def test_multiple_calls(self):
"""
Multiple calls of get_image_dimensions() should return the same size.
"""
img_path = os.path.join(os.path.dirname(__file__), "test.png")
with open(img_path, 'rb') as fh:
image = images.ImageFile(fh)
image_pil = Image.open(fh)
size_1 = images.get_image_dimensions(image)
size_2 = images.get_image_dimensions(image)
self.assertEqual(image_pil.size, size_1)
self.assertEqual(size_1, size_2)
@unittest.skipUnless(Image, "Pillow not installed")
def test_bug_19457(self):
"""
Regression test for #19457
get_image_dimensions fails on some pngs, while Image.size is working good on them
"""
img_path = os.path.join(os.path.dirname(__file__), "magic.png")
size = images.get_image_dimensions(img_path)
with open(img_path, 'rb') as fh:
self.assertEqual(size, Image.open(fh).size)
@unittest.skipUnless(Image, "Pillow not installed")
class GetImageDimensionsTests(unittest.TestCase):
def test_invalid_image(self):
"""
get_image_dimensions() should return (None, None) for the dimensions of
invalid images (#24441).
brokenimg.png is not a valid image and it has been generated by:
$ echo "123" > brokenimg.png
"""
img_path = os.path.join(os.path.dirname(__file__), "brokenimg.png")
with open(img_path, 'rb') as fh:
size = images.get_image_dimensions(fh)
self.assertEqual(size, (None, None))
def test_valid_image(self):
"""
get_image_dimensions() should catch struct.error while feeding the PIL
Image parser (#24544).
Emulates the Parser feed error. Since the error is raised on every feed
attempt, the resulting image size should be invalid: (None, None).
"""
img_path = os.path.join(os.path.dirname(__file__), "test.png")
with mock.patch('PIL.ImageFile.Parser.feed', side_effect=struct.error):
with open(img_path, 'rb') as fh:
size = images.get_image_dimensions(fh)
self.assertEqual(size, (None, None))
def test_webp(self):
img_path = os.path.join(os.path.dirname(__file__), 'test.webp')
with open(img_path, 'rb') as fh:
size = images.get_image_dimensions(fh)
self.assertEqual(size, (540, 405))
class FileMoveSafeTests(unittest.TestCase):
def test_file_move_overwrite(self):
handle_a, self.file_a = tempfile.mkstemp()
handle_b, self.file_b = tempfile.mkstemp()
# file_move_safe should raise an IOError exception if destination file exists and allow_overwrite is False
with self.assertRaises(IOError):
file_move_safe(self.file_a, self.file_b, allow_overwrite=False)
# should allow it and continue on if allow_overwrite is True
self.assertIsNone(file_move_safe(self.file_a, self.file_b, allow_overwrite=True))
os.close(handle_a)
os.close(handle_b)
def test_file_move_copystat_cifs(self):
"""
file_move_safe() ignores a copystat() EPERM PermissionError. This
happens when the destination filesystem is CIFS, for example.
"""
copystat_EACCES_error = PermissionError(errno.EACCES, 'msg')
copystat_EPERM_error = PermissionError(errno.EPERM, 'msg')
handle_a, self.file_a = tempfile.mkstemp()
handle_b, self.file_b = tempfile.mkstemp()
try:
# This exception is required to reach the copystat() call in
# file_safe_move().
with mock.patch('django.core.files.move.os.rename', side_effect=OSError()):
# An error besides EPERM isn't ignored.
with mock.patch('django.core.files.move.copystat', side_effect=copystat_EACCES_error):
with self.assertRaises(PermissionError):
file_move_safe(self.file_a, self.file_b, allow_overwrite=True)
# EPERM is ignored.
with mock.patch('django.core.files.move.copystat', side_effect=copystat_EPERM_error):
self.assertIsNone(file_move_safe(self.file_a, self.file_b, allow_overwrite=True))
finally:
os.close(handle_a)
os.close(handle_b)
class SpooledTempTests(unittest.TestCase):
def test_in_memory_spooled_temp(self):
with tempfile.SpooledTemporaryFile() as temp:
temp.write(b"foo bar baz quux\n")
django_file = File(temp, name="something.txt")
self.assertEqual(django_file.size, 17)
def test_written_spooled_temp(self):
with tempfile.SpooledTemporaryFile(max_size=4) as temp:
temp.write(b"foo bar baz quux\n")
django_file = File(temp, name="something.txt")
self.assertEqual(django_file.size, 17)
|
6ca13e50fd5af9e01464f0001ba2ab888cb85c2200b110eefe93ab2824eb8667 | import decimal
import json
import unittest
import uuid
from django import forms
from django.core import checks, exceptions, serializers, validators
from django.core.exceptions import FieldError
from django.core.management import call_command
from django.db import IntegrityError, connection, models
from django.test import TransactionTestCase, modify_settings, override_settings
from django.test.utils import isolate_apps
from django.utils import timezone
from . import (
PostgreSQLSimpleTestCase, PostgreSQLTestCase, PostgreSQLWidgetTestCase,
)
from .models import (
ArrayFieldSubclass, CharArrayModel, DateTimeArrayModel, IntegerArrayModel,
NestedIntegerArrayModel, NullableIntegerArrayModel, OtherTypesArrayModel,
PostgreSQLModel, Tag,
)
try:
from django.contrib.postgres.fields import ArrayField
from django.contrib.postgres.forms import (
SimpleArrayField, SplitArrayField, SplitArrayWidget,
)
from psycopg2.extras import NumericRange
except ImportError:
pass
class TestSaveLoad(PostgreSQLTestCase):
def test_integer(self):
instance = IntegerArrayModel(field=[1, 2, 3])
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_char(self):
instance = CharArrayModel(field=['hello', 'goodbye'])
instance.save()
loaded = CharArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_dates(self):
instance = DateTimeArrayModel(
datetimes=[timezone.now()],
dates=[timezone.now().date()],
times=[timezone.now().time()],
)
instance.save()
loaded = DateTimeArrayModel.objects.get()
self.assertEqual(instance.datetimes, loaded.datetimes)
self.assertEqual(instance.dates, loaded.dates)
self.assertEqual(instance.times, loaded.times)
def test_tuples(self):
instance = IntegerArrayModel(field=(1,))
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertSequenceEqual(instance.field, loaded.field)
def test_integers_passed_as_strings(self):
# This checks that get_prep_value is deferred properly
instance = IntegerArrayModel(field=['1'])
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertEqual(loaded.field, [1])
def test_default_null(self):
instance = NullableIntegerArrayModel()
instance.save()
loaded = NullableIntegerArrayModel.objects.get(pk=instance.pk)
self.assertIsNone(loaded.field)
self.assertEqual(instance.field, loaded.field)
def test_null_handling(self):
instance = NullableIntegerArrayModel(field=None)
instance.save()
loaded = NullableIntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
instance = IntegerArrayModel(field=None)
with self.assertRaises(IntegrityError):
instance.save()
def test_nested(self):
instance = NestedIntegerArrayModel(field=[[1, 2], [3, 4]])
instance.save()
loaded = NestedIntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_other_array_types(self):
instance = OtherTypesArrayModel(
ips=['192.168.0.1', '::1'],
uuids=[uuid.uuid4()],
decimals=[decimal.Decimal(1.25), 1.75],
tags=[Tag(1), Tag(2), Tag(3)],
json=[{'a': 1}, {'b': 2}],
int_ranges=[NumericRange(10, 20), NumericRange(30, 40)],
bigint_ranges=[
NumericRange(7000000000, 10000000000),
NumericRange(50000000000, 70000000000),
]
)
instance.save()
loaded = OtherTypesArrayModel.objects.get()
self.assertEqual(instance.ips, loaded.ips)
self.assertEqual(instance.uuids, loaded.uuids)
self.assertEqual(instance.decimals, loaded.decimals)
self.assertEqual(instance.tags, loaded.tags)
self.assertEqual(instance.json, loaded.json)
self.assertEqual(instance.int_ranges, loaded.int_ranges)
self.assertEqual(instance.bigint_ranges, loaded.bigint_ranges)
def test_null_from_db_value_handling(self):
instance = OtherTypesArrayModel.objects.create(
ips=['192.168.0.1', '::1'],
uuids=[uuid.uuid4()],
decimals=[decimal.Decimal(1.25), 1.75],
tags=None,
)
instance.refresh_from_db()
self.assertIsNone(instance.tags)
self.assertEqual(instance.json, [])
self.assertIsNone(instance.int_ranges)
self.assertIsNone(instance.bigint_ranges)
def test_model_set_on_base_field(self):
instance = IntegerArrayModel()
field = instance._meta.get_field('field')
self.assertEqual(field.model, IntegerArrayModel)
self.assertEqual(field.base_field.model, IntegerArrayModel)
class TestQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.objs = NullableIntegerArrayModel.objects.bulk_create([
NullableIntegerArrayModel(field=[1]),
NullableIntegerArrayModel(field=[2]),
NullableIntegerArrayModel(field=[2, 3]),
NullableIntegerArrayModel(field=[20, 30, 40]),
NullableIntegerArrayModel(field=None),
])
def test_exact(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__exact=[1]),
self.objs[:1]
)
def test_exact_charfield(self):
instance = CharArrayModel.objects.create(field=['text'])
self.assertSequenceEqual(
CharArrayModel.objects.filter(field=['text']),
[instance]
)
def test_exact_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field=[[1, 2], [3, 4]]),
[instance]
)
def test_isnull(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__isnull=True),
self.objs[-1:]
)
def test_gt(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__gt=[0]),
self.objs[:4]
)
def test_lt(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__lt=[2]),
self.objs[:1]
)
def test_in(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__in=[[1], [2]]),
self.objs[:2]
)
def test_in_subquery(self):
IntegerArrayModel.objects.create(field=[2, 3])
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
field__in=IntegerArrayModel.objects.all().values_list('field', flat=True)
),
self.objs[2:3]
)
@unittest.expectedFailure
def test_in_including_F_object(self):
# This test asserts that Array objects passed to filters can be
# constructed to contain F objects. This currently doesn't work as the
# psycopg2 mogrify method that generates the ARRAY() syntax is
# expecting literals, not column references (#27095).
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__in=[[models.F('id')]]),
self.objs[:2]
)
def test_in_as_F_object(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__in=[models.F('field')]),
self.objs[:4]
)
def test_contained_by(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contained_by=[1, 2]),
self.objs[:2]
)
@unittest.expectedFailure
def test_contained_by_including_F_object(self):
# This test asserts that Array objects passed to filters can be
# constructed to contain F objects. This currently doesn't work as the
# psycopg2 mogrify method that generates the ARRAY() syntax is
# expecting literals, not column references (#27095).
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contained_by=[models.F('id'), 2]),
self.objs[:2]
)
def test_contains(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contains=[2]),
self.objs[1:3]
)
def test_icontains(self):
# Using the __icontains lookup with ArrayField is inefficient.
instance = CharArrayModel.objects.create(field=['FoO'])
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__icontains='foo'),
[instance]
)
def test_contains_charfield(self):
# Regression for #22907
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__contains=['text']),
[]
)
def test_contained_by_charfield(self):
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__contained_by=['text']),
[]
)
def test_overlap_charfield(self):
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__overlap=['text']),
[]
)
def test_index(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0=2),
self.objs[1:3]
)
def test_index_chained(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0__lt=3),
self.objs[0:3]
)
def test_index_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0__0=1),
[instance]
)
@unittest.expectedFailure
def test_index_used_on_nested_data(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0=[1, 2]),
[instance]
)
def test_overlap(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__overlap=[1, 2]),
self.objs[0:3]
)
def test_len(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__len__lte=2),
self.objs[0:3]
)
def test_len_empty_array(self):
obj = NullableIntegerArrayModel.objects.create(field=[])
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__len=0),
[obj]
)
def test_slice(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0_1=[2]),
self.objs[1:3]
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0_2=[2, 3]),
self.objs[2:3]
)
def test_order_by_slice(self):
more_objs = (
NullableIntegerArrayModel.objects.create(field=[1, 637]),
NullableIntegerArrayModel.objects.create(field=[2, 1]),
NullableIntegerArrayModel.objects.create(field=[3, -98123]),
NullableIntegerArrayModel.objects.create(field=[4, 2]),
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.order_by('field__1'),
[
more_objs[2], more_objs[1], more_objs[3], self.objs[2],
self.objs[3], more_objs[0], self.objs[4], self.objs[1],
self.objs[0],
]
)
@unittest.expectedFailure
def test_slice_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0__0_1=[1]),
[instance]
)
def test_usage_in_subquery(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
id__in=NullableIntegerArrayModel.objects.filter(field__len=3)
),
[self.objs[3]]
)
def test_unsupported_lookup(self):
msg = "Unsupported lookup '0_bar' for ArrayField or join on the field not permitted."
with self.assertRaisesMessage(FieldError, msg):
list(NullableIntegerArrayModel.objects.filter(field__0_bar=[2]))
msg = "Unsupported lookup '0bar' for ArrayField or join on the field not permitted."
with self.assertRaisesMessage(FieldError, msg):
list(NullableIntegerArrayModel.objects.filter(field__0bar=[2]))
class TestDateTimeExactQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
now = timezone.now()
cls.datetimes = [now]
cls.dates = [now.date()]
cls.times = [now.time()]
cls.objs = [
DateTimeArrayModel.objects.create(datetimes=cls.datetimes, dates=cls.dates, times=cls.times),
]
def test_exact_datetimes(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(datetimes=self.datetimes),
self.objs
)
def test_exact_dates(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(dates=self.dates),
self.objs
)
def test_exact_times(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(times=self.times),
self.objs
)
class TestOtherTypesExactQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.ips = ['192.168.0.1', '::1']
cls.uuids = [uuid.uuid4()]
cls.decimals = [decimal.Decimal(1.25), 1.75]
cls.tags = [Tag(1), Tag(2), Tag(3)]
cls.objs = [
OtherTypesArrayModel.objects.create(
ips=cls.ips,
uuids=cls.uuids,
decimals=cls.decimals,
tags=cls.tags,
)
]
def test_exact_ip_addresses(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(ips=self.ips),
self.objs
)
def test_exact_uuids(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(uuids=self.uuids),
self.objs
)
def test_exact_decimals(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(decimals=self.decimals),
self.objs
)
def test_exact_tags(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(tags=self.tags),
self.objs
)
@isolate_apps('postgres_tests')
class TestChecks(PostgreSQLSimpleTestCase):
def test_field_checks(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.CharField())
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
# The inner CharField is missing a max_length.
self.assertEqual(errors[0].id, 'postgres.E001')
self.assertIn('max_length', errors[0].msg)
def test_invalid_base_fields(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.ManyToManyField('postgres_tests.IntegerArrayModel'))
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, 'postgres.E002')
def test_invalid_default(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), default=[])
model = MyModel()
self.assertEqual(model.check(), [
checks.Warning(
msg=(
"ArrayField default should be a callable instead of an "
"instance so that it's not shared between all field "
"instances."
),
hint='Use a callable instead, e.g., use `list` instead of `[]`.',
obj=MyModel._meta.get_field('field'),
id='postgres.E003',
)
])
def test_valid_default(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), default=list)
model = MyModel()
self.assertEqual(model.check(), [])
def test_valid_default_none(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), default=None)
model = MyModel()
self.assertEqual(model.check(), [])
def test_nested_field_checks(self):
"""
Nested ArrayFields are permitted.
"""
class MyModel(PostgreSQLModel):
field = ArrayField(ArrayField(models.CharField()))
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
# The inner CharField is missing a max_length.
self.assertEqual(errors[0].id, 'postgres.E001')
self.assertIn('max_length', errors[0].msg)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific tests")
class TestMigrations(TransactionTestCase):
available_apps = ['postgres_tests']
def test_deconstruct(self):
field = ArrayField(models.IntegerField())
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(type(new.base_field), type(field.base_field))
self.assertIsNot(new.base_field, field.base_field)
def test_deconstruct_with_size(self):
field = ArrayField(models.IntegerField(), size=3)
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(new.size, field.size)
def test_deconstruct_args(self):
field = ArrayField(models.CharField(max_length=20))
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(new.base_field.max_length, field.base_field.max_length)
def test_subclass_deconstruct(self):
field = ArrayField(models.IntegerField())
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.fields.ArrayField')
field = ArrayFieldSubclass()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, 'postgres_tests.models.ArrayFieldSubclass')
@override_settings(MIGRATION_MODULES={
"postgres_tests": "postgres_tests.array_default_migrations",
})
def test_adding_field_with_default(self):
# See #22962
table_name = 'postgres_tests_integerarraydefaultmodel'
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
call_command('migrate', 'postgres_tests', verbosity=0)
with connection.cursor() as cursor:
self.assertIn(table_name, connection.introspection.table_names(cursor))
call_command('migrate', 'postgres_tests', 'zero', verbosity=0)
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
@override_settings(MIGRATION_MODULES={
"postgres_tests": "postgres_tests.array_index_migrations",
})
def test_adding_arrayfield_with_index(self):
"""
ArrayField shouldn't have varchar_patterns_ops or text_patterns_ops indexes.
"""
table_name = 'postgres_tests_chartextarrayindexmodel'
call_command('migrate', 'postgres_tests', verbosity=0)
with connection.cursor() as cursor:
like_constraint_columns_list = [
v['columns']
for k, v in list(connection.introspection.get_constraints(cursor, table_name).items())
if k.endswith('_like')
]
# Only the CharField should have a LIKE index.
self.assertEqual(like_constraint_columns_list, [['char2']])
# All fields should have regular indexes.
with connection.cursor() as cursor:
indexes = [
c['columns'][0]
for c in connection.introspection.get_constraints(cursor, table_name).values()
if c['index'] and len(c['columns']) == 1
]
self.assertIn('char', indexes)
self.assertIn('char2', indexes)
self.assertIn('text', indexes)
call_command('migrate', 'postgres_tests', 'zero', verbosity=0)
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
class TestSerialization(PostgreSQLSimpleTestCase):
test_data = (
'[{"fields": {"field": "[\\"1\\", \\"2\\", null]"}, "model": "postgres_tests.integerarraymodel", "pk": null}]'
)
def test_dumping(self):
instance = IntegerArrayModel(field=[1, 2, None])
data = serializers.serialize('json', [instance])
self.assertEqual(json.loads(data), json.loads(self.test_data))
def test_loading(self):
instance = list(serializers.deserialize('json', self.test_data))[0].object
self.assertEqual(instance.field, [1, 2, None])
class TestValidation(PostgreSQLSimpleTestCase):
def test_unbounded(self):
field = ArrayField(models.IntegerField())
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([1, None], None)
self.assertEqual(cm.exception.code, 'item_invalid')
self.assertEqual(
cm.exception.message % cm.exception.params,
'Item 2 in the array did not validate: This field cannot be null.'
)
def test_blank_true(self):
field = ArrayField(models.IntegerField(blank=True, null=True))
# This should not raise a validation error
field.clean([1, None], None)
def test_with_size(self):
field = ArrayField(models.IntegerField(), size=3)
field.clean([1, 2, 3], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([1, 2, 3, 4], None)
self.assertEqual(cm.exception.messages[0], 'List contains 4 items, it should contain no more than 3.')
def test_nested_array_mismatch(self):
field = ArrayField(ArrayField(models.IntegerField()))
field.clean([[1, 2], [3, 4]], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([[1, 2], [3, 4, 5]], None)
self.assertEqual(cm.exception.code, 'nested_array_mismatch')
self.assertEqual(cm.exception.messages[0], 'Nested arrays must have the same length.')
def test_with_base_field_error_params(self):
field = ArrayField(models.CharField(max_length=2))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['abc'], None)
self.assertEqual(len(cm.exception.error_list), 1)
exception = cm.exception.error_list[0]
self.assertEqual(
exception.message,
'Item 1 in the array did not validate: Ensure this value has at most 2 characters (it has 3).'
)
self.assertEqual(exception.code, 'item_invalid')
self.assertEqual(exception.params, {'nth': 1, 'value': 'abc', 'limit_value': 2, 'show_value': 3})
def test_with_validators(self):
field = ArrayField(models.IntegerField(validators=[validators.MinValueValidator(1)]))
field.clean([1, 2], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([0], None)
self.assertEqual(len(cm.exception.error_list), 1)
exception = cm.exception.error_list[0]
self.assertEqual(
exception.message,
'Item 1 in the array did not validate: Ensure this value is greater than or equal to 1.'
)
self.assertEqual(exception.code, 'item_invalid')
self.assertEqual(exception.params, {'nth': 1, 'value': 0, 'limit_value': 1, 'show_value': 0})
class TestSimpleFormField(PostgreSQLSimpleTestCase):
def test_valid(self):
field = SimpleArrayField(forms.CharField())
value = field.clean('a,b,c')
self.assertEqual(value, ['a', 'b', 'c'])
def test_to_python_fail(self):
field = SimpleArrayField(forms.IntegerField())
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,9')
self.assertEqual(cm.exception.messages[0], 'Item 1 in the array did not validate: Enter a whole number.')
def test_validate_fail(self):
field = SimpleArrayField(forms.CharField(required=True))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,')
self.assertEqual(cm.exception.messages[0], 'Item 3 in the array did not validate: This field is required.')
def test_validate_fail_base_field_error_params(self):
field = SimpleArrayField(forms.CharField(max_length=2))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('abc,c,defg')
errors = cm.exception.error_list
self.assertEqual(len(errors), 2)
first_error = errors[0]
self.assertEqual(
first_error.message,
'Item 1 in the array did not validate: Ensure this value has at most 2 characters (it has 3).'
)
self.assertEqual(first_error.code, 'item_invalid')
self.assertEqual(first_error.params, {'nth': 1, 'value': 'abc', 'limit_value': 2, 'show_value': 3})
second_error = errors[1]
self.assertEqual(
second_error.message,
'Item 3 in the array did not validate: Ensure this value has at most 2 characters (it has 4).'
)
self.assertEqual(second_error.code, 'item_invalid')
self.assertEqual(second_error.params, {'nth': 3, 'value': 'defg', 'limit_value': 2, 'show_value': 4})
def test_validators_fail(self):
field = SimpleArrayField(forms.RegexField('[a-e]{2}'))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,bc,de')
self.assertEqual(cm.exception.messages[0], 'Item 1 in the array did not validate: Enter a valid value.')
def test_delimiter(self):
field = SimpleArrayField(forms.CharField(), delimiter='|')
value = field.clean('a|b|c')
self.assertEqual(value, ['a', 'b', 'c'])
def test_delimiter_with_nesting(self):
field = SimpleArrayField(SimpleArrayField(forms.CharField()), delimiter='|')
value = field.clean('a,b|c,d')
self.assertEqual(value, [['a', 'b'], ['c', 'd']])
def test_prepare_value(self):
field = SimpleArrayField(forms.CharField())
value = field.prepare_value(['a', 'b', 'c'])
self.assertEqual(value, 'a,b,c')
def test_max_length(self):
field = SimpleArrayField(forms.CharField(), max_length=2)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,c')
self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no more than 2.')
def test_min_length(self):
field = SimpleArrayField(forms.CharField(), min_length=4)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,c')
self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no fewer than 4.')
def test_required(self):
field = SimpleArrayField(forms.CharField(), required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('')
self.assertEqual(cm.exception.messages[0], 'This field is required.')
def test_model_field_formfield(self):
model_field = ArrayField(models.CharField(max_length=27))
form_field = model_field.formfield()
self.assertIsInstance(form_field, SimpleArrayField)
self.assertIsInstance(form_field.base_field, forms.CharField)
self.assertEqual(form_field.base_field.max_length, 27)
def test_model_field_formfield_size(self):
model_field = ArrayField(models.CharField(max_length=27), size=4)
form_field = model_field.formfield()
self.assertIsInstance(form_field, SimpleArrayField)
self.assertEqual(form_field.max_length, 4)
def test_model_field_choices(self):
model_field = ArrayField(models.IntegerField(choices=((1, 'A'), (2, 'B'))))
form_field = model_field.formfield()
self.assertEqual(form_field.clean('1,2'), [1, 2])
def test_already_converted_value(self):
field = SimpleArrayField(forms.CharField())
vals = ['a', 'b', 'c']
self.assertEqual(field.clean(vals), vals)
def test_has_changed(self):
field = SimpleArrayField(forms.IntegerField())
self.assertIs(field.has_changed([1, 2], [1, 2]), False)
self.assertIs(field.has_changed([1, 2], '1,2'), False)
self.assertIs(field.has_changed([1, 2], '1,2,3'), True)
self.assertIs(field.has_changed([1, 2], 'a,b'), True)
def test_has_changed_empty(self):
field = SimpleArrayField(forms.CharField())
self.assertIs(field.has_changed(None, None), False)
self.assertIs(field.has_changed(None, ''), False)
self.assertIs(field.has_changed(None, []), False)
self.assertIs(field.has_changed([], None), False)
self.assertIs(field.has_changed([], ''), False)
class TestSplitFormField(PostgreSQLSimpleTestCase):
def test_valid(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
data = {'array_0': 'a', 'array_1': 'b', 'array_2': 'c'}
form = SplitForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data, {'array': ['a', 'b', 'c']})
def test_required(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), required=True, size=3)
data = {'array_0': '', 'array_1': '', 'array_2': ''}
form = SplitForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'array': ['This field is required.']})
def test_remove_trailing_nulls(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(required=False), size=5, remove_trailing_nulls=True)
data = {'array_0': 'a', 'array_1': '', 'array_2': 'b', 'array_3': '', 'array_4': ''}
form = SplitForm(data)
self.assertTrue(form.is_valid(), form.errors)
self.assertEqual(form.cleaned_data, {'array': ['a', '', 'b']})
def test_remove_trailing_nulls_not_required(self):
class SplitForm(forms.Form):
array = SplitArrayField(
forms.CharField(required=False),
size=2,
remove_trailing_nulls=True,
required=False,
)
data = {'array_0': '', 'array_1': ''}
form = SplitForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data, {'array': []})
def test_required_field(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
data = {'array_0': 'a', 'array_1': 'b', 'array_2': ''}
form = SplitForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'array': ['Item 3 in the array did not validate: This field is required.']})
def test_invalid_integer(self):
msg = 'Item 2 in the array did not validate: Ensure this value is less than or equal to 100.'
with self.assertRaisesMessage(exceptions.ValidationError, msg):
SplitArrayField(forms.IntegerField(max_value=100), size=2).clean([0, 101])
# To locate the widget's template.
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'})
def test_rendering(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
self.assertHTMLEqual(str(SplitForm()), '''
<tr>
<th><label for="id_array_0">Array:</label></th>
<td>
<input id="id_array_0" name="array_0" type="text" required>
<input id="id_array_1" name="array_1" type="text" required>
<input id="id_array_2" name="array_2" type="text" required>
</td>
</tr>
''')
def test_invalid_char_length(self):
field = SplitArrayField(forms.CharField(max_length=2), size=3)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['abc', 'c', 'defg'])
self.assertEqual(cm.exception.messages, [
'Item 1 in the array did not validate: Ensure this value has at most 2 characters (it has 3).',
'Item 3 in the array did not validate: Ensure this value has at most 2 characters (it has 4).',
])
def test_splitarraywidget_value_omitted_from_data(self):
class Form(forms.ModelForm):
field = SplitArrayField(forms.IntegerField(), required=False, size=2)
class Meta:
model = IntegerArrayModel
fields = ('field',)
form = Form({'field_0': '1', 'field_1': '2'})
self.assertEqual(form.errors, {})
obj = form.save(commit=False)
self.assertEqual(obj.field, [1, 2])
class TestSplitFormWidget(PostgreSQLWidgetTestCase):
def test_get_context(self):
self.assertEqual(
SplitArrayWidget(forms.TextInput(), size=2).get_context('name', ['val1', 'val2']),
{
'widget': {
'name': 'name',
'is_hidden': False,
'required': False,
'value': "['val1', 'val2']",
'attrs': {},
'template_name': 'postgres/widgets/split_array.html',
'subwidgets': [
{
'name': 'name_0',
'is_hidden': False,
'required': False,
'value': 'val1',
'attrs': {},
'template_name': 'django/forms/widgets/text.html',
'type': 'text',
},
{
'name': 'name_1',
'is_hidden': False,
'required': False,
'value': 'val2',
'attrs': {},
'template_name': 'django/forms/widgets/text.html',
'type': 'text',
},
]
}
}
)
def test_render(self):
self.check_html(
SplitArrayWidget(forms.TextInput(), size=2), 'array', None,
"""
<input name="array_0" type="text">
<input name="array_1" type="text">
"""
)
def test_render_attrs(self):
self.check_html(
SplitArrayWidget(forms.TextInput(), size=2),
'array', ['val1', 'val2'], attrs={'id': 'foo'},
html=(
"""
<input id="foo_0" name="array_0" type="text" value="val1">
<input id="foo_1" name="array_1" type="text" value="val2">
"""
)
)
def test_value_omitted_from_data(self):
widget = SplitArrayWidget(forms.TextInput(), size=2)
self.assertIs(widget.value_omitted_from_data({}, {}, 'field'), True)
self.assertIs(widget.value_omitted_from_data({'field_0': 'value'}, {}, 'field'), False)
self.assertIs(widget.value_omitted_from_data({'field_1': 'value'}, {}, 'field'), False)
self.assertIs(widget.value_omitted_from_data({'field_0': 'value', 'field_1': 'value'}, {}, 'field'), False)
|
9278164f2ff98bda813e9285a6fb5c1e7ff890aa571e1f4f6ce79ded94a31960 | import json
from django.db.models.expressions import F, Value
from django.test.testcases import skipUnlessDBFeature
from django.test.utils import Approximate
from . import PostgreSQLTestCase
from .models import AggregateTestModel, StatTestModel
try:
from django.contrib.postgres.aggregates import (
ArrayAgg, BitAnd, BitOr, BoolAnd, BoolOr, Corr, CovarPop, JSONBAgg,
RegrAvgX, RegrAvgY, RegrCount, RegrIntercept, RegrR2, RegrSlope,
RegrSXX, RegrSXY, RegrSYY, StatAggregate, StringAgg,
)
except ImportError:
pass # psycopg2 is not installed
class TestGeneralAggregate(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
AggregateTestModel.objects.create(boolean_field=True, char_field='Foo1', integer_field=0)
AggregateTestModel.objects.create(boolean_field=False, char_field='Foo2', integer_field=1)
AggregateTestModel.objects.create(boolean_field=False, char_field='Foo4', integer_field=2)
AggregateTestModel.objects.create(boolean_field=True, char_field='Foo3', integer_field=0)
def test_array_agg_charfield(self):
values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg('char_field'))
self.assertEqual(values, {'arrayagg': ['Foo1', 'Foo2', 'Foo4', 'Foo3']})
def test_array_agg_charfield_ordering(self):
ordering_test_cases = (
(F('char_field').desc(), ['Foo4', 'Foo3', 'Foo2', 'Foo1']),
(F('char_field').asc(), ['Foo1', 'Foo2', 'Foo3', 'Foo4']),
(F('char_field'), ['Foo1', 'Foo2', 'Foo3', 'Foo4']),
([F('boolean_field'), F('char_field').desc()], ['Foo4', 'Foo2', 'Foo3', 'Foo1']),
((F('boolean_field'), F('char_field').desc()), ['Foo4', 'Foo2', 'Foo3', 'Foo1']),
('char_field', ['Foo1', 'Foo2', 'Foo3', 'Foo4']),
('-char_field', ['Foo4', 'Foo3', 'Foo2', 'Foo1']),
)
for ordering, expected_output in ordering_test_cases:
with self.subTest(ordering=ordering, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg('char_field', ordering=ordering)
)
self.assertEqual(values, {'arrayagg': expected_output})
def test_array_agg_integerfield(self):
values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg('integer_field'))
self.assertEqual(values, {'arrayagg': [0, 1, 2, 0]})
def test_array_agg_integerfield_ordering(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg('integer_field', ordering=F('integer_field').desc())
)
self.assertEqual(values, {'arrayagg': [2, 1, 0, 0]})
def test_array_agg_booleanfield(self):
values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg('boolean_field'))
self.assertEqual(values, {'arrayagg': [True, False, False, True]})
def test_array_agg_booleanfield_ordering(self):
ordering_test_cases = (
(F('boolean_field').asc(), [False, False, True, True]),
(F('boolean_field').desc(), [True, True, False, False]),
(F('boolean_field'), [False, False, True, True]),
)
for ordering, expected_output in ordering_test_cases:
with self.subTest(ordering=ordering, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg('boolean_field', ordering=ordering)
)
self.assertEqual(values, {'arrayagg': expected_output})
def test_array_agg_empty_result(self):
AggregateTestModel.objects.all().delete()
values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg('char_field'))
self.assertEqual(values, {'arrayagg': []})
values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg('integer_field'))
self.assertEqual(values, {'arrayagg': []})
values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg('boolean_field'))
self.assertEqual(values, {'arrayagg': []})
def test_array_agg_lookups(self):
aggr1 = AggregateTestModel.objects.create()
aggr2 = AggregateTestModel.objects.create()
StatTestModel.objects.create(related_field=aggr1, int1=1, int2=0)
StatTestModel.objects.create(related_field=aggr1, int1=2, int2=0)
StatTestModel.objects.create(related_field=aggr2, int1=3, int2=0)
StatTestModel.objects.create(related_field=aggr2, int1=4, int2=0)
qs = StatTestModel.objects.values('related_field').annotate(
array=ArrayAgg('int1')
).filter(array__overlap=[2]).values_list('array', flat=True)
self.assertCountEqual(qs.get(), [1, 2])
def test_bit_and_general(self):
values = AggregateTestModel.objects.filter(
integer_field__in=[0, 1]).aggregate(bitand=BitAnd('integer_field'))
self.assertEqual(values, {'bitand': 0})
def test_bit_and_on_only_true_values(self):
values = AggregateTestModel.objects.filter(
integer_field=1).aggregate(bitand=BitAnd('integer_field'))
self.assertEqual(values, {'bitand': 1})
def test_bit_and_on_only_false_values(self):
values = AggregateTestModel.objects.filter(
integer_field=0).aggregate(bitand=BitAnd('integer_field'))
self.assertEqual(values, {'bitand': 0})
def test_bit_and_empty_result(self):
AggregateTestModel.objects.all().delete()
values = AggregateTestModel.objects.aggregate(bitand=BitAnd('integer_field'))
self.assertEqual(values, {'bitand': None})
def test_bit_or_general(self):
values = AggregateTestModel.objects.filter(
integer_field__in=[0, 1]).aggregate(bitor=BitOr('integer_field'))
self.assertEqual(values, {'bitor': 1})
def test_bit_or_on_only_true_values(self):
values = AggregateTestModel.objects.filter(
integer_field=1).aggregate(bitor=BitOr('integer_field'))
self.assertEqual(values, {'bitor': 1})
def test_bit_or_on_only_false_values(self):
values = AggregateTestModel.objects.filter(
integer_field=0).aggregate(bitor=BitOr('integer_field'))
self.assertEqual(values, {'bitor': 0})
def test_bit_or_empty_result(self):
AggregateTestModel.objects.all().delete()
values = AggregateTestModel.objects.aggregate(bitor=BitOr('integer_field'))
self.assertEqual(values, {'bitor': None})
def test_bool_and_general(self):
values = AggregateTestModel.objects.aggregate(booland=BoolAnd('boolean_field'))
self.assertEqual(values, {'booland': False})
def test_bool_and_empty_result(self):
AggregateTestModel.objects.all().delete()
values = AggregateTestModel.objects.aggregate(booland=BoolAnd('boolean_field'))
self.assertEqual(values, {'booland': None})
def test_bool_or_general(self):
values = AggregateTestModel.objects.aggregate(boolor=BoolOr('boolean_field'))
self.assertEqual(values, {'boolor': True})
def test_bool_or_empty_result(self):
AggregateTestModel.objects.all().delete()
values = AggregateTestModel.objects.aggregate(boolor=BoolOr('boolean_field'))
self.assertEqual(values, {'boolor': None})
def test_string_agg_requires_delimiter(self):
with self.assertRaises(TypeError):
AggregateTestModel.objects.aggregate(stringagg=StringAgg('char_field'))
def test_string_agg_charfield(self):
values = AggregateTestModel.objects.aggregate(stringagg=StringAgg('char_field', delimiter=';'))
self.assertEqual(values, {'stringagg': 'Foo1;Foo2;Foo4;Foo3'})
def test_string_agg_charfield_ordering(self):
ordering_test_cases = (
(F('char_field').desc(), 'Foo4;Foo3;Foo2;Foo1'),
(F('char_field').asc(), 'Foo1;Foo2;Foo3;Foo4'),
(F('char_field'), 'Foo1;Foo2;Foo3;Foo4'),
)
for ordering, expected_output in ordering_test_cases:
with self.subTest(ordering=ordering, expected_output=expected_output):
values = AggregateTestModel.objects.aggregate(
stringagg=StringAgg('char_field', delimiter=';', ordering=ordering)
)
self.assertEqual(values, {'stringagg': expected_output})
def test_string_agg_empty_result(self):
AggregateTestModel.objects.all().delete()
values = AggregateTestModel.objects.aggregate(stringagg=StringAgg('char_field', delimiter=';'))
self.assertEqual(values, {'stringagg': ''})
def test_orderable_agg_alternative_fields(self):
values = AggregateTestModel.objects.aggregate(
arrayagg=ArrayAgg('integer_field', ordering=F('char_field').asc())
)
self.assertEqual(values, {'arrayagg': [0, 1, 0, 2]})
@skipUnlessDBFeature('has_jsonb_agg')
def test_json_agg(self):
values = AggregateTestModel.objects.aggregate(jsonagg=JSONBAgg('char_field'))
self.assertEqual(values, {'jsonagg': ['Foo1', 'Foo2', 'Foo4', 'Foo3']})
@skipUnlessDBFeature('has_jsonb_agg')
def test_json_agg_empty(self):
values = AggregateTestModel.objects.none().aggregate(jsonagg=JSONBAgg('integer_field'))
self.assertEqual(values, json.loads('{"jsonagg": []}'))
class TestAggregateDistinct(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
AggregateTestModel.objects.create(char_field='Foo')
AggregateTestModel.objects.create(char_field='Foo')
AggregateTestModel.objects.create(char_field='Bar')
def test_string_agg_distinct_false(self):
values = AggregateTestModel.objects.aggregate(stringagg=StringAgg('char_field', delimiter=' ', distinct=False))
self.assertEqual(values['stringagg'].count('Foo'), 2)
self.assertEqual(values['stringagg'].count('Bar'), 1)
def test_string_agg_distinct_true(self):
values = AggregateTestModel.objects.aggregate(stringagg=StringAgg('char_field', delimiter=' ', distinct=True))
self.assertEqual(values['stringagg'].count('Foo'), 1)
self.assertEqual(values['stringagg'].count('Bar'), 1)
def test_array_agg_distinct_false(self):
values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg('char_field', distinct=False))
self.assertEqual(sorted(values['arrayagg']), ['Bar', 'Foo', 'Foo'])
def test_array_agg_distinct_true(self):
values = AggregateTestModel.objects.aggregate(arrayagg=ArrayAgg('char_field', distinct=True))
self.assertEqual(sorted(values['arrayagg']), ['Bar', 'Foo'])
class TestStatisticsAggregate(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
StatTestModel.objects.create(
int1=1,
int2=3,
related_field=AggregateTestModel.objects.create(integer_field=0),
)
StatTestModel.objects.create(
int1=2,
int2=2,
related_field=AggregateTestModel.objects.create(integer_field=1),
)
StatTestModel.objects.create(
int1=3,
int2=1,
related_field=AggregateTestModel.objects.create(integer_field=2),
)
# Tests for base class (StatAggregate)
def test_missing_arguments_raises_exception(self):
with self.assertRaisesMessage(ValueError, 'Both y and x must be provided.'):
StatAggregate(x=None, y=None)
def test_correct_source_expressions(self):
func = StatAggregate(x='test', y=13)
self.assertIsInstance(func.source_expressions[0], Value)
self.assertIsInstance(func.source_expressions[1], F)
def test_alias_is_required(self):
class SomeFunc(StatAggregate):
function = 'TEST'
with self.assertRaisesMessage(TypeError, 'Complex aggregates require an alias'):
StatTestModel.objects.aggregate(SomeFunc(y='int2', x='int1'))
# Test aggregates
def test_corr_general(self):
values = StatTestModel.objects.aggregate(corr=Corr(y='int2', x='int1'))
self.assertEqual(values, {'corr': -1.0})
def test_corr_empty_result(self):
StatTestModel.objects.all().delete()
values = StatTestModel.objects.aggregate(corr=Corr(y='int2', x='int1'))
self.assertEqual(values, {'corr': None})
def test_covar_pop_general(self):
values = StatTestModel.objects.aggregate(covarpop=CovarPop(y='int2', x='int1'))
self.assertEqual(values, {'covarpop': Approximate(-0.66, places=1)})
def test_covar_pop_empty_result(self):
StatTestModel.objects.all().delete()
values = StatTestModel.objects.aggregate(covarpop=CovarPop(y='int2', x='int1'))
self.assertEqual(values, {'covarpop': None})
def test_covar_pop_sample(self):
values = StatTestModel.objects.aggregate(covarpop=CovarPop(y='int2', x='int1', sample=True))
self.assertEqual(values, {'covarpop': -1.0})
def test_covar_pop_sample_empty_result(self):
StatTestModel.objects.all().delete()
values = StatTestModel.objects.aggregate(covarpop=CovarPop(y='int2', x='int1', sample=True))
self.assertEqual(values, {'covarpop': None})
def test_regr_avgx_general(self):
values = StatTestModel.objects.aggregate(regravgx=RegrAvgX(y='int2', x='int1'))
self.assertEqual(values, {'regravgx': 2.0})
def test_regr_avgx_empty_result(self):
StatTestModel.objects.all().delete()
values = StatTestModel.objects.aggregate(regravgx=RegrAvgX(y='int2', x='int1'))
self.assertEqual(values, {'regravgx': None})
def test_regr_avgy_general(self):
values = StatTestModel.objects.aggregate(regravgy=RegrAvgY(y='int2', x='int1'))
self.assertEqual(values, {'regravgy': 2.0})
def test_regr_avgy_empty_result(self):
StatTestModel.objects.all().delete()
values = StatTestModel.objects.aggregate(regravgy=RegrAvgY(y='int2', x='int1'))
self.assertEqual(values, {'regravgy': None})
def test_regr_count_general(self):
values = StatTestModel.objects.aggregate(regrcount=RegrCount(y='int2', x='int1'))
self.assertEqual(values, {'regrcount': 3})
def test_regr_count_empty_result(self):
StatTestModel.objects.all().delete()
values = StatTestModel.objects.aggregate(regrcount=RegrCount(y='int2', x='int1'))
self.assertEqual(values, {'regrcount': 0})
def test_regr_intercept_general(self):
values = StatTestModel.objects.aggregate(regrintercept=RegrIntercept(y='int2', x='int1'))
self.assertEqual(values, {'regrintercept': 4})
def test_regr_intercept_empty_result(self):
StatTestModel.objects.all().delete()
values = StatTestModel.objects.aggregate(regrintercept=RegrIntercept(y='int2', x='int1'))
self.assertEqual(values, {'regrintercept': None})
def test_regr_r2_general(self):
values = StatTestModel.objects.aggregate(regrr2=RegrR2(y='int2', x='int1'))
self.assertEqual(values, {'regrr2': 1})
def test_regr_r2_empty_result(self):
StatTestModel.objects.all().delete()
values = StatTestModel.objects.aggregate(regrr2=RegrR2(y='int2', x='int1'))
self.assertEqual(values, {'regrr2': None})
def test_regr_slope_general(self):
values = StatTestModel.objects.aggregate(regrslope=RegrSlope(y='int2', x='int1'))
self.assertEqual(values, {'regrslope': -1})
def test_regr_slope_empty_result(self):
StatTestModel.objects.all().delete()
values = StatTestModel.objects.aggregate(regrslope=RegrSlope(y='int2', x='int1'))
self.assertEqual(values, {'regrslope': None})
def test_regr_sxx_general(self):
values = StatTestModel.objects.aggregate(regrsxx=RegrSXX(y='int2', x='int1'))
self.assertEqual(values, {'regrsxx': 2.0})
def test_regr_sxx_empty_result(self):
StatTestModel.objects.all().delete()
values = StatTestModel.objects.aggregate(regrsxx=RegrSXX(y='int2', x='int1'))
self.assertEqual(values, {'regrsxx': None})
def test_regr_sxy_general(self):
values = StatTestModel.objects.aggregate(regrsxy=RegrSXY(y='int2', x='int1'))
self.assertEqual(values, {'regrsxy': -2.0})
def test_regr_sxy_empty_result(self):
StatTestModel.objects.all().delete()
values = StatTestModel.objects.aggregate(regrsxy=RegrSXY(y='int2', x='int1'))
self.assertEqual(values, {'regrsxy': None})
def test_regr_syy_general(self):
values = StatTestModel.objects.aggregate(regrsyy=RegrSYY(y='int2', x='int1'))
self.assertEqual(values, {'regrsyy': 2.0})
def test_regr_syy_empty_result(self):
StatTestModel.objects.all().delete()
values = StatTestModel.objects.aggregate(regrsyy=RegrSYY(y='int2', x='int1'))
self.assertEqual(values, {'regrsyy': None})
def test_regr_avgx_with_related_obj_and_number_as_argument(self):
"""
This is more complex test to check if JOIN on field and
number as argument works as expected.
"""
values = StatTestModel.objects.aggregate(complex_regravgx=RegrAvgX(y=5, x='related_field__integer_field'))
self.assertEqual(values, {'complex_regravgx': 1.0})
|
d3e414651de24120caf7f4508adf90a5eb80fabd36eac2f5fc33b66bf79b4ea2 | """
Test PostgreSQL full text search.
These tests use dialogue from the 1975 film Monty Python and the Holy Grail.
All text copyright Python (Monty) Pictures. Thanks to sacred-texts.com for the
transcript.
"""
from django.contrib.postgres.search import (
SearchQuery, SearchRank, SearchVector,
)
from django.db.models import F
from django.test import SimpleTestCase, modify_settings, skipUnlessDBFeature
from . import PostgreSQLTestCase
from .models import Character, Line, Scene
class GrailTestData:
@classmethod
def setUpTestData(cls):
cls.robin = Scene.objects.create(scene='Scene 10', setting='The dark forest of Ewing')
cls.minstrel = Character.objects.create(name='Minstrel')
verses = [
(
'Bravely bold Sir Robin, rode forth from Camelot. '
'He was not afraid to die, o Brave Sir Robin. '
'He was not at all afraid to be killed in nasty ways. '
'Brave, brave, brave, brave Sir Robin!'
),
(
'He was not in the least bit scared to be mashed into a pulp, '
'Or to have his eyes gouged out, and his elbows broken. '
'To have his kneecaps split, and his body burned away, '
'And his limbs all hacked and mangled, brave Sir Robin!'
),
(
'His head smashed in and his heart cut out, '
'And his liver removed and his bowels unplugged, '
'And his nostrils ripped and his bottom burned off,'
'And his --'
),
]
cls.verses = [Line.objects.create(
scene=cls.robin,
character=cls.minstrel,
dialogue=verse,
) for verse in verses]
cls.verse0, cls.verse1, cls.verse2 = cls.verses
cls.witch_scene = Scene.objects.create(scene='Scene 5', setting="Sir Bedemir's Castle")
bedemir = Character.objects.create(name='Bedemir')
crowd = Character.objects.create(name='Crowd')
witch = Character.objects.create(name='Witch')
duck = Character.objects.create(name='Duck')
cls.bedemir0 = Line.objects.create(
scene=cls.witch_scene,
character=bedemir,
dialogue='We shall use my larger scales!',
dialogue_config='english',
)
cls.bedemir1 = Line.objects.create(
scene=cls.witch_scene,
character=bedemir,
dialogue='Right, remove the supports!',
dialogue_config='english',
)
cls.duck = Line.objects.create(scene=cls.witch_scene, character=duck, dialogue=None)
cls.crowd = Line.objects.create(scene=cls.witch_scene, character=crowd, dialogue='A witch! A witch!')
cls.witch = Line.objects.create(scene=cls.witch_scene, character=witch, dialogue="It's a fair cop.")
trojan_rabbit = Scene.objects.create(scene='Scene 8', setting="The castle of Our Master Ruiz' de lu la Ramper")
guards = Character.objects.create(name='French Guards')
cls.french = Line.objects.create(
scene=trojan_rabbit,
character=guards,
dialogue='Oh. Un beau cadeau. Oui oui.',
dialogue_config='french',
)
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'})
class SimpleSearchTest(GrailTestData, PostgreSQLTestCase):
def test_simple(self):
searched = Line.objects.filter(dialogue__search='elbows')
self.assertSequenceEqual(searched, [self.verse1])
def test_non_exact_match(self):
searched = Line.objects.filter(dialogue__search='hearts')
self.assertSequenceEqual(searched, [self.verse2])
def test_search_two_terms(self):
searched = Line.objects.filter(dialogue__search='heart bowel')
self.assertSequenceEqual(searched, [self.verse2])
def test_search_two_terms_with_partial_match(self):
searched = Line.objects.filter(dialogue__search='Robin killed')
self.assertSequenceEqual(searched, [self.verse0])
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'})
class SearchVectorFieldTest(GrailTestData, PostgreSQLTestCase):
def test_existing_vector(self):
Line.objects.update(dialogue_search_vector=SearchVector('dialogue'))
searched = Line.objects.filter(dialogue_search_vector=SearchQuery('Robin killed'))
self.assertSequenceEqual(searched, [self.verse0])
def test_existing_vector_config_explicit(self):
Line.objects.update(dialogue_search_vector=SearchVector('dialogue'))
searched = Line.objects.filter(dialogue_search_vector=SearchQuery('cadeaux', config='french'))
self.assertSequenceEqual(searched, [self.french])
class MultipleFieldsTest(GrailTestData, PostgreSQLTestCase):
def test_simple_on_dialogue(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search='elbows')
self.assertSequenceEqual(searched, [self.verse1])
def test_simple_on_scene(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search='Forest')
self.assertCountEqual(searched, self.verses)
def test_non_exact_match(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search='heart')
self.assertSequenceEqual(searched, [self.verse2])
def test_search_two_terms(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search='heart forest')
self.assertSequenceEqual(searched, [self.verse2])
def test_terms_adjacent(self):
searched = Line.objects.annotate(
search=SearchVector('character__name', 'dialogue'),
).filter(search='minstrel')
self.assertCountEqual(searched, self.verses)
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search='minstrelbravely')
self.assertSequenceEqual(searched, [])
def test_search_with_null(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search='bedemir')
self.assertCountEqual(searched, [self.bedemir0, self.bedemir1, self.crowd, self.witch, self.duck])
def test_search_with_non_text(self):
searched = Line.objects.annotate(
search=SearchVector('id'),
).filter(search=str(self.crowd.id))
self.assertSequenceEqual(searched, [self.crowd])
@skipUnlessDBFeature('has_phraseto_tsquery')
def test_phrase_search(self):
line_qs = Line.objects.annotate(search=SearchVector('dialogue'))
searched = line_qs.filter(search=SearchQuery('burned body his away', search_type='phrase'))
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(search=SearchQuery('his body burned away', search_type='phrase'))
self.assertSequenceEqual(searched, [self.verse1])
@skipUnlessDBFeature('has_phraseto_tsquery')
def test_phrase_search_with_config(self):
line_qs = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue', config='french'),
)
searched = line_qs.filter(
search=SearchQuery('cadeau beau un', search_type='phrase', config='french'),
)
self.assertSequenceEqual(searched, [])
searched = line_qs.filter(
search=SearchQuery('un beau cadeau', search_type='phrase', config='french'),
)
self.assertSequenceEqual(searched, [self.french])
def test_raw_search(self):
line_qs = Line.objects.annotate(search=SearchVector('dialogue'))
searched = line_qs.filter(search=SearchQuery('Robin', search_type='raw'))
self.assertCountEqual(searched, [self.verse0, self.verse1])
searched = line_qs.filter(search=SearchQuery("Robin & !'Camelot'", search_type='raw'))
self.assertSequenceEqual(searched, [self.verse1])
def test_raw_search_with_config(self):
line_qs = Line.objects.annotate(search=SearchVector('dialogue', config='french'))
searched = line_qs.filter(
search=SearchQuery("'cadeaux' & 'beaux'", search_type='raw', config='french'),
)
self.assertSequenceEqual(searched, [self.french])
def test_bad_search_type(self):
with self.assertRaisesMessage(ValueError, "Unknown search_type argument 'foo'."):
SearchQuery('kneecaps', search_type='foo')
def test_config_query_explicit(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue', config='french'),
).filter(search=SearchQuery('cadeaux', config='french'))
self.assertSequenceEqual(searched, [self.french])
def test_config_query_implicit(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue', config='french'),
).filter(search='cadeaux')
self.assertSequenceEqual(searched, [self.french])
def test_config_from_field_explicit(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue', config=F('dialogue_config')),
).filter(search=SearchQuery('cadeaux', config=F('dialogue_config')))
self.assertSequenceEqual(searched, [self.french])
def test_config_from_field_implicit(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue', config=F('dialogue_config')),
).filter(search='cadeaux')
self.assertSequenceEqual(searched, [self.french])
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'})
class TestCombinations(GrailTestData, PostgreSQLTestCase):
def test_vector_add(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting') + SearchVector('character__name'),
).filter(search='bedemir')
self.assertCountEqual(searched, [self.bedemir0, self.bedemir1, self.crowd, self.witch, self.duck])
def test_vector_add_multi(self):
searched = Line.objects.annotate(
search=(
SearchVector('scene__setting') +
SearchVector('character__name') +
SearchVector('dialogue')
),
).filter(search='bedemir')
self.assertCountEqual(searched, [self.bedemir0, self.bedemir1, self.crowd, self.witch, self.duck])
def test_query_and(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search=SearchQuery('bedemir') & SearchQuery('scales'))
self.assertSequenceEqual(searched, [self.bedemir0])
def test_query_multiple_and(self):
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search=SearchQuery('bedemir') & SearchQuery('scales') & SearchQuery('nostrils'))
self.assertSequenceEqual(searched, [])
searched = Line.objects.annotate(
search=SearchVector('scene__setting', 'dialogue'),
).filter(search=SearchQuery('shall') & SearchQuery('use') & SearchQuery('larger'))
self.assertSequenceEqual(searched, [self.bedemir0])
def test_query_or(self):
searched = Line.objects.filter(dialogue__search=SearchQuery('kneecaps') | SearchQuery('nostrils'))
self.assertCountEqual(searched, [self.verse1, self.verse2])
def test_query_multiple_or(self):
searched = Line.objects.filter(
dialogue__search=SearchQuery('kneecaps') | SearchQuery('nostrils') | SearchQuery('Sir Robin')
)
self.assertCountEqual(searched, [self.verse1, self.verse2, self.verse0])
def test_query_invert(self):
searched = Line.objects.filter(character=self.minstrel, dialogue__search=~SearchQuery('kneecaps'))
self.assertCountEqual(searched, [self.verse0, self.verse2])
def test_combine_different_configs(self):
searched = Line.objects.filter(
dialogue__search=(
SearchQuery('cadeau', config='french') |
SearchQuery('nostrils', config='english')
)
)
self.assertCountEqual(searched, [self.french, self.verse2])
@skipUnlessDBFeature('has_phraseto_tsquery')
def test_combine_raw_phrase(self):
searched = Line.objects.filter(
dialogue__search=(
SearchQuery('burn:*', search_type='raw', config='simple') |
SearchQuery('rode forth from Camelot', search_type='phrase')
)
)
self.assertCountEqual(searched, [self.verse0, self.verse1, self.verse2])
def test_query_combined_mismatch(self):
msg = "SearchQuery can only be combined with other SearchQuerys, got"
with self.assertRaisesMessage(TypeError, msg):
Line.objects.filter(dialogue__search=None | SearchQuery('kneecaps'))
with self.assertRaisesMessage(TypeError, msg):
Line.objects.filter(dialogue__search=None & SearchQuery('kneecaps'))
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'})
class TestRankingAndWeights(GrailTestData, PostgreSQLTestCase):
def test_ranking(self):
searched = Line.objects.filter(character=self.minstrel).annotate(
rank=SearchRank(SearchVector('dialogue'), SearchQuery('brave sir robin')),
).order_by('rank')
self.assertSequenceEqual(searched, [self.verse2, self.verse1, self.verse0])
def test_rank_passing_untyped_args(self):
searched = Line.objects.filter(character=self.minstrel).annotate(
rank=SearchRank('dialogue', 'brave sir robin'),
).order_by('rank')
self.assertSequenceEqual(searched, [self.verse2, self.verse1, self.verse0])
def test_weights_in_vector(self):
vector = SearchVector('dialogue', weight='A') + SearchVector('character__name', weight='D')
searched = Line.objects.filter(scene=self.witch_scene).annotate(
rank=SearchRank(vector, SearchQuery('witch')),
).order_by('-rank')[:2]
self.assertSequenceEqual(searched, [self.crowd, self.witch])
vector = SearchVector('dialogue', weight='D') + SearchVector('character__name', weight='A')
searched = Line.objects.filter(scene=self.witch_scene).annotate(
rank=SearchRank(vector, SearchQuery('witch')),
).order_by('-rank')[:2]
self.assertSequenceEqual(searched, [self.witch, self.crowd])
def test_ranked_custom_weights(self):
vector = SearchVector('dialogue', weight='D') + SearchVector('character__name', weight='A')
searched = Line.objects.filter(scene=self.witch_scene).annotate(
rank=SearchRank(vector, SearchQuery('witch'), weights=[1, 0, 0, 0.5]),
).order_by('-rank')[:2]
self.assertSequenceEqual(searched, [self.crowd, self.witch])
def test_ranking_chaining(self):
searched = Line.objects.filter(character=self.minstrel).annotate(
rank=SearchRank(SearchVector('dialogue'), SearchQuery('brave sir robin')),
).filter(rank__gt=0.3)
self.assertSequenceEqual(searched, [self.verse0])
class SearchQueryTests(SimpleTestCase):
def test_str(self):
tests = (
(~SearchQuery('a'), '~SearchQuery(a)'),
(
(SearchQuery('a') | SearchQuery('b')) & (SearchQuery('c') | SearchQuery('d')),
'((SearchQuery(a) || SearchQuery(b)) && (SearchQuery(c) || SearchQuery(d)))',
),
(
SearchQuery('a') & (SearchQuery('b') | SearchQuery('c')),
'(SearchQuery(a) && (SearchQuery(b) || SearchQuery(c)))',
),
(
(SearchQuery('a') | SearchQuery('b')) & SearchQuery('c'),
'((SearchQuery(a) || SearchQuery(b)) && SearchQuery(c))'
),
(
SearchQuery('a') & (SearchQuery('b') & (SearchQuery('c') | SearchQuery('d'))),
'(SearchQuery(a) && (SearchQuery(b) && (SearchQuery(c) || SearchQuery(d))))',
),
)
for query, expected_str in tests:
with self.subTest(query=query):
self.assertEqual(str(query), expected_str)
|
f653ebf1ccd41079cacf809881d49fbfad089b19e091801100cbc5e043e33356 | import datetime
import json
from decimal import Decimal
from django import forms
from django.core import exceptions, serializers
from django.db.models import DateField, DateTimeField, F, Func, Value
from django.test import ignore_warnings, override_settings
from django.utils import timezone
from django.utils.deprecation import RemovedInDjango31Warning
from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase
from .models import RangeLookupsModel, RangesModel
try:
from psycopg2.extras import DateRange, DateTimeTZRange, NumericRange
from django.contrib.postgres import fields as pg_fields, forms as pg_forms
from django.contrib.postgres.validators import (
RangeMaxValueValidator, RangeMinValueValidator,
)
except ImportError:
pass
class TestSaveLoad(PostgreSQLTestCase):
def test_all_fields(self):
now = timezone.now()
instance = RangesModel(
ints=NumericRange(0, 10),
bigints=NumericRange(10, 20),
decimals=NumericRange(20, 30),
timestamps=DateTimeTZRange(now - datetime.timedelta(hours=1), now),
dates=DateRange(now.date() - datetime.timedelta(days=1), now.date()),
)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(instance.ints, loaded.ints)
self.assertEqual(instance.bigints, loaded.bigints)
self.assertEqual(instance.decimals, loaded.decimals)
self.assertEqual(instance.timestamps, loaded.timestamps)
self.assertEqual(instance.dates, loaded.dates)
def test_range_object(self):
r = NumericRange(0, 10)
instance = RangesModel(ints=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.ints)
def test_tuple(self):
instance = RangesModel(ints=(0, 10))
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(NumericRange(0, 10), loaded.ints)
def test_range_object_boundaries(self):
r = NumericRange(0, 10, '[]')
instance = RangesModel(decimals=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.decimals)
self.assertIn(10, loaded.decimals)
def test_unbounded(self):
r = NumericRange(None, None, '()')
instance = RangesModel(decimals=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.decimals)
def test_empty(self):
r = NumericRange(empty=True)
instance = RangesModel(ints=r)
instance.save()
loaded = RangesModel.objects.get()
self.assertEqual(r, loaded.ints)
def test_null(self):
instance = RangesModel(ints=None)
instance.save()
loaded = RangesModel.objects.get()
self.assertIsNone(loaded.ints)
def test_model_set_on_base_field(self):
instance = RangesModel()
field = instance._meta.get_field('ints')
self.assertEqual(field.model, RangesModel)
self.assertEqual(field.base_field.model, RangesModel)
class TestRangeContainsLookup(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.timestamps = [
datetime.datetime(year=2016, month=1, day=1),
datetime.datetime(year=2016, month=1, day=2, hour=1),
datetime.datetime(year=2016, month=1, day=2, hour=12),
datetime.datetime(year=2016, month=1, day=3),
datetime.datetime(year=2016, month=1, day=3, hour=1),
datetime.datetime(year=2016, month=2, day=2),
]
cls.aware_timestamps = [
timezone.make_aware(timestamp)
for timestamp in cls.timestamps
]
cls.dates = [
datetime.date(year=2016, month=1, day=1),
datetime.date(year=2016, month=1, day=2),
datetime.date(year=2016, month=1, day=3),
datetime.date(year=2016, month=1, day=4),
datetime.date(year=2016, month=2, day=2),
datetime.date(year=2016, month=2, day=3),
]
cls.obj = RangesModel.objects.create(
dates=(cls.dates[0], cls.dates[3]),
timestamps=(cls.timestamps[0], cls.timestamps[3]),
)
cls.aware_obj = RangesModel.objects.create(
dates=(cls.dates[0], cls.dates[3]),
timestamps=(cls.aware_timestamps[0], cls.aware_timestamps[3]),
)
# Objects that don't match any queries.
for i in range(3, 4):
RangesModel.objects.create(
dates=(cls.dates[i], cls.dates[i + 1]),
timestamps=(cls.timestamps[i], cls.timestamps[i + 1]),
)
RangesModel.objects.create(
dates=(cls.dates[i], cls.dates[i + 1]),
timestamps=(cls.aware_timestamps[i], cls.aware_timestamps[i + 1]),
)
def test_datetime_range_contains(self):
filter_args = (
self.timestamps[1],
self.aware_timestamps[1],
(self.timestamps[1], self.timestamps[2]),
(self.aware_timestamps[1], self.aware_timestamps[2]),
Value(self.dates[0], output_field=DateTimeField()),
Func(F('dates'), function='lower', output_field=DateTimeField()),
)
for filter_arg in filter_args:
with self.subTest(filter_arg=filter_arg):
self.assertCountEqual(
RangesModel.objects.filter(**{'timestamps__contains': filter_arg}),
[self.obj, self.aware_obj],
)
def test_date_range_contains(self):
filter_args = (
self.timestamps[1],
(self.dates[1], self.dates[2]),
Value(self.dates[0], output_field=DateField()),
Func(F('timestamps'), function='lower', output_field=DateField()),
)
for filter_arg in filter_args:
with self.subTest(filter_arg=filter_arg):
self.assertCountEqual(
RangesModel.objects.filter(**{'dates__contains': filter_arg}),
[self.obj, self.aware_obj],
)
class TestQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.objs = [
RangesModel.objects.create(ints=NumericRange(0, 10)),
RangesModel.objects.create(ints=NumericRange(5, 15)),
RangesModel.objects.create(ints=NumericRange(None, 0)),
RangesModel.objects.create(ints=NumericRange(empty=True)),
RangesModel.objects.create(ints=None),
]
def test_exact(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__exact=NumericRange(0, 10)),
[self.objs[0]],
)
def test_isnull(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__isnull=True),
[self.objs[4]],
)
def test_isempty(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__isempty=True),
[self.objs[3]],
)
def test_contains(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__contains=8),
[self.objs[0], self.objs[1]],
)
def test_contains_range(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__contains=NumericRange(3, 8)),
[self.objs[0]],
)
def test_contained_by(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__contained_by=NumericRange(0, 20)),
[self.objs[0], self.objs[1], self.objs[3]],
)
def test_overlap(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__overlap=NumericRange(3, 8)),
[self.objs[0], self.objs[1]],
)
def test_fully_lt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__fully_lt=NumericRange(5, 10)),
[self.objs[2]],
)
def test_fully_gt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__fully_gt=NumericRange(5, 10)),
[],
)
def test_not_lt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__not_lt=NumericRange(5, 10)),
[self.objs[1]],
)
def test_not_gt(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__not_gt=NumericRange(5, 10)),
[self.objs[0], self.objs[2]],
)
def test_adjacent_to(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__adjacent_to=NumericRange(0, 5)),
[self.objs[1], self.objs[2]],
)
def test_startswith(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__startswith=0),
[self.objs[0]],
)
def test_endswith(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__endswith=0),
[self.objs[2]],
)
def test_startswith_chaining(self):
self.assertSequenceEqual(
RangesModel.objects.filter(ints__startswith__gte=0),
[self.objs[0], self.objs[1]],
)
class TestQueryingWithRanges(PostgreSQLTestCase):
def test_date_range(self):
objs = [
RangeLookupsModel.objects.create(date='2015-01-01'),
RangeLookupsModel.objects.create(date='2015-05-05'),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(date__contained_by=DateRange('2015-01-01', '2015-05-04')),
[objs[0]],
)
def test_date_range_datetime_field(self):
objs = [
RangeLookupsModel.objects.create(timestamp='2015-01-01'),
RangeLookupsModel.objects.create(timestamp='2015-05-05'),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(timestamp__date__contained_by=DateRange('2015-01-01', '2015-05-04')),
[objs[0]],
)
def test_datetime_range(self):
objs = [
RangeLookupsModel.objects.create(timestamp='2015-01-01T09:00:00'),
RangeLookupsModel.objects.create(timestamp='2015-05-05T17:00:00'),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(
timestamp__contained_by=DateTimeTZRange('2015-01-01T09:00', '2015-05-04T23:55')
),
[objs[0]],
)
def test_integer_range(self):
objs = [
RangeLookupsModel.objects.create(integer=5),
RangeLookupsModel.objects.create(integer=99),
RangeLookupsModel.objects.create(integer=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(integer__contained_by=NumericRange(1, 98)),
[objs[0]]
)
def test_biginteger_range(self):
objs = [
RangeLookupsModel.objects.create(big_integer=5),
RangeLookupsModel.objects.create(big_integer=99),
RangeLookupsModel.objects.create(big_integer=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(big_integer__contained_by=NumericRange(1, 98)),
[objs[0]]
)
def test_float_range(self):
objs = [
RangeLookupsModel.objects.create(float=5),
RangeLookupsModel.objects.create(float=99),
RangeLookupsModel.objects.create(float=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(float__contained_by=NumericRange(1, 98)),
[objs[0]]
)
def test_f_ranges(self):
parent = RangesModel.objects.create(decimals=NumericRange(0, 10))
objs = [
RangeLookupsModel.objects.create(float=5, parent=parent),
RangeLookupsModel.objects.create(float=99, parent=parent),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.filter(float__contained_by=F('parent__decimals')),
[objs[0]]
)
def test_exclude(self):
objs = [
RangeLookupsModel.objects.create(float=5),
RangeLookupsModel.objects.create(float=99),
RangeLookupsModel.objects.create(float=-1),
]
self.assertSequenceEqual(
RangeLookupsModel.objects.exclude(float__contained_by=NumericRange(0, 100)),
[objs[2]]
)
class TestSerialization(PostgreSQLSimpleTestCase):
test_data = (
'[{"fields": {"ints": "{\\"upper\\": \\"10\\", \\"lower\\": \\"0\\", '
'\\"bounds\\": \\"[)\\"}", "decimals": "{\\"empty\\": true}", '
'"bigints": null, "timestamps": "{\\"upper\\": \\"2014-02-02T12:12:12+00:00\\", '
'\\"lower\\": \\"2014-01-01T00:00:00+00:00\\", \\"bounds\\": \\"[)\\"}", '
'"dates": "{\\"upper\\": \\"2014-02-02\\", \\"lower\\": \\"2014-01-01\\", \\"bounds\\": \\"[)\\"}" }, '
'"model": "postgres_tests.rangesmodel", "pk": null}]'
)
lower_date = datetime.date(2014, 1, 1)
upper_date = datetime.date(2014, 2, 2)
lower_dt = datetime.datetime(2014, 1, 1, 0, 0, 0, tzinfo=timezone.utc)
upper_dt = datetime.datetime(2014, 2, 2, 12, 12, 12, tzinfo=timezone.utc)
def test_dumping(self):
instance = RangesModel(
ints=NumericRange(0, 10), decimals=NumericRange(empty=True),
timestamps=DateTimeTZRange(self.lower_dt, self.upper_dt),
dates=DateRange(self.lower_date, self.upper_date),
)
data = serializers.serialize('json', [instance])
dumped = json.loads(data)
for field in ('ints', 'dates', 'timestamps'):
dumped[0]['fields'][field] = json.loads(dumped[0]['fields'][field])
check = json.loads(self.test_data)
for field in ('ints', 'dates', 'timestamps'):
check[0]['fields'][field] = json.loads(check[0]['fields'][field])
self.assertEqual(dumped, check)
def test_loading(self):
instance = list(serializers.deserialize('json', self.test_data))[0].object
self.assertEqual(instance.ints, NumericRange(0, 10))
self.assertEqual(instance.decimals, NumericRange(empty=True))
self.assertIsNone(instance.bigints)
self.assertEqual(instance.dates, DateRange(self.lower_date, self.upper_date))
self.assertEqual(instance.timestamps, DateTimeTZRange(self.lower_dt, self.upper_dt))
def test_serialize_range_with_null(self):
instance = RangesModel(ints=NumericRange(None, 10))
data = serializers.serialize('json', [instance])
new_instance = list(serializers.deserialize('json', data))[0].object
self.assertEqual(new_instance.ints, NumericRange(None, 10))
instance = RangesModel(ints=NumericRange(10, None))
data = serializers.serialize('json', [instance])
new_instance = list(serializers.deserialize('json', data))[0].object
self.assertEqual(new_instance.ints, NumericRange(10, None))
class TestValidators(PostgreSQLSimpleTestCase):
def test_max(self):
validator = RangeMaxValueValidator(5)
validator(NumericRange(0, 5))
msg = 'Ensure that this range is completely less than or equal to 5.'
with self.assertRaises(exceptions.ValidationError) as cm:
validator(NumericRange(0, 10))
self.assertEqual(cm.exception.messages[0], msg)
self.assertEqual(cm.exception.code, 'max_value')
with self.assertRaisesMessage(exceptions.ValidationError, msg):
validator(NumericRange(0, None)) # an unbound range
def test_min(self):
validator = RangeMinValueValidator(5)
validator(NumericRange(10, 15))
msg = 'Ensure that this range is completely greater than or equal to 5.'
with self.assertRaises(exceptions.ValidationError) as cm:
validator(NumericRange(0, 10))
self.assertEqual(cm.exception.messages[0], msg)
self.assertEqual(cm.exception.code, 'min_value')
with self.assertRaisesMessage(exceptions.ValidationError, msg):
validator(NumericRange(None, 10)) # an unbound range
class TestFormField(PostgreSQLSimpleTestCase):
def test_valid_integer(self):
field = pg_forms.IntegerRangeField()
value = field.clean(['1', '2'])
self.assertEqual(value, NumericRange(1, 2))
@ignore_warnings(category=RemovedInDjango31Warning)
def test_valid_floats(self):
field = pg_forms.FloatRangeField()
value = field.clean(['1.12345', '2.001'])
self.assertEqual(value, NumericRange(1.12345, 2.001))
def test_valid_decimal(self):
field = pg_forms.DecimalRangeField()
value = field.clean(['1.12345', '2.001'])
self.assertEqual(value, NumericRange(Decimal('1.12345'), Decimal('2.001')))
def test_float_range_field_deprecation(self):
msg = 'FloatRangeField is deprecated in favor of DecimalRangeField.'
with self.assertRaisesMessage(RemovedInDjango31Warning, msg):
pg_forms.FloatRangeField()
def test_valid_timestamps(self):
field = pg_forms.DateTimeRangeField()
value = field.clean(['01/01/2014 00:00:00', '02/02/2014 12:12:12'])
lower = datetime.datetime(2014, 1, 1, 0, 0, 0)
upper = datetime.datetime(2014, 2, 2, 12, 12, 12)
self.assertEqual(value, DateTimeTZRange(lower, upper))
def test_valid_dates(self):
field = pg_forms.DateRangeField()
value = field.clean(['01/01/2014', '02/02/2014'])
lower = datetime.date(2014, 1, 1)
upper = datetime.date(2014, 2, 2)
self.assertEqual(value, DateRange(lower, upper))
def test_using_split_datetime_widget(self):
class SplitDateTimeRangeField(pg_forms.DateTimeRangeField):
base_field = forms.SplitDateTimeField
class SplitForm(forms.Form):
field = SplitDateTimeRangeField()
form = SplitForm()
self.assertHTMLEqual(str(form), '''
<tr>
<th>
<label for="id_field_0">Field:</label>
</th>
<td>
<input id="id_field_0_0" name="field_0_0" type="text">
<input id="id_field_0_1" name="field_0_1" type="text">
<input id="id_field_1_0" name="field_1_0" type="text">
<input id="id_field_1_1" name="field_1_1" type="text">
</td>
</tr>
''')
form = SplitForm({
'field_0_0': '01/01/2014',
'field_0_1': '00:00:00',
'field_1_0': '02/02/2014',
'field_1_1': '12:12:12',
})
self.assertTrue(form.is_valid())
lower = datetime.datetime(2014, 1, 1, 0, 0, 0)
upper = datetime.datetime(2014, 2, 2, 12, 12, 12)
self.assertEqual(form.cleaned_data['field'], DateTimeTZRange(lower, upper))
def test_none(self):
field = pg_forms.IntegerRangeField(required=False)
value = field.clean(['', ''])
self.assertIsNone(value)
def test_rendering(self):
class RangeForm(forms.Form):
ints = pg_forms.IntegerRangeField()
self.assertHTMLEqual(str(RangeForm()), '''
<tr>
<th><label for="id_ints_0">Ints:</label></th>
<td>
<input id="id_ints_0" name="ints_0" type="number">
<input id="id_ints_1" name="ints_1" type="number">
</td>
</tr>
''')
def test_integer_lower_bound_higher(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['10', '2'])
self.assertEqual(cm.exception.messages[0], 'The start of the range must not exceed the end of the range.')
self.assertEqual(cm.exception.code, 'bound_ordering')
def test_integer_open(self):
field = pg_forms.IntegerRangeField()
value = field.clean(['', '0'])
self.assertEqual(value, NumericRange(None, 0))
def test_integer_incorrect_data_type(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('1')
self.assertEqual(cm.exception.messages[0], 'Enter two whole numbers.')
self.assertEqual(cm.exception.code, 'invalid')
def test_integer_invalid_lower(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['a', '2'])
self.assertEqual(cm.exception.messages[0], 'Enter a whole number.')
def test_integer_invalid_upper(self):
field = pg_forms.IntegerRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['1', 'b'])
self.assertEqual(cm.exception.messages[0], 'Enter a whole number.')
def test_integer_required(self):
field = pg_forms.IntegerRangeField(required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['', ''])
self.assertEqual(cm.exception.messages[0], 'This field is required.')
value = field.clean([1, ''])
self.assertEqual(value, NumericRange(1, None))
def test_decimal_lower_bound_higher(self):
field = pg_forms.DecimalRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['1.8', '1.6'])
self.assertEqual(cm.exception.messages[0], 'The start of the range must not exceed the end of the range.')
self.assertEqual(cm.exception.code, 'bound_ordering')
def test_decimal_open(self):
field = pg_forms.DecimalRangeField()
value = field.clean(['', '3.1415926'])
self.assertEqual(value, NumericRange(None, Decimal('3.1415926')))
def test_decimal_incorrect_data_type(self):
field = pg_forms.DecimalRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('1.6')
self.assertEqual(cm.exception.messages[0], 'Enter two numbers.')
self.assertEqual(cm.exception.code, 'invalid')
def test_decimal_invalid_lower(self):
field = pg_forms.DecimalRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['a', '3.1415926'])
self.assertEqual(cm.exception.messages[0], 'Enter a number.')
def test_decimal_invalid_upper(self):
field = pg_forms.DecimalRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['1.61803399', 'b'])
self.assertEqual(cm.exception.messages[0], 'Enter a number.')
def test_decimal_required(self):
field = pg_forms.DecimalRangeField(required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['', ''])
self.assertEqual(cm.exception.messages[0], 'This field is required.')
value = field.clean(['1.61803399', ''])
self.assertEqual(value, NumericRange(Decimal('1.61803399'), None))
def test_date_lower_bound_higher(self):
field = pg_forms.DateRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['2013-04-09', '1976-04-16'])
self.assertEqual(cm.exception.messages[0], 'The start of the range must not exceed the end of the range.')
self.assertEqual(cm.exception.code, 'bound_ordering')
def test_date_open(self):
field = pg_forms.DateRangeField()
value = field.clean(['', '2013-04-09'])
self.assertEqual(value, DateRange(None, datetime.date(2013, 4, 9)))
def test_date_incorrect_data_type(self):
field = pg_forms.DateRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('1')
self.assertEqual(cm.exception.messages[0], 'Enter two valid dates.')
self.assertEqual(cm.exception.code, 'invalid')
def test_date_invalid_lower(self):
field = pg_forms.DateRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['a', '2013-04-09'])
self.assertEqual(cm.exception.messages[0], 'Enter a valid date.')
def test_date_invalid_upper(self):
field = pg_forms.DateRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['2013-04-09', 'b'])
self.assertEqual(cm.exception.messages[0], 'Enter a valid date.')
def test_date_required(self):
field = pg_forms.DateRangeField(required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['', ''])
self.assertEqual(cm.exception.messages[0], 'This field is required.')
value = field.clean(['1976-04-16', ''])
self.assertEqual(value, DateRange(datetime.date(1976, 4, 16), None))
def test_datetime_lower_bound_higher(self):
field = pg_forms.DateTimeRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['2006-10-25 14:59', '2006-10-25 14:58'])
self.assertEqual(cm.exception.messages[0], 'The start of the range must not exceed the end of the range.')
self.assertEqual(cm.exception.code, 'bound_ordering')
def test_datetime_open(self):
field = pg_forms.DateTimeRangeField()
value = field.clean(['', '2013-04-09 11:45'])
self.assertEqual(value, DateTimeTZRange(None, datetime.datetime(2013, 4, 9, 11, 45)))
def test_datetime_incorrect_data_type(self):
field = pg_forms.DateTimeRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('2013-04-09 11:45')
self.assertEqual(cm.exception.messages[0], 'Enter two valid date/times.')
self.assertEqual(cm.exception.code, 'invalid')
def test_datetime_invalid_lower(self):
field = pg_forms.DateTimeRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['45', '2013-04-09 11:45'])
self.assertEqual(cm.exception.messages[0], 'Enter a valid date/time.')
def test_datetime_invalid_upper(self):
field = pg_forms.DateTimeRangeField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['2013-04-09 11:45', 'sweet pickles'])
self.assertEqual(cm.exception.messages[0], 'Enter a valid date/time.')
def test_datetime_required(self):
field = pg_forms.DateTimeRangeField(required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['', ''])
self.assertEqual(cm.exception.messages[0], 'This field is required.')
value = field.clean(['2013-04-09 11:45', ''])
self.assertEqual(value, DateTimeTZRange(datetime.datetime(2013, 4, 9, 11, 45), None))
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Johannesburg')
def test_datetime_prepare_value(self):
field = pg_forms.DateTimeRangeField()
value = field.prepare_value(
DateTimeTZRange(datetime.datetime(2015, 5, 22, 16, 6, 33, tzinfo=timezone.utc), None)
)
self.assertEqual(value, [datetime.datetime(2015, 5, 22, 18, 6, 33), None])
def test_model_field_formfield_integer(self):
model_field = pg_fields.IntegerRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.IntegerRangeField)
def test_model_field_formfield_biginteger(self):
model_field = pg_fields.BigIntegerRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.IntegerRangeField)
def test_model_field_formfield_float(self):
model_field = pg_fields.DecimalRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.DecimalRangeField)
def test_model_field_formfield_date(self):
model_field = pg_fields.DateRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.DateRangeField)
def test_model_field_formfield_datetime(self):
model_field = pg_fields.DateTimeRangeField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, pg_forms.DateTimeRangeField)
class TestWidget(PostgreSQLSimpleTestCase):
def test_range_widget(self):
f = pg_forms.ranges.DateTimeRangeField()
self.assertHTMLEqual(
f.widget.render('datetimerange', ''),
'<input type="text" name="datetimerange_0"><input type="text" name="datetimerange_1">'
)
self.assertHTMLEqual(
f.widget.render('datetimerange', None),
'<input type="text" name="datetimerange_0"><input type="text" name="datetimerange_1">'
)
dt_range = DateTimeTZRange(
datetime.datetime(2006, 1, 10, 7, 30),
datetime.datetime(2006, 2, 12, 9, 50)
)
self.assertHTMLEqual(
f.widget.render('datetimerange', dt_range),
'<input type="text" name="datetimerange_0" value="2006-01-10 07:30:00">'
'<input type="text" name="datetimerange_1" value="2006-02-12 09:50:00">'
)
|
19bba564b360403b3ac636313fd314bea706258c6e14641a56752d400d171564 | from django.core.serializers.json import DjangoJSONEncoder
from django.db import models
from .fields import (
ArrayField, BigIntegerRangeField, CICharField, CIEmailField, CITextField,
DateRangeField, DateTimeRangeField, DecimalRangeField, HStoreField,
IntegerRangeField, JSONField, SearchVectorField,
)
class Tag:
def __init__(self, tag_id):
self.tag_id = tag_id
def __eq__(self, other):
return isinstance(other, Tag) and self.tag_id == other.tag_id
class TagField(models.SmallIntegerField):
def from_db_value(self, value, expression, connection):
if value is None:
return value
return Tag(int(value))
def to_python(self, value):
if isinstance(value, Tag):
return value
if value is None:
return value
return Tag(int(value))
def get_prep_value(self, value):
return value.tag_id
class PostgreSQLModel(models.Model):
class Meta:
abstract = True
required_db_vendor = 'postgresql'
class IntegerArrayModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), default=list, blank=True)
class NullableIntegerArrayModel(PostgreSQLModel):
field = ArrayField(models.IntegerField(), blank=True, null=True)
class CharArrayModel(PostgreSQLModel):
field = ArrayField(models.CharField(max_length=10))
class DateTimeArrayModel(PostgreSQLModel):
datetimes = ArrayField(models.DateTimeField())
dates = ArrayField(models.DateField())
times = ArrayField(models.TimeField())
class NestedIntegerArrayModel(PostgreSQLModel):
field = ArrayField(ArrayField(models.IntegerField()))
class OtherTypesArrayModel(PostgreSQLModel):
ips = ArrayField(models.GenericIPAddressField(), default=list)
uuids = ArrayField(models.UUIDField(), default=list)
decimals = ArrayField(models.DecimalField(max_digits=5, decimal_places=2), default=list)
tags = ArrayField(TagField(), blank=True, null=True)
json = ArrayField(JSONField(default=dict), default=list)
int_ranges = ArrayField(IntegerRangeField(), blank=True, null=True)
bigint_ranges = ArrayField(BigIntegerRangeField(), blank=True, null=True)
class HStoreModel(PostgreSQLModel):
field = HStoreField(blank=True, null=True)
array_field = ArrayField(HStoreField(), null=True)
class CharFieldModel(models.Model):
field = models.CharField(max_length=16)
class TextFieldModel(models.Model):
field = models.TextField()
def __str__(self):
return self.field
# Scene/Character/Line models are used to test full text search. They're
# populated with content from Monty Python and the Holy Grail.
class Scene(models.Model):
scene = models.CharField(max_length=255)
setting = models.CharField(max_length=255)
def __str__(self):
return self.scene
class Character(models.Model):
name = models.CharField(max_length=255)
def __str__(self):
return self.name
class CITestModel(PostgreSQLModel):
name = CICharField(primary_key=True, max_length=255)
email = CIEmailField()
description = CITextField()
array_field = ArrayField(CITextField(), null=True)
def __str__(self):
return self.name
class Line(PostgreSQLModel):
scene = models.ForeignKey('Scene', models.CASCADE)
character = models.ForeignKey('Character', models.CASCADE)
dialogue = models.TextField(blank=True, null=True)
dialogue_search_vector = SearchVectorField(blank=True, null=True)
dialogue_config = models.CharField(max_length=100, blank=True, null=True)
def __str__(self):
return self.dialogue or ''
class RangesModel(PostgreSQLModel):
ints = IntegerRangeField(blank=True, null=True)
bigints = BigIntegerRangeField(blank=True, null=True)
decimals = DecimalRangeField(blank=True, null=True)
timestamps = DateTimeRangeField(blank=True, null=True)
dates = DateRangeField(blank=True, null=True)
class RangeLookupsModel(PostgreSQLModel):
parent = models.ForeignKey(RangesModel, models.SET_NULL, blank=True, null=True)
integer = models.IntegerField(blank=True, null=True)
big_integer = models.BigIntegerField(blank=True, null=True)
float = models.FloatField(blank=True, null=True)
timestamp = models.DateTimeField(blank=True, null=True)
date = models.DateField(blank=True, null=True)
class JSONModel(PostgreSQLModel):
field = JSONField(blank=True, null=True)
field_custom = JSONField(blank=True, null=True, encoder=DjangoJSONEncoder)
class ArrayFieldSubclass(ArrayField):
def __init__(self, *args, **kwargs):
super().__init__(models.IntegerField())
class AggregateTestModel(models.Model):
"""
To test postgres-specific general aggregation functions
"""
char_field = models.CharField(max_length=30, blank=True)
integer_field = models.IntegerField(null=True)
boolean_field = models.BooleanField(null=True)
class StatTestModel(models.Model):
"""
To test postgres-specific aggregation functions for statistics
"""
int1 = models.IntegerField()
int2 = models.IntegerField()
related_field = models.ForeignKey(AggregateTestModel, models.SET_NULL, null=True)
class NowTestModel(models.Model):
when = models.DateTimeField(null=True, default=None)
class UUIDTestModel(models.Model):
uuid = models.UUIDField(default=None, null=True)
|
66889acec3d2021c5ce016f49a9d0845bfc2d61ed71a7767a8e54f62fe9f4ca8 | """
Indirection layer for PostgreSQL-specific fields, so the tests don't fail when
run with a backend other than PostgreSQL.
"""
from django.db import models
try:
from django.contrib.postgres.fields import (
ArrayField, BigIntegerRangeField, CICharField, CIEmailField,
CITextField, DateRangeField, DateTimeRangeField, DecimalRangeField,
HStoreField, IntegerRangeField, JSONField,
)
from django.contrib.postgres.search import SearchVectorField
except ImportError:
class DummyArrayField(models.Field):
def __init__(self, base_field, size=None, **kwargs):
super().__init__(**kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
kwargs.update({
'base_field': '',
'size': 1,
})
return name, path, args, kwargs
class DummyJSONField(models.Field):
def __init__(self, encoder=None, **kwargs):
super().__init__(**kwargs)
ArrayField = DummyArrayField
BigIntegerRangeField = models.Field
CICharField = models.Field
CIEmailField = models.Field
CITextField = models.Field
DateRangeField = models.Field
DateTimeRangeField = models.Field
DecimalRangeField = models.Field
HStoreField = models.Field
IntegerRangeField = models.Field
JSONField = DummyJSONField
SearchVectorField = models.Field
|
c4f83dff73cf26772a13256a4df6094bf303a1b3b2a846332d91a945c103043e | from unittest import mock
from django.contrib.postgres.indexes import (
BrinIndex, BTreeIndex, GinIndex, GistIndex, HashIndex, SpGistIndex,
)
from django.db import connection
from django.db.models import CharField
from django.db.models.functions import Length
from django.db.models.query_utils import Q
from django.db.utils import NotSupportedError
from django.test import skipUnlessDBFeature
from django.test.utils import register_lookup
from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase
from .models import CharFieldModel, IntegerArrayModel
class IndexTestMixin:
def test_name_auto_generation(self):
index = self.index_class(fields=['field'])
index.set_name_with_model(CharFieldModel)
self.assertRegex(index.name, r'postgres_te_field_[0-9a-f]{6}_%s' % self.index_class.suffix)
def test_deconstruction_no_customization(self):
index = self.index_class(fields=['title'], name='test_title_%s' % self.index_class.suffix)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.indexes.%s' % self.index_class.__name__)
self.assertEqual(args, ())
self.assertEqual(kwargs, {'fields': ['title'], 'name': 'test_title_%s' % self.index_class.suffix})
class BrinIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = BrinIndex
def test_suffix(self):
self.assertEqual(BrinIndex.suffix, 'brin')
def test_deconstruction(self):
index = BrinIndex(fields=['title'], name='test_title_brin', autosummarize=True, pages_per_range=16)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.indexes.BrinIndex')
self.assertEqual(args, ())
self.assertEqual(kwargs, {
'fields': ['title'],
'name': 'test_title_brin',
'autosummarize': True,
'pages_per_range': 16,
})
def test_invalid_pages_per_range(self):
with self.assertRaisesMessage(ValueError, 'pages_per_range must be None or a positive integer'):
BrinIndex(fields=['title'], name='test_title_brin', pages_per_range=0)
class BTreeIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = BTreeIndex
def test_suffix(self):
self.assertEqual(BTreeIndex.suffix, 'btree')
def test_deconstruction(self):
index = BTreeIndex(fields=['title'], name='test_title_btree', fillfactor=80)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.indexes.BTreeIndex')
self.assertEqual(args, ())
self.assertEqual(kwargs, {'fields': ['title'], 'name': 'test_title_btree', 'fillfactor': 80})
class GinIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = GinIndex
def test_suffix(self):
self.assertEqual(GinIndex.suffix, 'gin')
def test_deconstruction(self):
index = GinIndex(
fields=['title'],
name='test_title_gin',
fastupdate=True,
gin_pending_list_limit=128,
)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.indexes.GinIndex')
self.assertEqual(args, ())
self.assertEqual(kwargs, {
'fields': ['title'],
'name': 'test_title_gin',
'fastupdate': True,
'gin_pending_list_limit': 128,
})
class GistIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = GistIndex
def test_suffix(self):
self.assertEqual(GistIndex.suffix, 'gist')
def test_deconstruction(self):
index = GistIndex(fields=['title'], name='test_title_gist', buffering=False, fillfactor=80)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.indexes.GistIndex')
self.assertEqual(args, ())
self.assertEqual(kwargs, {
'fields': ['title'],
'name': 'test_title_gist',
'buffering': False,
'fillfactor': 80,
})
class HashIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = HashIndex
def test_suffix(self):
self.assertEqual(HashIndex.suffix, 'hash')
def test_deconstruction(self):
index = HashIndex(fields=['title'], name='test_title_hash', fillfactor=80)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.indexes.HashIndex')
self.assertEqual(args, ())
self.assertEqual(kwargs, {'fields': ['title'], 'name': 'test_title_hash', 'fillfactor': 80})
class SpGistIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = SpGistIndex
def test_suffix(self):
self.assertEqual(SpGistIndex.suffix, 'spgist')
def test_deconstruction(self):
index = SpGistIndex(fields=['title'], name='test_title_spgist', fillfactor=80)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.indexes.SpGistIndex')
self.assertEqual(args, ())
self.assertEqual(kwargs, {'fields': ['title'], 'name': 'test_title_spgist', 'fillfactor': 80})
class SchemaTests(PostgreSQLTestCase):
def get_constraints(self, table):
"""
Get the indexes on the table using a new cursor.
"""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def test_gin_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn('field', self.get_constraints(IntegerArrayModel._meta.db_table))
# Add the index
index_name = 'integer_array_model_field_gin'
index = GinIndex(fields=['field'], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
constraints = self.get_constraints(IntegerArrayModel._meta.db_table)
# Check gin index was added
self.assertEqual(constraints[index_name]['type'], GinIndex.suffix)
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(IntegerArrayModel, index)
self.assertNotIn(index_name, self.get_constraints(IntegerArrayModel._meta.db_table))
def test_gin_fastupdate(self):
index_name = 'integer_array_gin_fastupdate'
index = GinIndex(fields=['field'], name=index_name, fastupdate=False)
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
constraints = self.get_constraints(IntegerArrayModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], 'gin')
self.assertEqual(constraints[index_name]['options'], ['fastupdate=off'])
with connection.schema_editor() as editor:
editor.remove_index(IntegerArrayModel, index)
self.assertNotIn(index_name, self.get_constraints(IntegerArrayModel._meta.db_table))
def test_partial_gin_index(self):
with register_lookup(CharField, Length):
index_name = 'char_field_gin_partial_idx'
index = GinIndex(fields=['field'], name=index_name, condition=Q(field__length=40))
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], 'gin')
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_partial_gin_index_with_tablespace(self):
with register_lookup(CharField, Length):
index_name = 'char_field_gin_partial_idx'
index = GinIndex(
fields=['field'],
name=index_name,
condition=Q(field__length=40),
db_tablespace='pg_default',
)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
self.assertIn('TABLESPACE "pg_default" ', str(index.create_sql(CharFieldModel, editor)))
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], 'gin')
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
@skipUnlessDBFeature('has_gin_pending_list_limit')
def test_gin_parameters(self):
index_name = 'integer_array_gin_params'
index = GinIndex(fields=['field'], name=index_name, fastupdate=True, gin_pending_list_limit=64)
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
constraints = self.get_constraints(IntegerArrayModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], 'gin')
self.assertEqual(constraints[index_name]['options'], ['gin_pending_list_limit=64', 'fastupdate=on'])
with connection.schema_editor() as editor:
editor.remove_index(IntegerArrayModel, index)
self.assertNotIn(index_name, self.get_constraints(IntegerArrayModel._meta.db_table))
@mock.patch('django.db.backends.postgresql.features.DatabaseFeatures.has_gin_pending_list_limit', False)
def test_gin_parameters_exception(self):
index_name = 'gin_options_exception'
index = GinIndex(fields=['field'], name=index_name, gin_pending_list_limit=64)
msg = 'GIN option gin_pending_list_limit requires PostgreSQL 9.5+.'
with self.assertRaisesMessage(NotSupportedError, msg):
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
self.assertNotIn(index_name, self.get_constraints(IntegerArrayModel._meta.db_table))
@skipUnlessDBFeature('has_brin_index_support')
def test_brin_index(self):
index_name = 'char_field_model_field_brin'
index = BrinIndex(fields=['field'], name=index_name, pages_per_range=4)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], BrinIndex.suffix)
self.assertEqual(constraints[index_name]['options'], ['pages_per_range=4'])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
@skipUnlessDBFeature('has_brin_index_support', 'has_brin_autosummarize')
def test_brin_parameters(self):
index_name = 'char_field_brin_params'
index = BrinIndex(fields=['field'], name=index_name, autosummarize=True)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], BrinIndex.suffix)
self.assertEqual(constraints[index_name]['options'], ['autosummarize=on'])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_brin_index_not_supported(self):
index_name = 'brin_index_exception'
index = BrinIndex(fields=['field'], name=index_name)
with self.assertRaisesMessage(NotSupportedError, 'BRIN indexes require PostgreSQL 9.5+.'):
with mock.patch('django.db.backends.postgresql.features.DatabaseFeatures.has_brin_index_support', False):
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
@skipUnlessDBFeature('has_brin_index_support')
def test_brin_autosummarize_not_supported(self):
index_name = 'brin_options_exception'
index = BrinIndex(fields=['field'], name=index_name, autosummarize=True)
with self.assertRaisesMessage(NotSupportedError, 'BRIN option autosummarize requires PostgreSQL 10+.'):
with mock.patch('django.db.backends.postgresql.features.DatabaseFeatures.has_brin_autosummarize', False):
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_btree_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn('field', self.get_constraints(CharFieldModel._meta.db_table))
# Add the index.
index_name = 'char_field_model_field_btree'
index = BTreeIndex(fields=['field'], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]['type'], BTreeIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_btree_parameters(self):
index_name = 'integer_array_btree_fillfactor'
index = BTreeIndex(fields=['field'], name=index_name, fillfactor=80)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], BTreeIndex.suffix)
self.assertEqual(constraints[index_name]['options'], ['fillfactor=80'])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_gist_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn('field', self.get_constraints(CharFieldModel._meta.db_table))
# Add the index.
index_name = 'char_field_model_field_gist'
index = GistIndex(fields=['field'], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]['type'], GistIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_gist_parameters(self):
index_name = 'integer_array_gist_buffering'
index = GistIndex(fields=['field'], name=index_name, buffering=True, fillfactor=80)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], GistIndex.suffix)
self.assertEqual(constraints[index_name]['options'], ['buffering=on', 'fillfactor=80'])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_hash_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn('field', self.get_constraints(CharFieldModel._meta.db_table))
# Add the index.
index_name = 'char_field_model_field_hash'
index = HashIndex(fields=['field'], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]['type'], HashIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_hash_parameters(self):
index_name = 'integer_array_hash_fillfactor'
index = HashIndex(fields=['field'], name=index_name, fillfactor=80)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], HashIndex.suffix)
self.assertEqual(constraints[index_name]['options'], ['fillfactor=80'])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_spgist_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn('field', self.get_constraints(CharFieldModel._meta.db_table))
# Add the index.
index_name = 'char_field_model_field_spgist'
index = SpGistIndex(fields=['field'], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]['type'], SpGistIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_spgist_parameters(self):
index_name = 'integer_array_spgist_fillfactor'
index = SpGistIndex(fields=['field'], name=index_name, fillfactor=80)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], SpGistIndex.suffix)
self.assertEqual(constraints[index_name]['options'], ['fillfactor=80'])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
|
40efdafc7e60c9a7b3069b92393622c22e8af9b55f1c70749e04c7c0db573e3c | import datetime
import uuid
from decimal import Decimal
from django.core import checks, exceptions, serializers
from django.core.serializers.json import DjangoJSONEncoder
from django.db.models import Q
from django.forms import CharField, Form, widgets
from django.test.utils import isolate_apps
from django.utils.html import escape
from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase
from .models import JSONModel, PostgreSQLModel
try:
from django.contrib.postgres import forms
from django.contrib.postgres.fields import JSONField
except ImportError:
pass
class TestSaveLoad(PostgreSQLTestCase):
def test_null(self):
instance = JSONModel()
instance.save()
loaded = JSONModel.objects.get()
self.assertIsNone(loaded.field)
def test_empty_object(self):
instance = JSONModel(field={})
instance.save()
loaded = JSONModel.objects.get()
self.assertEqual(loaded.field, {})
def test_empty_list(self):
instance = JSONModel(field=[])
instance.save()
loaded = JSONModel.objects.get()
self.assertEqual(loaded.field, [])
def test_boolean(self):
instance = JSONModel(field=True)
instance.save()
loaded = JSONModel.objects.get()
self.assertIs(loaded.field, True)
def test_string(self):
instance = JSONModel(field='why?')
instance.save()
loaded = JSONModel.objects.get()
self.assertEqual(loaded.field, 'why?')
def test_number(self):
instance = JSONModel(field=1)
instance.save()
loaded = JSONModel.objects.get()
self.assertEqual(loaded.field, 1)
def test_realistic_object(self):
obj = {
'a': 'b',
'c': 1,
'd': ['e', {'f': 'g'}],
'h': True,
'i': False,
'j': None,
}
instance = JSONModel(field=obj)
instance.save()
loaded = JSONModel.objects.get()
self.assertEqual(loaded.field, obj)
def test_custom_encoding(self):
"""
JSONModel.field_custom has a custom DjangoJSONEncoder.
"""
some_uuid = uuid.uuid4()
obj_before = {
'date': datetime.date(2016, 8, 12),
'datetime': datetime.datetime(2016, 8, 12, 13, 44, 47, 575981),
'decimal': Decimal('10.54'),
'uuid': some_uuid,
}
obj_after = {
'date': '2016-08-12',
'datetime': '2016-08-12T13:44:47.575',
'decimal': '10.54',
'uuid': str(some_uuid),
}
JSONModel.objects.create(field_custom=obj_before)
loaded = JSONModel.objects.get()
self.assertEqual(loaded.field_custom, obj_after)
class TestQuerying(PostgreSQLTestCase):
@classmethod
def setUpTestData(cls):
cls.objs = [
JSONModel.objects.create(field=None),
JSONModel.objects.create(field=True),
JSONModel.objects.create(field=False),
JSONModel.objects.create(field='yes'),
JSONModel.objects.create(field=7),
JSONModel.objects.create(field=[]),
JSONModel.objects.create(field={}),
JSONModel.objects.create(field={
'a': 'b',
'c': 1,
}),
JSONModel.objects.create(field={
'a': 'b',
'c': 1,
'd': ['e', {'f': 'g'}],
'h': True,
'i': False,
'j': None,
'k': {'l': 'm'},
}),
JSONModel.objects.create(field=[1, [2]]),
JSONModel.objects.create(field={
'k': True,
'l': False,
}),
JSONModel.objects.create(field={'foo': 'bar'}),
]
def test_exact(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__exact={}),
[self.objs[6]]
)
def test_exact_complex(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__exact={'a': 'b', 'c': 1}),
[self.objs[7]]
)
def test_isnull(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__isnull=True),
[self.objs[0]]
)
def test_ordering_by_transform(self):
objs = [
JSONModel.objects.create(field={'ord': 93, 'name': 'bar'}),
JSONModel.objects.create(field={'ord': 22.1, 'name': 'foo'}),
JSONModel.objects.create(field={'ord': -1, 'name': 'baz'}),
JSONModel.objects.create(field={'ord': 21.931902, 'name': 'spam'}),
JSONModel.objects.create(field={'ord': -100291029, 'name': 'eggs'}),
]
query = JSONModel.objects.filter(field__name__isnull=False).order_by('field__ord')
self.assertSequenceEqual(query, [objs[4], objs[2], objs[3], objs[1], objs[0]])
def test_deep_values(self):
query = JSONModel.objects.values_list('field__k__l')
self.assertSequenceEqual(
query,
[
(None,), (None,), (None,), (None,), (None,), (None,),
(None,), (None,), ('m',), (None,), (None,), (None,),
]
)
def test_deep_distinct(self):
query = JSONModel.objects.distinct('field__k__l').values_list('field__k__l')
self.assertSequenceEqual(query, [('m',), (None,)])
def test_isnull_key(self):
# key__isnull works the same as has_key='key'.
self.assertSequenceEqual(
JSONModel.objects.filter(field__a__isnull=True),
self.objs[:7] + self.objs[9:]
)
self.assertSequenceEqual(
JSONModel.objects.filter(field__a__isnull=False),
[self.objs[7], self.objs[8]]
)
def test_none_key(self):
self.assertSequenceEqual(JSONModel.objects.filter(field__j=None), [self.objs[8]])
def test_none_key_exclude(self):
obj = JSONModel.objects.create(field={'j': 1})
self.assertSequenceEqual(JSONModel.objects.exclude(field__j=None), [obj])
def test_isnull_key_or_none(self):
obj = JSONModel.objects.create(field={'a': None})
self.assertSequenceEqual(
JSONModel.objects.filter(Q(field__a__isnull=True) | Q(field__a=None)),
self.objs[:7] + self.objs[9:] + [obj]
)
def test_contains(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__contains={'a': 'b'}),
[self.objs[7], self.objs[8]]
)
def test_contained_by(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__contained_by={'a': 'b', 'c': 1, 'h': True}),
[self.objs[6], self.objs[7]]
)
def test_has_key(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__has_key='a'),
[self.objs[7], self.objs[8]]
)
def test_has_keys(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__has_keys=['a', 'c', 'h']),
[self.objs[8]]
)
def test_has_any_keys(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__has_any_keys=['c', 'l']),
[self.objs[7], self.objs[8], self.objs[10]]
)
def test_shallow_list_lookup(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__0=1),
[self.objs[9]]
)
def test_shallow_obj_lookup(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__a='b'),
[self.objs[7], self.objs[8]]
)
def test_deep_lookup_objs(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__k__l='m'),
[self.objs[8]]
)
def test_shallow_lookup_obj_target(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__k={'l': 'm'}),
[self.objs[8]]
)
def test_deep_lookup_array(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__1__0=2),
[self.objs[9]]
)
def test_deep_lookup_mixed(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__d__1__f='g'),
[self.objs[8]]
)
def test_deep_lookup_transform(self):
self.assertSequenceEqual(
JSONModel.objects.filter(field__c__gt=1),
[]
)
self.assertSequenceEqual(
JSONModel.objects.filter(field__c__lt=5),
[self.objs[7], self.objs[8]]
)
def test_usage_in_subquery(self):
self.assertSequenceEqual(
JSONModel.objects.filter(id__in=JSONModel.objects.filter(field__c=1)),
self.objs[7:9]
)
def test_iexact(self):
self.assertTrue(JSONModel.objects.filter(field__foo__iexact='BaR').exists())
self.assertFalse(JSONModel.objects.filter(field__foo__iexact='"BaR"').exists())
def test_icontains(self):
self.assertFalse(JSONModel.objects.filter(field__foo__icontains='"bar"').exists())
def test_startswith(self):
self.assertTrue(JSONModel.objects.filter(field__foo__startswith='b').exists())
def test_istartswith(self):
self.assertTrue(JSONModel.objects.filter(field__foo__istartswith='B').exists())
def test_endswith(self):
self.assertTrue(JSONModel.objects.filter(field__foo__endswith='r').exists())
def test_iendswith(self):
self.assertTrue(JSONModel.objects.filter(field__foo__iendswith='R').exists())
def test_regex(self):
self.assertTrue(JSONModel.objects.filter(field__foo__regex=r'^bar$').exists())
def test_iregex(self):
self.assertTrue(JSONModel.objects.filter(field__foo__iregex=r'^bAr$').exists())
@isolate_apps('postgres_tests')
class TestChecks(PostgreSQLSimpleTestCase):
def test_invalid_default(self):
class MyModel(PostgreSQLModel):
field = JSONField(default={})
model = MyModel()
self.assertEqual(model.check(), [
checks.Warning(
msg=(
"JSONField default should be a callable instead of an "
"instance so that it's not shared between all field "
"instances."
),
hint='Use a callable instead, e.g., use `dict` instead of `{}`.',
obj=MyModel._meta.get_field('field'),
id='postgres.E003',
)
])
def test_valid_default(self):
class MyModel(PostgreSQLModel):
field = JSONField(default=dict)
model = MyModel()
self.assertEqual(model.check(), [])
def test_valid_default_none(self):
class MyModel(PostgreSQLModel):
field = JSONField(default=None)
model = MyModel()
self.assertEqual(model.check(), [])
class TestSerialization(PostgreSQLSimpleTestCase):
test_data = (
'[{"fields": {"field": %s, "field_custom": null}, '
'"model": "postgres_tests.jsonmodel", "pk": null}]'
)
test_values = (
# (Python value, serialized value),
({'a': 'b', 'c': None}, '{"a": "b", "c": null}'),
('abc', '"abc"'),
('{"a": "a"}', '"{\\"a\\": \\"a\\"}"'),
)
def test_dumping(self):
for value, serialized in self.test_values:
with self.subTest(value=value):
instance = JSONModel(field=value)
data = serializers.serialize('json', [instance])
self.assertJSONEqual(data, self.test_data % serialized)
def test_loading(self):
for value, serialized in self.test_values:
with self.subTest(value=value):
instance = list(serializers.deserialize('json', self.test_data % serialized))[0].object
self.assertEqual(instance.field, value)
class TestValidation(PostgreSQLSimpleTestCase):
def test_not_serializable(self):
field = JSONField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(datetime.timedelta(days=1), None)
self.assertEqual(cm.exception.code, 'invalid')
self.assertEqual(cm.exception.message % cm.exception.params, "Value must be valid JSON.")
def test_custom_encoder(self):
with self.assertRaisesMessage(ValueError, "The encoder parameter must be a callable object."):
field = JSONField(encoder=DjangoJSONEncoder())
field = JSONField(encoder=DjangoJSONEncoder)
self.assertEqual(field.clean(datetime.timedelta(days=1), None), datetime.timedelta(days=1))
class TestFormField(PostgreSQLSimpleTestCase):
def test_valid(self):
field = forms.JSONField()
value = field.clean('{"a": "b"}')
self.assertEqual(value, {'a': 'b'})
def test_valid_empty(self):
field = forms.JSONField(required=False)
value = field.clean('')
self.assertIsNone(value)
def test_invalid(self):
field = forms.JSONField()
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('{some badly formed: json}')
self.assertEqual(cm.exception.messages[0], "'{some badly formed: json}' value must be valid JSON.")
def test_formfield(self):
model_field = JSONField()
form_field = model_field.formfield()
self.assertIsInstance(form_field, forms.JSONField)
def test_formfield_disabled(self):
class JsonForm(Form):
name = CharField()
jfield = forms.JSONField(disabled=True)
form = JsonForm({'name': 'xyz', 'jfield': '["bar"]'}, initial={'jfield': ['foo']})
self.assertIn('["foo"]</textarea>', form.as_p())
def test_prepare_value(self):
field = forms.JSONField()
self.assertEqual(field.prepare_value({'a': 'b'}), '{"a": "b"}')
self.assertEqual(field.prepare_value(None), 'null')
self.assertEqual(field.prepare_value('foo'), '"foo"')
def test_redisplay_wrong_input(self):
"""
When displaying a bound form (typically due to invalid input), the form
should not overquote JSONField inputs.
"""
class JsonForm(Form):
name = CharField(max_length=2)
jfield = forms.JSONField()
# JSONField input is fine, name is too long
form = JsonForm({'name': 'xyz', 'jfield': '["foo"]'})
self.assertIn('["foo"]</textarea>', form.as_p())
# This time, the JSONField input is wrong
form = JsonForm({'name': 'xy', 'jfield': '{"foo"}'})
# Appears once in the textarea and once in the error message
self.assertEqual(form.as_p().count(escape('{"foo"}')), 2)
def test_widget(self):
"""The default widget of a JSONField is a Textarea."""
field = forms.JSONField()
self.assertIsInstance(field.widget, widgets.Textarea)
def test_custom_widget_kwarg(self):
"""The widget can be overridden with a kwarg."""
field = forms.JSONField(widget=widgets.Input)
self.assertIsInstance(field.widget, widgets.Input)
def test_custom_widget_attribute(self):
"""The widget can be overridden with an attribute."""
class CustomJSONField(forms.JSONField):
widget = widgets.Input
field = CustomJSONField()
self.assertIsInstance(field.widget, widgets.Input)
def test_already_converted_value(self):
field = forms.JSONField(required=False)
tests = [
'["a", "b", "c"]', '{"a": 1, "b": 2}', '1', '1.5', '"foo"',
'true', 'false', 'null',
]
for json_string in tests:
val = field.clean(json_string)
self.assertEqual(field.clean(val), val)
def test_has_changed(self):
field = forms.JSONField()
self.assertIs(field.has_changed({'a': True}, '{"a": 1}'), True)
self.assertIs(field.has_changed({'a': 1, 'b': 2}, '{"b": 2, "a": 1}'), False)
|
b46f5a3745537d286dd06f6143a0e11a455c32c24dcf05511567618954585367 | from django.urls import path
from . import feeds
urlpatterns = [
path('syndication/rss2/', feeds.TestRss2Feed()),
path(
'syndication/rss2/guid_ispermalink_true/',
feeds.TestRss2FeedWithGuidIsPermaLinkTrue()),
path(
'syndication/rss2/guid_ispermalink_false/',
feeds.TestRss2FeedWithGuidIsPermaLinkFalse()),
path('syndication/rss091/', feeds.TestRss091Feed()),
path('syndication/no_pubdate/', feeds.TestNoPubdateFeed()),
path('syndication/atom/', feeds.TestAtomFeed()),
path('syndication/latest/', feeds.TestLatestFeed()),
path('syndication/custom/', feeds.TestCustomFeed()),
path('syndication/naive-dates/', feeds.NaiveDatesFeed()),
path('syndication/aware-dates/', feeds.TZAwareDatesFeed()),
path('syndication/feedurl/', feeds.TestFeedUrlFeed()),
path('syndication/articles/', feeds.ArticlesFeed()),
path('syndication/template/', feeds.TemplateFeed()),
path('syndication/template_context/', feeds.TemplateContextFeed()),
path('syndication/rss2/single-enclosure/', feeds.TestSingleEnclosureRSSFeed()),
path('syndication/rss2/multiple-enclosure/', feeds.TestMultipleEnclosureRSSFeed()),
path('syndication/atom/single-enclosure/', feeds.TestSingleEnclosureAtomFeed()),
path('syndication/atom/multiple-enclosure/', feeds.TestMultipleEnclosureAtomFeed()),
]
|
a3272dc010fba6cdd137444ef24efee03af8669599e4345a22244bf7a3aac329 | import datetime
from decimal import Decimal
from django.core.exceptions import FieldDoesNotExist, FieldError
from django.db.models import (
BooleanField, CharField, Count, DateTimeField, ExpressionWrapper, F, Func,
IntegerField, NullBooleanField, OuterRef, Q, Subquery, Sum, Value,
)
from django.db.models.expressions import RawSQL
from django.db.models.functions import Length, Lower
from django.test import TestCase, skipUnlessDBFeature
from .models import (
Author, Book, Company, DepartmentStore, Employee, Publisher, Store, Ticket,
)
def cxOracle_py3_bug(func):
"""
There's a bug in Django/cx_Oracle with respect to string handling under
Python 3 (essentially, they treat Python 3 strings as Python 2 strings
rather than unicode). This makes some tests here fail under Python 3, so
we mark them as expected failures until someone fixes them in #23843.
"""
from unittest import expectedFailure
from django.db import connection
return expectedFailure(func) if connection.vendor == 'oracle' else func
class NonAggregateAnnotationTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name='Adrian Holovaty', age=34)
cls.a2 = Author.objects.create(name='Jacob Kaplan-Moss', age=35)
cls.a3 = Author.objects.create(name='Brad Dayley', age=45)
cls.a4 = Author.objects.create(name='James Bennett', age=29)
cls.a5 = Author.objects.create(name='Jeffrey Forcier', age=37)
cls.a6 = Author.objects.create(name='Paul Bissex', age=29)
cls.a7 = Author.objects.create(name='Wesley J. Chun', age=25)
cls.a8 = Author.objects.create(name='Peter Norvig', age=57)
cls.a9 = Author.objects.create(name='Stuart Russell', age=46)
cls.a1.friends.add(cls.a2, cls.a4)
cls.a2.friends.add(cls.a1, cls.a7)
cls.a4.friends.add(cls.a1)
cls.a5.friends.add(cls.a6, cls.a7)
cls.a6.friends.add(cls.a5, cls.a7)
cls.a7.friends.add(cls.a2, cls.a5, cls.a6)
cls.a8.friends.add(cls.a9)
cls.a9.friends.add(cls.a8)
cls.p1 = Publisher.objects.create(name='Apress', num_awards=3)
cls.p2 = Publisher.objects.create(name='Sams', num_awards=1)
cls.p3 = Publisher.objects.create(name='Prentice Hall', num_awards=7)
cls.p4 = Publisher.objects.create(name='Morgan Kaufmann', num_awards=9)
cls.p5 = Publisher.objects.create(name="Jonno's House of Books", num_awards=0)
cls.b1 = Book.objects.create(
isbn='159059725', name='The Definitive Guide to Django: Web Development Done Right',
pages=447, rating=4.5, price=Decimal('30.00'), contact=cls.a1, publisher=cls.p1,
pubdate=datetime.date(2007, 12, 6)
)
cls.b2 = Book.objects.create(
isbn='067232959', name='Sams Teach Yourself Django in 24 Hours',
pages=528, rating=3.0, price=Decimal('23.09'), contact=cls.a3, publisher=cls.p2,
pubdate=datetime.date(2008, 3, 3)
)
cls.b3 = Book.objects.create(
isbn='159059996', name='Practical Django Projects',
pages=300, rating=4.0, price=Decimal('29.69'), contact=cls.a4, publisher=cls.p1,
pubdate=datetime.date(2008, 6, 23)
)
cls.b4 = Book.objects.create(
isbn='013235613', name='Python Web Development with Django',
pages=350, rating=4.0, price=Decimal('29.69'), contact=cls.a5, publisher=cls.p3,
pubdate=datetime.date(2008, 11, 3)
)
cls.b5 = Book.objects.create(
isbn='013790395', name='Artificial Intelligence: A Modern Approach',
pages=1132, rating=4.0, price=Decimal('82.80'), contact=cls.a8, publisher=cls.p3,
pubdate=datetime.date(1995, 1, 15)
)
cls.b6 = Book.objects.create(
isbn='155860191', name='Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp',
pages=946, rating=5.0, price=Decimal('75.00'), contact=cls.a8, publisher=cls.p4,
pubdate=datetime.date(1991, 10, 15)
)
cls.b1.authors.add(cls.a1, cls.a2)
cls.b2.authors.add(cls.a3)
cls.b3.authors.add(cls.a4)
cls.b4.authors.add(cls.a5, cls.a6, cls.a7)
cls.b5.authors.add(cls.a8, cls.a9)
cls.b6.authors.add(cls.a8)
s1 = Store.objects.create(
name='Amazon.com',
original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42),
friday_night_closing=datetime.time(23, 59, 59)
)
s2 = Store.objects.create(
name='Books.com',
original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37),
friday_night_closing=datetime.time(23, 59, 59)
)
s3 = Store.objects.create(
name="Mamma and Pappa's Books",
original_opening=datetime.datetime(1945, 4, 25, 16, 24, 14),
friday_night_closing=datetime.time(21, 30)
)
s1.books.add(cls.b1, cls.b2, cls.b3, cls.b4, cls.b5, cls.b6)
s2.books.add(cls.b1, cls.b3, cls.b5, cls.b6)
s3.books.add(cls.b3, cls.b4, cls.b6)
def test_basic_annotation(self):
books = Book.objects.annotate(
is_book=Value(1, output_field=IntegerField()))
for book in books:
self.assertEqual(book.is_book, 1)
def test_basic_f_annotation(self):
books = Book.objects.annotate(another_rating=F('rating'))
for book in books:
self.assertEqual(book.another_rating, book.rating)
def test_joined_annotation(self):
books = Book.objects.select_related('publisher').annotate(
num_awards=F('publisher__num_awards'))
for book in books:
self.assertEqual(book.num_awards, book.publisher.num_awards)
def test_mixed_type_annotation_date_interval(self):
active = datetime.datetime(2015, 3, 20, 14, 0, 0)
duration = datetime.timedelta(hours=1)
expires = datetime.datetime(2015, 3, 20, 14, 0, 0) + duration
Ticket.objects.create(active_at=active, duration=duration)
t = Ticket.objects.annotate(
expires=ExpressionWrapper(F('active_at') + F('duration'), output_field=DateTimeField())
).first()
self.assertEqual(t.expires, expires)
def test_mixed_type_annotation_numbers(self):
test = self.b1
b = Book.objects.annotate(
combined=ExpressionWrapper(F('pages') + F('rating'), output_field=IntegerField())
).get(isbn=test.isbn)
combined = int(test.pages + test.rating)
self.assertEqual(b.combined, combined)
def test_empty_expression_annotation(self):
books = Book.objects.annotate(
selected=ExpressionWrapper(Q(pk__in=[]), output_field=BooleanField())
)
self.assertEqual(len(books), Book.objects.count())
self.assertTrue(all(not book.selected for book in books))
books = Book.objects.annotate(
selected=ExpressionWrapper(Q(pk__in=Book.objects.none()), output_field=BooleanField())
)
self.assertEqual(len(books), Book.objects.count())
self.assertTrue(all(not book.selected for book in books))
def test_annotate_with_aggregation(self):
books = Book.objects.annotate(
is_book=Value(1, output_field=IntegerField()),
rating_count=Count('rating'))
for book in books:
self.assertEqual(book.is_book, 1)
self.assertEqual(book.rating_count, 1)
def test_aggregate_over_annotation(self):
agg = Author.objects.annotate(other_age=F('age')).aggregate(otherage_sum=Sum('other_age'))
other_agg = Author.objects.aggregate(age_sum=Sum('age'))
self.assertEqual(agg['otherage_sum'], other_agg['age_sum'])
@skipUnlessDBFeature('can_distinct_on_fields')
def test_distinct_on_with_annotation(self):
store = Store.objects.create(
name='test store',
original_opening=datetime.datetime.now(),
friday_night_closing=datetime.time(21, 00, 00),
)
names = [
'Theodore Roosevelt',
'Eleanor Roosevelt',
'Franklin Roosevelt',
'Ned Stark',
'Catelyn Stark',
]
for name in names:
Employee.objects.create(
store=store,
first_name=name.split()[0],
last_name=name.split()[1],
age=30, salary=2000,
)
people = Employee.objects.annotate(
name_lower=Lower('last_name'),
).distinct('name_lower')
self.assertEqual({p.last_name for p in people}, {'Stark', 'Roosevelt'})
self.assertEqual(len(people), 2)
people2 = Employee.objects.annotate(
test_alias=F('store__name'),
).distinct('test_alias')
self.assertEqual(len(people2), 1)
lengths = Employee.objects.annotate(
name_len=Length('first_name'),
).distinct('name_len').values_list('name_len', flat=True)
self.assertSequenceEqual(lengths, [3, 7, 8])
def test_filter_annotation(self):
books = Book.objects.annotate(
is_book=Value(1, output_field=IntegerField())
).filter(is_book=1)
for book in books:
self.assertEqual(book.is_book, 1)
def test_filter_annotation_with_f(self):
books = Book.objects.annotate(
other_rating=F('rating')
).filter(other_rating=3.5)
for book in books:
self.assertEqual(book.other_rating, 3.5)
def test_filter_annotation_with_double_f(self):
books = Book.objects.annotate(
other_rating=F('rating')
).filter(other_rating=F('rating'))
for book in books:
self.assertEqual(book.other_rating, book.rating)
def test_filter_agg_with_double_f(self):
books = Book.objects.annotate(
sum_rating=Sum('rating')
).filter(sum_rating=F('sum_rating'))
for book in books:
self.assertEqual(book.sum_rating, book.rating)
def test_filter_wrong_annotation(self):
with self.assertRaisesMessage(FieldError, "Cannot resolve keyword 'nope' into field."):
list(Book.objects.annotate(
sum_rating=Sum('rating')
).filter(sum_rating=F('nope')))
def test_decimal_annotation(self):
salary = Decimal(10) ** -Employee._meta.get_field('salary').decimal_places
Employee.objects.create(
first_name='Max',
last_name='Paine',
store=Store.objects.first(),
age=23,
salary=salary,
)
self.assertEqual(
Employee.objects.annotate(new_salary=F('salary') / 10).get().new_salary,
salary / 10,
)
def test_filter_decimal_annotation(self):
qs = Book.objects.annotate(new_price=F('price') + 1).filter(new_price=Decimal(31)).values_list('new_price')
self.assertEqual(qs.get(), (Decimal(31),))
def test_combined_annotation_commutative(self):
book1 = Book.objects.annotate(adjusted_rating=F('rating') + 2).get(pk=self.b1.pk)
book2 = Book.objects.annotate(adjusted_rating=2 + F('rating')).get(pk=self.b1.pk)
self.assertEqual(book1.adjusted_rating, book2.adjusted_rating)
book1 = Book.objects.annotate(adjusted_rating=F('rating') + None).get(pk=self.b1.pk)
book2 = Book.objects.annotate(adjusted_rating=None + F('rating')).get(pk=self.b1.pk)
self.assertEqual(book1.adjusted_rating, book2.adjusted_rating)
def test_update_with_annotation(self):
book_preupdate = Book.objects.get(pk=self.b2.pk)
Book.objects.annotate(other_rating=F('rating') - 1).update(rating=F('other_rating'))
book_postupdate = Book.objects.get(pk=self.b2.pk)
self.assertEqual(book_preupdate.rating - 1, book_postupdate.rating)
def test_annotation_with_m2m(self):
books = Book.objects.annotate(author_age=F('authors__age')).filter(pk=self.b1.pk).order_by('author_age')
self.assertEqual(books[0].author_age, 34)
self.assertEqual(books[1].author_age, 35)
def test_annotation_reverse_m2m(self):
books = Book.objects.annotate(
store_name=F('store__name'),
).filter(
name='Practical Django Projects',
).order_by('store_name')
self.assertQuerysetEqual(
books, [
'Amazon.com',
'Books.com',
'Mamma and Pappa\'s Books'
],
lambda b: b.store_name
)
def test_values_annotation(self):
"""
Annotations can reference fields in a values clause,
and contribute to an existing values clause.
"""
# annotate references a field in values()
qs = Book.objects.values('rating').annotate(other_rating=F('rating') - 1)
book = qs.get(pk=self.b1.pk)
self.assertEqual(book['rating'] - 1, book['other_rating'])
# filter refs the annotated value
book = qs.get(other_rating=4)
self.assertEqual(book['other_rating'], 4)
# can annotate an existing values with a new field
book = qs.annotate(other_isbn=F('isbn')).get(other_rating=4)
self.assertEqual(book['other_rating'], 4)
self.assertEqual(book['other_isbn'], '155860191')
def test_values_with_pk_annotation(self):
# annotate references a field in values() with pk
publishers = Publisher.objects.values('id', 'book__rating').annotate(total=Sum('book__rating'))
for publisher in publishers.filter(pk=self.p1.pk):
self.assertEqual(publisher['book__rating'], publisher['total'])
@skipUnlessDBFeature('allows_group_by_pk')
def test_rawsql_group_by_collapse(self):
raw = RawSQL('SELECT MIN(id) FROM annotations_book', [])
qs = Author.objects.values('id').annotate(
min_book_id=raw,
count_friends=Count('friends'),
).order_by()
_, _, group_by = qs.query.get_compiler(using='default').pre_sql_setup()
self.assertEqual(len(group_by), 1)
self.assertNotEqual(raw, group_by[0])
def test_defer_annotation(self):
"""
Deferred attributes can be referenced by an annotation,
but they are not themselves deferred, and cannot be deferred.
"""
qs = Book.objects.defer('rating').annotate(other_rating=F('rating') - 1)
with self.assertNumQueries(2):
book = qs.get(other_rating=4)
self.assertEqual(book.rating, 5)
self.assertEqual(book.other_rating, 4)
with self.assertRaisesMessage(FieldDoesNotExist, "Book has no field named 'other_rating'"):
book = qs.defer('other_rating').get(other_rating=4)
def test_mti_annotations(self):
"""
Fields on an inherited model can be referenced by an
annotated field.
"""
d = DepartmentStore.objects.create(
name='Angus & Robinson',
original_opening=datetime.date(2014, 3, 8),
friday_night_closing=datetime.time(21, 00, 00),
chain='Westfield'
)
books = Book.objects.filter(rating__gt=4)
for b in books:
d.books.add(b)
qs = DepartmentStore.objects.annotate(
other_name=F('name'),
other_chain=F('chain'),
is_open=Value(True, BooleanField()),
book_isbn=F('books__isbn')
).order_by('book_isbn').filter(chain='Westfield')
self.assertQuerysetEqual(
qs, [
('Angus & Robinson', 'Westfield', True, '155860191'),
('Angus & Robinson', 'Westfield', True, '159059725')
],
lambda d: (d.other_name, d.other_chain, d.is_open, d.book_isbn)
)
def test_null_annotation(self):
"""
Annotating None onto a model round-trips
"""
book = Book.objects.annotate(no_value=Value(None, output_field=IntegerField())).first()
self.assertIsNone(book.no_value)
def test_order_by_annotation(self):
authors = Author.objects.annotate(other_age=F('age')).order_by('other_age')
self.assertQuerysetEqual(
authors, [
25, 29, 29, 34, 35, 37, 45, 46, 57,
],
lambda a: a.other_age
)
def test_order_by_aggregate(self):
authors = Author.objects.values('age').annotate(age_count=Count('age')).order_by('age_count', 'age')
self.assertQuerysetEqual(
authors, [
(25, 1), (34, 1), (35, 1), (37, 1), (45, 1), (46, 1), (57, 1), (29, 2),
],
lambda a: (a['age'], a['age_count'])
)
def test_annotate_exists(self):
authors = Author.objects.annotate(c=Count('id')).filter(c__gt=1)
self.assertFalse(authors.exists())
def test_column_field_ordering(self):
"""
Columns are aligned in the correct order for resolve_columns. This test
will fail on MySQL if column ordering is out. Column fields should be
aligned as:
1. extra_select
2. model_fields
3. annotation_fields
4. model_related_fields
"""
store = Store.objects.first()
Employee.objects.create(id=1, first_name='Max', manager=True, last_name='Paine',
store=store, age=23, salary=Decimal(50000.00))
Employee.objects.create(id=2, first_name='Buffy', manager=False, last_name='Summers',
store=store, age=18, salary=Decimal(40000.00))
qs = Employee.objects.extra(
select={'random_value': '42'}
).select_related('store').annotate(
annotated_value=Value(17, output_field=IntegerField())
)
rows = [
(1, 'Max', True, 42, 'Paine', 23, Decimal(50000.00), store.name, 17),
(2, 'Buffy', False, 42, 'Summers', 18, Decimal(40000.00), store.name, 17)
]
self.assertQuerysetEqual(
qs.order_by('id'), rows,
lambda e: (
e.id, e.first_name, e.manager, e.random_value, e.last_name, e.age,
e.salary, e.store.name, e.annotated_value))
def test_column_field_ordering_with_deferred(self):
store = Store.objects.first()
Employee.objects.create(id=1, first_name='Max', manager=True, last_name='Paine',
store=store, age=23, salary=Decimal(50000.00))
Employee.objects.create(id=2, first_name='Buffy', manager=False, last_name='Summers',
store=store, age=18, salary=Decimal(40000.00))
qs = Employee.objects.extra(
select={'random_value': '42'}
).select_related('store').annotate(
annotated_value=Value(17, output_field=IntegerField())
)
rows = [
(1, 'Max', True, 42, 'Paine', 23, Decimal(50000.00), store.name, 17),
(2, 'Buffy', False, 42, 'Summers', 18, Decimal(40000.00), store.name, 17)
]
# and we respect deferred columns!
self.assertQuerysetEqual(
qs.defer('age').order_by('id'), rows,
lambda e: (
e.id, e.first_name, e.manager, e.random_value, e.last_name, e.age,
e.salary, e.store.name, e.annotated_value))
@cxOracle_py3_bug
def test_custom_functions(self):
Company(name='Apple', motto=None, ticker_name='APPL', description='Beautiful Devices').save()
Company(name='Django Software Foundation', motto=None, ticker_name=None, description=None).save()
Company(name='Google', motto='Do No Evil', ticker_name='GOOG', description='Internet Company').save()
Company(name='Yahoo', motto=None, ticker_name=None, description='Internet Company').save()
qs = Company.objects.annotate(
tagline=Func(
F('motto'),
F('ticker_name'),
F('description'),
Value('No Tag'),
function='COALESCE'
)
).order_by('name')
self.assertQuerysetEqual(
qs, [
('Apple', 'APPL'),
('Django Software Foundation', 'No Tag'),
('Google', 'Do No Evil'),
('Yahoo', 'Internet Company')
],
lambda c: (c.name, c.tagline)
)
@cxOracle_py3_bug
def test_custom_functions_can_ref_other_functions(self):
Company(name='Apple', motto=None, ticker_name='APPL', description='Beautiful Devices').save()
Company(name='Django Software Foundation', motto=None, ticker_name=None, description=None).save()
Company(name='Google', motto='Do No Evil', ticker_name='GOOG', description='Internet Company').save()
Company(name='Yahoo', motto=None, ticker_name=None, description='Internet Company').save()
class Lower(Func):
function = 'LOWER'
qs = Company.objects.annotate(
tagline=Func(
F('motto'),
F('ticker_name'),
F('description'),
Value('No Tag'),
function='COALESCE',
)
).annotate(
tagline_lower=Lower(F('tagline'), output_field=CharField())
).order_by('name')
# LOWER function supported by:
# oracle, postgres, mysql, sqlite, sqlserver
self.assertQuerysetEqual(
qs, [
('Apple', 'APPL'.lower()),
('Django Software Foundation', 'No Tag'.lower()),
('Google', 'Do No Evil'.lower()),
('Yahoo', 'Internet Company'.lower())
],
lambda c: (c.name, c.tagline_lower)
)
def test_boolean_value_annotation(self):
books = Book.objects.annotate(
is_book=Value(True, output_field=BooleanField()),
is_pony=Value(False, output_field=BooleanField()),
is_none=Value(None, output_field=BooleanField(null=True)),
is_none_old=Value(None, output_field=NullBooleanField()),
)
self.assertGreater(len(books), 0)
for book in books:
self.assertIs(book.is_book, True)
self.assertIs(book.is_pony, False)
self.assertIsNone(book.is_none)
self.assertIsNone(book.is_none_old)
def test_annotation_in_f_grouped_by_annotation(self):
qs = (
Publisher.objects.annotate(multiplier=Value(3))
# group by option => sum of value * multiplier
.values('name')
.annotate(multiplied_value_sum=Sum(F('multiplier') * F('num_awards')))
.order_by()
)
self.assertCountEqual(
qs, [
{'multiplied_value_sum': 9, 'name': 'Apress'},
{'multiplied_value_sum': 0, 'name': "Jonno's House of Books"},
{'multiplied_value_sum': 27, 'name': 'Morgan Kaufmann'},
{'multiplied_value_sum': 21, 'name': 'Prentice Hall'},
{'multiplied_value_sum': 3, 'name': 'Sams'},
]
)
def test_arguments_must_be_expressions(self):
msg = 'QuerySet.annotate() received non-expression(s): %s.'
with self.assertRaisesMessage(TypeError, msg % BooleanField()):
Book.objects.annotate(BooleanField())
with self.assertRaisesMessage(TypeError, msg % True):
Book.objects.annotate(is_book=True)
with self.assertRaisesMessage(TypeError, msg % ', '.join([str(BooleanField()), 'True'])):
Book.objects.annotate(BooleanField(), Value(False), is_book=True)
def test_chaining_annotation_filter_with_m2m(self):
qs = Author.objects.filter(
name='Adrian Holovaty',
friends__age=35,
).annotate(
jacob_name=F('friends__name'),
).filter(
friends__age=29,
).annotate(
james_name=F('friends__name'),
).values('jacob_name', 'james_name')
self.assertCountEqual(
qs,
[{'jacob_name': 'Jacob Kaplan-Moss', 'james_name': 'James Bennett'}],
)
@skipUnlessDBFeature('supports_subqueries_in_group_by')
def test_annotation_filter_with_subquery(self):
long_books_qs = Book.objects.filter(
publisher=OuterRef('pk'),
pages__gt=400,
).values('publisher').annotate(count=Count('pk')).values('count')
publisher_books_qs = Publisher.objects.annotate(
total_books=Count('book'),
).filter(
total_books=Subquery(long_books_qs, output_field=IntegerField()),
).values('name')
self.assertCountEqual(publisher_books_qs, [{'name': 'Sams'}, {'name': 'Morgan Kaufmann'}])
|
a49d0eb7fac4360a31a9df12849af89bbc7959c673bed5b3e234a00779f723ca | import re
from io import StringIO
from unittest import mock, skipUnless
from django.core.management import call_command
from django.db import connection
from django.db.backends.base.introspection import TableInfo
from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature
from .models import PeopleMoreData
def inspectdb_tables_only(table_name):
"""
Limit introspection to tables created for models of this app.
Some databases such as Oracle are extremely slow at introspection.
"""
return table_name.startswith('inspectdb_')
def special_table_only(table_name):
return table_name.startswith('inspectdb_special')
class InspectDBTestCase(TestCase):
unique_re = re.compile(r'.*unique_together = \((.+),\).*')
def test_stealth_table_name_filter_option(self):
out = StringIO()
call_command('inspectdb', table_name_filter=inspectdb_tables_only, stdout=out)
error_message = "inspectdb has examined a table that should have been filtered out."
# contrib.contenttypes is one of the apps always installed when running
# the Django test suite, check that one of its tables hasn't been
# inspected
self.assertNotIn("class DjangoContentType(models.Model):", out.getvalue(), msg=error_message)
def test_table_option(self):
"""
inspectdb can inspect a subset of tables by passing the table names as
arguments.
"""
out = StringIO()
call_command('inspectdb', 'inspectdb_people', stdout=out)
output = out.getvalue()
self.assertIn('class InspectdbPeople(models.Model):', output)
self.assertNotIn("InspectdbPeopledata", output)
def make_field_type_asserter(self):
"""Call inspectdb and return a function to validate a field type in its output"""
out = StringIO()
call_command('inspectdb', 'inspectdb_columntypes', stdout=out)
output = out.getvalue()
def assertFieldType(name, definition):
out_def = re.search(r'^\s*%s = (models.*)$' % name, output, re.MULTILINE).groups()[0]
self.assertEqual(definition, out_def)
return assertFieldType
def test_field_types(self):
"""Test introspection of various Django field types"""
assertFieldType = self.make_field_type_asserter()
# Inspecting Oracle DB doesn't produce correct results (#19884):
# - it reports fields as blank=True when they aren't.
if not connection.features.interprets_empty_strings_as_nulls:
assertFieldType('char_field', "models.CharField(max_length=10)")
assertFieldType('null_char_field', "models.CharField(max_length=10, blank=True, null=True)")
assertFieldType('email_field', "models.CharField(max_length=254)")
assertFieldType('file_field', "models.CharField(max_length=100)")
assertFieldType('file_path_field', "models.CharField(max_length=100)")
assertFieldType('slug_field', "models.CharField(max_length=50)")
assertFieldType('text_field', "models.TextField()")
assertFieldType('url_field', "models.CharField(max_length=200)")
assertFieldType('date_field', "models.DateField()")
assertFieldType('date_time_field', "models.DateTimeField()")
if connection.features.can_introspect_ip_address_field:
assertFieldType('gen_ip_address_field', "models.GenericIPAddressField()")
elif not connection.features.interprets_empty_strings_as_nulls:
assertFieldType('gen_ip_address_field', "models.CharField(max_length=39)")
if connection.features.can_introspect_time_field:
assertFieldType('time_field', "models.TimeField()")
if connection.features.has_native_uuid_field:
assertFieldType('uuid_field', "models.UUIDField()")
elif not connection.features.interprets_empty_strings_as_nulls:
assertFieldType('uuid_field', "models.CharField(max_length=32)")
def test_number_field_types(self):
"""Test introspection of various Django field types"""
assertFieldType = self.make_field_type_asserter()
if not connection.features.can_introspect_autofield:
assertFieldType('id', "models.IntegerField(primary_key=True) # AutoField?")
if connection.features.can_introspect_big_integer_field:
assertFieldType('big_int_field', "models.BigIntegerField()")
else:
assertFieldType('big_int_field', "models.IntegerField()")
bool_field_type = connection.features.introspected_boolean_field_type
assertFieldType('bool_field', "models.{}()".format(bool_field_type))
assertFieldType('null_bool_field', 'models.{}(blank=True, null=True)'.format(bool_field_type))
if connection.features.can_introspect_decimal_field:
assertFieldType('decimal_field', "models.DecimalField(max_digits=6, decimal_places=1)")
else: # Guessed arguments on SQLite, see #5014
assertFieldType('decimal_field', "models.DecimalField(max_digits=10, decimal_places=5) "
"# max_digits and decimal_places have been guessed, "
"as this database handles decimal fields as float")
assertFieldType('float_field', "models.FloatField()")
assertFieldType('int_field', "models.IntegerField()")
if connection.features.can_introspect_positive_integer_field:
assertFieldType('pos_int_field', "models.PositiveIntegerField()")
else:
assertFieldType('pos_int_field', "models.IntegerField()")
if connection.features.can_introspect_positive_integer_field:
if connection.features.can_introspect_small_integer_field:
assertFieldType('pos_small_int_field', "models.PositiveSmallIntegerField()")
else:
assertFieldType('pos_small_int_field', "models.PositiveIntegerField()")
else:
if connection.features.can_introspect_small_integer_field:
assertFieldType('pos_small_int_field', "models.SmallIntegerField()")
else:
assertFieldType('pos_small_int_field', "models.IntegerField()")
if connection.features.can_introspect_small_integer_field:
assertFieldType('small_int_field', "models.SmallIntegerField()")
else:
assertFieldType('small_int_field', "models.IntegerField()")
@skipUnlessDBFeature('can_introspect_foreign_keys')
def test_attribute_name_not_python_keyword(self):
out = StringIO()
call_command('inspectdb', table_name_filter=inspectdb_tables_only, stdout=out)
output = out.getvalue()
error_message = "inspectdb generated an attribute name which is a Python keyword"
# Recursive foreign keys should be set to 'self'
self.assertIn("parent = models.ForeignKey('self', models.DO_NOTHING)", output)
self.assertNotIn(
"from = models.ForeignKey(InspectdbPeople, models.DO_NOTHING)",
output,
msg=error_message,
)
# As InspectdbPeople model is defined after InspectdbMessage, it should be quoted
self.assertIn(
"from_field = models.ForeignKey('InspectdbPeople', models.DO_NOTHING, db_column='from_id')",
output,
)
self.assertIn(
"people_pk = models.ForeignKey(InspectdbPeople, models.DO_NOTHING, primary_key=True)",
output,
)
self.assertIn(
"people_unique = models.ForeignKey(InspectdbPeople, models.DO_NOTHING, unique=True)",
output,
)
def test_digits_column_name_introspection(self):
"""Introspection of column names consist/start with digits (#16536/#17676)"""
out = StringIO()
call_command('inspectdb', 'inspectdb_digitsincolumnname', stdout=out)
output = out.getvalue()
error_message = "inspectdb generated a model field name which is a number"
self.assertNotIn(" 123 = models.CharField", output, msg=error_message)
self.assertIn("number_123 = models.CharField", output)
error_message = "inspectdb generated a model field name which starts with a digit"
self.assertNotIn(" 4extra = models.CharField", output, msg=error_message)
self.assertIn("number_4extra = models.CharField", output)
self.assertNotIn(" 45extra = models.CharField", output, msg=error_message)
self.assertIn("number_45extra = models.CharField", output)
def test_special_column_name_introspection(self):
"""
Introspection of column names containing special characters,
unsuitable for Python identifiers
"""
out = StringIO()
call_command('inspectdb', table_name_filter=special_table_only, stdout=out)
output = out.getvalue()
base_name = connection.introspection.identifier_converter('Field')
self.assertIn("field = models.IntegerField()", output)
self.assertIn("field_field = models.IntegerField(db_column='%s_')" % base_name, output)
self.assertIn("field_field_0 = models.IntegerField(db_column='%s__')" % base_name, output)
self.assertIn("field_field_1 = models.IntegerField(db_column='__field')", output)
self.assertIn("prc_x = models.IntegerField(db_column='prc(%) x')", output)
self.assertIn("tamaño = models.IntegerField()", output)
def test_table_name_introspection(self):
"""
Introspection of table names containing special characters,
unsuitable for Python identifiers
"""
out = StringIO()
call_command('inspectdb', table_name_filter=special_table_only, stdout=out)
output = out.getvalue()
self.assertIn("class InspectdbSpecialTableName(models.Model):", output)
def test_managed_models(self):
"""By default the command generates models with `Meta.managed = False` (#14305)"""
out = StringIO()
call_command('inspectdb', 'inspectdb_columntypes', stdout=out)
output = out.getvalue()
self.longMessage = False
self.assertIn(" managed = False", output, msg='inspectdb should generate unmanaged models.')
def test_unique_together_meta(self):
out = StringIO()
call_command('inspectdb', 'inspectdb_uniquetogether', stdout=out)
output = out.getvalue()
self.assertIn(" unique_together = (('", output)
unique_together_match = re.findall(self.unique_re, output)
# There should be one unique_together tuple.
self.assertEqual(len(unique_together_match), 1)
fields = unique_together_match[0]
# Fields with db_column = field name.
self.assertIn("('field1', 'field2')", fields)
# Fields from columns whose names are Python keywords.
self.assertIn("('field1', 'field2')", fields)
# Fields whose names normalize to the same Python field name and hence
# are given an integer suffix.
self.assertIn("('non_unique_column', 'non_unique_column_0')", fields)
@skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific SQL')
def test_unsupported_unique_together(self):
"""Unsupported index types (COALESCE here) are skipped."""
with connection.cursor() as c:
c.execute(
'CREATE UNIQUE INDEX Findex ON %s '
'(id, people_unique_id, COALESCE(message_id, -1))' % PeopleMoreData._meta.db_table
)
try:
out = StringIO()
call_command(
'inspectdb',
table_name_filter=lambda tn: tn.startswith(PeopleMoreData._meta.db_table),
stdout=out,
)
output = out.getvalue()
self.assertIn('# A unique constraint could not be introspected.', output)
self.assertEqual(re.findall(self.unique_re, output), ["('id', 'people_unique')"])
finally:
with connection.cursor() as c:
c.execute('DROP INDEX Findex')
@skipUnless(connection.vendor == 'sqlite',
"Only patched sqlite's DatabaseIntrospection.data_types_reverse for this test")
def test_custom_fields(self):
"""
Introspection of columns with a custom field (#21090)
"""
out = StringIO()
orig_data_types_reverse = connection.introspection.data_types_reverse
try:
connection.introspection.data_types_reverse = {
'text': 'myfields.TextField',
'bigint': 'BigIntegerField',
}
call_command('inspectdb', 'inspectdb_columntypes', stdout=out)
output = out.getvalue()
self.assertIn("text_field = myfields.TextField()", output)
self.assertIn("big_int_field = models.BigIntegerField()", output)
finally:
connection.introspection.data_types_reverse = orig_data_types_reverse
def test_introspection_errors(self):
"""
Introspection errors should not crash the command, and the error should
be visible in the output.
"""
out = StringIO()
with mock.patch('django.db.connection.introspection.get_table_list',
return_value=[TableInfo(name='nonexistent', type='t')]):
call_command('inspectdb', stdout=out)
output = out.getvalue()
self.assertIn("# Unable to inspect table 'nonexistent'", output)
# The error message depends on the backend
self.assertIn("# The error was:", output)
class InspectDBTransactionalTests(TransactionTestCase):
available_apps = ['inspectdb']
def test_include_views(self):
"""inspectdb --include-views creates models for database views."""
with connection.cursor() as cursor:
cursor.execute(
'CREATE VIEW inspectdb_people_view AS '
'SELECT id, name FROM inspectdb_people'
)
out = StringIO()
view_model = 'class InspectdbPeopleView(models.Model):'
view_managed = 'managed = False # Created from a view.'
try:
call_command('inspectdb', table_name_filter=inspectdb_tables_only, stdout=out)
no_views_output = out.getvalue()
self.assertNotIn(view_model, no_views_output)
self.assertNotIn(view_managed, no_views_output)
call_command('inspectdb', table_name_filter=inspectdb_tables_only, include_views=True, stdout=out)
with_views_output = out.getvalue()
self.assertIn(view_model, with_views_output)
self.assertIn(view_managed, with_views_output)
finally:
with connection.cursor() as cursor:
cursor.execute('DROP VIEW inspectdb_people_view')
@skipUnlessDBFeature('can_introspect_materialized_views')
def test_include_materialized_views(self):
"""inspectdb --include-views creates models for materialized views."""
with connection.cursor() as cursor:
cursor.execute(
'CREATE MATERIALIZED VIEW inspectdb_people_materialized AS '
'SELECT id, name FROM inspectdb_people'
)
out = StringIO()
view_model = 'class InspectdbPeopleMaterialized(models.Model):'
view_managed = 'managed = False # Created from a view.'
try:
call_command('inspectdb', table_name_filter=inspectdb_tables_only, stdout=out)
no_views_output = out.getvalue()
self.assertNotIn(view_model, no_views_output)
self.assertNotIn(view_managed, no_views_output)
call_command('inspectdb', table_name_filter=inspectdb_tables_only, include_views=True, stdout=out)
with_views_output = out.getvalue()
self.assertIn(view_model, with_views_output)
self.assertIn(view_managed, with_views_output)
finally:
with connection.cursor() as cursor:
cursor.execute('DROP MATERIALIZED VIEW inspectdb_people_materialized')
@skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific SQL')
@skipUnlessDBFeature('supports_table_partitions')
def test_include_partitions(self):
"""inspectdb --include-partitions creates models for partitions."""
with connection.cursor() as cursor:
cursor.execute('''\
CREATE TABLE inspectdb_partition_parent (name text not null)
PARTITION BY LIST (left(upper(name), 1))
''')
cursor.execute('''\
CREATE TABLE inspectdb_partition_child
PARTITION OF inspectdb_partition_parent
FOR VALUES IN ('A', 'B', 'C')
''')
out = StringIO()
partition_model_parent = 'class InspectdbPartitionParent(models.Model):'
partition_model_child = 'class InspectdbPartitionChild(models.Model):'
partition_managed = 'managed = False # Created from a partition.'
try:
call_command('inspectdb', table_name_filter=inspectdb_tables_only, stdout=out)
no_partitions_output = out.getvalue()
self.assertIn(partition_model_parent, no_partitions_output)
self.assertNotIn(partition_model_child, no_partitions_output)
self.assertNotIn(partition_managed, no_partitions_output)
call_command('inspectdb', table_name_filter=inspectdb_tables_only, include_partitions=True, stdout=out)
with_partitions_output = out.getvalue()
self.assertIn(partition_model_parent, with_partitions_output)
self.assertIn(partition_model_child, with_partitions_output)
self.assertIn(partition_managed, with_partitions_output)
finally:
with connection.cursor() as cursor:
cursor.execute('DROP TABLE IF EXISTS inspectdb_partition_child')
cursor.execute('DROP TABLE IF EXISTS inspectdb_partition_parent')
@skipUnless(connection.vendor == 'postgresql', 'PostgreSQL specific SQL')
def test_foreign_data_wrapper(self):
with connection.cursor() as cursor:
cursor.execute('CREATE EXTENSION IF NOT EXISTS file_fdw')
cursor.execute('CREATE SERVER inspectdb_server FOREIGN DATA WRAPPER file_fdw')
cursor.execute('''\
CREATE FOREIGN TABLE inspectdb_iris_foreign_table (
petal_length real,
petal_width real,
sepal_length real,
sepal_width real
) SERVER inspectdb_server OPTIONS (
filename '/dev/null'
)
''')
out = StringIO()
foreign_table_model = 'class InspectdbIrisForeignTable(models.Model):'
foreign_table_managed = 'managed = False'
try:
call_command('inspectdb', stdout=out)
output = out.getvalue()
self.assertIn(foreign_table_model, output)
self.assertIn(foreign_table_managed, output)
finally:
with connection.cursor() as cursor:
cursor.execute('DROP FOREIGN TABLE IF EXISTS inspectdb_iris_foreign_table')
cursor.execute('DROP SERVER IF EXISTS inspectdb_server')
cursor.execute('DROP EXTENSION IF EXISTS file_fdw')
|
aa35dc58b0dd31fe7cd1c5efdc9735db62e25e25db32c6f98fcecfc9dde43b20 | import time
from django.core.exceptions import ImproperlyConfigured
from django.http import HttpResponse
from django.test import RequestFactory, SimpleTestCase, override_settings
from django.test.utils import require_jinja2
from django.urls import resolve
from django.views.generic import RedirectView, TemplateView, View
from . import views
class SimpleView(View):
"""
A simple view with a docstring.
"""
def get(self, request):
return HttpResponse('This is a simple view')
class SimplePostView(SimpleView):
post = SimpleView.get
class PostOnlyView(View):
def post(self, request):
return HttpResponse('This view only accepts POST')
class CustomizableView(SimpleView):
parameter = {}
def decorator(view):
view.is_decorated = True
return view
class DecoratedDispatchView(SimpleView):
@decorator
def dispatch(self, request, *args, **kwargs):
return super().dispatch(request, *args, **kwargs)
class AboutTemplateView(TemplateView):
def get(self, request):
return self.render_to_response({})
def get_template_names(self):
return ['generic_views/about.html']
class AboutTemplateAttributeView(TemplateView):
template_name = 'generic_views/about.html'
def get(self, request):
return self.render_to_response(context={})
class InstanceView(View):
def get(self, request):
return self
class ViewTest(SimpleTestCase):
rf = RequestFactory()
def _assert_simple(self, response):
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'This is a simple view')
def test_no_init_kwargs(self):
"""
A view can't be accidentally instantiated before deployment
"""
msg = 'This method is available only on the class, not on instances.'
with self.assertRaisesMessage(AttributeError, msg):
SimpleView(key='value').as_view()
def test_no_init_args(self):
"""
A view can't be accidentally instantiated before deployment
"""
msg = 'as_view() takes 1 positional argument but 2 were given'
with self.assertRaisesMessage(TypeError, msg):
SimpleView.as_view('value')
def test_pathological_http_method(self):
"""
The edge case of a http request that spoofs an existing method name is caught.
"""
self.assertEqual(SimpleView.as_view()(
self.rf.get('/', REQUEST_METHOD='DISPATCH')
).status_code, 405)
def test_get_only(self):
"""
Test a view which only allows GET doesn't allow other methods.
"""
self._assert_simple(SimpleView.as_view()(self.rf.get('/')))
self.assertEqual(SimpleView.as_view()(self.rf.post('/')).status_code, 405)
self.assertEqual(SimpleView.as_view()(
self.rf.get('/', REQUEST_METHOD='FAKE')
).status_code, 405)
def test_get_and_head(self):
"""
Test a view which supplies a GET method also responds correctly to HEAD.
"""
self._assert_simple(SimpleView.as_view()(self.rf.get('/')))
response = SimpleView.as_view()(self.rf.head('/'))
self.assertEqual(response.status_code, 200)
def test_head_no_get(self):
"""
Test a view which supplies no GET method responds to HEAD with HTTP 405.
"""
response = PostOnlyView.as_view()(self.rf.head('/'))
self.assertEqual(response.status_code, 405)
def test_get_and_post(self):
"""
Test a view which only allows both GET and POST.
"""
self._assert_simple(SimplePostView.as_view()(self.rf.get('/')))
self._assert_simple(SimplePostView.as_view()(self.rf.post('/')))
self.assertEqual(SimplePostView.as_view()(
self.rf.get('/', REQUEST_METHOD='FAKE')
).status_code, 405)
def test_invalid_keyword_argument(self):
"""
View arguments must be predefined on the class and can't
be named like a HTTP method.
"""
msg = (
"You tried to pass in the %s method name as a keyword argument "
"to SimpleView(). Don't do that."
)
# Check each of the allowed method names
for method in SimpleView.http_method_names:
with self.assertRaisesMessage(TypeError, msg % method):
SimpleView.as_view(**{method: 'value'})
# Check the case view argument is ok if predefined on the class...
CustomizableView.as_view(parameter="value")
# ...but raises errors otherwise.
msg = (
"CustomizableView() received an invalid keyword 'foobar'. "
"as_view only accepts arguments that are already attributes of "
"the class."
)
with self.assertRaisesMessage(TypeError, msg):
CustomizableView.as_view(foobar="value")
def test_calling_more_than_once(self):
"""
Test a view can only be called once.
"""
request = self.rf.get('/')
view = InstanceView.as_view()
self.assertNotEqual(view(request), view(request))
def test_class_attributes(self):
"""
The callable returned from as_view() has proper
docstring, name and module.
"""
self.assertEqual(SimpleView.__doc__, SimpleView.as_view().__doc__)
self.assertEqual(SimpleView.__name__, SimpleView.as_view().__name__)
self.assertEqual(SimpleView.__module__, SimpleView.as_view().__module__)
def test_dispatch_decoration(self):
"""
Attributes set by decorators on the dispatch method
are also present on the closure.
"""
self.assertTrue(DecoratedDispatchView.as_view().is_decorated)
def test_options(self):
"""
Views respond to HTTP OPTIONS requests with an Allow header
appropriate for the methods implemented by the view class.
"""
request = self.rf.options('/')
view = SimpleView.as_view()
response = view(request)
self.assertEqual(200, response.status_code)
self.assertTrue(response['Allow'])
def test_options_for_get_view(self):
"""
A view implementing GET allows GET and HEAD.
"""
request = self.rf.options('/')
view = SimpleView.as_view()
response = view(request)
self._assert_allows(response, 'GET', 'HEAD')
def test_options_for_get_and_post_view(self):
"""
A view implementing GET and POST allows GET, HEAD, and POST.
"""
request = self.rf.options('/')
view = SimplePostView.as_view()
response = view(request)
self._assert_allows(response, 'GET', 'HEAD', 'POST')
def test_options_for_post_view(self):
"""
A view implementing POST allows POST.
"""
request = self.rf.options('/')
view = PostOnlyView.as_view()
response = view(request)
self._assert_allows(response, 'POST')
def _assert_allows(self, response, *expected_methods):
"Assert allowed HTTP methods reported in the Allow response header"
response_allows = set(response['Allow'].split(', '))
self.assertEqual(set(expected_methods + ('OPTIONS',)), response_allows)
def test_args_kwargs_request_on_self(self):
"""
Test a view only has args, kwargs & request once `as_view`
has been called.
"""
bare_view = InstanceView()
view = InstanceView.as_view()(self.rf.get('/'))
for attribute in ('args', 'kwargs', 'request'):
self.assertNotIn(attribute, dir(bare_view))
self.assertIn(attribute, dir(view))
def test_overridden_setup(self):
class SetAttributeMixin:
def setup(self, request, *args, **kwargs):
self.attr = True
super().setup(request, *args, **kwargs)
class CheckSetupView(SetAttributeMixin, SimpleView):
def dispatch(self, request, *args, **kwargs):
assert hasattr(self, 'attr')
return super().dispatch(request, *args, **kwargs)
response = CheckSetupView.as_view()(self.rf.get('/'))
self.assertEqual(response.status_code, 200)
def test_not_calling_parent_setup_error(self):
class TestView(View):
def setup(self, request, *args, **kwargs):
pass # Not calling supre().setup()
msg = (
"TestView instance has no 'request' attribute. Did you override "
"setup() and forget to call super()?"
)
with self.assertRaisesMessage(AttributeError, msg):
TestView.as_view()(self.rf.get('/'))
def test_direct_instantiation(self):
"""
It should be possible to use the view by directly instantiating it
without going through .as_view() (#21564).
"""
view = PostOnlyView()
response = view.dispatch(self.rf.head('/'))
self.assertEqual(response.status_code, 405)
@override_settings(ROOT_URLCONF='generic_views.urls')
class TemplateViewTest(SimpleTestCase):
rf = RequestFactory()
def _assert_about(self, response):
response.render()
self.assertContains(response, '<h1>About</h1>')
def test_get(self):
"""
Test a view that simply renders a template on GET
"""
self._assert_about(AboutTemplateView.as_view()(self.rf.get('/about/')))
def test_head(self):
"""
Test a TemplateView responds correctly to HEAD
"""
response = AboutTemplateView.as_view()(self.rf.head('/about/'))
self.assertEqual(response.status_code, 200)
def test_get_template_attribute(self):
"""
Test a view that renders a template on GET with the template name as
an attribute on the class.
"""
self._assert_about(AboutTemplateAttributeView.as_view()(self.rf.get('/about/')))
def test_get_generic_template(self):
"""
Test a completely generic view that renders a template on GET
with the template name as an argument at instantiation.
"""
self._assert_about(TemplateView.as_view(template_name='generic_views/about.html')(self.rf.get('/about/')))
def test_template_name_required(self):
"""
A template view must provide a template name.
"""
msg = (
"TemplateResponseMixin requires either a definition of "
"'template_name' or an implementation of 'get_template_names()'"
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get('/template/no_template/')
@require_jinja2
def test_template_engine(self):
"""
A template view may provide a template engine.
"""
request = self.rf.get('/using/')
view = TemplateView.as_view(template_name='generic_views/using.html')
self.assertEqual(view(request).render().content, b'DTL\n')
view = TemplateView.as_view(template_name='generic_views/using.html', template_engine='django')
self.assertEqual(view(request).render().content, b'DTL\n')
view = TemplateView.as_view(template_name='generic_views/using.html', template_engine='jinja2')
self.assertEqual(view(request).render().content, b'Jinja2\n')
def test_template_params(self):
"""
A generic template view passes kwargs as context.
"""
response = self.client.get('/template/simple/bar/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['foo'], 'bar')
self.assertIsInstance(response.context['view'], View)
def test_extra_template_params(self):
"""
A template view can be customized to return extra context.
"""
response = self.client.get('/template/custom/bar/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['foo'], 'bar')
self.assertEqual(response.context['key'], 'value')
self.assertIsInstance(response.context['view'], View)
def test_cached_views(self):
"""
A template view can be cached
"""
response = self.client.get('/template/cached/bar/')
self.assertEqual(response.status_code, 200)
time.sleep(1.0)
response2 = self.client.get('/template/cached/bar/')
self.assertEqual(response2.status_code, 200)
self.assertEqual(response.content, response2.content)
time.sleep(2.0)
# Let the cache expire and test again
response2 = self.client.get('/template/cached/bar/')
self.assertEqual(response2.status_code, 200)
self.assertNotEqual(response.content, response2.content)
def test_content_type(self):
response = self.client.get('/template/content_type/')
self.assertEqual(response['Content-Type'], 'text/plain')
def test_resolve_view(self):
match = resolve('/template/content_type/')
self.assertIs(match.func.view_class, TemplateView)
self.assertEqual(match.func.view_initkwargs['content_type'], 'text/plain')
def test_resolve_login_required_view(self):
match = resolve('/template/login_required/')
self.assertIs(match.func.view_class, TemplateView)
def test_extra_context(self):
response = self.client.get('/template/extra_context/')
self.assertEqual(response.context['title'], 'Title')
@override_settings(ROOT_URLCONF='generic_views.urls')
class RedirectViewTest(SimpleTestCase):
rf = RequestFactory()
def test_no_url(self):
"Without any configuration, returns HTTP 410 GONE"
response = RedirectView.as_view()(self.rf.get('/foo/'))
self.assertEqual(response.status_code, 410)
def test_default_redirect(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.get('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_permanent_redirect(self):
"Permanent redirects are an option"
response = RedirectView.as_view(url='/bar/', permanent=True)(self.rf.get('/foo/'))
self.assertEqual(response.status_code, 301)
self.assertEqual(response.url, '/bar/')
def test_temporary_redirect(self):
"Temporary redirects are an option"
response = RedirectView.as_view(url='/bar/', permanent=False)(self.rf.get('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_include_args(self):
"GET arguments can be included in the redirected URL"
response = RedirectView.as_view(url='/bar/')(self.rf.get('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
response = RedirectView.as_view(url='/bar/', query_string=True)(self.rf.get('/foo/?pork=spam'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/?pork=spam')
def test_include_urlencoded_args(self):
"GET arguments can be URL-encoded when included in the redirected URL"
response = RedirectView.as_view(url='/bar/', query_string=True)(
self.rf.get('/foo/?unicode=%E2%9C%93'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/?unicode=%E2%9C%93')
def test_parameter_substitution(self):
"Redirection URLs can be parameterized"
response = RedirectView.as_view(url='/bar/%(object_id)d/')(self.rf.get('/foo/42/'), object_id=42)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/42/')
def test_named_url_pattern(self):
"Named pattern parameter should reverse to the matching pattern"
response = RedirectView.as_view(pattern_name='artist_detail')(self.rf.get('/foo/'), pk=1)
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], '/detail/artist/1/')
def test_named_url_pattern_using_args(self):
response = RedirectView.as_view(pattern_name='artist_detail')(self.rf.get('/foo/'), 1)
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], '/detail/artist/1/')
def test_redirect_POST(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.post('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_HEAD(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.head('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_OPTIONS(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.options('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_PUT(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.put('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_PATCH(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.patch('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_DELETE(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.delete('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_when_meta_contains_no_query_string(self):
"regression for #16705"
# we can't use self.rf.get because it always sets QUERY_STRING
response = RedirectView.as_view(url='/bar/')(self.rf.request(PATH_INFO='/foo/'))
self.assertEqual(response.status_code, 302)
def test_direct_instantiation(self):
"""
It should be possible to use the view without going through .as_view()
(#21564).
"""
view = RedirectView()
response = view.dispatch(self.rf.head('/foo/'))
self.assertEqual(response.status_code, 410)
class GetContextDataTest(SimpleTestCase):
def test_get_context_data_super(self):
test_view = views.CustomContextView()
context = test_view.get_context_data(kwarg_test='kwarg_value')
# the test_name key is inserted by the test classes parent
self.assertIn('test_name', context)
self.assertEqual(context['kwarg_test'], 'kwarg_value')
self.assertEqual(context['custom_key'], 'custom_value')
# test that kwarg overrides values assigned higher up
context = test_view.get_context_data(test_name='test_value')
self.assertEqual(context['test_name'], 'test_value')
def test_object_at_custom_name_in_context_data(self):
# Checks 'pony' key presence in dict returned by get_context_date
test_view = views.CustomSingleObjectView()
test_view.context_object_name = 'pony'
context = test_view.get_context_data()
self.assertEqual(context['pony'], test_view.object)
def test_object_in_get_context_data(self):
# Checks 'object' key presence in dict returned by get_context_date #20234
test_view = views.CustomSingleObjectView()
context = test_view.get_context_data()
self.assertEqual(context['object'], test_view.object)
class UseMultipleObjectMixinTest(SimpleTestCase):
rf = RequestFactory()
def test_use_queryset_from_view(self):
test_view = views.CustomMultipleObjectMixinView()
test_view.get(self.rf.get('/'))
# Don't pass queryset as argument
context = test_view.get_context_data()
self.assertEqual(context['object_list'], test_view.queryset)
def test_overwrite_queryset(self):
test_view = views.CustomMultipleObjectMixinView()
test_view.get(self.rf.get('/'))
queryset = [{'name': 'Lennon'}, {'name': 'Ono'}]
self.assertNotEqual(test_view.queryset, queryset)
# Overwrite the view's queryset with queryset from kwarg
context = test_view.get_context_data(object_list=queryset)
self.assertEqual(context['object_list'], queryset)
class SingleObjectTemplateResponseMixinTest(SimpleTestCase):
def test_template_mixin_without_template(self):
"""
We want to makes sure that if you use a template mixin, but forget the
template, it still tells you it's ImproperlyConfigured instead of
TemplateDoesNotExist.
"""
view = views.TemplateResponseWithoutTemplate()
msg = (
"TemplateResponseMixin requires either a definition of "
"'template_name' or an implementation of 'get_template_names()'"
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
view.get_template_names()
|
a21d561ba3f618f7a6c7216c20e99d33f16616b1bf4116f54c3bac6914275eb4 | from django.http import HttpResponse
from django.test import RequestFactory, SimpleTestCase
from django.test.utils import override_settings
class SecurityMiddlewareTest(SimpleTestCase):
@property
def middleware(self):
from django.middleware.security import SecurityMiddleware
return SecurityMiddleware()
@property
def secure_request_kwargs(self):
return {"wsgi.url_scheme": "https"}
def response(self, *args, headers=None, **kwargs):
response = HttpResponse(*args, **kwargs)
if headers:
for k, v in headers.items():
response[k] = v
return response
def process_response(self, *args, secure=False, request=None, **kwargs):
request_kwargs = {}
if secure:
request_kwargs.update(self.secure_request_kwargs)
if request is None:
request = self.request.get("/some/url", **request_kwargs)
ret = self.middleware.process_request(request)
if ret:
return ret
return self.middleware.process_response(
request, self.response(*args, **kwargs))
request = RequestFactory()
def process_request(self, method, *args, secure=False, **kwargs):
if secure:
kwargs.update(self.secure_request_kwargs)
req = getattr(self.request, method.lower())(*args, **kwargs)
return self.middleware.process_request(req)
@override_settings(SECURE_HSTS_SECONDS=3600)
def test_sts_on(self):
"""
With HSTS_SECONDS=3600, the middleware adds
"Strict-Transport-Security: max-age=3600" to the response.
"""
self.assertEqual(
self.process_response(secure=True)["Strict-Transport-Security"],
'max-age=3600',
)
@override_settings(SECURE_HSTS_SECONDS=3600)
def test_sts_already_present(self):
"""
The middleware will not override a "Strict-Transport-Security" header
already present in the response.
"""
response = self.process_response(
secure=True,
headers={"Strict-Transport-Security": "max-age=7200"})
self.assertEqual(response["Strict-Transport-Security"], "max-age=7200")
@override_settings(HSTS_SECONDS=3600)
def test_sts_only_if_secure(self):
"""
The "Strict-Transport-Security" header is not added to responses going
over an insecure connection.
"""
self.assertNotIn("Strict-Transport-Security", self.process_response(secure=False))
@override_settings(HSTS_SECONDS=0)
def test_sts_off(self):
"""
With HSTS_SECONDS of 0, the middleware does not add a
"Strict-Transport-Security" header to the response.
"""
self.assertNotIn("Strict-Transport-Security", self.process_response(secure=True))
@override_settings(
SECURE_HSTS_SECONDS=600, SECURE_HSTS_INCLUDE_SUBDOMAINS=True)
def test_sts_include_subdomains(self):
"""
With HSTS_SECONDS non-zero and HSTS_INCLUDE_SUBDOMAINS
True, the middleware adds a "Strict-Transport-Security" header with the
"includeSubDomains" directive to the response.
"""
response = self.process_response(secure=True)
self.assertEqual(response["Strict-Transport-Security"], "max-age=600; includeSubDomains")
@override_settings(
SECURE_HSTS_SECONDS=600, SECURE_HSTS_INCLUDE_SUBDOMAINS=False)
def test_sts_no_include_subdomains(self):
"""
With HSTS_SECONDS non-zero and HSTS_INCLUDE_SUBDOMAINS
False, the middleware adds a "Strict-Transport-Security" header without
the "includeSubDomains" directive to the response.
"""
response = self.process_response(secure=True)
self.assertEqual(response["Strict-Transport-Security"], "max-age=600")
@override_settings(SECURE_HSTS_SECONDS=10886400, SECURE_HSTS_PRELOAD=True)
def test_sts_preload(self):
"""
With HSTS_SECONDS non-zero and SECURE_HSTS_PRELOAD True, the middleware
adds a "Strict-Transport-Security" header with the "preload" directive
to the response.
"""
response = self.process_response(secure=True)
self.assertEqual(response["Strict-Transport-Security"], "max-age=10886400; preload")
@override_settings(SECURE_HSTS_SECONDS=10886400, SECURE_HSTS_INCLUDE_SUBDOMAINS=True, SECURE_HSTS_PRELOAD=True)
def test_sts_subdomains_and_preload(self):
"""
With HSTS_SECONDS non-zero, SECURE_HSTS_INCLUDE_SUBDOMAINS and
SECURE_HSTS_PRELOAD True, the middleware adds a "Strict-Transport-Security"
header containing both the "includeSubDomains" and "preload" directives
to the response.
"""
response = self.process_response(secure=True)
self.assertEqual(response["Strict-Transport-Security"], "max-age=10886400; includeSubDomains; preload")
@override_settings(SECURE_HSTS_SECONDS=10886400, SECURE_HSTS_PRELOAD=False)
def test_sts_no_preload(self):
"""
With HSTS_SECONDS non-zero and SECURE_HSTS_PRELOAD
False, the middleware adds a "Strict-Transport-Security" header without
the "preload" directive to the response.
"""
response = self.process_response(secure=True)
self.assertEqual(response["Strict-Transport-Security"], "max-age=10886400")
@override_settings(SECURE_CONTENT_TYPE_NOSNIFF=True)
def test_content_type_on(self):
"""
With CONTENT_TYPE_NOSNIFF set to True, the middleware adds
"X-Content-Type-Options: nosniff" header to the response.
"""
self.assertEqual(self.process_response()["X-Content-Type-Options"], "nosniff")
@override_settings(SECURE_CONTENT_TYPE_NOSNIFF=True)
def test_content_type_already_present(self):
"""
The middleware will not override an "X-Content-Type-Options" header
already present in the response.
"""
response = self.process_response(secure=True, headers={"X-Content-Type-Options": "foo"})
self.assertEqual(response["X-Content-Type-Options"], "foo")
@override_settings(SECURE_CONTENT_TYPE_NOSNIFF=False)
def test_content_type_off(self):
"""
With CONTENT_TYPE_NOSNIFF False, the middleware does not add an
"X-Content-Type-Options" header to the response.
"""
self.assertNotIn("X-Content-Type-Options", self.process_response())
@override_settings(SECURE_BROWSER_XSS_FILTER=True)
def test_xss_filter_on(self):
"""
With BROWSER_XSS_FILTER set to True, the middleware adds
"s-xss-protection: 1; mode=block" header to the response.
"""
self.assertEqual(
self.process_response()["X-XSS-Protection"],
"1; mode=block")
@override_settings(SECURE_BROWSER_XSS_FILTER=True)
def test_xss_filter_already_present(self):
"""
The middleware will not override an "X-XSS-Protection" header
already present in the response.
"""
response = self.process_response(secure=True, headers={"X-XSS-Protection": "foo"})
self.assertEqual(response["X-XSS-Protection"], "foo")
@override_settings(BROWSER_XSS_FILTER=False)
def test_xss_filter_off(self):
"""
With BROWSER_XSS_FILTER set to False, the middleware does not add an
"X-XSS-Protection" header to the response.
"""
self.assertNotIn("X-XSS-Protection", self.process_response())
@override_settings(SECURE_SSL_REDIRECT=True)
def test_ssl_redirect_on(self):
"""
With SSL_REDIRECT True, the middleware redirects any non-secure
requests to the https:// version of the same URL.
"""
ret = self.process_request("get", "/some/url?query=string")
self.assertEqual(ret.status_code, 301)
self.assertEqual(
ret["Location"], "https://testserver/some/url?query=string")
@override_settings(SECURE_SSL_REDIRECT=True)
def test_no_redirect_ssl(self):
"""
The middleware does not redirect secure requests.
"""
ret = self.process_request("get", "/some/url", secure=True)
self.assertIsNone(ret)
@override_settings(
SECURE_SSL_REDIRECT=True, SECURE_REDIRECT_EXEMPT=["^insecure/"])
def test_redirect_exempt(self):
"""
The middleware does not redirect requests with URL path matching an
exempt pattern.
"""
ret = self.process_request("get", "/insecure/page")
self.assertIsNone(ret)
@override_settings(
SECURE_SSL_REDIRECT=True, SECURE_SSL_HOST="secure.example.com")
def test_redirect_ssl_host(self):
"""
The middleware redirects to SSL_HOST if given.
"""
ret = self.process_request("get", "/some/url")
self.assertEqual(ret.status_code, 301)
self.assertEqual(ret["Location"], "https://secure.example.com/some/url")
@override_settings(SECURE_SSL_REDIRECT=False)
def test_ssl_redirect_off(self):
"""
With SSL_REDIRECT False, the middleware does no redirect.
"""
ret = self.process_request("get", "/some/url")
self.assertIsNone(ret)
|
9aa29ff1c764480ef073d6ed8e9a4c5fc758773c617884ae993c47e1b3a092c1 | from django.contrib import admin
from django.contrib.admin.sites import AdminSite
from django.contrib.auth.models import User
from django.contrib.contenttypes.admin import GenericTabularInline
from django.contrib.contenttypes.models import ContentType
from django.forms.formsets import DEFAULT_MAX_NUM
from django.forms.models import ModelForm
from django.test import (
RequestFactory, SimpleTestCase, TestCase, override_settings,
)
from django.urls import reverse
from .admin import MediaInline, MediaPermanentInline, site as admin_site
from .models import Category, Episode, EpisodePermanent, Media, PhoneNumber
class TestDataMixin:
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', password='secret', email='[email protected]')
@override_settings(ROOT_URLCONF='generic_inline_admin.urls')
class GenericAdminViewTest(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
e = Episode.objects.create(name='This Week in Django')
self.episode_pk = e.pk
m = Media(content_object=e, url='http://example.com/podcast.mp3')
m.save()
self.mp3_media_pk = m.pk
m = Media(content_object=e, url='http://example.com/logo.png')
m.save()
self.png_media_pk = m.pk
def test_basic_add_GET(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get(reverse('admin:generic_inline_admin_episode_add'))
self.assertEqual(response.status_code, 200)
def test_basic_edit_GET(self):
"""
A smoke test to ensure GET on the change_view works.
"""
response = self.client.get(
reverse('admin:generic_inline_admin_episode_change', args=(self.episode_pk,))
)
self.assertEqual(response.status_code, 200)
def test_basic_add_POST(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
"name": "This Week in Django",
# inline data
"generic_inline_admin-media-content_type-object_id-TOTAL_FORMS": "1",
"generic_inline_admin-media-content_type-object_id-INITIAL_FORMS": "0",
"generic_inline_admin-media-content_type-object_id-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse('admin:generic_inline_admin_episode_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_basic_edit_POST(self):
"""
A smoke test to ensure POST on edit_view works.
"""
post_data = {
"name": "This Week in Django",
# inline data
"generic_inline_admin-media-content_type-object_id-TOTAL_FORMS": "3",
"generic_inline_admin-media-content_type-object_id-INITIAL_FORMS": "2",
"generic_inline_admin-media-content_type-object_id-MAX_NUM_FORMS": "0",
"generic_inline_admin-media-content_type-object_id-0-id": "%d" % self.mp3_media_pk,
"generic_inline_admin-media-content_type-object_id-0-url": "http://example.com/podcast.mp3",
"generic_inline_admin-media-content_type-object_id-1-id": "%d" % self.png_media_pk,
"generic_inline_admin-media-content_type-object_id-1-url": "http://example.com/logo.png",
"generic_inline_admin-media-content_type-object_id-2-id": "",
"generic_inline_admin-media-content_type-object_id-2-url": "",
}
url = reverse('admin:generic_inline_admin_episode_change', args=(self.episode_pk,))
response = self.client.post(url, post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
@override_settings(ROOT_URLCONF='generic_inline_admin.urls')
class GenericInlineAdminParametersTest(TestDataMixin, TestCase):
factory = RequestFactory()
def setUp(self):
self.client.force_login(self.superuser)
def _create_object(self, model):
"""
Create a model with an attached Media object via GFK. We can't
load content via a fixture (since the GenericForeignKey relies on
content type IDs, which will vary depending on what other tests
have been run), thus we do it here.
"""
e = model.objects.create(name='This Week in Django')
Media.objects.create(content_object=e, url='http://example.com/podcast.mp3')
return e
def test_no_param(self):
"""
With one initial form, extra (default) at 3, there should be 4 forms.
"""
e = self._create_object(Episode)
response = self.client.get(reverse('admin:generic_inline_admin_episode_change', args=(e.pk,)))
formset = response.context['inline_admin_formsets'][0].formset
self.assertEqual(formset.total_form_count(), 4)
self.assertEqual(formset.initial_form_count(), 1)
def test_extra_param(self):
"""
With extra=0, there should be one form.
"""
class ExtraInline(GenericTabularInline):
model = Media
extra = 0
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [ExtraInline]
e = self._create_object(Episode)
request = self.factory.get(reverse('admin:generic_inline_admin_episode_change', args=(e.pk,)))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data['inline_admin_formsets'][0].formset
self.assertEqual(formset.total_form_count(), 1)
self.assertEqual(formset.initial_form_count(), 1)
def testMaxNumParam(self):
"""
With extra=5 and max_num=2, there should be only 2 forms.
"""
class MaxNumInline(GenericTabularInline):
model = Media
extra = 5
max_num = 2
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [MaxNumInline]
e = self._create_object(Episode)
request = self.factory.get(reverse('admin:generic_inline_admin_episode_change', args=(e.pk,)))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data['inline_admin_formsets'][0].formset
self.assertEqual(formset.total_form_count(), 2)
self.assertEqual(formset.initial_form_count(), 1)
def test_min_num_param(self):
"""
With extra=3 and min_num=2, there should be five forms.
"""
class MinNumInline(GenericTabularInline):
model = Media
extra = 3
min_num = 2
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [MinNumInline]
e = self._create_object(Episode)
request = self.factory.get(reverse('admin:generic_inline_admin_episode_change', args=(e.pk,)))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data['inline_admin_formsets'][0].formset
self.assertEqual(formset.total_form_count(), 5)
self.assertEqual(formset.initial_form_count(), 1)
def test_get_extra(self):
class GetExtraInline(GenericTabularInline):
model = Media
extra = 4
def get_extra(self, request, obj):
return 2
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [GetExtraInline]
e = self._create_object(Episode)
request = self.factory.get(reverse('admin:generic_inline_admin_episode_change', args=(e.pk,)))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data['inline_admin_formsets'][0].formset
self.assertEqual(formset.extra, 2)
def test_get_min_num(self):
class GetMinNumInline(GenericTabularInline):
model = Media
min_num = 5
def get_min_num(self, request, obj):
return 2
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [GetMinNumInline]
e = self._create_object(Episode)
request = self.factory.get(reverse('admin:generic_inline_admin_episode_change', args=(e.pk,)))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data['inline_admin_formsets'][0].formset
self.assertEqual(formset.min_num, 2)
def test_get_max_num(self):
class GetMaxNumInline(GenericTabularInline):
model = Media
extra = 5
def get_max_num(self, request, obj):
return 2
modeladmin = admin.ModelAdmin(Episode, admin_site)
modeladmin.inlines = [GetMaxNumInline]
e = self._create_object(Episode)
request = self.factory.get(reverse('admin:generic_inline_admin_episode_change', args=(e.pk,)))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(e.pk))
formset = response.context_data['inline_admin_formsets'][0].formset
self.assertEqual(formset.max_num, 2)
@override_settings(ROOT_URLCONF='generic_inline_admin.urls')
class GenericInlineAdminWithUniqueTogetherTest(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_add(self):
category_id = Category.objects.create(name='male').pk
post_data = {
"name": "John Doe",
# inline data
"generic_inline_admin-phonenumber-content_type-object_id-TOTAL_FORMS": "1",
"generic_inline_admin-phonenumber-content_type-object_id-INITIAL_FORMS": "0",
"generic_inline_admin-phonenumber-content_type-object_id-MAX_NUM_FORMS": "0",
"generic_inline_admin-phonenumber-content_type-object_id-0-id": "",
"generic_inline_admin-phonenumber-content_type-object_id-0-phone_number": "555-555-5555",
"generic_inline_admin-phonenumber-content_type-object_id-0-category": "%s" % category_id,
}
response = self.client.get(reverse('admin:generic_inline_admin_contact_add'))
self.assertEqual(response.status_code, 200)
response = self.client.post(reverse('admin:generic_inline_admin_contact_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_delete(self):
from .models import Contact
c = Contact.objects.create(name='foo')
PhoneNumber.objects.create(
object_id=c.id,
content_type=ContentType.objects.get_for_model(Contact),
phone_number="555-555-5555",
)
response = self.client.post(reverse('admin:generic_inline_admin_contact_delete', args=[c.pk]))
self.assertContains(response, 'Are you sure you want to delete')
@override_settings(ROOT_URLCONF='generic_inline_admin.urls')
class NoInlineDeletionTest(SimpleTestCase):
def test_no_deletion(self):
inline = MediaPermanentInline(EpisodePermanent, admin_site)
fake_request = object()
formset = inline.get_formset(fake_request)
self.assertFalse(formset.can_delete)
class MockRequest:
pass
class MockSuperUser:
def has_perm(self, perm):
return True
request = MockRequest()
request.user = MockSuperUser()
@override_settings(ROOT_URLCONF='generic_inline_admin.urls')
class GenericInlineModelAdminTest(SimpleTestCase):
def setUp(self):
self.site = AdminSite()
def test_get_formset_kwargs(self):
media_inline = MediaInline(Media, AdminSite())
# Create a formset with default arguments
formset = media_inline.get_formset(request)
self.assertEqual(formset.max_num, DEFAULT_MAX_NUM)
self.assertIs(formset.can_order, False)
# Create a formset with custom keyword arguments
formset = media_inline.get_formset(request, max_num=100, can_order=True)
self.assertEqual(formset.max_num, 100)
self.assertIs(formset.can_order, True)
def test_custom_form_meta_exclude_with_readonly(self):
"""
The custom ModelForm's `Meta.exclude` is respected when
used in conjunction with `GenericInlineModelAdmin.readonly_fields`
and when no `ModelAdmin.exclude` is defined.
"""
class MediaForm(ModelForm):
class Meta:
model = Media
exclude = ['url']
class MediaInline(GenericTabularInline):
readonly_fields = ['description']
form = MediaForm
model = Media
class EpisodeAdmin(admin.ModelAdmin):
inlines = [
MediaInline
]
ma = EpisodeAdmin(Episode, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
['keywords', 'id', 'DELETE'])
def test_custom_form_meta_exclude(self):
"""
The custom ModelForm's `Meta.exclude` is respected by
`GenericInlineModelAdmin.get_formset`, and overridden if
`ModelAdmin.exclude` or `GenericInlineModelAdmin.exclude` are defined.
Refs #15907.
"""
# First with `GenericInlineModelAdmin` -----------------
class MediaForm(ModelForm):
class Meta:
model = Media
exclude = ['url']
class MediaInline(GenericTabularInline):
exclude = ['description']
form = MediaForm
model = Media
class EpisodeAdmin(admin.ModelAdmin):
inlines = [
MediaInline
]
ma = EpisodeAdmin(Episode, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
['url', 'keywords', 'id', 'DELETE'])
# Then, only with `ModelForm` -----------------
class MediaInline(GenericTabularInline):
form = MediaForm
model = Media
class EpisodeAdmin(admin.ModelAdmin):
inlines = [
MediaInline
]
ma = EpisodeAdmin(Episode, self.site)
self.assertEqual(
list(list(ma.get_formsets_with_inlines(request))[0][0]().forms[0].fields),
['description', 'keywords', 'id', 'DELETE'])
def test_get_fieldsets(self):
# get_fieldsets is called when figuring out form fields.
# Refs #18681.
class MediaForm(ModelForm):
class Meta:
model = Media
fields = '__all__'
class MediaInline(GenericTabularInline):
form = MediaForm
model = Media
can_delete = False
def get_fieldsets(self, request, obj=None):
return [(None, {'fields': ['url', 'description']})]
ma = MediaInline(Media, self.site)
form = ma.get_formset(None).form
self.assertEqual(form._meta.fields, ['url', 'description'])
def test_get_formsets_with_inlines_returns_tuples(self):
"""
get_formsets_with_inlines() returns the correct tuples.
"""
class MediaForm(ModelForm):
class Meta:
model = Media
exclude = ['url']
class MediaInline(GenericTabularInline):
form = MediaForm
model = Media
class AlternateInline(GenericTabularInline):
form = MediaForm
model = Media
class EpisodeAdmin(admin.ModelAdmin):
inlines = [
AlternateInline, MediaInline
]
ma = EpisodeAdmin(Episode, self.site)
inlines = ma.get_inline_instances(request)
for (formset, inline), other_inline in zip(ma.get_formsets_with_inlines(request), inlines):
self.assertIsInstance(formset, other_inline.get_formset(request).__class__)
|
afa57385f9a3dcf848bbed3839adfcd62c490b2065601d72b0b85a902eb8a1cc | """
Unit tests for reverse URL lookups.
"""
import sys
import threading
from admin_scripts.tests import AdminScriptTestCase
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
from django.http import (
HttpRequest, HttpResponsePermanentRedirect, HttpResponseRedirect,
)
from django.shortcuts import redirect
from django.test import SimpleTestCase, TestCase, override_settings
from django.test.utils import override_script_prefix
from django.urls import (
NoReverseMatch, Resolver404, ResolverMatch, URLPattern, URLResolver,
get_callable, get_resolver, get_urlconf, include, path, re_path, resolve,
reverse, reverse_lazy,
)
from django.urls.resolvers import RegexPattern
from . import middleware, urlconf_outer, views
from .utils import URLObject
from .views import empty_view
resolve_test_data = (
# These entries are in the format: (path, url_name, app_name, namespace, view_name, func, args, kwargs)
# Simple case
('/normal/42/37/', 'normal-view', '', '', 'normal-view', views.empty_view, (), {'arg1': '42', 'arg2': '37'}),
(
'/view_class/42/37/', 'view-class', '', '', 'view-class', views.view_class_instance, (),
{'arg1': '42', 'arg2': '37'}
),
(
'/included/normal/42/37/', 'inc-normal-view', 'included_namespace_urls',
'included_namespace_urls', 'included_namespace_urls:inc-normal-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
(
'/included/view_class/42/37/', 'inc-view-class', 'included_namespace_urls',
'included_namespace_urls', 'included_namespace_urls:inc-view-class',
views.view_class_instance, (), {'arg1': '42', 'arg2': '37'}
),
# Unnamed args are dropped if you have *any* kwargs in a pattern
('/mixed_args/42/37/', 'mixed-args', '', '', 'mixed-args', views.empty_view, (), {'arg2': '37'}),
(
'/included/mixed_args/42/37/', 'inc-mixed-args', 'included_namespace_urls',
'included_namespace_urls', 'included_namespace_urls:inc-mixed-args',
views.empty_view, (), {'arg2': '37'}
),
(
'/included/12/mixed_args/42/37/', 'inc-mixed-args', 'included_namespace_urls',
'included_namespace_urls', 'included_namespace_urls:inc-mixed-args',
views.empty_view, (), {'arg2': '37'}
),
# Unnamed views should have None as the url_name. Regression data for #21157.
(
'/unnamed/normal/42/37/', None, '', '', 'urlpatterns_reverse.views.empty_view', views.empty_view, (),
{'arg1': '42', 'arg2': '37'}
),
(
'/unnamed/view_class/42/37/', None, '', '', 'urlpatterns_reverse.views.ViewClass', views.view_class_instance,
(), {'arg1': '42', 'arg2': '37'}
),
# If you have no kwargs, you get an args list.
('/no_kwargs/42/37/', 'no-kwargs', '', '', 'no-kwargs', views.empty_view, ('42', '37'), {}),
(
'/included/no_kwargs/42/37/', 'inc-no-kwargs', 'included_namespace_urls',
'included_namespace_urls', 'included_namespace_urls:inc-no-kwargs',
views.empty_view, ('42', '37'), {}
),
(
'/included/12/no_kwargs/42/37/', 'inc-no-kwargs', 'included_namespace_urls',
'included_namespace_urls', 'included_namespace_urls:inc-no-kwargs',
views.empty_view, ('12', '42', '37'), {}
),
# Namespaces
(
'/test1/inner/42/37/', 'urlobject-view', 'testapp', 'test-ns1', 'test-ns1:urlobject-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
(
'/included/test3/inner/42/37/', 'urlobject-view', 'included_namespace_urls:testapp',
'included_namespace_urls:test-ns3', 'included_namespace_urls:test-ns3:urlobject-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
(
'/ns-included1/normal/42/37/', 'inc-normal-view', 'included_namespace_urls',
'inc-ns1', 'inc-ns1:inc-normal-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
(
'/included/test3/inner/42/37/', 'urlobject-view', 'included_namespace_urls:testapp',
'included_namespace_urls:test-ns3', 'included_namespace_urls:test-ns3:urlobject-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
(
'/default/inner/42/37/', 'urlobject-view', 'testapp', 'testapp', 'testapp:urlobject-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
(
'/other2/inner/42/37/', 'urlobject-view', 'nodefault', 'other-ns2', 'other-ns2:urlobject-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
(
'/other1/inner/42/37/', 'urlobject-view', 'nodefault', 'other-ns1', 'other-ns1:urlobject-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
# Nested namespaces
(
'/ns-included1/test3/inner/42/37/', 'urlobject-view', 'included_namespace_urls:testapp',
'inc-ns1:test-ns3', 'inc-ns1:test-ns3:urlobject-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
(
'/ns-included1/ns-included4/ns-included2/test3/inner/42/37/', 'urlobject-view',
'included_namespace_urls:namespace_urls:included_namespace_urls:testapp',
'inc-ns1:inc-ns4:inc-ns2:test-ns3',
'inc-ns1:inc-ns4:inc-ns2:test-ns3:urlobject-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
(
'/app-included/test3/inner/42/37/', 'urlobject-view', 'included_namespace_urls:testapp', 'inc-app:test-ns3',
'inc-app:test-ns3:urlobject-view', views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
(
'/app-included/ns-included4/ns-included2/test3/inner/42/37/', 'urlobject-view',
'included_namespace_urls:namespace_urls:included_namespace_urls:testapp',
'inc-app:inc-ns4:inc-ns2:test-ns3',
'inc-app:inc-ns4:inc-ns2:test-ns3:urlobject-view',
views.empty_view, (), {'arg1': '42', 'arg2': '37'}
),
# Namespaces capturing variables
(
'/inc70/', 'inner-nothing', 'included_urls', 'inc-ns5', 'inc-ns5:inner-nothing',
views.empty_view, (), {'outer': '70'}
),
(
'/inc78/extra/foobar/', 'inner-extra', 'included_urls', 'inc-ns5', 'inc-ns5:inner-extra',
views.empty_view, (), {'outer': '78', 'extra': 'foobar'}
),
)
test_data = (
('places', '/places/3/', [3], {}),
('places', '/places/3/', ['3'], {}),
('places', NoReverseMatch, ['a'], {}),
('places', NoReverseMatch, [], {}),
('places?', '/place/', [], {}),
('places+', '/places/', [], {}),
('places*', '/place/', [], {}),
('places2?', '/', [], {}),
('places2+', '/places/', [], {}),
('places2*', '/', [], {}),
('places3', '/places/4/', [4], {}),
('places3', '/places/harlem/', ['harlem'], {}),
('places3', NoReverseMatch, ['harlem64'], {}),
('places4', '/places/3/', [], {'id': 3}),
('people', NoReverseMatch, [], {}),
('people', '/people/adrian/', ['adrian'], {}),
('people', '/people/adrian/', [], {'name': 'adrian'}),
('people', NoReverseMatch, ['name with spaces'], {}),
('people', NoReverseMatch, [], {'name': 'name with spaces'}),
('people2', '/people/name/', [], {}),
('people2a', '/people/name/fred/', ['fred'], {}),
('people_backref', '/people/nate-nate/', ['nate'], {}),
('people_backref', '/people/nate-nate/', [], {'name': 'nate'}),
('optional', '/optional/fred/', [], {'name': 'fred'}),
('optional', '/optional/fred/', ['fred'], {}),
('named_optional', '/optional/1/', [1], {}),
('named_optional', '/optional/1/', [], {'arg1': 1}),
('named_optional', '/optional/1/2/', [1, 2], {}),
('named_optional', '/optional/1/2/', [], {'arg1': 1, 'arg2': 2}),
('named_optional_terminated', '/optional/1/2/', [1, 2], {}),
('named_optional_terminated', '/optional/1/2/', [], {'arg1': 1, 'arg2': 2}),
('hardcoded', '/hardcoded/', [], {}),
('hardcoded2', '/hardcoded/doc.pdf', [], {}),
('people3', '/people/il/adrian/', [], {'state': 'il', 'name': 'adrian'}),
('people3', NoReverseMatch, [], {'state': 'il'}),
('people3', NoReverseMatch, [], {'name': 'adrian'}),
('people4', NoReverseMatch, [], {'state': 'il', 'name': 'adrian'}),
('people6', '/people/il/test/adrian/', ['il/test', 'adrian'], {}),
('people6', '/people//adrian/', ['adrian'], {}),
('range', '/character_set/a/', [], {}),
('range2', '/character_set/x/', [], {}),
('price', '/price/$10/', ['10'], {}),
('price2', '/price/$10/', ['10'], {}),
('price3', '/price/$10/', ['10'], {}),
('product', '/product/chocolate+($2.00)/', [], {'price': '2.00', 'product': 'chocolate'}),
('headlines', '/headlines/2007.5.21/', [], {'year': 2007, 'month': 5, 'day': 21}),
(
'windows', r'/windows_path/C:%5CDocuments%20and%20Settings%5Cspam/', [],
{'drive_name': 'C', 'path': r'Documents and Settings\spam'}
),
('special', r'/special_chars/~@+%5C$*%7C/', [r'~@+\$*|'], {}),
('special', r'/special_chars/some%20resource/', [r'some resource'], {}),
('special', r'/special_chars/10%25%20complete/', [r'10% complete'], {}),
('special', r'/special_chars/some%20resource/', [], {'chars': r'some resource'}),
('special', r'/special_chars/10%25%20complete/', [], {'chars': r'10% complete'}),
('special', NoReverseMatch, [''], {}),
('mixed', '/john/0/', [], {'name': 'john'}),
('repeats', '/repeats/a/', [], {}),
('repeats2', '/repeats/aa/', [], {}),
('repeats3', '/repeats/aa/', [], {}),
('test', '/test/1', [], {}),
('inner-nothing', '/outer/42/', [], {'outer': '42'}),
('inner-nothing', '/outer/42/', ['42'], {}),
('inner-nothing', NoReverseMatch, ['foo'], {}),
('inner-extra', '/outer/42/extra/inner/', [], {'extra': 'inner', 'outer': '42'}),
('inner-extra', '/outer/42/extra/inner/', ['42', 'inner'], {}),
('inner-extra', NoReverseMatch, ['fred', 'inner'], {}),
('inner-no-kwargs', '/outer-no-kwargs/42/inner-no-kwargs/1/', ['42', '1'], {}),
('disjunction', NoReverseMatch, ['foo'], {}),
('inner-disjunction', NoReverseMatch, ['10', '11'], {}),
('extra-places', '/e-places/10/', ['10'], {}),
('extra-people', '/e-people/fred/', ['fred'], {}),
('extra-people', '/e-people/fred/', [], {'name': 'fred'}),
('part', '/part/one/', [], {'value': 'one'}),
('part', '/prefix/xx/part/one/', [], {'value': 'one', 'prefix': 'xx'}),
('part2', '/part2/one/', [], {'value': 'one'}),
('part2', '/part2/', [], {}),
('part2', '/prefix/xx/part2/one/', [], {'value': 'one', 'prefix': 'xx'}),
('part2', '/prefix/xx/part2/', [], {'prefix': 'xx'}),
# Tests for nested groups. Nested capturing groups will only work if you
# *only* supply the correct outer group.
('nested-noncapture', '/nested/noncapture/opt', [], {'p': 'opt'}),
('nested-capture', '/nested/capture/opt/', ['opt/'], {}),
('nested-capture', NoReverseMatch, [], {'p': 'opt'}),
('nested-mixedcapture', '/nested/capture/mixed/opt', ['opt'], {}),
('nested-mixedcapture', NoReverseMatch, [], {'p': 'opt'}),
('nested-namedcapture', '/nested/capture/named/opt/', [], {'outer': 'opt/'}),
('nested-namedcapture', NoReverseMatch, [], {'outer': 'opt/', 'inner': 'opt'}),
('nested-namedcapture', NoReverseMatch, [], {'inner': 'opt'}),
('non_path_include', '/includes/non_path_include/', [], {}),
# Tests for #13154
('defaults', '/defaults_view1/3/', [], {'arg1': 3, 'arg2': 1}),
('defaults', '/defaults_view2/3/', [], {'arg1': 3, 'arg2': 2}),
('defaults', NoReverseMatch, [], {'arg1': 3, 'arg2': 3}),
('defaults', NoReverseMatch, [], {'arg2': 1}),
# Security tests
('security', '/%2Fexample.com/security/', ['/example.com'], {}),
)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.no_urls')
class NoURLPatternsTests(SimpleTestCase):
def test_no_urls_exception(self):
"""
URLResolver should raise an exception when no urlpatterns exist.
"""
resolver = URLResolver(RegexPattern(r'^$'), settings.ROOT_URLCONF)
with self.assertRaisesMessage(
ImproperlyConfigured,
"The included URLconf 'urlpatterns_reverse.no_urls' does not "
"appear to have any patterns in it. If you see valid patterns in "
"the file then the issue is probably caused by a circular import."
):
getattr(resolver, 'url_patterns')
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
class URLPatternReverse(SimpleTestCase):
def test_urlpattern_reverse(self):
for name, expected, args, kwargs in test_data:
with self.subTest(name=name, args=args, kwargs=kwargs):
try:
got = reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.assertEqual(NoReverseMatch, expected)
else:
self.assertEqual(got, expected)
def test_reverse_none(self):
# Reversing None should raise an error, not return the last un-named view.
with self.assertRaises(NoReverseMatch):
reverse(None)
def test_mixing_args_and_kwargs(self):
msg = "Don't mix *args and **kwargs in call to reverse()!"
with self.assertRaisesMessage(ValueError, msg):
reverse('name', args=['a'], kwargs={'b': 'c'})
@override_script_prefix('/{{invalid}}/')
def test_prefix_braces(self):
self.assertEqual(
'/%7B%7Binvalid%7D%7D/includes/non_path_include/',
reverse('non_path_include')
)
def test_prefix_parenthesis(self):
# Parentheses are allowed and should not cause errors or be escaped
with override_script_prefix('/bogus)/'):
self.assertEqual(
'/bogus)/includes/non_path_include/',
reverse('non_path_include')
)
with override_script_prefix('/(bogus)/'):
self.assertEqual(
'/(bogus)/includes/non_path_include/',
reverse('non_path_include')
)
@override_script_prefix('/bump%20map/')
def test_prefix_format_char(self):
self.assertEqual(
'/bump%2520map/includes/non_path_include/',
reverse('non_path_include')
)
@override_script_prefix('/%7Eme/')
def test_non_urlsafe_prefix_with_args(self):
# Regression for #20022, adjusted for #24013 because ~ is an unreserved
# character. Tests whether % is escaped.
self.assertEqual('/%257Eme/places/1/', reverse('places', args=[1]))
def test_patterns_reported(self):
# Regression for #17076
with self.assertRaisesMessage(NoReverseMatch, r"1 pattern(s) tried: ['people/(?P<name>\\w+)/$']"):
# this url exists, but requires an argument
reverse("people", args=[])
@override_script_prefix('/script:name/')
def test_script_name_escaping(self):
self.assertEqual(
reverse('optional', args=['foo:bar']),
'/script:name/optional/foo:bar/'
)
def test_view_not_found_message(self):
msg = (
"Reverse for 'nonexistent-view' not found. 'nonexistent-view' "
"is not a valid view function or pattern name."
)
with self.assertRaisesMessage(NoReverseMatch, msg):
reverse('nonexistent-view')
def test_no_args_message(self):
msg = "Reverse for 'places' with no arguments not found. 1 pattern(s) tried:"
with self.assertRaisesMessage(NoReverseMatch, msg):
reverse('places')
def test_illegal_args_message(self):
msg = "Reverse for 'places' with arguments '(1, 2)' not found. 1 pattern(s) tried:"
with self.assertRaisesMessage(NoReverseMatch, msg):
reverse('places', args=(1, 2))
def test_illegal_kwargs_message(self):
msg = "Reverse for 'places' with keyword arguments '{'arg1': 2}' not found. 1 pattern(s) tried:"
with self.assertRaisesMessage(NoReverseMatch, msg):
reverse('places', kwargs={'arg1': 2})
class ResolverTests(SimpleTestCase):
def test_resolver_repr(self):
"""
Test repr of URLResolver, especially when urlconf_name is a list
(#17892).
"""
# Pick a resolver from a namespaced URLconf
resolver = get_resolver('urlpatterns_reverse.namespace_urls')
sub_resolver = resolver.namespace_dict['test-ns1'][1]
self.assertIn('<URLPattern list>', repr(sub_resolver))
def test_reverse_lazy_object_coercion_by_resolve(self):
"""
Verifies lazy object returned by reverse_lazy is coerced to
text by resolve(). Previous to #21043, this would raise a TypeError.
"""
urls = 'urlpatterns_reverse.named_urls'
proxy_url = reverse_lazy('named-url1', urlconf=urls)
resolver = get_resolver(urls)
resolver.resolve(proxy_url)
def test_resolver_reverse(self):
resolver = get_resolver('urlpatterns_reverse.named_urls')
test_urls = [
# (name, args, kwargs, expected)
('named-url1', (), {}, ''),
('named-url2', ('arg',), {}, 'extra/arg/'),
('named-url2', (), {'extra': 'arg'}, 'extra/arg/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(resolver.reverse(name, *args, **kwargs), expected)
def test_resolver_reverse_conflict(self):
"""
URL pattern name arguments don't need to be unique. The last registered
pattern takes precedence for conflicting names.
"""
resolver = get_resolver('urlpatterns_reverse.named_urls_conflict')
test_urls = [
# (name, args, kwargs, expected)
# Without arguments, the last URL in urlpatterns has precedence.
('name-conflict', (), {}, 'conflict/'),
# With an arg, the last URL in urlpatterns has precedence.
('name-conflict', ('arg',), {}, 'conflict-last/arg/'),
# With a kwarg, other URL patterns can be reversed.
('name-conflict', (), {'first': 'arg'}, 'conflict-first/arg/'),
('name-conflict', (), {'middle': 'arg'}, 'conflict-middle/arg/'),
('name-conflict', (), {'last': 'arg'}, 'conflict-last/arg/'),
# The number and order of the arguments don't interfere with reversing.
('name-conflict', ('arg', 'arg'), {}, 'conflict/arg/arg/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(resolver.reverse(name, *args, **kwargs), expected)
def test_non_regex(self):
"""
A Resolver404 is raised if resolving doesn't meet the basic
requirements of a path to match - i.e., at the very least, it matches
the root pattern '^/'. Never return None from resolve() to prevent a
TypeError from occurring later (#10834).
"""
test_urls = ['', 'a', '\\', '.']
for path_ in test_urls:
with self.subTest(path=path_):
with self.assertRaises(Resolver404):
resolve(path_)
def test_404_tried_urls_have_names(self):
"""
The list of URLs that come back from a Resolver404 exception contains
a list in the right format for printing out in the DEBUG 404 page with
both the patterns and URL names, if available.
"""
urls = 'urlpatterns_reverse.named_urls'
# this list matches the expected URL types and names returned when
# you try to resolve a nonexistent URL in the first level of included
# URLs in named_urls.py (e.g., '/included/nonexistent-url')
url_types_names = [
[{'type': URLPattern, 'name': 'named-url1'}],
[{'type': URLPattern, 'name': 'named-url2'}],
[{'type': URLPattern, 'name': None}],
[{'type': URLResolver}, {'type': URLPattern, 'name': 'named-url3'}],
[{'type': URLResolver}, {'type': URLPattern, 'name': 'named-url4'}],
[{'type': URLResolver}, {'type': URLPattern, 'name': None}],
[{'type': URLResolver}, {'type': URLResolver}],
]
with self.assertRaisesMessage(Resolver404, 'tried') as cm:
resolve('/included/nonexistent-url', urlconf=urls)
e = cm.exception
# make sure we at least matched the root ('/') url resolver:
self.assertIn('tried', e.args[0])
self.assertEqual(
len(e.args[0]['tried']),
len(url_types_names),
'Wrong number of tried URLs returned. Expected %s, got %s.' % (
len(url_types_names), len(e.args[0]['tried'])
)
)
for tried, expected in zip(e.args[0]['tried'], url_types_names):
for t, e in zip(tried, expected):
with self.subTest(t):
self.assertIsInstance(t, e['type']), '%s is not an instance of %s' % (t, e['type'])
if 'name' in e:
if not e['name']:
self.assertIsNone(t.name, 'Expected no URL name but found %s.' % t.name)
else:
self.assertEqual(
t.name,
e['name'],
'Wrong URL name. Expected "%s", got "%s".' % (e['name'], t.name)
)
def test_namespaced_view_detail(self):
resolver = get_resolver('urlpatterns_reverse.nested_urls')
self.assertTrue(resolver._is_callback('urlpatterns_reverse.nested_urls.view1'))
self.assertTrue(resolver._is_callback('urlpatterns_reverse.nested_urls.view2'))
self.assertTrue(resolver._is_callback('urlpatterns_reverse.nested_urls.View3'))
self.assertFalse(resolver._is_callback('urlpatterns_reverse.nested_urls.blub'))
def test_view_detail_as_method(self):
# Views which have a class name as part of their path.
resolver = get_resolver('urlpatterns_reverse.method_view_urls')
self.assertTrue(resolver._is_callback('urlpatterns_reverse.method_view_urls.ViewContainer.method_view'))
self.assertTrue(resolver._is_callback('urlpatterns_reverse.method_view_urls.ViewContainer.classmethod_view'))
def test_populate_concurrency(self):
"""
URLResolver._populate() can be called concurrently, but not more
than once per thread (#26888).
"""
resolver = URLResolver(RegexPattern(r'^/'), 'urlpatterns_reverse.urls')
resolver._local.populating = True
thread = threading.Thread(target=resolver._populate)
thread.start()
thread.join()
self.assertNotEqual(resolver._reverse_dict, {})
@override_settings(ROOT_URLCONF='urlpatterns_reverse.reverse_lazy_urls')
class ReverseLazyTest(TestCase):
def test_redirect_with_lazy_reverse(self):
response = self.client.get('/redirect/')
self.assertRedirects(response, "/redirected_to/", status_code=302)
def test_user_permission_with_lazy_reverse(self):
alfred = User.objects.create_user('alfred', '[email protected]', password='testpw')
response = self.client.get('/login_required_view/')
self.assertRedirects(response, "/login/?next=/login_required_view/", status_code=302)
self.client.force_login(alfred)
response = self.client.get('/login_required_view/')
self.assertEqual(response.status_code, 200)
def test_inserting_reverse_lazy_into_string(self):
self.assertEqual(
'Some URL: %s' % reverse_lazy('some-login-page'),
'Some URL: /login/'
)
class ReverseLazySettingsTest(AdminScriptTestCase):
"""
reverse_lazy can be used in settings without causing a circular
import error.
"""
def setUp(self):
self.write_settings(
'settings.py',
extra="from django.urls import reverse_lazy\nLOGIN_URL = reverse_lazy('login')",
)
def tearDown(self):
self.remove_settings('settings.py')
def test_lazy_in_settings(self):
out, err = self.run_manage(['check'])
self.assertNoOutput(err)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
class ReverseShortcutTests(SimpleTestCase):
def test_redirect_to_object(self):
# We don't really need a model; just something with a get_absolute_url
class FakeObj:
def get_absolute_url(self):
return "/hi-there/"
res = redirect(FakeObj())
self.assertIsInstance(res, HttpResponseRedirect)
self.assertEqual(res.url, '/hi-there/')
res = redirect(FakeObj(), permanent=True)
self.assertIsInstance(res, HttpResponsePermanentRedirect)
self.assertEqual(res.url, '/hi-there/')
def test_redirect_to_view_name(self):
res = redirect('hardcoded2')
self.assertEqual(res.url, '/hardcoded/doc.pdf')
res = redirect('places', 1)
self.assertEqual(res.url, '/places/1/')
res = redirect('headlines', year='2008', month='02', day='17')
self.assertEqual(res.url, '/headlines/2008.02.17/')
with self.assertRaises(NoReverseMatch):
redirect('not-a-view')
def test_redirect_to_url(self):
res = redirect('/foo/')
self.assertEqual(res.url, '/foo/')
res = redirect('http://example.com/')
self.assertEqual(res.url, 'http://example.com/')
# Assert that we can redirect using UTF-8 strings
res = redirect('/æøå/abc/')
self.assertEqual(res.url, '/%C3%A6%C3%B8%C3%A5/abc/')
# Assert that no imports are attempted when dealing with a relative path
# (previously, the below would resolve in a UnicodeEncodeError from __import__ )
res = redirect('/æøå.abc/')
self.assertEqual(res.url, '/%C3%A6%C3%B8%C3%A5.abc/')
res = redirect('os.path')
self.assertEqual(res.url, 'os.path')
def test_no_illegal_imports(self):
# modules that are not listed in urlpatterns should not be importable
redirect("urlpatterns_reverse.nonimported_module.view")
self.assertNotIn("urlpatterns_reverse.nonimported_module", sys.modules)
def test_reverse_by_path_nested(self):
# Views added to urlpatterns using include() should be reversible.
from .views import nested_view
self.assertEqual(reverse(nested_view), '/includes/nested_path/')
def test_redirect_view_object(self):
from .views import absolute_kwargs_view
res = redirect(absolute_kwargs_view)
self.assertEqual(res.url, '/absolute_arg_view/')
with self.assertRaises(NoReverseMatch):
redirect(absolute_kwargs_view, wrong_argument=None)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.namespace_urls')
class NamespaceTests(SimpleTestCase):
def test_ambiguous_object(self):
"""
Names deployed via dynamic URL objects that require namespaces can't
be resolved.
"""
test_urls = [
('urlobject-view', [], {}),
('urlobject-view', [37, 42], {}),
('urlobject-view', [], {'arg1': 42, 'arg2': 37}),
]
for name, args, kwargs in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
with self.assertRaises(NoReverseMatch):
reverse(name, args=args, kwargs=kwargs)
def test_ambiguous_urlpattern(self):
"""
Names deployed via dynamic URL objects that require namespaces can't
be resolved.
"""
test_urls = [
('inner-nothing', [], {}),
('inner-nothing', [37, 42], {}),
('inner-nothing', [], {'arg1': 42, 'arg2': 37}),
]
for name, args, kwargs in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
with self.assertRaises(NoReverseMatch):
reverse(name, args=args, kwargs=kwargs)
def test_non_existent_namespace(self):
"""Nonexistent namespaces raise errors."""
test_urls = [
'blahblah:urlobject-view',
'test-ns1:blahblah:urlobject-view',
]
for name in test_urls:
with self.subTest(name=name):
with self.assertRaises(NoReverseMatch):
reverse(name)
def test_normal_name(self):
"""Normal lookups work as expected."""
test_urls = [
('normal-view', [], {}, '/normal/'),
('normal-view', [37, 42], {}, '/normal/37/42/'),
('normal-view', [], {'arg1': 42, 'arg2': 37}, '/normal/42/37/'),
('special-view', [], {}, '/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_simple_included_name(self):
"""Normal lookups work on names included from other patterns."""
test_urls = [
('included_namespace_urls:inc-normal-view', [], {}, '/included/normal/'),
('included_namespace_urls:inc-normal-view', [37, 42], {}, '/included/normal/37/42/'),
('included_namespace_urls:inc-normal-view', [], {'arg1': 42, 'arg2': 37}, '/included/normal/42/37/'),
('included_namespace_urls:inc-special-view', [], {}, '/included/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_namespace_object(self):
"""Dynamic URL objects can be found using a namespace."""
test_urls = [
('test-ns1:urlobject-view', [], {}, '/test1/inner/'),
('test-ns1:urlobject-view', [37, 42], {}, '/test1/inner/37/42/'),
('test-ns1:urlobject-view', [], {'arg1': 42, 'arg2': 37}, '/test1/inner/42/37/'),
('test-ns1:urlobject-special-view', [], {}, '/test1/inner/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_app_object(self):
"""
Dynamic URL objects can return a (pattern, app_name) 2-tuple, and
include() can set the namespace.
"""
test_urls = [
('new-ns1:urlobject-view', [], {}, '/newapp1/inner/'),
('new-ns1:urlobject-view', [37, 42], {}, '/newapp1/inner/37/42/'),
('new-ns1:urlobject-view', [], {'arg1': 42, 'arg2': 37}, '/newapp1/inner/42/37/'),
('new-ns1:urlobject-special-view', [], {}, '/newapp1/inner/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_app_object_default_namespace(self):
"""
Namespace defaults to app_name when including a (pattern, app_name)
2-tuple.
"""
test_urls = [
('newapp:urlobject-view', [], {}, '/new-default/inner/'),
('newapp:urlobject-view', [37, 42], {}, '/new-default/inner/37/42/'),
('newapp:urlobject-view', [], {'arg1': 42, 'arg2': 37}, '/new-default/inner/42/37/'),
('newapp:urlobject-special-view', [], {}, '/new-default/inner/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_embedded_namespace_object(self):
"""Namespaces can be installed anywhere in the URL pattern tree."""
test_urls = [
('included_namespace_urls:test-ns3:urlobject-view', [], {}, '/included/test3/inner/'),
('included_namespace_urls:test-ns3:urlobject-view', [37, 42], {}, '/included/test3/inner/37/42/'),
(
'included_namespace_urls:test-ns3:urlobject-view', [], {'arg1': 42, 'arg2': 37},
'/included/test3/inner/42/37/',
),
('included_namespace_urls:test-ns3:urlobject-special-view', [], {}, '/included/test3/inner/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_namespace_pattern(self):
"""Namespaces can be applied to include()'d urlpatterns."""
test_urls = [
('inc-ns1:inc-normal-view', [], {}, '/ns-included1/normal/'),
('inc-ns1:inc-normal-view', [37, 42], {}, '/ns-included1/normal/37/42/'),
('inc-ns1:inc-normal-view', [], {'arg1': 42, 'arg2': 37}, '/ns-included1/normal/42/37/'),
('inc-ns1:inc-special-view', [], {}, '/ns-included1/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_app_name_pattern(self):
"""
Namespaces can be applied to include()'d urlpatterns that set an
app_name attribute.
"""
test_urls = [
('app-ns1:inc-normal-view', [], {}, '/app-included1/normal/'),
('app-ns1:inc-normal-view', [37, 42], {}, '/app-included1/normal/37/42/'),
('app-ns1:inc-normal-view', [], {'arg1': 42, 'arg2': 37}, '/app-included1/normal/42/37/'),
('app-ns1:inc-special-view', [], {}, '/app-included1/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_namespace_pattern_with_variable_prefix(self):
"""
Using include() with namespaces when there is a regex variable in front
of it.
"""
test_urls = [
('inc-outer:inc-normal-view', [], {'outer': 42}, '/ns-outer/42/normal/'),
('inc-outer:inc-normal-view', [42], {}, '/ns-outer/42/normal/'),
('inc-outer:inc-normal-view', [], {'arg1': 37, 'arg2': 4, 'outer': 42}, '/ns-outer/42/normal/37/4/'),
('inc-outer:inc-normal-view', [42, 37, 4], {}, '/ns-outer/42/normal/37/4/'),
('inc-outer:inc-special-view', [], {'outer': 42}, '/ns-outer/42/+%5C$*/'),
('inc-outer:inc-special-view', [42], {}, '/ns-outer/42/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_multiple_namespace_pattern(self):
"""Namespaces can be embedded."""
test_urls = [
('inc-ns1:test-ns3:urlobject-view', [], {}, '/ns-included1/test3/inner/'),
('inc-ns1:test-ns3:urlobject-view', [37, 42], {}, '/ns-included1/test3/inner/37/42/'),
(
'inc-ns1:test-ns3:urlobject-view', [], {'arg1': 42, 'arg2': 37},
'/ns-included1/test3/inner/42/37/',
),
('inc-ns1:test-ns3:urlobject-special-view', [], {}, '/ns-included1/test3/inner/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_nested_namespace_pattern(self):
"""Namespaces can be nested."""
test_urls = [
(
'inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view', [], {},
'/ns-included1/ns-included4/ns-included1/test3/inner/',
),
(
'inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view', [37, 42], {},
'/ns-included1/ns-included4/ns-included1/test3/inner/37/42/',
),
(
'inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view', [], {'arg1': 42, 'arg2': 37},
'/ns-included1/ns-included4/ns-included1/test3/inner/42/37/',
),
(
'inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-special-view', [], {},
'/ns-included1/ns-included4/ns-included1/test3/inner/+%5C$*/',
),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_app_lookup_object(self):
"""A default application namespace can be used for lookup."""
test_urls = [
('testapp:urlobject-view', [], {}, '/default/inner/'),
('testapp:urlobject-view', [37, 42], {}, '/default/inner/37/42/'),
('testapp:urlobject-view', [], {'arg1': 42, 'arg2': 37}, '/default/inner/42/37/'),
('testapp:urlobject-special-view', [], {}, '/default/inner/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_app_lookup_object_with_default(self):
"""A default application namespace is sensitive to the current app."""
test_urls = [
('testapp:urlobject-view', [], {}, 'test-ns3', '/default/inner/'),
('testapp:urlobject-view', [37, 42], {}, 'test-ns3', '/default/inner/37/42/'),
('testapp:urlobject-view', [], {'arg1': 42, 'arg2': 37}, 'test-ns3', '/default/inner/42/37/'),
('testapp:urlobject-special-view', [], {}, 'test-ns3', '/default/inner/+%5C$*/'),
]
for name, args, kwargs, current_app, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs, current_app=current_app):
self.assertEqual(reverse(name, args=args, kwargs=kwargs, current_app=current_app), expected)
def test_app_lookup_object_without_default(self):
"""
An application namespace without a default is sensitive to the current
app.
"""
test_urls = [
('nodefault:urlobject-view', [], {}, None, '/other2/inner/'),
('nodefault:urlobject-view', [37, 42], {}, None, '/other2/inner/37/42/'),
('nodefault:urlobject-view', [], {'arg1': 42, 'arg2': 37}, None, '/other2/inner/42/37/'),
('nodefault:urlobject-special-view', [], {}, None, '/other2/inner/+%5C$*/'),
('nodefault:urlobject-view', [], {}, 'other-ns1', '/other1/inner/'),
('nodefault:urlobject-view', [37, 42], {}, 'other-ns1', '/other1/inner/37/42/'),
('nodefault:urlobject-view', [], {'arg1': 42, 'arg2': 37}, 'other-ns1', '/other1/inner/42/37/'),
('nodefault:urlobject-special-view', [], {}, 'other-ns1', '/other1/inner/+%5C$*/'),
]
for name, args, kwargs, current_app, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs, current_app=current_app):
self.assertEqual(reverse(name, args=args, kwargs=kwargs, current_app=current_app), expected)
def test_special_chars_namespace(self):
test_urls = [
('special:included_namespace_urls:inc-normal-view', [], {}, '/+%5C$*/included/normal/'),
('special:included_namespace_urls:inc-normal-view', [37, 42], {}, '/+%5C$*/included/normal/37/42/'),
(
'special:included_namespace_urls:inc-normal-view', [], {'arg1': 42, 'arg2': 37},
'/+%5C$*/included/normal/42/37/',
),
('special:included_namespace_urls:inc-special-view', [], {}, '/+%5C$*/included/+%5C$*/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_namespaces_with_variables(self):
"""Namespace prefixes can capture variables."""
test_urls = [
('inc-ns5:inner-nothing', [], {'outer': '70'}, '/inc70/'),
('inc-ns5:inner-extra', [], {'extra': 'foobar', 'outer': '78'}, '/inc78/extra/foobar/'),
('inc-ns5:inner-nothing', ['70'], {}, '/inc70/'),
('inc-ns5:inner-extra', ['78', 'foobar'], {}, '/inc78/extra/foobar/'),
]
for name, args, kwargs, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs):
self.assertEqual(reverse(name, args=args, kwargs=kwargs), expected)
def test_nested_app_lookup(self):
"""
A nested current_app should be split in individual namespaces (#24904).
"""
test_urls = [
('inc-ns1:testapp:urlobject-view', [], {}, None, '/ns-included1/test4/inner/'),
('inc-ns1:testapp:urlobject-view', [37, 42], {}, None, '/ns-included1/test4/inner/37/42/'),
('inc-ns1:testapp:urlobject-view', [], {'arg1': 42, 'arg2': 37}, None, '/ns-included1/test4/inner/42/37/'),
('inc-ns1:testapp:urlobject-special-view', [], {}, None, '/ns-included1/test4/inner/+%5C$*/'),
('inc-ns1:testapp:urlobject-view', [], {}, 'inc-ns1:test-ns3', '/ns-included1/test3/inner/'),
('inc-ns1:testapp:urlobject-view', [37, 42], {}, 'inc-ns1:test-ns3', '/ns-included1/test3/inner/37/42/'),
(
'inc-ns1:testapp:urlobject-view', [], {'arg1': 42, 'arg2': 37}, 'inc-ns1:test-ns3',
'/ns-included1/test3/inner/42/37/',
),
(
'inc-ns1:testapp:urlobject-special-view', [], {}, 'inc-ns1:test-ns3',
'/ns-included1/test3/inner/+%5C$*/',
),
]
for name, args, kwargs, current_app, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs, current_app=current_app):
self.assertEqual(reverse(name, args=args, kwargs=kwargs, current_app=current_app), expected)
def test_current_app_no_partial_match(self):
"""current_app shouldn't be used unless it matches the whole path."""
test_urls = [
('inc-ns1:testapp:urlobject-view', [], {}, 'nonexistent:test-ns3', '/ns-included1/test4/inner/'),
(
'inc-ns1:testapp:urlobject-view', [37, 42], {}, 'nonexistent:test-ns3',
'/ns-included1/test4/inner/37/42/',
),
(
'inc-ns1:testapp:urlobject-view', [], {'arg1': 42, 'arg2': 37}, 'nonexistent:test-ns3',
'/ns-included1/test4/inner/42/37/',
),
(
'inc-ns1:testapp:urlobject-special-view', [], {}, 'nonexistent:test-ns3',
'/ns-included1/test4/inner/+%5C$*/',
),
]
for name, args, kwargs, current_app, expected in test_urls:
with self.subTest(name=name, args=args, kwargs=kwargs, current_app=current_app):
self.assertEqual(reverse(name, args=args, kwargs=kwargs, current_app=current_app), expected)
@override_settings(ROOT_URLCONF=urlconf_outer.__name__)
class RequestURLconfTests(SimpleTestCase):
def test_urlconf(self):
response = self.client.get('/test/me/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'outer:/test/me/,inner:/inner_urlconf/second_test/')
response = self.client.get('/inner_urlconf/second_test/')
self.assertEqual(response.status_code, 200)
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 404)
@override_settings(
MIDDLEWARE=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
]
)
def test_urlconf_overridden(self):
response = self.client.get('/test/me/')
self.assertEqual(response.status_code, 404)
response = self.client.get('/inner_urlconf/second_test/')
self.assertEqual(response.status_code, 404)
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'outer:,inner:/second_test/')
@override_settings(
MIDDLEWARE=[
'%s.NullChangeURLconfMiddleware' % middleware.__name__,
]
)
def test_urlconf_overridden_with_null(self):
"""
Overriding request.urlconf with None will fall back to the default
URLconf.
"""
response = self.client.get('/test/me/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'outer:/test/me/,inner:/inner_urlconf/second_test/')
response = self.client.get('/inner_urlconf/second_test/')
self.assertEqual(response.status_code, 200)
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 404)
@override_settings(
MIDDLEWARE=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseInnerInResponseMiddleware' % middleware.__name__,
]
)
def test_reverse_inner_in_response_middleware(self):
"""
Test reversing an URL from the *overridden* URLconf from inside
a response middleware.
"""
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'/second_test/')
@override_settings(
MIDDLEWARE=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseOuterInResponseMiddleware' % middleware.__name__,
]
)
def test_reverse_outer_in_response_middleware(self):
"""
Test reversing an URL from the *default* URLconf from inside
a response middleware.
"""
message = "Reverse for 'outer' not found."
with self.assertRaisesMessage(NoReverseMatch, message):
self.client.get('/second_test/')
@override_settings(
MIDDLEWARE=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseInnerInStreaming' % middleware.__name__,
]
)
def test_reverse_inner_in_streaming(self):
"""
Test reversing an URL from the *overridden* URLconf from inside
a streaming response.
"""
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(b''.join(response), b'/second_test/')
@override_settings(
MIDDLEWARE=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseOuterInStreaming' % middleware.__name__,
]
)
def test_reverse_outer_in_streaming(self):
"""
Test reversing an URL from the *default* URLconf from inside
a streaming response.
"""
message = "Reverse for 'outer' not found."
with self.assertRaisesMessage(NoReverseMatch, message):
self.client.get('/second_test/')
b''.join(self.client.get('/second_test/'))
def test_urlconf_is_reset_after_request(self):
"""The URLconf is reset after each request."""
self.assertIsNone(get_urlconf())
with override_settings(MIDDLEWARE=['%s.ChangeURLconfMiddleware' % middleware.__name__]):
self.client.get(reverse('inner'))
self.assertIsNone(get_urlconf())
class ErrorHandlerResolutionTests(SimpleTestCase):
"""Tests for handler400, handler404 and handler500"""
def setUp(self):
urlconf = 'urlpatterns_reverse.urls_error_handlers'
urlconf_callables = 'urlpatterns_reverse.urls_error_handlers_callables'
self.resolver = URLResolver(RegexPattern(r'^$'), urlconf)
self.callable_resolver = URLResolver(RegexPattern(r'^$'), urlconf_callables)
def test_named_handlers(self):
handler = (empty_view, {})
for code in [400, 404, 500]:
with self.subTest(code=code):
self.assertEqual(self.resolver.resolve_error_handler(code), handler)
def test_callable_handlers(self):
handler = (empty_view, {})
for code in [400, 404, 500]:
with self.subTest(code=code):
self.assertEqual(self.callable_resolver.resolve_error_handler(code), handler)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls_without_handlers')
class DefaultErrorHandlerTests(SimpleTestCase):
def test_default_handler(self):
"If the urls.py doesn't specify handlers, the defaults are used"
response = self.client.get('/test/')
self.assertEqual(response.status_code, 404)
with self.assertRaisesMessage(ValueError, "I don't think I'm getting good"):
self.client.get('/bad_view/')
@override_settings(ROOT_URLCONF=None)
class NoRootUrlConfTests(SimpleTestCase):
"""Tests for handler404 and handler500 if ROOT_URLCONF is None"""
def test_no_handler_exception(self):
msg = (
"The included URLconf 'None' does not appear to have any patterns "
"in it. If you see valid patterns in the file then the issue is "
"probably caused by a circular import."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get('/test/me/')
@override_settings(ROOT_URLCONF='urlpatterns_reverse.namespace_urls')
class ResolverMatchTests(SimpleTestCase):
def test_urlpattern_resolve(self):
for path_, url_name, app_name, namespace, view_name, func, args, kwargs in resolve_test_data:
with self.subTest(path=path_):
# Legacy support for extracting "function, args, kwargs".
match_func, match_args, match_kwargs = resolve(path_)
self.assertEqual(match_func, func)
self.assertEqual(match_args, args)
self.assertEqual(match_kwargs, kwargs)
# ResolverMatch capabilities.
match = resolve(path_)
self.assertEqual(match.__class__, ResolverMatch)
self.assertEqual(match.url_name, url_name)
self.assertEqual(match.app_name, app_name)
self.assertEqual(match.namespace, namespace)
self.assertEqual(match.view_name, view_name)
self.assertEqual(match.func, func)
self.assertEqual(match.args, args)
self.assertEqual(match.kwargs, kwargs)
# and for legacy purposes:
self.assertEqual(match[0], func)
self.assertEqual(match[1], args)
self.assertEqual(match[2], kwargs)
def test_resolver_match_on_request(self):
response = self.client.get('/resolver_match/')
resolver_match = response.resolver_match
self.assertEqual(resolver_match.url_name, 'test-resolver-match')
def test_resolver_match_on_request_before_resolution(self):
request = HttpRequest()
self.assertIsNone(request.resolver_match)
def test_repr(self):
self.assertEqual(
repr(resolve('/no_kwargs/42/37/')),
"ResolverMatch(func=urlpatterns_reverse.views.empty_view, "
"args=('42', '37'), kwargs={}, url_name=no-kwargs, app_names=[], "
"namespaces=[], route=^no_kwargs/([0-9]+)/([0-9]+)/$)",
)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.erroneous_urls')
class ErroneousViewTests(SimpleTestCase):
def test_noncallable_view(self):
# View is not a callable (explicit import; arbitrary Python object)
with self.assertRaisesMessage(TypeError, 'view must be a callable'):
path('uncallable-object/', views.uncallable)
def test_invalid_regex(self):
# Regex contains an error (refs #6170)
msg = '(regex_error/$" is not a valid regular expression'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
reverse(views.empty_view)
class ViewLoadingTests(SimpleTestCase):
def test_view_loading(self):
self.assertEqual(get_callable('urlpatterns_reverse.views.empty_view'), empty_view)
self.assertEqual(get_callable(empty_view), empty_view)
def test_view_does_not_exist(self):
msg = "View does not exist in module urlpatterns_reverse.views."
with self.assertRaisesMessage(ViewDoesNotExist, msg):
get_callable('urlpatterns_reverse.views.i_should_not_exist')
def test_attributeerror_not_hidden(self):
msg = 'I am here to confuse django.urls.get_callable'
with self.assertRaisesMessage(AttributeError, msg):
get_callable('urlpatterns_reverse.views_broken.i_am_broken')
def test_non_string_value(self):
msg = "'1' is not a callable or a dot-notation path"
with self.assertRaisesMessage(ViewDoesNotExist, msg):
get_callable(1)
def test_string_without_dot(self):
msg = "Could not import 'test'. The path must be fully qualified."
with self.assertRaisesMessage(ImportError, msg):
get_callable('test')
def test_module_does_not_exist(self):
with self.assertRaisesMessage(ImportError, "No module named 'foo'"):
get_callable('foo.bar')
def test_parent_module_does_not_exist(self):
msg = 'Parent module urlpatterns_reverse.foo does not exist.'
with self.assertRaisesMessage(ViewDoesNotExist, msg):
get_callable('urlpatterns_reverse.foo.bar')
def test_not_callable(self):
msg = (
"Could not import 'urlpatterns_reverse.tests.resolve_test_data'. "
"View is not callable."
)
with self.assertRaisesMessage(ViewDoesNotExist, msg):
get_callable('urlpatterns_reverse.tests.resolve_test_data')
class IncludeTests(SimpleTestCase):
url_patterns = [
path('inner/', views.empty_view, name='urlobject-view'),
re_path(r'^inner/(?P<arg1>[0-9]+)/(?P<arg2>[0-9]+)/$', views.empty_view, name='urlobject-view'),
re_path(r'^inner/\+\\\$\*/$', views.empty_view, name='urlobject-special-view'),
]
app_urls = URLObject('inc-app')
def test_include_urls(self):
self.assertEqual(include(self.url_patterns), (self.url_patterns, None, None))
def test_include_namespace(self):
msg = (
'Specifying a namespace in include() without providing an '
'app_name is not supported.'
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
include(self.url_patterns, 'namespace')
def test_include_4_tuple(self):
msg = 'Passing a 4-tuple to include() is not supported.'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
include((self.url_patterns, 'app_name', 'namespace', 'blah'))
def test_include_3_tuple(self):
msg = 'Passing a 3-tuple to include() is not supported.'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
include((self.url_patterns, 'app_name', 'namespace'))
def test_include_3_tuple_namespace(self):
msg = 'Cannot override the namespace for a dynamic module that provides a namespace.'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
include((self.url_patterns, 'app_name', 'namespace'), 'namespace')
def test_include_2_tuple(self):
self.assertEqual(
include((self.url_patterns, 'app_name')),
(self.url_patterns, 'app_name', 'app_name')
)
def test_include_2_tuple_namespace(self):
self.assertEqual(
include((self.url_patterns, 'app_name'), namespace='namespace'),
(self.url_patterns, 'app_name', 'namespace')
)
def test_include_app_name(self):
self.assertEqual(
include(self.app_urls),
(self.app_urls, 'inc-app', 'inc-app')
)
def test_include_app_name_namespace(self):
self.assertEqual(
include(self.app_urls, 'namespace'),
(self.app_urls, 'inc-app', 'namespace')
)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
class LookaheadTests(SimpleTestCase):
def test_valid_resolve(self):
test_urls = [
'/lookahead-/a-city/',
'/lookbehind-/a-city/',
'/lookahead+/a-city/',
'/lookbehind+/a-city/',
]
for test_url in test_urls:
with self.subTest(url=test_url):
self.assertEqual(resolve(test_url).kwargs, {'city': 'a-city'})
def test_invalid_resolve(self):
test_urls = [
'/lookahead-/not-a-city/',
'/lookbehind-/not-a-city/',
'/lookahead+/other-city/',
'/lookbehind+/other-city/',
]
for test_url in test_urls:
with self.subTest(url=test_url):
with self.assertRaises(Resolver404):
resolve(test_url)
def test_valid_reverse(self):
test_urls = [
('lookahead-positive', {'city': 'a-city'}, '/lookahead+/a-city/'),
('lookahead-negative', {'city': 'a-city'}, '/lookahead-/a-city/'),
('lookbehind-positive', {'city': 'a-city'}, '/lookbehind+/a-city/'),
('lookbehind-negative', {'city': 'a-city'}, '/lookbehind-/a-city/'),
]
for name, kwargs, expected in test_urls:
with self.subTest(name=name, kwargs=kwargs):
self.assertEqual(reverse(name, kwargs=kwargs), expected)
def test_invalid_reverse(self):
test_urls = [
('lookahead-positive', {'city': 'other-city'}),
('lookahead-negative', {'city': 'not-a-city'}),
('lookbehind-positive', {'city': 'other-city'}),
('lookbehind-negative', {'city': 'not-a-city'}),
]
for name, kwargs in test_urls:
with self.subTest(name=name, kwargs=kwargs):
with self.assertRaises(NoReverseMatch):
reverse(name, kwargs=kwargs)
|
e3600224a9ca64e7e201895c25934f25c9195588e26914a7aa7af017bea56e73 | import codecs
import datetime
import os
import shutil
import tempfile
import unittest
from io import StringIO
from unittest import mock
from admin_scripts.tests import AdminScriptTestCase
from django.conf import settings
from django.contrib.staticfiles import storage
from django.contrib.staticfiles.management.commands import (
collectstatic, runserver,
)
from django.core.exceptions import ImproperlyConfigured
from django.core.management import CommandError, call_command
from django.test import RequestFactory, override_settings
from django.test.utils import extend_sys_path
from django.utils import timezone
from django.utils._os import symlinks_supported
from django.utils.functional import empty
from .cases import CollectionTestCase, StaticFilesTestCase, TestDefaults
from .settings import TEST_ROOT, TEST_SETTINGS
from .storage import DummyStorage
class TestNoFilesCreated:
def test_no_files_created(self):
"""
Make sure no files were create in the destination directory.
"""
self.assertEqual(os.listdir(settings.STATIC_ROOT), [])
class TestRunserver(StaticFilesTestCase):
@override_settings(MIDDLEWARE=['django.middleware.common.CommonMiddleware'])
def test_middleware_loaded_only_once(self):
command = runserver.Command()
with mock.patch('django.middleware.common.CommonMiddleware') as mocked:
command.get_handler(use_static_handler=True, insecure_serving=True)
self.assertEqual(mocked.call_count, 1)
def test_404_response(self):
command = runserver.Command()
handler = command.get_handler(use_static_handler=True, insecure_serving=True)
missing_static_file = os.path.join(settings.STATIC_URL, 'unknown.css')
req = RequestFactory().get(missing_static_file)
with override_settings(DEBUG=False):
response = handler.get_response(req)
self.assertEqual(response.status_code, 404)
with override_settings(DEBUG=True):
response = handler.get_response(req)
self.assertEqual(response.status_code, 404)
class TestFindStatic(TestDefaults, CollectionTestCase):
"""
Test ``findstatic`` management command.
"""
def _get_file(self, filepath):
path = call_command('findstatic', filepath, all=False, verbosity=0, stdout=StringIO())
with codecs.open(path, "r", "utf-8") as f:
return f.read()
def test_all_files(self):
"""
findstatic returns all candidate files if run without --first and -v1.
"""
result = call_command('findstatic', 'test/file.txt', verbosity=1, stdout=StringIO())
lines = [l.strip() for l in result.split('\n')]
self.assertEqual(len(lines), 3) # three because there is also the "Found <file> here" line
self.assertIn('project', lines[1])
self.assertIn('apps', lines[2])
def test_all_files_less_verbose(self):
"""
findstatic returns all candidate files if run without --first and -v0.
"""
result = call_command('findstatic', 'test/file.txt', verbosity=0, stdout=StringIO())
lines = [l.strip() for l in result.split('\n')]
self.assertEqual(len(lines), 2)
self.assertIn('project', lines[0])
self.assertIn('apps', lines[1])
def test_all_files_more_verbose(self):
"""
findstatic returns all candidate files if run without --first and -v2.
Also, test that findstatic returns the searched locations with -v2.
"""
result = call_command('findstatic', 'test/file.txt', verbosity=2, stdout=StringIO())
lines = [l.strip() for l in result.split('\n')]
self.assertIn('project', lines[1])
self.assertIn('apps', lines[2])
self.assertIn("Looking in the following locations:", lines[3])
searched_locations = ', '.join(lines[4:])
# AppDirectoriesFinder searched locations
self.assertIn(os.path.join('staticfiles_tests', 'apps', 'test', 'static'), searched_locations)
self.assertIn(os.path.join('staticfiles_tests', 'apps', 'no_label', 'static'), searched_locations)
# FileSystemFinder searched locations
self.assertIn(TEST_SETTINGS['STATICFILES_DIRS'][1][1], searched_locations)
self.assertIn(TEST_SETTINGS['STATICFILES_DIRS'][0], searched_locations)
# DefaultStorageFinder searched locations
self.assertIn(
os.path.join('staticfiles_tests', 'project', 'site_media', 'media'),
searched_locations
)
class TestConfiguration(StaticFilesTestCase):
def test_location_empty(self):
msg = 'without having set the STATIC_ROOT setting to a filesystem path'
err = StringIO()
for root in ['', None]:
with override_settings(STATIC_ROOT=root):
with self.assertRaisesMessage(ImproperlyConfigured, msg):
call_command('collectstatic', interactive=False, verbosity=0, stderr=err)
def test_local_storage_detection_helper(self):
staticfiles_storage = storage.staticfiles_storage
try:
storage.staticfiles_storage._wrapped = empty
with self.settings(STATICFILES_STORAGE='django.contrib.staticfiles.storage.StaticFilesStorage'):
command = collectstatic.Command()
self.assertTrue(command.is_local_storage())
storage.staticfiles_storage._wrapped = empty
with self.settings(STATICFILES_STORAGE='staticfiles_tests.storage.DummyStorage'):
command = collectstatic.Command()
self.assertFalse(command.is_local_storage())
collectstatic.staticfiles_storage = storage.FileSystemStorage()
command = collectstatic.Command()
self.assertTrue(command.is_local_storage())
collectstatic.staticfiles_storage = DummyStorage()
command = collectstatic.Command()
self.assertFalse(command.is_local_storage())
finally:
staticfiles_storage._wrapped = empty
collectstatic.staticfiles_storage = staticfiles_storage
storage.staticfiles_storage = staticfiles_storage
class TestCollectionHelpSubcommand(AdminScriptTestCase):
@override_settings(STATIC_ROOT=None)
def test_missing_settings_dont_prevent_help(self):
"""
Even if the STATIC_ROOT setting is not set, one can still call the
`manage.py help collectstatic` command.
"""
self.write_settings('settings.py', apps=['django.contrib.staticfiles'])
out, err = self.run_manage(['help', 'collectstatic'])
self.assertNoOutput(err)
class TestCollection(TestDefaults, CollectionTestCase):
"""
Test ``collectstatic`` management command.
"""
def test_ignore(self):
"""
-i patterns are ignored.
"""
self.assertFileNotFound('test/test.ignoreme')
def test_common_ignore_patterns(self):
"""
Common ignore patterns (*~, .*, CVS) are ignored.
"""
self.assertFileNotFound('test/.hidden')
self.assertFileNotFound('test/backup~')
self.assertFileNotFound('test/CVS')
class TestCollectionVerbosity(CollectionTestCase):
copying_msg = 'Copying '
run_collectstatic_in_setUp = False
post_process_msg = 'Post-processed'
staticfiles_copied_msg = 'static files copied to'
def test_verbosity_0(self):
stdout = StringIO()
self.run_collectstatic(verbosity=0, stdout=stdout)
self.assertEqual(stdout.getvalue(), '')
def test_verbosity_1(self):
stdout = StringIO()
self.run_collectstatic(verbosity=1, stdout=stdout)
output = stdout.getvalue()
self.assertIn(self.staticfiles_copied_msg, output)
self.assertNotIn(self.copying_msg, output)
def test_verbosity_2(self):
stdout = StringIO()
self.run_collectstatic(verbosity=2, stdout=stdout)
output = stdout.getvalue()
self.assertIn(self.staticfiles_copied_msg, output)
self.assertIn(self.copying_msg, output)
@override_settings(STATICFILES_STORAGE='django.contrib.staticfiles.storage.ManifestStaticFilesStorage')
def test_verbosity_1_with_post_process(self):
stdout = StringIO()
self.run_collectstatic(verbosity=1, stdout=stdout, post_process=True)
self.assertNotIn(self.post_process_msg, stdout.getvalue())
@override_settings(STATICFILES_STORAGE='django.contrib.staticfiles.storage.ManifestStaticFilesStorage')
def test_verbosity_2_with_post_process(self):
stdout = StringIO()
self.run_collectstatic(verbosity=2, stdout=stdout, post_process=True)
self.assertIn(self.post_process_msg, stdout.getvalue())
class TestCollectionClear(CollectionTestCase):
"""
Test the ``--clear`` option of the ``collectstatic`` management command.
"""
def run_collectstatic(self, **kwargs):
clear_filepath = os.path.join(settings.STATIC_ROOT, 'cleared.txt')
with open(clear_filepath, 'w') as f:
f.write('should be cleared')
super().run_collectstatic(clear=True)
def test_cleared_not_found(self):
self.assertFileNotFound('cleared.txt')
def test_dir_not_exists(self, **kwargs):
shutil.rmtree(settings.STATIC_ROOT)
super().run_collectstatic(clear=True)
@override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.PathNotImplementedStorage')
def test_handle_path_notimplemented(self):
self.run_collectstatic()
self.assertFileNotFound('cleared.txt')
class TestInteractiveMessages(CollectionTestCase):
overwrite_warning_msg = "This will overwrite existing files!"
delete_warning_msg = "This will DELETE ALL FILES in this location!"
files_copied_msg = "static files copied"
@staticmethod
def mock_input(stdout):
def _input(msg):
stdout.write(msg)
return 'yes'
return _input
def test_warning_when_clearing_staticdir(self):
stdout = StringIO()
self.run_collectstatic()
with mock.patch('builtins.input', side_effect=self.mock_input(stdout)):
call_command('collectstatic', interactive=True, clear=True, stdout=stdout)
output = stdout.getvalue()
self.assertNotIn(self.overwrite_warning_msg, output)
self.assertIn(self.delete_warning_msg, output)
def test_warning_when_overwriting_files_in_staticdir(self):
stdout = StringIO()
self.run_collectstatic()
with mock.patch('builtins.input', side_effect=self.mock_input(stdout)):
call_command('collectstatic', interactive=True, stdout=stdout)
output = stdout.getvalue()
self.assertIn(self.overwrite_warning_msg, output)
self.assertNotIn(self.delete_warning_msg, output)
def test_no_warning_when_staticdir_does_not_exist(self):
stdout = StringIO()
shutil.rmtree(settings.STATIC_ROOT)
call_command('collectstatic', interactive=True, stdout=stdout)
output = stdout.getvalue()
self.assertNotIn(self.overwrite_warning_msg, output)
self.assertNotIn(self.delete_warning_msg, output)
self.assertIn(self.files_copied_msg, output)
def test_no_warning_for_empty_staticdir(self):
stdout = StringIO()
with tempfile.TemporaryDirectory(prefix='collectstatic_empty_staticdir_test') as static_dir:
with override_settings(STATIC_ROOT=static_dir):
call_command('collectstatic', interactive=True, stdout=stdout)
output = stdout.getvalue()
self.assertNotIn(self.overwrite_warning_msg, output)
self.assertNotIn(self.delete_warning_msg, output)
self.assertIn(self.files_copied_msg, output)
def test_cancelled(self):
self.run_collectstatic()
with mock.patch('builtins.input', side_effect=lambda _: 'no'):
with self.assertRaisesMessage(CommandError, 'Collecting static files cancelled'):
call_command('collectstatic', interactive=True)
class TestCollectionExcludeNoDefaultIgnore(TestDefaults, CollectionTestCase):
"""
Test ``--exclude-dirs`` and ``--no-default-ignore`` options of the
``collectstatic`` management command.
"""
def run_collectstatic(self):
super().run_collectstatic(use_default_ignore_patterns=False)
def test_no_common_ignore_patterns(self):
"""
With --no-default-ignore, common ignore patterns (*~, .*, CVS)
are not ignored.
"""
self.assertFileContains('test/.hidden', 'should be ignored')
self.assertFileContains('test/backup~', 'should be ignored')
self.assertFileContains('test/CVS', 'should be ignored')
@override_settings(INSTALLED_APPS=[
'staticfiles_tests.apps.staticfiles_config.IgnorePatternsAppConfig',
'staticfiles_tests.apps.test',
])
class TestCollectionCustomIgnorePatterns(CollectionTestCase):
def test_custom_ignore_patterns(self):
"""
A custom ignore_patterns list, ['*.css', '*/vendor/*.js'] in this case,
can be specified in an AppConfig definition.
"""
self.assertFileNotFound('test/nonascii.css')
self.assertFileContains('test/.hidden', 'should be ignored')
self.assertFileNotFound(os.path.join('test', 'vendor', 'module.js'))
class TestCollectionDryRun(TestNoFilesCreated, CollectionTestCase):
"""
Test ``--dry-run`` option for ``collectstatic`` management command.
"""
def run_collectstatic(self):
super().run_collectstatic(dry_run=True)
class TestCollectionFilesOverride(CollectionTestCase):
"""
Test overriding duplicated files by ``collectstatic`` management command.
Check for proper handling of apps order in installed apps even if file modification
dates are in different order:
'staticfiles_test_app',
'staticfiles_tests.apps.no_label',
"""
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, self.temp_dir)
# get modification and access times for no_label/static/file2.txt
self.orig_path = os.path.join(TEST_ROOT, 'apps', 'no_label', 'static', 'file2.txt')
self.orig_mtime = os.path.getmtime(self.orig_path)
self.orig_atime = os.path.getatime(self.orig_path)
# prepare duplicate of file2.txt from a temporary app
# this file will have modification time older than no_label/static/file2.txt
# anyway it should be taken to STATIC_ROOT because the temporary app is before
# 'no_label' app in installed apps
self.temp_app_path = os.path.join(self.temp_dir, 'staticfiles_test_app')
self.testfile_path = os.path.join(self.temp_app_path, 'static', 'file2.txt')
os.makedirs(self.temp_app_path)
with open(os.path.join(self.temp_app_path, '__init__.py'), 'w+'):
pass
os.makedirs(os.path.dirname(self.testfile_path))
with open(self.testfile_path, 'w+') as f:
f.write('duplicate of file2.txt')
os.utime(self.testfile_path, (self.orig_atime - 1, self.orig_mtime - 1))
self.settings_with_test_app = self.modify_settings(
INSTALLED_APPS={'prepend': 'staticfiles_test_app'},
)
with extend_sys_path(self.temp_dir):
self.settings_with_test_app.enable()
super().setUp()
def tearDown(self):
super().tearDown()
self.settings_with_test_app.disable()
def test_ordering_override(self):
"""
Test if collectstatic takes files in proper order
"""
self.assertFileContains('file2.txt', 'duplicate of file2.txt')
# run collectstatic again
self.run_collectstatic()
self.assertFileContains('file2.txt', 'duplicate of file2.txt')
# The collectstatic test suite already has conflicting files since both
# project/test/file.txt and apps/test/static/test/file.txt are collected. To
# properly test for the warning not happening unless we tell it to explicitly,
# we remove the project directory and will add back a conflicting file later.
@override_settings(STATICFILES_DIRS=[])
class TestCollectionOverwriteWarning(CollectionTestCase):
"""
Test warning in ``collectstatic`` output when a file is skipped because a
previous file was already written to the same path.
"""
# If this string is in the collectstatic output, it means the warning we're
# looking for was emitted.
warning_string = 'Found another file'
def _collectstatic_output(self, **kwargs):
"""
Run collectstatic, and capture and return the output. We want to run
the command at highest verbosity, which is why we can't
just call e.g. BaseCollectionTestCase.run_collectstatic()
"""
out = StringIO()
call_command('collectstatic', interactive=False, verbosity=3, stdout=out, **kwargs)
return out.getvalue()
def test_no_warning(self):
"""
There isn't a warning if there isn't a duplicate destination.
"""
output = self._collectstatic_output(clear=True)
self.assertNotIn(self.warning_string, output)
def test_warning(self):
"""
There is a warning when there are duplicate destinations.
"""
with tempfile.TemporaryDirectory() as static_dir:
duplicate = os.path.join(static_dir, 'test', 'file.txt')
os.mkdir(os.path.dirname(duplicate))
with open(duplicate, 'w+') as f:
f.write('duplicate of file.txt')
with self.settings(STATICFILES_DIRS=[static_dir]):
output = self._collectstatic_output(clear=True)
self.assertIn(self.warning_string, output)
os.remove(duplicate)
# Make sure the warning went away again.
with self.settings(STATICFILES_DIRS=[static_dir]):
output = self._collectstatic_output(clear=True)
self.assertNotIn(self.warning_string, output)
@override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.DummyStorage')
class TestCollectionNonLocalStorage(TestNoFilesCreated, CollectionTestCase):
"""
Tests for a Storage that implements get_modified_time() but not path()
(#15035).
"""
def test_storage_properties(self):
# Properties of the Storage as described in the ticket.
storage = DummyStorage()
self.assertEqual(storage.get_modified_time('name'), datetime.datetime(1970, 1, 1, tzinfo=timezone.utc))
with self.assertRaisesMessage(NotImplementedError, "This backend doesn't support absolute paths."):
storage.path('name')
class TestCollectionNeverCopyStorage(CollectionTestCase):
@override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.NeverCopyRemoteStorage')
def test_skips_newer_files_in_remote_storage(self):
"""
collectstatic skips newer files in a remote storage.
run_collectstatic() in setUp() copies the static files, then files are
always skipped after NeverCopyRemoteStorage is activated since
NeverCopyRemoteStorage.get_modified_time() returns a datetime in the
future to simulate an unmodified file.
"""
stdout = StringIO()
self.run_collectstatic(stdout=stdout, verbosity=2)
output = stdout.getvalue()
self.assertIn("Skipping 'test.txt' (not modified)", output)
@unittest.skipUnless(symlinks_supported(), "Must be able to symlink to run this test.")
class TestCollectionLinks(TestDefaults, CollectionTestCase):
"""
Test ``--link`` option for ``collectstatic`` management command.
Note that by inheriting ``TestDefaults`` we repeat all
the standard file resolving tests here, to make sure using
``--link`` does not change the file-selection semantics.
"""
def run_collectstatic(self, clear=False, link=True, **kwargs):
super().run_collectstatic(link=link, clear=clear, **kwargs)
def test_links_created(self):
"""
With ``--link``, symbolic links are created.
"""
self.assertTrue(os.path.islink(os.path.join(settings.STATIC_ROOT, 'test.txt')))
def test_broken_symlink(self):
"""
Test broken symlink gets deleted.
"""
path = os.path.join(settings.STATIC_ROOT, 'test.txt')
os.unlink(path)
self.run_collectstatic()
self.assertTrue(os.path.islink(path))
def test_symlinks_and_files_replaced(self):
"""
Running collectstatic in non-symlink mode replaces symlinks with files,
while symlink mode replaces files with symlinks.
"""
path = os.path.join(settings.STATIC_ROOT, 'test.txt')
self.assertTrue(os.path.islink(path))
self.run_collectstatic(link=False)
self.assertFalse(os.path.islink(path))
self.run_collectstatic(link=True)
self.assertTrue(os.path.islink(path))
def test_clear_broken_symlink(self):
"""
With ``--clear``, broken symbolic links are deleted.
"""
nonexistent_file_path = os.path.join(settings.STATIC_ROOT, 'nonexistent.txt')
broken_symlink_path = os.path.join(settings.STATIC_ROOT, 'symlink.txt')
os.symlink(nonexistent_file_path, broken_symlink_path)
self.run_collectstatic(clear=True)
self.assertFalse(os.path.lexists(broken_symlink_path))
@override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.PathNotImplementedStorage')
def test_no_remote_link(self):
with self.assertRaisesMessage(CommandError, "Can't symlink to a remote destination."):
self.run_collectstatic()
|
3e342e01bc4b6997d62dfbe5f949eeedb926730d2c5a4e954c118518fd245b34 | import os
import shutil
import sys
import tempfile
import unittest
from io import StringIO
from django.conf import settings
from django.contrib.staticfiles import finders, storage
from django.contrib.staticfiles.management.commands.collectstatic import (
Command as CollectstaticCommand,
)
from django.core.cache.backends.base import BaseCache
from django.core.management import call_command
from django.test import SimpleTestCase, ignore_warnings, override_settings
from django.utils.deprecation import RemovedInDjango31Warning
from .cases import CollectionTestCase
from .settings import TEST_ROOT
def hashed_file_path(test, path):
fullpath = test.render_template(test.static_template_snippet(path))
return fullpath.replace(settings.STATIC_URL, '')
class TestHashedFiles:
hashed_file_path = hashed_file_path
def setUp(self):
self._max_post_process_passes = storage.staticfiles_storage.max_post_process_passes
super().setUp()
def tearDown(self):
# Clear hashed files to avoid side effects among tests.
storage.staticfiles_storage.hashed_files.clear()
storage.staticfiles_storage.max_post_process_passes = self._max_post_process_passes
def assertPostCondition(self):
"""
Assert post conditions for a test are met. Must be manually called at
the end of each test.
"""
pass
def test_template_tag_return(self):
self.assertStaticRaises(ValueError, "does/not/exist.png", "/static/does/not/exist.png")
self.assertStaticRenders("test/file.txt", "/static/test/file.dad0999e4f8f.txt")
self.assertStaticRenders("test/file.txt", "/static/test/file.dad0999e4f8f.txt", asvar=True)
self.assertStaticRenders("cached/styles.css", "/static/cached/styles.5e0040571e1a.css")
self.assertStaticRenders("path/", "/static/path/")
self.assertStaticRenders("path/?query", "/static/path/?query")
self.assertPostCondition()
def test_template_tag_simple_content(self):
relpath = self.hashed_file_path("cached/styles.css")
self.assertEqual(relpath, "cached/styles.5e0040571e1a.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content)
self.assertPostCondition()
def test_path_ignored_completely(self):
relpath = self.hashed_file_path("cached/css/ignored.css")
self.assertEqual(relpath, "cached/css/ignored.554da52152af.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b'#foobar', content)
self.assertIn(b'http:foobar', content)
self.assertIn(b'https:foobar', content)
self.assertIn(b'data:foobar', content)
self.assertIn(b'chrome:foobar', content)
self.assertIn(b'//foobar', content)
self.assertPostCondition()
def test_path_with_querystring(self):
relpath = self.hashed_file_path("cached/styles.css?spam=eggs")
self.assertEqual(relpath, "cached/styles.5e0040571e1a.css?spam=eggs")
with storage.staticfiles_storage.open("cached/styles.5e0040571e1a.css") as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content)
self.assertPostCondition()
def test_path_with_fragment(self):
relpath = self.hashed_file_path("cached/styles.css#eggs")
self.assertEqual(relpath, "cached/styles.5e0040571e1a.css#eggs")
with storage.staticfiles_storage.open("cached/styles.5e0040571e1a.css") as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content)
self.assertPostCondition()
def test_path_with_querystring_and_fragment(self):
relpath = self.hashed_file_path("cached/css/fragments.css")
self.assertEqual(relpath, "cached/css/fragments.a60c0e74834f.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b'fonts/font.b9b105392eb8.eot?#iefix', content)
self.assertIn(b'fonts/font.b8d603e42714.svg#webfontIyfZbseF', content)
self.assertIn(b'fonts/font.b8d603e42714.svg#path/to/../../fonts/font.svg', content)
self.assertIn(b'data:font/woff;charset=utf-8;base64,d09GRgABAAAAADJoAA0AAAAAR2QAAQAAAAAAAAAAAAA', content)
self.assertIn(b'#default#VML', content)
self.assertPostCondition()
def test_template_tag_absolute(self):
relpath = self.hashed_file_path("cached/absolute.css")
self.assertEqual(relpath, "cached/absolute.eb04def9f9a4.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"/static/cached/styles.css", content)
self.assertIn(b"/static/cached/styles.5e0040571e1a.css", content)
self.assertNotIn(b"/static/styles_root.css", content)
self.assertIn(b"/static/styles_root.401f2509a628.css", content)
self.assertIn(b'/static/cached/img/relative.acae32e4532b.png', content)
self.assertPostCondition()
def test_template_tag_absolute_root(self):
"""
Like test_template_tag_absolute, but for a file in STATIC_ROOT (#26249).
"""
relpath = self.hashed_file_path("absolute_root.css")
self.assertEqual(relpath, "absolute_root.f821df1b64f7.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"/static/styles_root.css", content)
self.assertIn(b"/static/styles_root.401f2509a628.css", content)
self.assertPostCondition()
def test_template_tag_relative(self):
relpath = self.hashed_file_path("cached/relative.css")
self.assertEqual(relpath, "cached/relative.c3e9e1ea6f2e.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"../cached/styles.css", content)
self.assertNotIn(b'@import "styles.css"', content)
self.assertNotIn(b'url(img/relative.png)', content)
self.assertIn(b'url("img/relative.acae32e4532b.png")', content)
self.assertIn(b"../cached/styles.5e0040571e1a.css", content)
self.assertPostCondition()
def test_import_replacement(self):
"See #18050"
relpath = self.hashed_file_path("cached/import.css")
self.assertEqual(relpath, "cached/import.f53576679e5a.css")
with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b"""import url("styles.5e0040571e1a.css")""", relfile.read())
self.assertPostCondition()
def test_template_tag_deep_relative(self):
relpath = self.hashed_file_path("cached/css/window.css")
self.assertEqual(relpath, "cached/css/window.5d5c10836967.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b'url(img/window.png)', content)
self.assertIn(b'url("img/window.acae32e4532b.png")', content)
self.assertPostCondition()
def test_template_tag_url(self):
relpath = self.hashed_file_path("cached/url.css")
self.assertEqual(relpath, "cached/url.902310b73412.css")
with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b"https://", relfile.read())
self.assertPostCondition()
@override_settings(
STATICFILES_DIRS=[os.path.join(TEST_ROOT, 'project', 'loop')],
STATICFILES_FINDERS=['django.contrib.staticfiles.finders.FileSystemFinder'],
)
def test_import_loop(self):
finders.get_finder.cache_clear()
err = StringIO()
with self.assertRaisesMessage(RuntimeError, 'Max post-process passes exceeded'):
call_command('collectstatic', interactive=False, verbosity=0, stderr=err)
self.assertEqual("Post-processing 'All' failed!\n\n", err.getvalue())
self.assertPostCondition()
def test_post_processing(self):
"""
post_processing behaves correctly.
Files that are alterable should always be post-processed; files that
aren't should be skipped.
collectstatic has already been called once in setUp() for this testcase,
therefore we check by verifying behavior on a second run.
"""
collectstatic_args = {
'interactive': False,
'verbosity': 0,
'link': False,
'clear': False,
'dry_run': False,
'post_process': True,
'use_default_ignore_patterns': True,
'ignore_patterns': ['*.ignoreme'],
}
collectstatic_cmd = CollectstaticCommand()
collectstatic_cmd.set_options(**collectstatic_args)
stats = collectstatic_cmd.collect()
self.assertIn(os.path.join('cached', 'css', 'window.css'), stats['post_processed'])
self.assertIn(os.path.join('cached', 'css', 'img', 'window.png'), stats['unmodified'])
self.assertIn(os.path.join('test', 'nonascii.css'), stats['post_processed'])
self.assertPostCondition()
def test_css_import_case_insensitive(self):
relpath = self.hashed_file_path("cached/styles_insensitive.css")
self.assertEqual(relpath, "cached/styles_insensitive.3fa427592a53.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content)
self.assertPostCondition()
@override_settings(
STATICFILES_DIRS=[os.path.join(TEST_ROOT, 'project', 'faulty')],
STATICFILES_FINDERS=['django.contrib.staticfiles.finders.FileSystemFinder'],
)
def test_post_processing_failure(self):
"""
post_processing indicates the origin of the error when it fails.
"""
finders.get_finder.cache_clear()
err = StringIO()
with self.assertRaises(Exception):
call_command('collectstatic', interactive=False, verbosity=0, stderr=err)
self.assertEqual("Post-processing 'faulty.css' failed!\n\n", err.getvalue())
self.assertPostCondition()
@ignore_warnings(category=RemovedInDjango31Warning)
@override_settings(
STATICFILES_STORAGE='django.contrib.staticfiles.storage.CachedStaticFilesStorage',
)
class TestCollectionCachedStorage(TestHashedFiles, CollectionTestCase):
"""
Tests for the Cache busting storage
"""
def test_cache_invalidation(self):
name = "cached/styles.css"
hashed_name = "cached/styles.5e0040571e1a.css"
# check if the cache is filled correctly as expected
cache_key = storage.staticfiles_storage.hash_key(name)
cached_name = storage.staticfiles_storage.hashed_files.get(cache_key)
self.assertEqual(self.hashed_file_path(name), cached_name)
# clearing the cache to make sure we re-set it correctly in the url method
storage.staticfiles_storage.hashed_files.clear()
cached_name = storage.staticfiles_storage.hashed_files.get(cache_key)
self.assertIsNone(cached_name)
self.assertEqual(self.hashed_file_path(name), hashed_name)
cached_name = storage.staticfiles_storage.hashed_files.get(cache_key)
self.assertEqual(cached_name, hashed_name)
# Check files that had to be hashed multiple times since their content
# includes other files that were hashed.
name = 'cached/relative.css'
hashed_name = 'cached/relative.c3e9e1ea6f2e.css'
cache_key = storage.staticfiles_storage.hash_key(name)
cached_name = storage.staticfiles_storage.hashed_files.get(cache_key)
self.assertIsNone(cached_name)
self.assertEqual(self.hashed_file_path(name), hashed_name)
cached_name = storage.staticfiles_storage.hashed_files.get(cache_key)
self.assertEqual(cached_name, hashed_name)
def test_cache_key_memcache_validation(self):
"""
Handle cache key creation correctly, see #17861.
"""
name = (
"/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff"
"/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff"
"/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff"
"/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff"
"/some crazy/long filename/ with spaces Here and ?#%#$/other/stuff"
"/some crazy/\x16\xb4"
)
cache_key = storage.staticfiles_storage.hash_key(name)
cache_validator = BaseCache({})
cache_validator.validate_key(cache_key)
self.assertEqual(cache_key, 'staticfiles:821ea71ef36f95b3922a77f7364670e7')
def test_corrupt_intermediate_files(self):
configured_storage = storage.staticfiles_storage
# Clear cache to force rehashing of the files
configured_storage.hashed_files.clear()
# Simulate a corrupt chain of intermediate files by ensuring they don't
# resolve before the max post-process count, which would normally be
# high enough.
configured_storage.max_post_process_passes = 1
# File without intermediates that can be rehashed without a problem.
self.hashed_file_path('cached/css/img/window.png')
# File with too many intermediates to rehash with the low max
# post-process passes.
err_msg = "The name 'cached/styles.css' could not be hashed with %r." % (configured_storage._wrapped,)
with self.assertRaisesMessage(ValueError, err_msg):
self.hashed_file_path('cached/styles.css')
class TestCachedStaticFilesStorageDeprecation(SimpleTestCase):
def test_warning(self):
from django.contrib.staticfiles.storage import CachedStaticFilesStorage
from django.utils.deprecation import RemovedInDjango31Warning
msg = (
'CachedStaticFilesStorage is deprecated in favor of '
'ManifestStaticFilesStorage.'
)
with self.assertRaisesMessage(RemovedInDjango31Warning, msg):
CachedStaticFilesStorage()
@override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.ExtraPatternsStorage')
class TestExtraPatternsStorage(CollectionTestCase):
def setUp(self):
storage.staticfiles_storage.hashed_files.clear() # avoid cache interference
super().setUp()
def cached_file_path(self, path):
fullpath = self.render_template(self.static_template_snippet(path))
return fullpath.replace(settings.STATIC_URL, '')
def test_multi_extension_patterns(self):
"""
With storage classes having several file extension patterns, only the
files matching a specific file pattern should be affected by the
substitution (#19670).
"""
# CSS files shouldn't be touched by JS patterns.
relpath = self.cached_file_path("cached/import.css")
self.assertEqual(relpath, "cached/import.f53576679e5a.css")
with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b'import url("styles.5e0040571e1a.css")', relfile.read())
# Confirm JS patterns have been applied to JS files.
relpath = self.cached_file_path("cached/test.js")
self.assertEqual(relpath, "cached/test.388d7a790d46.js")
with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b'JS_URL("import.f53576679e5a.css")', relfile.read())
@override_settings(
STATICFILES_STORAGE='django.contrib.staticfiles.storage.ManifestStaticFilesStorage',
)
class TestCollectionManifestStorage(TestHashedFiles, CollectionTestCase):
"""
Tests for the Cache busting storage
"""
def setUp(self):
super().setUp()
temp_dir = tempfile.mkdtemp()
os.makedirs(os.path.join(temp_dir, 'test'))
self._clear_filename = os.path.join(temp_dir, 'test', 'cleared.txt')
with open(self._clear_filename, 'w') as f:
f.write('to be deleted in one test')
self.patched_settings = self.settings(
STATICFILES_DIRS=settings.STATICFILES_DIRS + [temp_dir],
)
self.patched_settings.enable()
self.addCleanup(shutil.rmtree, temp_dir)
self._manifest_strict = storage.staticfiles_storage.manifest_strict
def tearDown(self):
self.patched_settings.disable()
if os.path.exists(self._clear_filename):
os.unlink(self._clear_filename)
storage.staticfiles_storage.manifest_strict = self._manifest_strict
super().tearDown()
def assertPostCondition(self):
hashed_files = storage.staticfiles_storage.hashed_files
# The in-memory version of the manifest matches the one on disk
# since a properly created manifest should cover all filenames.
if hashed_files:
manifest = storage.staticfiles_storage.load_manifest()
self.assertEqual(hashed_files, manifest)
def test_manifest_exists(self):
filename = storage.staticfiles_storage.manifest_name
path = storage.staticfiles_storage.path(filename)
self.assertTrue(os.path.exists(path))
def test_loaded_cache(self):
self.assertNotEqual(storage.staticfiles_storage.hashed_files, {})
manifest_content = storage.staticfiles_storage.read_manifest()
self.assertIn(
'"version": "%s"' % storage.staticfiles_storage.manifest_version,
manifest_content
)
def test_parse_cache(self):
hashed_files = storage.staticfiles_storage.hashed_files
manifest = storage.staticfiles_storage.load_manifest()
self.assertEqual(hashed_files, manifest)
def test_clear_empties_manifest(self):
cleared_file_name = storage.staticfiles_storage.clean_name(os.path.join('test', 'cleared.txt'))
# collect the additional file
self.run_collectstatic()
hashed_files = storage.staticfiles_storage.hashed_files
self.assertIn(cleared_file_name, hashed_files)
manifest_content = storage.staticfiles_storage.load_manifest()
self.assertIn(cleared_file_name, manifest_content)
original_path = storage.staticfiles_storage.path(cleared_file_name)
self.assertTrue(os.path.exists(original_path))
# delete the original file form the app, collect with clear
os.unlink(self._clear_filename)
self.run_collectstatic(clear=True)
self.assertFileNotFound(original_path)
hashed_files = storage.staticfiles_storage.hashed_files
self.assertNotIn(cleared_file_name, hashed_files)
manifest_content = storage.staticfiles_storage.load_manifest()
self.assertNotIn(cleared_file_name, manifest_content)
def test_missing_entry(self):
missing_file_name = 'cached/missing.css'
configured_storage = storage.staticfiles_storage
self.assertNotIn(missing_file_name, configured_storage.hashed_files)
# File name not found in manifest
with self.assertRaisesMessage(ValueError, "Missing staticfiles manifest entry for '%s'" % missing_file_name):
self.hashed_file_path(missing_file_name)
configured_storage.manifest_strict = False
# File doesn't exist on disk
err_msg = "The file '%s' could not be found with %r." % (missing_file_name, configured_storage._wrapped)
with self.assertRaisesMessage(ValueError, err_msg):
self.hashed_file_path(missing_file_name)
content = StringIO()
content.write('Found')
configured_storage.save(missing_file_name, content)
# File exists on disk
self.hashed_file_path(missing_file_name)
@override_settings(STATICFILES_STORAGE='staticfiles_tests.storage.SimpleStorage')
class TestCollectionSimpleStorage(CollectionTestCase):
hashed_file_path = hashed_file_path
def setUp(self):
storage.staticfiles_storage.hashed_files.clear() # avoid cache interference
super().setUp()
def test_template_tag_return(self):
self.assertStaticRaises(ValueError, "does/not/exist.png", "/static/does/not/exist.png")
self.assertStaticRenders("test/file.txt", "/static/test/file.deploy12345.txt")
self.assertStaticRenders("cached/styles.css", "/static/cached/styles.deploy12345.css")
self.assertStaticRenders("path/", "/static/path/")
self.assertStaticRenders("path/?query", "/static/path/?query")
def test_template_tag_simple_content(self):
relpath = self.hashed_file_path("cached/styles.css")
self.assertEqual(relpath, "cached/styles.deploy12345.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.deploy12345.css", content)
class CustomStaticFilesStorage(storage.StaticFilesStorage):
"""
Used in TestStaticFilePermissions
"""
def __init__(self, *args, **kwargs):
kwargs['file_permissions_mode'] = 0o640
kwargs['directory_permissions_mode'] = 0o740
super().__init__(*args, **kwargs)
@unittest.skipIf(sys.platform.startswith('win'), "Windows only partially supports chmod.")
class TestStaticFilePermissions(CollectionTestCase):
command_params = {
'interactive': False,
'verbosity': 0,
'ignore_patterns': ['*.ignoreme'],
}
def setUp(self):
self.umask = 0o027
self.old_umask = os.umask(self.umask)
super().setUp()
def tearDown(self):
os.umask(self.old_umask)
super().tearDown()
# Don't run collectstatic command in this test class.
def run_collectstatic(self, **kwargs):
pass
@override_settings(
FILE_UPLOAD_PERMISSIONS=0o655,
FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765,
)
def test_collect_static_files_permissions(self):
call_command('collectstatic', **self.command_params)
test_file = os.path.join(settings.STATIC_ROOT, "test.txt")
test_dir = os.path.join(settings.STATIC_ROOT, "subdir")
file_mode = os.stat(test_file)[0] & 0o777
dir_mode = os.stat(test_dir)[0] & 0o777
self.assertEqual(file_mode, 0o655)
self.assertEqual(dir_mode, 0o765)
@override_settings(
FILE_UPLOAD_PERMISSIONS=None,
FILE_UPLOAD_DIRECTORY_PERMISSIONS=None,
)
def test_collect_static_files_default_permissions(self):
call_command('collectstatic', **self.command_params)
test_file = os.path.join(settings.STATIC_ROOT, "test.txt")
test_dir = os.path.join(settings.STATIC_ROOT, "subdir")
file_mode = os.stat(test_file)[0] & 0o777
dir_mode = os.stat(test_dir)[0] & 0o777
self.assertEqual(file_mode, 0o666 & ~self.umask)
self.assertEqual(dir_mode, 0o777 & ~self.umask)
@override_settings(
FILE_UPLOAD_PERMISSIONS=0o655,
FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765,
STATICFILES_STORAGE='staticfiles_tests.test_storage.CustomStaticFilesStorage',
)
def test_collect_static_files_subclass_of_static_storage(self):
call_command('collectstatic', **self.command_params)
test_file = os.path.join(settings.STATIC_ROOT, "test.txt")
test_dir = os.path.join(settings.STATIC_ROOT, "subdir")
file_mode = os.stat(test_file)[0] & 0o777
dir_mode = os.stat(test_dir)[0] & 0o777
self.assertEqual(file_mode, 0o640)
self.assertEqual(dir_mode, 0o740)
@override_settings(
STATICFILES_STORAGE='django.contrib.staticfiles.storage.ManifestStaticFilesStorage',
)
class TestCollectionHashedFilesCache(CollectionTestCase):
"""
Files referenced from CSS use the correct final hashed name regardless of
the order in which the files are post-processed.
"""
hashed_file_path = hashed_file_path
def setUp(self):
super().setUp()
self._temp_dir = temp_dir = tempfile.mkdtemp()
os.makedirs(os.path.join(temp_dir, 'test'))
self.addCleanup(shutil.rmtree, temp_dir)
def _get_filename_path(self, filename):
return os.path.join(self._temp_dir, 'test', filename)
def test_file_change_after_collectstatic(self):
# Create initial static files.
file_contents = (
('foo.png', 'foo'),
('bar.css', 'url("foo.png")\nurl("xyz.png")'),
('xyz.png', 'xyz'),
)
for filename, content in file_contents:
with open(self._get_filename_path(filename), 'w') as f:
f.write(content)
with self.modify_settings(STATICFILES_DIRS={'append': self._temp_dir}):
finders.get_finder.cache_clear()
err = StringIO()
# First collectstatic run.
call_command('collectstatic', interactive=False, verbosity=0, stderr=err)
relpath = self.hashed_file_path('test/bar.css')
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b'foo.acbd18db4cc2.png', content)
self.assertIn(b'xyz.d16fb36f0911.png', content)
# Change the contents of the png files.
for filename in ('foo.png', 'xyz.png'):
with open(self._get_filename_path(filename), 'w+b') as f:
f.write(b"new content of file to change its hash")
# The hashes of the png files in the CSS file are updated after
# a second collectstatic.
call_command('collectstatic', interactive=False, verbosity=0, stderr=err)
relpath = self.hashed_file_path('test/bar.css')
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn(b'foo.57a5cb9ba68d.png', content)
self.assertIn(b'xyz.57a5cb9ba68d.png', content)
|
6e705b38ca27b2897c3e201430602ef4dd43b9a7bc708e0ae358c6cc0dd3dd57 | import io
import os
import sys
import tempfile
from unittest import skipIf
from django.core.files.base import ContentFile
from django.http import FileResponse
from django.test import SimpleTestCase
class FileResponseTests(SimpleTestCase):
def test_file_from_disk_response(self):
response = FileResponse(open(__file__, 'rb'))
self.assertEqual(response['Content-Length'], str(os.path.getsize(__file__)))
self.assertIn(response['Content-Type'], ['text/x-python', 'text/plain'])
response.close()
def test_file_from_buffer_response(self):
response = FileResponse(io.BytesIO(b'binary content'))
self.assertEqual(response['Content-Length'], '14')
self.assertEqual(response['Content-Type'], 'application/octet-stream')
self.assertEqual(list(response), [b'binary content'])
@skipIf(sys.platform == 'win32', "Named pipes are Unix-only.")
def test_file_from_named_pipe_response(self):
with tempfile.TemporaryDirectory() as temp_dir:
pipe_file = os.path.join(temp_dir, 'named_pipe')
os.mkfifo(pipe_file)
pipe_for_read = os.open(pipe_file, os.O_RDONLY | os.O_NONBLOCK)
with open(pipe_file, 'wb') as pipe_for_write:
pipe_for_write.write(b'binary content')
response = FileResponse(os.fdopen(pipe_for_read, mode='rb'))
self.assertEqual(list(response), [b'binary content'])
response.close()
self.assertFalse(response.has_header('Content-Length'))
def test_file_from_disk_as_attachment(self):
response = FileResponse(open(__file__, 'rb'), as_attachment=True)
self.assertEqual(response['Content-Length'], str(os.path.getsize(__file__)))
self.assertIn(response['Content-Type'], ['text/x-python', 'text/plain'])
self.assertEqual(response['Content-Disposition'], 'attachment; filename="test_fileresponse.py"')
response.close()
def test_compressed_response(self):
"""
If compressed responses are served with the uncompressed Content-Type
and a compression Content-Encoding, browsers might automatically
uncompress the file, which is most probably not wanted.
"""
test_tuples = (
('.tar.gz', 'application/gzip'),
('.tar.bz2', 'application/x-bzip'),
('.tar.xz', 'application/x-xz'),
)
for extension, mimetype in test_tuples:
with self.subTest(ext=extension):
with tempfile.NamedTemporaryFile(suffix=extension) as tmp:
response = FileResponse(tmp)
self.assertEqual(response['Content-Type'], mimetype)
self.assertFalse(response.has_header('Content-Encoding'))
def test_unicode_attachment(self):
response = FileResponse(
ContentFile(b'binary content', name="祝您平安.odt"), as_attachment=True,
content_type='application/vnd.oasis.opendocument.text',
)
self.assertEqual(response['Content-Type'], 'application/vnd.oasis.opendocument.text')
self.assertEqual(
response['Content-Disposition'],
"attachment; filename*=utf-8''%E7%A5%9D%E6%82%A8%E5%B9%B3%E5%AE%89.odt"
)
|
cb0d7e8e66e6df728204a2f6594096dd08dba65c722719fdfa972ab40c47a274 | from django.contrib.contenttypes.models import ContentType, ContentTypeManager
from django.db import models
from django.test import TestCase, override_settings
from django.test.utils import isolate_apps
from .models import Author, ConcreteModel, FooWithUrl, ProxyModel
class ContentTypesTests(TestCase):
def setUp(self):
ContentType.objects.clear_cache()
def tearDown(self):
ContentType.objects.clear_cache()
def test_lookup_cache(self):
"""
The content type cache (see ContentTypeManager) works correctly.
Lookups for a particular content type -- by model, ID, or natural key
-- should hit the database only on the first lookup.
"""
# At this point, a lookup for a ContentType should hit the DB
with self.assertNumQueries(1):
ContentType.objects.get_for_model(ContentType)
# A second hit, though, won't hit the DB, nor will a lookup by ID
# or natural key
with self.assertNumQueries(0):
ct = ContentType.objects.get_for_model(ContentType)
with self.assertNumQueries(0):
ContentType.objects.get_for_id(ct.id)
with self.assertNumQueries(0):
ContentType.objects.get_by_natural_key('contenttypes', 'contenttype')
# Once we clear the cache, another lookup will again hit the DB
ContentType.objects.clear_cache()
with self.assertNumQueries(1):
ContentType.objects.get_for_model(ContentType)
# The same should happen with a lookup by natural key
ContentType.objects.clear_cache()
with self.assertNumQueries(1):
ContentType.objects.get_by_natural_key('contenttypes', 'contenttype')
# And a second hit shouldn't hit the DB
with self.assertNumQueries(0):
ContentType.objects.get_by_natural_key('contenttypes', 'contenttype')
def test_get_for_models_creation(self):
ContentType.objects.all().delete()
with self.assertNumQueries(4):
cts = ContentType.objects.get_for_models(ContentType, FooWithUrl, ProxyModel, ConcreteModel)
self.assertEqual(cts, {
ContentType: ContentType.objects.get_for_model(ContentType),
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
ProxyModel: ContentType.objects.get_for_model(ProxyModel),
ConcreteModel: ContentType.objects.get_for_model(ConcreteModel),
})
def test_get_for_models_empty_cache(self):
# Empty cache.
with self.assertNumQueries(1):
cts = ContentType.objects.get_for_models(ContentType, FooWithUrl, ProxyModel, ConcreteModel)
self.assertEqual(cts, {
ContentType: ContentType.objects.get_for_model(ContentType),
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
ProxyModel: ContentType.objects.get_for_model(ProxyModel),
ConcreteModel: ContentType.objects.get_for_model(ConcreteModel),
})
def test_get_for_models_partial_cache(self):
# Partial cache
ContentType.objects.get_for_model(ContentType)
with self.assertNumQueries(1):
cts = ContentType.objects.get_for_models(ContentType, FooWithUrl)
self.assertEqual(cts, {
ContentType: ContentType.objects.get_for_model(ContentType),
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
})
def test_get_for_models_full_cache(self):
# Full cache
ContentType.objects.get_for_model(ContentType)
ContentType.objects.get_for_model(FooWithUrl)
with self.assertNumQueries(0):
cts = ContentType.objects.get_for_models(ContentType, FooWithUrl)
self.assertEqual(cts, {
ContentType: ContentType.objects.get_for_model(ContentType),
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
})
@isolate_apps('contenttypes_tests')
def test_get_for_model_create_contenttype(self):
"""
ContentTypeManager.get_for_model() creates the corresponding content
type if it doesn't exist in the database.
"""
class ModelCreatedOnTheFly(models.Model):
name = models.CharField()
ct = ContentType.objects.get_for_model(ModelCreatedOnTheFly)
self.assertEqual(ct.app_label, 'contenttypes_tests')
self.assertEqual(ct.model, 'modelcreatedonthefly')
self.assertEqual(str(ct), 'modelcreatedonthefly')
def test_get_for_concrete_model(self):
"""
Make sure the `for_concrete_model` kwarg correctly works
with concrete, proxy and deferred models
"""
concrete_model_ct = ContentType.objects.get_for_model(ConcreteModel)
self.assertEqual(concrete_model_ct, ContentType.objects.get_for_model(ProxyModel))
self.assertEqual(concrete_model_ct, ContentType.objects.get_for_model(ConcreteModel, for_concrete_model=False))
proxy_model_ct = ContentType.objects.get_for_model(ProxyModel, for_concrete_model=False)
self.assertNotEqual(concrete_model_ct, proxy_model_ct)
# Make sure deferred model are correctly handled
ConcreteModel.objects.create(name="Concrete")
DeferredConcreteModel = ConcreteModel.objects.only('pk').get().__class__
DeferredProxyModel = ProxyModel.objects.only('pk').get().__class__
self.assertEqual(concrete_model_ct, ContentType.objects.get_for_model(DeferredConcreteModel))
self.assertEqual(
concrete_model_ct,
ContentType.objects.get_for_model(DeferredConcreteModel, for_concrete_model=False)
)
self.assertEqual(concrete_model_ct, ContentType.objects.get_for_model(DeferredProxyModel))
self.assertEqual(
proxy_model_ct,
ContentType.objects.get_for_model(DeferredProxyModel, for_concrete_model=False)
)
def test_get_for_concrete_models(self):
"""
Make sure the `for_concrete_models` kwarg correctly works
with concrete, proxy and deferred models.
"""
concrete_model_ct = ContentType.objects.get_for_model(ConcreteModel)
cts = ContentType.objects.get_for_models(ConcreteModel, ProxyModel)
self.assertEqual(cts, {
ConcreteModel: concrete_model_ct,
ProxyModel: concrete_model_ct,
})
proxy_model_ct = ContentType.objects.get_for_model(ProxyModel, for_concrete_model=False)
cts = ContentType.objects.get_for_models(ConcreteModel, ProxyModel, for_concrete_models=False)
self.assertEqual(cts, {
ConcreteModel: concrete_model_ct,
ProxyModel: proxy_model_ct,
})
# Make sure deferred model are correctly handled
ConcreteModel.objects.create(name="Concrete")
DeferredConcreteModel = ConcreteModel.objects.only('pk').get().__class__
DeferredProxyModel = ProxyModel.objects.only('pk').get().__class__
cts = ContentType.objects.get_for_models(DeferredConcreteModel, DeferredProxyModel)
self.assertEqual(cts, {
DeferredConcreteModel: concrete_model_ct,
DeferredProxyModel: concrete_model_ct,
})
cts = ContentType.objects.get_for_models(
DeferredConcreteModel, DeferredProxyModel, for_concrete_models=False
)
self.assertEqual(cts, {
DeferredConcreteModel: concrete_model_ct,
DeferredProxyModel: proxy_model_ct,
})
def test_cache_not_shared_between_managers(self):
with self.assertNumQueries(1):
ContentType.objects.get_for_model(ContentType)
with self.assertNumQueries(0):
ContentType.objects.get_for_model(ContentType)
other_manager = ContentTypeManager()
other_manager.model = ContentType
with self.assertNumQueries(1):
other_manager.get_for_model(ContentType)
with self.assertNumQueries(0):
other_manager.get_for_model(ContentType)
def test_missing_model(self):
"""
Displaying content types in admin (or anywhere) doesn't break on
leftover content type records in the DB for which no model is defined
anymore.
"""
ct = ContentType.objects.create(
app_label='contenttypes',
model='OldModel',
)
self.assertEqual(str(ct), 'OldModel')
self.assertIsNone(ct.model_class())
# Stale ContentTypes can be fetched like any other object.
ct_fetched = ContentType.objects.get_for_id(ct.pk)
self.assertIsNone(ct_fetched.model_class())
def test_str(self):
ct = ContentType.objects.get(app_label='contenttypes_tests', model='site')
self.assertEqual(str(ct), 'site')
class TestRouter:
def db_for_read(self, model, **hints):
return 'other'
def db_for_write(self, model, **hints):
return 'default'
@override_settings(DATABASE_ROUTERS=[TestRouter()])
class ContentTypesMultidbTests(TestCase):
databases = {'default', 'other'}
def test_multidb(self):
"""
When using multiple databases, ContentType.objects.get_for_model() uses
db_for_read().
"""
ContentType.objects.clear_cache()
with self.assertNumQueries(0, using='default'), self.assertNumQueries(1, using='other'):
ContentType.objects.get_for_model(Author)
|
2fdba58fa7ecd358cbf37926c91f8820c788fb9b787ebcb48483d682ac92987c | """
Regression tests for the Test Client, especially the customized assertions.
"""
import itertools
import os
from django.contrib.auth.models import User
from django.contrib.auth.signals import user_logged_in, user_logged_out
from django.http import HttpResponse
from django.template import (
Context, RequestContext, TemplateSyntaxError, engines,
)
from django.template.response import SimpleTemplateResponse
from django.test import (
Client, SimpleTestCase, TestCase, modify_settings, override_settings,
)
from django.test.client import RedirectCycleError, RequestFactory, encode_file
from django.test.utils import ContextList
from django.urls import NoReverseMatch, reverse
from django.utils.translation import gettext_lazy
from .models import CustomUser
from .views import CustomTestException
class TestDataMixin:
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create_user(username='testclient', password='password')
cls.staff = User.objects.create_user(username='staff', password='password', is_staff=True)
@override_settings(ROOT_URLCONF='test_client_regress.urls')
class AssertContainsTests(SimpleTestCase):
def test_contains(self):
"Responses can be inspected for content, including counting repeated substrings"
response = self.client.get('/no_template_view/')
self.assertNotContains(response, 'never')
self.assertContains(response, 'never', 0)
self.assertContains(response, 'once')
self.assertContains(response, 'once', 1)
self.assertContains(response, 'twice')
self.assertContains(response, 'twice', 2)
try:
self.assertContains(response, 'text', status_code=999)
except AssertionError as e:
self.assertIn("Couldn't retrieve content: Response code was 200 (expected 999)", str(e))
try:
self.assertContains(response, 'text', status_code=999, msg_prefix='abc')
except AssertionError as e:
self.assertIn("abc: Couldn't retrieve content: Response code was 200 (expected 999)", str(e))
try:
self.assertNotContains(response, 'text', status_code=999)
except AssertionError as e:
self.assertIn("Couldn't retrieve content: Response code was 200 (expected 999)", str(e))
try:
self.assertNotContains(response, 'text', status_code=999, msg_prefix='abc')
except AssertionError as e:
self.assertIn("abc: Couldn't retrieve content: Response code was 200 (expected 999)", str(e))
try:
self.assertNotContains(response, 'once')
except AssertionError as e:
self.assertIn("Response should not contain 'once'", str(e))
try:
self.assertNotContains(response, 'once', msg_prefix='abc')
except AssertionError as e:
self.assertIn("abc: Response should not contain 'once'", str(e))
try:
self.assertContains(response, 'never', 1)
except AssertionError as e:
self.assertIn("Found 0 instances of 'never' in response (expected 1)", str(e))
try:
self.assertContains(response, 'never', 1, msg_prefix='abc')
except AssertionError as e:
self.assertIn("abc: Found 0 instances of 'never' in response (expected 1)", str(e))
try:
self.assertContains(response, 'once', 0)
except AssertionError as e:
self.assertIn("Found 1 instances of 'once' in response (expected 0)", str(e))
try:
self.assertContains(response, 'once', 0, msg_prefix='abc')
except AssertionError as e:
self.assertIn("abc: Found 1 instances of 'once' in response (expected 0)", str(e))
try:
self.assertContains(response, 'once', 2)
except AssertionError as e:
self.assertIn("Found 1 instances of 'once' in response (expected 2)", str(e))
try:
self.assertContains(response, 'once', 2, msg_prefix='abc')
except AssertionError as e:
self.assertIn("abc: Found 1 instances of 'once' in response (expected 2)", str(e))
try:
self.assertContains(response, 'twice', 1)
except AssertionError as e:
self.assertIn("Found 2 instances of 'twice' in response (expected 1)", str(e))
try:
self.assertContains(response, 'twice', 1, msg_prefix='abc')
except AssertionError as e:
self.assertIn("abc: Found 2 instances of 'twice' in response (expected 1)", str(e))
try:
self.assertContains(response, 'thrice')
except AssertionError as e:
self.assertIn("Couldn't find 'thrice' in response", str(e))
try:
self.assertContains(response, 'thrice', msg_prefix='abc')
except AssertionError as e:
self.assertIn("abc: Couldn't find 'thrice' in response", str(e))
try:
self.assertContains(response, 'thrice', 3)
except AssertionError as e:
self.assertIn("Found 0 instances of 'thrice' in response (expected 3)", str(e))
try:
self.assertContains(response, 'thrice', 3, msg_prefix='abc')
except AssertionError as e:
self.assertIn("abc: Found 0 instances of 'thrice' in response (expected 3)", str(e))
def test_unicode_contains(self):
"Unicode characters can be found in template context"
# Regression test for #10183
r = self.client.get('/check_unicode/')
self.assertContains(r, 'さかき')
self.assertContains(r, b'\xe5\xb3\xa0'.decode())
def test_unicode_not_contains(self):
"Unicode characters can be searched for, and not found in template context"
# Regression test for #10183
r = self.client.get('/check_unicode/')
self.assertNotContains(r, 'はたけ')
self.assertNotContains(r, b'\xe3\x81\xaf\xe3\x81\x9f\xe3\x81\x91'.decode())
def test_binary_contains(self):
r = self.client.get('/check_binary/')
self.assertContains(r, b'%PDF-1.4\r\n%\x93\x8c\x8b\x9e')
with self.assertRaises(AssertionError):
self.assertContains(r, b'%PDF-1.4\r\n%\x93\x8c\x8b\x9e', count=2)
def test_binary_not_contains(self):
r = self.client.get('/check_binary/')
self.assertNotContains(r, b'%ODF-1.4\r\n%\x93\x8c\x8b\x9e')
with self.assertRaises(AssertionError):
self.assertNotContains(r, b'%PDF-1.4\r\n%\x93\x8c\x8b\x9e')
def test_nontext_contains(self):
r = self.client.get('/no_template_view/')
self.assertContains(r, gettext_lazy('once'))
def test_nontext_not_contains(self):
r = self.client.get('/no_template_view/')
self.assertNotContains(r, gettext_lazy('never'))
def test_assert_contains_renders_template_response(self):
"""
An unrendered SimpleTemplateResponse may be used in assertContains().
"""
template = engines['django'].from_string('Hello')
response = SimpleTemplateResponse(template)
self.assertContains(response, 'Hello')
def test_assert_contains_using_non_template_response(self):
""" auto-rendering does not affect responses that aren't
instances (or subclasses) of SimpleTemplateResponse.
Refs #15826.
"""
response = HttpResponse('Hello')
self.assertContains(response, 'Hello')
def test_assert_not_contains_renders_template_response(self):
"""
An unrendered SimpleTemplateResponse may be used in assertNotContains().
"""
template = engines['django'].from_string('Hello')
response = SimpleTemplateResponse(template)
self.assertNotContains(response, 'Bye')
def test_assert_not_contains_using_non_template_response(self):
"""
auto-rendering does not affect responses that aren't instances (or
subclasses) of SimpleTemplateResponse.
"""
response = HttpResponse('Hello')
self.assertNotContains(response, 'Bye')
@override_settings(ROOT_URLCONF='test_client_regress.urls')
class AssertTemplateUsedTests(TestDataMixin, TestCase):
def test_no_context(self):
"Template usage assertions work then templates aren't in use"
response = self.client.get('/no_template_view/')
# The no template case doesn't mess with the template assertions
self.assertTemplateNotUsed(response, 'GET Template')
try:
self.assertTemplateUsed(response, 'GET Template')
except AssertionError as e:
self.assertIn("No templates used to render the response", str(e))
try:
self.assertTemplateUsed(response, 'GET Template', msg_prefix='abc')
except AssertionError as e:
self.assertIn("abc: No templates used to render the response", str(e))
with self.assertRaises(AssertionError) as context:
self.assertTemplateUsed(response, 'GET Template', count=2)
self.assertIn(
"No templates used to render the response",
str(context.exception))
def test_single_context(self):
"Template assertions work when there is a single context"
response = self.client.get('/post_view/', {})
try:
self.assertTemplateNotUsed(response, 'Empty GET Template')
except AssertionError as e:
self.assertIn("Template 'Empty GET Template' was used unexpectedly in rendering the response", str(e))
try:
self.assertTemplateNotUsed(response, 'Empty GET Template', msg_prefix='abc')
except AssertionError as e:
self.assertIn("abc: Template 'Empty GET Template' was used unexpectedly in rendering the response", str(e))
try:
self.assertTemplateUsed(response, 'Empty POST Template')
except AssertionError as e:
self.assertIn(
"Template 'Empty POST Template' was not a template used to "
"render the response. Actual template(s) used: Empty GET Template",
str(e)
)
try:
self.assertTemplateUsed(response, 'Empty POST Template', msg_prefix='abc')
except AssertionError as e:
self.assertIn(
"abc: Template 'Empty POST Template' was not a template used "
"to render the response. Actual template(s) used: Empty GET Template",
str(e)
)
with self.assertRaises(AssertionError) as context:
self.assertTemplateUsed(response, 'Empty GET Template', count=2)
self.assertIn(
"Template 'Empty GET Template' was expected to be rendered 2 "
"time(s) but was actually rendered 1 time(s).",
str(context.exception))
with self.assertRaises(AssertionError) as context:
self.assertTemplateUsed(
response, 'Empty GET Template', msg_prefix='abc', count=2)
self.assertIn(
"abc: Template 'Empty GET Template' was expected to be rendered 2 "
"time(s) but was actually rendered 1 time(s).",
str(context.exception))
def test_multiple_context(self):
"Template assertions work when there are multiple contexts"
post_data = {
'text': 'Hello World',
'email': '[email protected]',
'value': 37,
'single': 'b',
'multi': ('b', 'c', 'e')
}
response = self.client.post('/form_view_with_template/', post_data)
self.assertContains(response, 'POST data OK')
try:
self.assertTemplateNotUsed(response, "form_view.html")
except AssertionError as e:
self.assertIn("Template 'form_view.html' was used unexpectedly in rendering the response", str(e))
try:
self.assertTemplateNotUsed(response, 'base.html')
except AssertionError as e:
self.assertIn("Template 'base.html' was used unexpectedly in rendering the response", str(e))
try:
self.assertTemplateUsed(response, "Valid POST Template")
except AssertionError as e:
self.assertIn(
"Template 'Valid POST Template' was not a template used to "
"render the response. Actual template(s) used: form_view.html, base.html",
str(e)
)
with self.assertRaises(AssertionError) as context:
self.assertTemplateUsed(response, 'base.html', count=2)
self.assertIn(
"Template 'base.html' was expected to be rendered 2 "
"time(s) but was actually rendered 1 time(s).",
str(context.exception))
def test_template_rendered_multiple_times(self):
"""Template assertions work when a template is rendered multiple times."""
response = self.client.get('/render_template_multiple_times/')
self.assertTemplateUsed(response, 'base.html', count=2)
@override_settings(ROOT_URLCONF='test_client_regress.urls')
class AssertRedirectsTests(SimpleTestCase):
def test_redirect_page(self):
"An assertion is raised if the original page couldn't be retrieved as expected"
# This page will redirect with code 301, not 302
response = self.client.get('/permanent_redirect_view/')
try:
self.assertRedirects(response, '/get_view/')
except AssertionError as e:
self.assertIn("Response didn't redirect as expected: Response code was 301 (expected 302)", str(e))
try:
self.assertRedirects(response, '/get_view/', msg_prefix='abc')
except AssertionError as e:
self.assertIn("abc: Response didn't redirect as expected: Response code was 301 (expected 302)", str(e))
def test_lost_query(self):
"An assertion is raised if the redirect location doesn't preserve GET parameters"
response = self.client.get('/redirect_view/', {'var': 'value'})
try:
self.assertRedirects(response, '/get_view/')
except AssertionError as e:
self.assertIn("Response redirected to '/get_view/?var=value', expected '/get_view/'", str(e))
try:
self.assertRedirects(response, '/get_view/', msg_prefix='abc')
except AssertionError as e:
self.assertIn("abc: Response redirected to '/get_view/?var=value', expected '/get_view/'", str(e))
def test_incorrect_target(self):
"An assertion is raised if the response redirects to another target"
response = self.client.get('/permanent_redirect_view/')
try:
# Should redirect to get_view
self.assertRedirects(response, '/some_view/')
except AssertionError as e:
self.assertIn("Response didn't redirect as expected: Response code was 301 (expected 302)", str(e))
def test_target_page(self):
"An assertion is raised if the response redirect target cannot be retrieved as expected"
response = self.client.get('/double_redirect_view/')
try:
# The redirect target responds with a 301 code, not 200
self.assertRedirects(response, 'http://testserver/permanent_redirect_view/')
except AssertionError as e:
self.assertIn(
"Couldn't retrieve redirection page '/permanent_redirect_view/': "
"response code was 301 (expected 200)",
str(e)
)
try:
# The redirect target responds with a 301 code, not 200
self.assertRedirects(response, 'http://testserver/permanent_redirect_view/', msg_prefix='abc')
except AssertionError as e:
self.assertIn(
"abc: Couldn't retrieve redirection page '/permanent_redirect_view/': "
"response code was 301 (expected 200)",
str(e)
)
def test_redirect_chain(self):
"You can follow a redirect chain of multiple redirects"
response = self.client.get('/redirects/further/more/', {}, follow=True)
self.assertRedirects(response, '/no_template_view/', status_code=302, target_status_code=200)
self.assertEqual(len(response.redirect_chain), 1)
self.assertEqual(response.redirect_chain[0], ('/no_template_view/', 302))
def test_multiple_redirect_chain(self):
"You can follow a redirect chain of multiple redirects"
response = self.client.get('/redirects/', {}, follow=True)
self.assertRedirects(response, '/no_template_view/', status_code=302, target_status_code=200)
self.assertEqual(len(response.redirect_chain), 3)
self.assertEqual(response.redirect_chain[0], ('/redirects/further/', 302))
self.assertEqual(response.redirect_chain[1], ('/redirects/further/more/', 302))
self.assertEqual(response.redirect_chain[2], ('/no_template_view/', 302))
def test_redirect_chain_to_non_existent(self):
"You can follow a chain to a nonexistent view."
response = self.client.get('/redirect_to_non_existent_view2/', {}, follow=True)
self.assertRedirects(response, '/non_existent_view/', status_code=302, target_status_code=404)
def test_redirect_chain_to_self(self):
"Redirections to self are caught and escaped"
with self.assertRaises(RedirectCycleError) as context:
self.client.get('/redirect_to_self/', {}, follow=True)
response = context.exception.last_response
# The chain of redirects stops once the cycle is detected.
self.assertRedirects(response, '/redirect_to_self/', status_code=302, target_status_code=302)
self.assertEqual(len(response.redirect_chain), 2)
def test_redirect_to_self_with_changing_query(self):
"Redirections don't loop forever even if query is changing"
with self.assertRaises(RedirectCycleError):
self.client.get('/redirect_to_self_with_changing_query_view/', {'counter': '0'}, follow=True)
def test_circular_redirect(self):
"Circular redirect chains are caught and escaped"
with self.assertRaises(RedirectCycleError) as context:
self.client.get('/circular_redirect_1/', {}, follow=True)
response = context.exception.last_response
# The chain of redirects will get back to the starting point, but stop there.
self.assertRedirects(response, '/circular_redirect_2/', status_code=302, target_status_code=302)
self.assertEqual(len(response.redirect_chain), 4)
def test_redirect_chain_post(self):
"A redirect chain will be followed from an initial POST post"
response = self.client.post('/redirects/', {'nothing': 'to_send'}, follow=True)
self.assertRedirects(response, '/no_template_view/', 302, 200)
self.assertEqual(len(response.redirect_chain), 3)
def test_redirect_chain_head(self):
"A redirect chain will be followed from an initial HEAD request"
response = self.client.head('/redirects/', {'nothing': 'to_send'}, follow=True)
self.assertRedirects(response, '/no_template_view/', 302, 200)
self.assertEqual(len(response.redirect_chain), 3)
def test_redirect_chain_options(self):
"A redirect chain will be followed from an initial OPTIONS request"
response = self.client.options('/redirects/', follow=True)
self.assertRedirects(response, '/no_template_view/', 302, 200)
self.assertEqual(len(response.redirect_chain), 3)
def test_redirect_chain_put(self):
"A redirect chain will be followed from an initial PUT request"
response = self.client.put('/redirects/', follow=True)
self.assertRedirects(response, '/no_template_view/', 302, 200)
self.assertEqual(len(response.redirect_chain), 3)
def test_redirect_chain_delete(self):
"A redirect chain will be followed from an initial DELETE request"
response = self.client.delete('/redirects/', follow=True)
self.assertRedirects(response, '/no_template_view/', 302, 200)
self.assertEqual(len(response.redirect_chain), 3)
@modify_settings(ALLOWED_HOSTS={'append': 'otherserver'})
def test_redirect_to_different_host(self):
"The test client will preserve scheme, host and port changes"
response = self.client.get('/redirect_other_host/', follow=True)
self.assertRedirects(
response, 'https://otherserver:8443/no_template_view/',
status_code=302, target_status_code=200
)
# We can't use is_secure() or get_host()
# because response.request is a dictionary, not an HttpRequest
self.assertEqual(response.request.get('wsgi.url_scheme'), 'https')
self.assertEqual(response.request.get('SERVER_NAME'), 'otherserver')
self.assertEqual(response.request.get('SERVER_PORT'), '8443')
# assertRedirects() can follow redirect to 'otherserver' too.
response = self.client.get('/redirect_other_host/', follow=False)
self.assertRedirects(
response, 'https://otherserver:8443/no_template_view/',
status_code=302, target_status_code=200
)
def test_redirect_chain_on_non_redirect_page(self):
"An assertion is raised if the original page couldn't be retrieved as expected"
# This page will redirect with code 301, not 302
response = self.client.get('/get_view/', follow=True)
try:
self.assertRedirects(response, '/get_view/')
except AssertionError as e:
self.assertIn("Response didn't redirect as expected: Response code was 200 (expected 302)", str(e))
try:
self.assertRedirects(response, '/get_view/', msg_prefix='abc')
except AssertionError as e:
self.assertIn("abc: Response didn't redirect as expected: Response code was 200 (expected 302)", str(e))
def test_redirect_on_non_redirect_page(self):
"An assertion is raised if the original page couldn't be retrieved as expected"
# This page will redirect with code 301, not 302
response = self.client.get('/get_view/')
try:
self.assertRedirects(response, '/get_view/')
except AssertionError as e:
self.assertIn("Response didn't redirect as expected: Response code was 200 (expected 302)", str(e))
try:
self.assertRedirects(response, '/get_view/', msg_prefix='abc')
except AssertionError as e:
self.assertIn("abc: Response didn't redirect as expected: Response code was 200 (expected 302)", str(e))
def test_redirect_scheme(self):
"An assertion is raised if the response doesn't have the scheme specified in expected_url"
# For all possible True/False combinations of follow and secure
for follow, secure in itertools.product([True, False], repeat=2):
# always redirects to https
response = self.client.get('/https_redirect_view/', follow=follow, secure=secure)
# the goal scheme is https
self.assertRedirects(response, 'https://testserver/secure_view/', status_code=302)
with self.assertRaises(AssertionError):
self.assertRedirects(response, 'http://testserver/secure_view/', status_code=302)
@override_settings(ROOT_URLCONF='test_client_regress.urls')
class AssertFormErrorTests(SimpleTestCase):
def test_unknown_form(self):
"An assertion is raised if the form name is unknown"
post_data = {
'text': 'Hello World',
'email': 'not an email address',
'value': 37,
'single': 'b',
'multi': ('b', 'c', 'e')
}
response = self.client.post('/form_view/', post_data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "Invalid POST Template")
try:
self.assertFormError(response, 'wrong_form', 'some_field', 'Some error.')
except AssertionError as e:
self.assertIn("The form 'wrong_form' was not used to render the response", str(e))
try:
self.assertFormError(response, 'wrong_form', 'some_field', 'Some error.', msg_prefix='abc')
except AssertionError as e:
self.assertIn("abc: The form 'wrong_form' was not used to render the response", str(e))
def test_unknown_field(self):
"An assertion is raised if the field name is unknown"
post_data = {
'text': 'Hello World',
'email': 'not an email address',
'value': 37,
'single': 'b',
'multi': ('b', 'c', 'e')
}
response = self.client.post('/form_view/', post_data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "Invalid POST Template")
try:
self.assertFormError(response, 'form', 'some_field', 'Some error.')
except AssertionError as e:
self.assertIn("The form 'form' in context 0 does not contain the field 'some_field'", str(e))
try:
self.assertFormError(response, 'form', 'some_field', 'Some error.', msg_prefix='abc')
except AssertionError as e:
self.assertIn("abc: The form 'form' in context 0 does not contain the field 'some_field'", str(e))
def test_noerror_field(self):
"An assertion is raised if the field doesn't have any errors"
post_data = {
'text': 'Hello World',
'email': 'not an email address',
'value': 37,
'single': 'b',
'multi': ('b', 'c', 'e')
}
response = self.client.post('/form_view/', post_data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "Invalid POST Template")
try:
self.assertFormError(response, 'form', 'value', 'Some error.')
except AssertionError as e:
self.assertIn("The field 'value' on form 'form' in context 0 contains no errors", str(e))
try:
self.assertFormError(response, 'form', 'value', 'Some error.', msg_prefix='abc')
except AssertionError as e:
self.assertIn("abc: The field 'value' on form 'form' in context 0 contains no errors", str(e))
def test_unknown_error(self):
"An assertion is raised if the field doesn't contain the provided error"
post_data = {
'text': 'Hello World',
'email': 'not an email address',
'value': 37,
'single': 'b',
'multi': ('b', 'c', 'e')
}
response = self.client.post('/form_view/', post_data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "Invalid POST Template")
try:
self.assertFormError(response, 'form', 'email', 'Some error.')
except AssertionError as e:
self.assertIn(
"The field 'email' on form 'form' in context 0 does not "
"contain the error 'Some error.' (actual errors: "
"['Enter a valid email address.'])",
str(e)
)
try:
self.assertFormError(response, 'form', 'email', 'Some error.', msg_prefix='abc')
except AssertionError as e:
self.assertIn(
"abc: The field 'email' on form 'form' in context 0 does "
"not contain the error 'Some error.' (actual errors: "
"['Enter a valid email address.'])",
str(e)
)
def test_unknown_nonfield_error(self):
"""
An assertion is raised if the form's non field errors doesn't contain
the provided error.
"""
post_data = {
'text': 'Hello World',
'email': 'not an email address',
'value': 37,
'single': 'b',
'multi': ('b', 'c', 'e')
}
response = self.client.post('/form_view/', post_data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "Invalid POST Template")
try:
self.assertFormError(response, 'form', None, 'Some error.')
except AssertionError as e:
self.assertIn(
"The form 'form' in context 0 does not contain the non-field "
"error 'Some error.' (actual errors: none)",
str(e)
)
try:
self.assertFormError(response, 'form', None, 'Some error.', msg_prefix='abc')
except AssertionError as e:
self.assertIn(
"abc: The form 'form' in context 0 does not contain the "
"non-field error 'Some error.' (actual errors: none)",
str(e)
)
@override_settings(ROOT_URLCONF='test_client_regress.urls')
class AssertFormsetErrorTests(SimpleTestCase):
msg_prefixes = [("", {}), ("abc: ", {"msg_prefix": "abc"})]
def setUp(self):
"""Makes response object for testing field and non-field errors"""
# For testing field and non-field errors
self.response_form_errors = self.getResponse({
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '2',
'form-0-text': 'Raise non-field error',
'form-0-email': 'not an email address',
'form-0-value': 37,
'form-0-single': 'b',
'form-0-multi': ('b', 'c', 'e'),
'form-1-text': 'Hello World',
'form-1-email': '[email protected]',
'form-1-value': 37,
'form-1-single': 'b',
'form-1-multi': ('b', 'c', 'e'),
})
# For testing non-form errors
self.response_nonform_errors = self.getResponse({
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '2',
'form-0-text': 'Hello World',
'form-0-email': '[email protected]',
'form-0-value': 37,
'form-0-single': 'b',
'form-0-multi': ('b', 'c', 'e'),
'form-1-text': 'Hello World',
'form-1-email': '[email protected]',
'form-1-value': 37,
'form-1-single': 'b',
'form-1-multi': ('b', 'c', 'e'),
})
def getResponse(self, post_data):
response = self.client.post('/formset_view/', post_data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "Invalid POST Template")
return response
def test_unknown_formset(self):
"An assertion is raised if the formset name is unknown"
for prefix, kwargs in self.msg_prefixes:
msg = prefix + "The formset 'wrong_formset' was not used to render the response"
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(
self.response_form_errors,
'wrong_formset', 0, 'Some_field', 'Some error.', **kwargs
)
def test_unknown_field(self):
"An assertion is raised if the field name is unknown"
for prefix, kwargs in self.msg_prefixes:
msg = prefix + "The formset 'my_formset', form 0 in context 0 does not contain the field 'Some_field'"
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(
self.response_form_errors,
'my_formset', 0, 'Some_field', 'Some error.', **kwargs
)
def test_no_error_field(self):
"An assertion is raised if the field doesn't have any errors"
for prefix, kwargs in self.msg_prefixes:
msg = prefix + "The field 'value' on formset 'my_formset', form 1 in context 0 contains no errors"
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(self.response_form_errors, 'my_formset', 1, 'value', 'Some error.', **kwargs)
def test_unknown_error(self):
"An assertion is raised if the field doesn't contain the specified error"
for prefix, kwargs in self.msg_prefixes:
msg = prefix + (
"The field 'email' on formset 'my_formset', form 0 "
"in context 0 does not contain the error 'Some error.' "
"(actual errors: ['Enter a valid email address.'])"
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(self.response_form_errors, 'my_formset', 0, 'email', 'Some error.', **kwargs)
def test_field_error(self):
"No assertion is raised if the field contains the provided error"
error_msg = ['Enter a valid email address.']
for prefix, kwargs in self.msg_prefixes:
self.assertFormsetError(self.response_form_errors, 'my_formset', 0, 'email', error_msg, **kwargs)
def test_no_nonfield_error(self):
"An assertion is raised if the formsets non-field errors doesn't contain any errors."
for prefix, kwargs in self.msg_prefixes:
msg = prefix + "The formset 'my_formset', form 1 in context 0 does not contain any non-field errors."
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(self.response_form_errors, 'my_formset', 1, None, 'Some error.', **kwargs)
def test_unknown_nonfield_error(self):
"An assertion is raised if the formsets non-field errors doesn't contain the provided error."
for prefix, kwargs in self.msg_prefixes:
msg = prefix + (
"The formset 'my_formset', form 0 in context 0 does not "
"contain the non-field error 'Some error.' (actual errors: "
"['Non-field error.'])"
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(self.response_form_errors, 'my_formset', 0, None, 'Some error.', **kwargs)
def test_nonfield_error(self):
"No assertion is raised if the formsets non-field errors contains the provided error."
for prefix, kwargs in self.msg_prefixes:
self.assertFormsetError(self.response_form_errors, 'my_formset', 0, None, 'Non-field error.', **kwargs)
def test_no_nonform_error(self):
"An assertion is raised if the formsets non-form errors doesn't contain any errors."
for prefix, kwargs in self.msg_prefixes:
msg = prefix + "The formset 'my_formset' in context 0 does not contain any non-form errors."
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(self.response_form_errors, 'my_formset', None, None, 'Some error.', **kwargs)
def test_unknown_nonform_error(self):
"An assertion is raised if the formsets non-form errors doesn't contain the provided error."
for prefix, kwargs in self.msg_prefixes:
msg = prefix + (
"The formset 'my_formset' in context 0 does not contain the "
"non-form error 'Some error.' (actual errors: ['Forms in a set "
"must have distinct email addresses.'])"
)
with self.assertRaisesMessage(AssertionError, msg):
self.assertFormsetError(
self.response_nonform_errors,
'my_formset', None, None, 'Some error.', **kwargs
)
def test_nonform_error(self):
"No assertion is raised if the formsets non-form errors contains the provided error."
msg = 'Forms in a set must have distinct email addresses.'
for prefix, kwargs in self.msg_prefixes:
self.assertFormsetError(self.response_nonform_errors, 'my_formset', None, None, msg, **kwargs)
@override_settings(ROOT_URLCONF='test_client_regress.urls')
class LoginTests(TestDataMixin, TestCase):
def test_login_different_client(self):
"Using a different test client doesn't violate authentication"
# Create a second client, and log in.
c = Client()
login = c.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
# Get a redirection page with the second client.
response = c.get("/login_protected_redirect_view/")
# At this points, the self.client isn't logged in.
# assertRedirects uses the original client, not the default client.
self.assertRedirects(response, "/get_view/")
@override_settings(
SESSION_ENGINE='test_client_regress.session',
ROOT_URLCONF='test_client_regress.urls',
)
class SessionEngineTests(TestDataMixin, TestCase):
def test_login(self):
"A session engine that modifies the session key can be used to log in"
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
# Try to access a login protected page.
response = self.client.get("/login_protected_view/")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
@override_settings(ROOT_URLCONF='test_client_regress.urls',)
class URLEscapingTests(SimpleTestCase):
def test_simple_argument_get(self):
"Get a view that has a simple string argument"
response = self.client.get(reverse('arg_view', args=['Slartibartfast']))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'Howdy, Slartibartfast')
def test_argument_with_space_get(self):
"Get a view that has a string argument that requires escaping"
response = self.client.get(reverse('arg_view', args=['Arthur Dent']))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'Hi, Arthur')
def test_simple_argument_post(self):
"Post for a view that has a simple string argument"
response = self.client.post(reverse('arg_view', args=['Slartibartfast']))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'Howdy, Slartibartfast')
def test_argument_with_space_post(self):
"Post for a view that has a string argument that requires escaping"
response = self.client.post(reverse('arg_view', args=['Arthur Dent']))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'Hi, Arthur')
@override_settings(ROOT_URLCONF='test_client_regress.urls')
class ExceptionTests(TestDataMixin, TestCase):
def test_exception_cleared(self):
"#5836 - A stale user exception isn't re-raised by the test client."
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
with self.assertRaises(CustomTestException):
self.client.get("/staff_only/")
# At this point, an exception has been raised, and should be cleared.
# This next operation should be successful; if it isn't we have a problem.
login = self.client.login(username='staff', password='password')
self.assertTrue(login, 'Could not log in')
self.client.get("/staff_only/")
@override_settings(ROOT_URLCONF='test_client_regress.urls')
class TemplateExceptionTests(SimpleTestCase):
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(os.path.dirname(__file__), 'bad_templates')],
}])
def test_bad_404_template(self):
"Errors found when rendering 404 error templates are re-raised"
with self.assertRaises(TemplateSyntaxError):
self.client.get("/no_such_view/")
# We need two different tests to check URLconf substitution - one to check
# it was changed, and another one (without self.urls) to check it was reverted on
# teardown. This pair of tests relies upon the alphabetical ordering of test execution.
@override_settings(ROOT_URLCONF='test_client_regress.urls')
class UrlconfSubstitutionTests(SimpleTestCase):
def test_urlconf_was_changed(self):
"TestCase can enforce a custom URLconf on a per-test basis"
url = reverse('arg_view', args=['somename'])
self.assertEqual(url, '/arg_view/somename/')
# This test needs to run *after* UrlconfSubstitutionTests; the zz prefix in the
# name is to ensure alphabetical ordering.
class zzUrlconfSubstitutionTests(SimpleTestCase):
def test_urlconf_was_reverted(self):
"""URLconf is reverted to original value after modification in a TestCase
This will not find a match as the default ROOT_URLCONF is empty.
"""
with self.assertRaises(NoReverseMatch):
reverse('arg_view', args=['somename'])
@override_settings(ROOT_URLCONF='test_client_regress.urls')
class ContextTests(TestDataMixin, TestCase):
def test_single_context(self):
"Context variables can be retrieved from a single context"
response = self.client.get("/request_data/", data={'foo': 'whiz'})
self.assertIsInstance(response.context, RequestContext)
self.assertIn('get-foo', response.context)
self.assertEqual(response.context['get-foo'], 'whiz')
self.assertEqual(response.context['data'], 'sausage')
with self.assertRaisesMessage(KeyError, 'does-not-exist'):
response.context['does-not-exist']
def test_inherited_context(self):
"Context variables can be retrieved from a list of contexts"
response = self.client.get("/request_data_extended/", data={'foo': 'whiz'})
self.assertEqual(response.context.__class__, ContextList)
self.assertEqual(len(response.context), 2)
self.assertIn('get-foo', response.context)
self.assertEqual(response.context['get-foo'], 'whiz')
self.assertEqual(response.context['data'], 'bacon')
with self.assertRaises(KeyError) as cm:
response.context['does-not-exist']
self.assertEqual(cm.exception.args[0], 'does-not-exist')
def test_contextlist_keys(self):
c1 = Context()
c1.update({'hello': 'world', 'goodbye': 'john'})
c1.update({'hello': 'dolly', 'dolly': 'parton'})
c2 = Context()
c2.update({'goodbye': 'world', 'python': 'rocks'})
c2.update({'goodbye': 'dolly'})
k = ContextList([c1, c2])
# None, True and False are builtins of BaseContext, and present
# in every Context without needing to be added.
self.assertEqual({'None', 'True', 'False', 'hello', 'goodbye', 'python', 'dolly'}, k.keys())
def test_contextlist_get(self):
c1 = Context({'hello': 'world', 'goodbye': 'john'})
c2 = Context({'goodbye': 'world', 'python': 'rocks'})
k = ContextList([c1, c2])
self.assertEqual(k.get('hello'), 'world')
self.assertEqual(k.get('goodbye'), 'john')
self.assertEqual(k.get('python'), 'rocks')
self.assertEqual(k.get('nonexistent', 'default'), 'default')
def test_15368(self):
# Need to insert a context processor that assumes certain things about
# the request instance. This triggers a bug caused by some ways of
# copying RequestContext.
with self.settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'test_client_regress.context_processors.special',
],
},
}]):
response = self.client.get("/request_context_view/")
self.assertContains(response, 'Path: /request_context_view/')
def test_nested_requests(self):
"""
response.context is not lost when view call another view.
"""
response = self.client.get("/nested_view/")
self.assertIsInstance(response.context, RequestContext)
self.assertEqual(response.context['nested'], 'yes')
@override_settings(ROOT_URLCONF='test_client_regress.urls')
class SessionTests(TestDataMixin, TestCase):
def test_session(self):
"The session isn't lost if a user logs in"
# The session doesn't exist to start.
response = self.client.get('/check_session/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'NO')
# This request sets a session variable.
response = self.client.get('/set_session/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'set_session')
# The session has been modified
response = self.client.get('/check_session/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'YES')
# Log in
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
# Session should still contain the modified value
response = self.client.get('/check_session/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'YES')
def test_session_initiated(self):
session = self.client.session
session['session_var'] = 'foo'
session.save()
response = self.client.get('/check_session/')
self.assertEqual(response.content, b'foo')
def test_logout(self):
"""Logout should work whether the user is logged in or not (#9978)."""
self.client.logout()
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
self.client.logout()
self.client.logout()
def test_logout_with_user(self):
"""Logout should send user_logged_out signal if user was logged in."""
def listener(*args, **kwargs):
listener.executed = True
self.assertEqual(kwargs['sender'], User)
listener.executed = False
user_logged_out.connect(listener)
self.client.login(username='testclient', password='password')
self.client.logout()
user_logged_out.disconnect(listener)
self.assertTrue(listener.executed)
@override_settings(AUTH_USER_MODEL='test_client_regress.CustomUser')
def test_logout_with_custom_user(self):
"""Logout should send user_logged_out signal if custom user was logged in."""
def listener(*args, **kwargs):
self.assertEqual(kwargs['sender'], CustomUser)
listener.executed = True
listener.executed = False
u = CustomUser.custom_objects.create(email='[email protected]')
u.set_password('password')
u.save()
user_logged_out.connect(listener)
self.client.login(username='[email protected]', password='password')
self.client.logout()
user_logged_out.disconnect(listener)
self.assertTrue(listener.executed)
@override_settings(AUTHENTICATION_BACKENDS=(
'django.contrib.auth.backends.ModelBackend',
'test_client_regress.auth_backends.CustomUserBackend'))
def test_logout_with_custom_auth_backend(self):
"Request a logout after logging in with custom authentication backend"
def listener(*args, **kwargs):
self.assertEqual(kwargs['sender'], CustomUser)
listener.executed = True
listener.executed = False
u = CustomUser.custom_objects.create(email='[email protected]')
u.set_password('password')
u.save()
user_logged_out.connect(listener)
self.client.login(username='[email protected]', password='password')
self.client.logout()
user_logged_out.disconnect(listener)
self.assertTrue(listener.executed)
def test_logout_without_user(self):
"""Logout should send signal even if user not authenticated."""
def listener(user, *args, **kwargs):
listener.user = user
listener.executed = True
listener.executed = False
user_logged_out.connect(listener)
self.client.login(username='incorrect', password='password')
self.client.logout()
user_logged_out.disconnect(listener)
self.assertTrue(listener.executed)
self.assertIsNone(listener.user)
def test_login_with_user(self):
"""Login should send user_logged_in signal on successful login."""
def listener(*args, **kwargs):
listener.executed = True
listener.executed = False
user_logged_in.connect(listener)
self.client.login(username='testclient', password='password')
user_logged_out.disconnect(listener)
self.assertTrue(listener.executed)
def test_login_without_signal(self):
"""Login shouldn't send signal if user wasn't logged in"""
def listener(*args, **kwargs):
listener.executed = True
listener.executed = False
user_logged_in.connect(listener)
self.client.login(username='incorrect', password='password')
user_logged_in.disconnect(listener)
self.assertFalse(listener.executed)
@override_settings(ROOT_URLCONF='test_client_regress.urls')
class RequestMethodTests(SimpleTestCase):
def test_get(self):
"Request a view via request method GET"
response = self.client.get('/request_methods/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'request method: GET')
def test_post(self):
"Request a view via request method POST"
response = self.client.post('/request_methods/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'request method: POST')
def test_head(self):
"Request a view via request method HEAD"
response = self.client.head('/request_methods/')
self.assertEqual(response.status_code, 200)
# A HEAD request doesn't return any content.
self.assertNotEqual(response.content, b'request method: HEAD')
self.assertEqual(response.content, b'')
def test_options(self):
"Request a view via request method OPTIONS"
response = self.client.options('/request_methods/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'request method: OPTIONS')
def test_put(self):
"Request a view via request method PUT"
response = self.client.put('/request_methods/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'request method: PUT')
def test_delete(self):
"Request a view via request method DELETE"
response = self.client.delete('/request_methods/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'request method: DELETE')
def test_patch(self):
"Request a view via request method PATCH"
response = self.client.patch('/request_methods/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'request method: PATCH')
@override_settings(ROOT_URLCONF='test_client_regress.urls')
class RequestMethodStringDataTests(SimpleTestCase):
def test_post(self):
"Request a view with string data via request method POST"
# Regression test for #11371
data = '{"test": "json"}'
response = self.client.post('/request_methods/', data=data, content_type='application/json')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'request method: POST')
def test_put(self):
"Request a view with string data via request method PUT"
# Regression test for #11371
data = '{"test": "json"}'
response = self.client.put('/request_methods/', data=data, content_type='application/json')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'request method: PUT')
def test_patch(self):
"Request a view with string data via request method PATCH"
# Regression test for #17797
data = '{"test": "json"}'
response = self.client.patch('/request_methods/', data=data, content_type='application/json')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'request method: PATCH')
def test_empty_string_data(self):
"Request a view with empty string data via request method GET/POST/HEAD"
# Regression test for #21740
response = self.client.get('/body/', data='', content_type='application/json')
self.assertEqual(response.content, b'')
response = self.client.post('/body/', data='', content_type='application/json')
self.assertEqual(response.content, b'')
response = self.client.head('/body/', data='', content_type='application/json')
self.assertEqual(response.content, b'')
def test_json_bytes(self):
response = self.client.post('/body/', data=b"{'value': 37}", content_type='application/json')
self.assertEqual(response.content, b"{'value': 37}")
def test_json(self):
response = self.client.get('/json_response/')
self.assertEqual(response.json(), {'key': 'value'})
def test_json_structured_suffixes(self):
valid_types = (
'application/vnd.api+json',
'application/vnd.api.foo+json',
'application/json; charset=utf-8',
'application/activity+json',
'application/activity+json; charset=utf-8',
)
for content_type in valid_types:
response = self.client.get('/json_response/', {'content_type': content_type})
self.assertEqual(response['Content-Type'], content_type)
self.assertEqual(response.json(), {'key': 'value'})
def test_json_multiple_access(self):
response = self.client.get('/json_response/')
self.assertIs(response.json(), response.json())
def test_json_wrong_header(self):
response = self.client.get('/body/')
msg = 'Content-Type header is "text/html; charset=utf-8", not "application/json"'
with self.assertRaisesMessage(ValueError, msg):
self.assertEqual(response.json(), {'key': 'value'})
@override_settings(ROOT_URLCONF='test_client_regress.urls',)
class QueryStringTests(SimpleTestCase):
def test_get_like_requests(self):
for method_name in ('get', 'head'):
# A GET-like request can pass a query string as data (#10571)
method = getattr(self.client, method_name)
response = method("/request_data/", data={'foo': 'whiz'})
self.assertEqual(response.context['get-foo'], 'whiz')
# A GET-like request can pass a query string as part of the URL
response = method("/request_data/?foo=whiz")
self.assertEqual(response.context['get-foo'], 'whiz')
# Data provided in the URL to a GET-like request is overridden by actual form data
response = method("/request_data/?foo=whiz", data={'foo': 'bang'})
self.assertEqual(response.context['get-foo'], 'bang')
response = method("/request_data/?foo=whiz", data={'bar': 'bang'})
self.assertIsNone(response.context['get-foo'])
self.assertEqual(response.context['get-bar'], 'bang')
def test_post_like_requests(self):
# A POST-like request can pass a query string as data
response = self.client.post("/request_data/", data={'foo': 'whiz'})
self.assertIsNone(response.context['get-foo'])
self.assertEqual(response.context['post-foo'], 'whiz')
# A POST-like request can pass a query string as part of the URL
response = self.client.post("/request_data/?foo=whiz")
self.assertEqual(response.context['get-foo'], 'whiz')
self.assertIsNone(response.context['post-foo'])
# POST data provided in the URL augments actual form data
response = self.client.post("/request_data/?foo=whiz", data={'foo': 'bang'})
self.assertEqual(response.context['get-foo'], 'whiz')
self.assertEqual(response.context['post-foo'], 'bang')
response = self.client.post("/request_data/?foo=whiz", data={'bar': 'bang'})
self.assertEqual(response.context['get-foo'], 'whiz')
self.assertIsNone(response.context['get-bar'])
self.assertIsNone(response.context['post-foo'])
self.assertEqual(response.context['post-bar'], 'bang')
@override_settings(ROOT_URLCONF='test_client_regress.urls')
class PayloadEncodingTests(SimpleTestCase):
"""Regression tests for #10571."""
def test_simple_payload(self):
"""A simple ASCII-only text can be POSTed."""
text = 'English: mountain pass'
response = self.client.post('/parse_encoded_text/', text, content_type='text/plain')
self.assertEqual(response.content, text.encode())
def test_utf8_payload(self):
"""Non-ASCII data encoded as UTF-8 can be POSTed."""
text = 'dog: собака'
response = self.client.post('/parse_encoded_text/', text, content_type='text/plain; charset=utf-8')
self.assertEqual(response.content, text.encode())
def test_utf16_payload(self):
"""Non-ASCII data encoded as UTF-16 can be POSTed."""
text = 'dog: собака'
response = self.client.post('/parse_encoded_text/', text, content_type='text/plain; charset=utf-16')
self.assertEqual(response.content, text.encode('utf-16'))
def test_non_utf_payload(self):
"""Non-ASCII data as a non-UTF based encoding can be POSTed."""
text = 'dog: собака'
response = self.client.post('/parse_encoded_text/', text, content_type='text/plain; charset=koi8-r')
self.assertEqual(response.content, text.encode('koi8-r'))
class DummyFile:
def __init__(self, filename):
self.name = filename
def read(self):
return b'TEST_FILE_CONTENT'
class UploadedFileEncodingTest(SimpleTestCase):
def test_file_encoding(self):
encoded_file = encode_file('TEST_BOUNDARY', 'TEST_KEY', DummyFile('test_name.bin'))
self.assertEqual(b'--TEST_BOUNDARY', encoded_file[0])
self.assertEqual(b'Content-Disposition: form-data; name="TEST_KEY"; filename="test_name.bin"', encoded_file[1])
self.assertEqual(b'TEST_FILE_CONTENT', encoded_file[-1])
def test_guesses_content_type_on_file_encoding(self):
self.assertEqual(b'Content-Type: application/octet-stream',
encode_file('IGNORE', 'IGNORE', DummyFile("file.bin"))[2])
self.assertEqual(b'Content-Type: text/plain',
encode_file('IGNORE', 'IGNORE', DummyFile("file.txt"))[2])
self.assertIn(encode_file('IGNORE', 'IGNORE', DummyFile("file.zip"))[2], (
b'Content-Type: application/x-compress',
b'Content-Type: application/x-zip',
b'Content-Type: application/x-zip-compressed',
b'Content-Type: application/zip',))
self.assertEqual(b'Content-Type: application/octet-stream',
encode_file('IGNORE', 'IGNORE', DummyFile("file.unknown"))[2])
@override_settings(ROOT_URLCONF='test_client_regress.urls',)
class RequestHeadersTest(SimpleTestCase):
def test_client_headers(self):
"A test client can receive custom headers"
response = self.client.get("/check_headers/", HTTP_X_ARG_CHECK='Testing 123')
self.assertEqual(response.content, b"HTTP_X_ARG_CHECK: Testing 123")
self.assertEqual(response.status_code, 200)
def test_client_headers_redirect(self):
"Test client headers are preserved through redirects"
response = self.client.get("/check_headers_redirect/", follow=True, HTTP_X_ARG_CHECK='Testing 123')
self.assertEqual(response.content, b"HTTP_X_ARG_CHECK: Testing 123")
self.assertRedirects(response, '/check_headers/', status_code=302, target_status_code=200)
@override_settings(ROOT_URLCONF='test_client_regress.urls')
class ReadLimitedStreamTest(SimpleTestCase):
"""
HttpRequest.body, HttpRequest.read(), and HttpRequest.read(BUFFER) have
proper LimitedStream behavior.
Refs #14753, #15785
"""
def test_body_from_empty_request(self):
"""HttpRequest.body on a test client GET request should return
the empty string."""
self.assertEqual(self.client.get("/body/").content, b'')
def test_read_from_empty_request(self):
"""HttpRequest.read() on a test client GET request should return the
empty string."""
self.assertEqual(self.client.get("/read_all/").content, b'')
def test_read_numbytes_from_empty_request(self):
"""HttpRequest.read(LARGE_BUFFER) on a test client GET request should
return the empty string."""
self.assertEqual(self.client.get("/read_buffer/").content, b'')
def test_read_from_nonempty_request(self):
"""HttpRequest.read() on a test client PUT request with some payload
should return that payload."""
payload = b'foobar'
self.assertEqual(self.client.put("/read_all/", data=payload, content_type='text/plain').content, payload)
def test_read_numbytes_from_nonempty_request(self):
"""HttpRequest.read(LARGE_BUFFER) on a test client PUT request with
some payload should return that payload."""
payload = b'foobar'
self.assertEqual(self.client.put("/read_buffer/", data=payload, content_type='text/plain').content, payload)
@override_settings(ROOT_URLCONF='test_client_regress.urls')
class RequestFactoryStateTest(SimpleTestCase):
"""Regression tests for #15929."""
# These tests are checking that certain middleware don't change certain
# global state. Alternatively, from the point of view of a test, they are
# ensuring test isolation behavior. So, unusually, it doesn't make sense to
# run the tests individually, and if any are failing it is confusing to run
# them with any other set of tests.
def common_test_that_should_always_pass(self):
request = RequestFactory().get('/')
request.session = {}
self.assertFalse(hasattr(request, 'user'))
def test_request(self):
self.common_test_that_should_always_pass()
def test_request_after_client(self):
# apart from the next line the three tests are identical
self.client.get('/')
self.common_test_that_should_always_pass()
def test_request_after_client_2(self):
# This test is executed after the previous one
self.common_test_that_should_always_pass()
@override_settings(ROOT_URLCONF='test_client_regress.urls')
class RequestFactoryEnvironmentTests(SimpleTestCase):
"""
Regression tests for #8551 and #17067: ensure that environment variables
are set correctly in RequestFactory.
"""
def test_should_set_correct_env_variables(self):
request = RequestFactory().get('/path/')
self.assertEqual(request.META.get('REMOTE_ADDR'), '127.0.0.1')
self.assertEqual(request.META.get('SERVER_NAME'), 'testserver')
self.assertEqual(request.META.get('SERVER_PORT'), '80')
self.assertEqual(request.META.get('SERVER_PROTOCOL'), 'HTTP/1.1')
self.assertEqual(request.META.get('SCRIPT_NAME') + request.META.get('PATH_INFO'), '/path/')
def test_cookies(self):
factory = RequestFactory()
factory.cookies.load('A="B"; C="D"; Path=/; Version=1')
request = factory.get('/')
self.assertEqual(request.META['HTTP_COOKIE'], 'A="B"; C="D"')
|
da93938f9df74d141c17726af6c595d04c7ba22322172f1c7bd744b8f9c25ef6 | from django.urls import include, path
from django.views.generic import RedirectView
from . import views
urlpatterns = [
path('', include('test_client.urls')),
path('no_template_view/', views.no_template_view),
path('staff_only/', views.staff_only_view),
path('get_view/', views.get_view),
path('request_data/', views.request_data),
path('request_data_extended/', views.request_data, {'template': 'extended.html', 'data': 'bacon'}),
path('arg_view/<name>/', views.view_with_argument, name='arg_view'),
path('nested_view/', views.nested_view, name='nested_view'),
path('login_protected_redirect_view/', views.login_protected_redirect_view),
path('redirects/', RedirectView.as_view(url='/redirects/further/')),
path('redirects/further/', RedirectView.as_view(url='/redirects/further/more/')),
path('redirects/further/more/', RedirectView.as_view(url='/no_template_view/')),
path('redirect_to_non_existent_view/', RedirectView.as_view(url='/non_existent_view/')),
path('redirect_to_non_existent_view2/', RedirectView.as_view(url='/redirect_to_non_existent_view/')),
path('redirect_to_self/', RedirectView.as_view(url='/redirect_to_self/')),
path('redirect_to_self_with_changing_query_view/', views.redirect_to_self_with_changing_query_view),
path('circular_redirect_1/', RedirectView.as_view(url='/circular_redirect_2/')),
path('circular_redirect_2/', RedirectView.as_view(url='/circular_redirect_3/')),
path('circular_redirect_3/', RedirectView.as_view(url='/circular_redirect_1/')),
path('redirect_other_host/', RedirectView.as_view(url='https://otherserver:8443/no_template_view/')),
path('set_session/', views.set_session_view),
path('check_session/', views.check_session_view),
path('request_methods/', views.request_methods_view),
path('check_unicode/', views.return_unicode),
path('check_binary/', views.return_undecodable_binary),
path('json_response/', views.return_json_response),
path('parse_encoded_text/', views.return_text_file),
path('check_headers/', views.check_headers),
path('check_headers_redirect/', RedirectView.as_view(url='/check_headers/')),
path('body/', views.body),
path('read_all/', views.read_all),
path('read_buffer/', views.read_buffer),
path('request_context_view/', views.request_context_view),
path('render_template_multiple_times/', views.render_template_multiple_times),
]
|
57fbbd6301c766fd8fca1dc3b74c6e834538e9eff4e31cc99ab1f77470375509 | from operator import attrgetter
from django.test import TestCase
from .models import Person
class RecursiveM2MTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.a, cls.b, cls.c, cls.d = [
Person.objects.create(name=name)
for name in ["Anne", "Bill", "Chuck", "David"]
]
# Anne is friends with Bill and Chuck
cls.a.friends.add(cls.b, cls.c)
# David is friends with Anne and Chuck - add in reverse direction
cls.d.friends.add(cls.a, cls.c)
def test_recursive_m2m_all(self):
# Who is friends with Anne?
self.assertQuerysetEqual(
self.a.friends.all(), [
"Bill",
"Chuck",
"David"
],
attrgetter("name"),
ordered=False
)
# Who is friends with Bill?
self.assertQuerysetEqual(
self.b.friends.all(), [
"Anne",
],
attrgetter("name")
)
# Who is friends with Chuck?
self.assertQuerysetEqual(
self.c.friends.all(), [
"Anne",
"David"
],
attrgetter("name"),
ordered=False
)
# Who is friends with David?
self.assertQuerysetEqual(
self.d.friends.all(), [
"Anne",
"Chuck",
],
attrgetter("name"),
ordered=False
)
def test_recursive_m2m_reverse_add(self):
# Bill is already friends with Anne - add Anne again, but in the
# reverse direction
self.b.friends.add(self.a)
# Who is friends with Anne?
self.assertQuerysetEqual(
self.a.friends.all(), [
"Bill",
"Chuck",
"David",
],
attrgetter("name"),
ordered=False
)
# Who is friends with Bill?
self.assertQuerysetEqual(
self.b.friends.all(), [
"Anne",
],
attrgetter("name")
)
def test_recursive_m2m_remove(self):
# Remove Anne from Bill's friends
self.b.friends.remove(self.a)
# Who is friends with Anne?
self.assertQuerysetEqual(
self.a.friends.all(), [
"Chuck",
"David",
],
attrgetter("name"),
ordered=False
)
# Who is friends with Bill?
self.assertQuerysetEqual(
self.b.friends.all(), []
)
def test_recursive_m2m_clear(self):
# Clear Anne's group of friends
self.a.friends.clear()
# Who is friends with Anne?
self.assertQuerysetEqual(
self.a.friends.all(), []
)
# Reverse relationships should also be gone
# Who is friends with Chuck?
self.assertQuerysetEqual(
self.c.friends.all(), [
"David",
],
attrgetter("name")
)
# Who is friends with David?
self.assertQuerysetEqual(
self.d.friends.all(), [
"Chuck",
],
attrgetter("name")
)
def test_recursive_m2m_add_via_related_name(self):
# David is idolized by Anne and Chuck - add in reverse direction
self.d.stalkers.add(self.a)
# Who are Anne's idols?
self.assertQuerysetEqual(
self.a.idols.all(), [
"David",
],
attrgetter("name"),
ordered=False
)
# Who is stalking Anne?
self.assertQuerysetEqual(
self.a.stalkers.all(), [],
attrgetter("name")
)
def test_recursive_m2m_add_in_both_directions(self):
"""Adding the same relation twice results in a single relation."""
# Ann idolizes David
self.a.idols.add(self.d)
# David is idolized by Anne
self.d.stalkers.add(self.a)
# Who are Anne's idols?
self.assertQuerysetEqual(
self.a.idols.all(), [
"David",
],
attrgetter("name"),
ordered=False
)
# As the assertQuerysetEqual uses a set for comparison,
# check we've only got David listed once
self.assertEqual(self.a.idols.all().count(), 1)
def test_recursive_m2m_related_to_self(self):
# Ann idolizes herself
self.a.idols.add(self.a)
# Who are Anne's idols?
self.assertQuerysetEqual(
self.a.idols.all(), [
"Anne",
],
attrgetter("name"),
ordered=False
)
# Who is stalking Anne?
self.assertQuerysetEqual(
self.a.stalkers.all(), [
"Anne",
],
attrgetter("name")
)
|
7af3d246775bc7fdae4b12b742b94bface58e7fd0e41a36efac942cb2198f0c4 | from django.core.exceptions import FieldError
from django.db.models import Count, F, Max
from django.test import TestCase
from .models import A, B, Bar, D, DataPoint, Foo, RelatedPoint
class SimpleTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = A.objects.create()
cls.a2 = A.objects.create()
for x in range(20):
B.objects.create(a=cls.a1)
D.objects.create(a=cls.a1)
def test_nonempty_update(self):
"""
Update changes the right number of rows for a nonempty queryset
"""
num_updated = self.a1.b_set.update(y=100)
self.assertEqual(num_updated, 20)
cnt = B.objects.filter(y=100).count()
self.assertEqual(cnt, 20)
def test_empty_update(self):
"""
Update changes the right number of rows for an empty queryset
"""
num_updated = self.a2.b_set.update(y=100)
self.assertEqual(num_updated, 0)
cnt = B.objects.filter(y=100).count()
self.assertEqual(cnt, 0)
def test_nonempty_update_with_inheritance(self):
"""
Update changes the right number of rows for an empty queryset
when the update affects only a base table
"""
num_updated = self.a1.d_set.update(y=100)
self.assertEqual(num_updated, 20)
cnt = D.objects.filter(y=100).count()
self.assertEqual(cnt, 20)
def test_empty_update_with_inheritance(self):
"""
Update changes the right number of rows for an empty queryset
when the update affects only a base table
"""
num_updated = self.a2.d_set.update(y=100)
self.assertEqual(num_updated, 0)
cnt = D.objects.filter(y=100).count()
self.assertEqual(cnt, 0)
def test_foreign_key_update_with_id(self):
"""
Update works using <field>_id for foreign keys
"""
num_updated = self.a1.d_set.update(a_id=self.a2)
self.assertEqual(num_updated, 20)
self.assertEqual(self.a2.d_set.count(), 20)
class AdvancedTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.d0 = DataPoint.objects.create(name="d0", value="apple")
cls.d2 = DataPoint.objects.create(name="d2", value="banana")
cls.d3 = DataPoint.objects.create(name="d3", value="banana")
cls.r1 = RelatedPoint.objects.create(name="r1", data=cls.d3)
def test_update(self):
"""
Objects are updated by first filtering the candidates into a queryset
and then calling the update() method. It executes immediately and
returns nothing.
"""
resp = DataPoint.objects.filter(value="apple").update(name="d1")
self.assertEqual(resp, 1)
resp = DataPoint.objects.filter(value="apple")
self.assertEqual(list(resp), [self.d0])
def test_update_multiple_objects(self):
"""
We can update multiple objects at once.
"""
resp = DataPoint.objects.filter(value='banana').update(value='pineapple')
self.assertEqual(resp, 2)
self.assertEqual(DataPoint.objects.get(name="d2").value, 'pineapple')
def test_update_fk(self):
"""
Foreign key fields can also be updated, although you can only update
the object referred to, not anything inside the related object.
"""
resp = RelatedPoint.objects.filter(name="r1").update(data=self.d0)
self.assertEqual(resp, 1)
resp = RelatedPoint.objects.filter(data__name="d0")
self.assertEqual(list(resp), [self.r1])
def test_update_multiple_fields(self):
"""
Multiple fields can be updated at once
"""
resp = DataPoint.objects.filter(value="apple").update(
value="fruit", another_value="peach")
self.assertEqual(resp, 1)
d = DataPoint.objects.get(name="d0")
self.assertEqual(d.value, 'fruit')
self.assertEqual(d.another_value, 'peach')
def test_update_all(self):
"""
In the rare case you want to update every instance of a model, update()
is also a manager method.
"""
self.assertEqual(DataPoint.objects.update(value='thing'), 3)
resp = DataPoint.objects.values('value').distinct()
self.assertEqual(list(resp), [{'value': 'thing'}])
def test_update_slice_fail(self):
"""
We do not support update on already sliced query sets.
"""
method = DataPoint.objects.all()[:2].update
msg = 'Cannot update a query once a slice has been taken.'
with self.assertRaisesMessage(AssertionError, msg):
method(another_value='another thing')
def test_update_respects_to_field(self):
"""
Update of an FK field which specifies a to_field works.
"""
a_foo = Foo.objects.create(target='aaa')
b_foo = Foo.objects.create(target='bbb')
bar = Bar.objects.create(foo=a_foo)
self.assertEqual(bar.foo_id, a_foo.target)
bar_qs = Bar.objects.filter(pk=bar.pk)
self.assertEqual(bar_qs[0].foo_id, a_foo.target)
bar_qs.update(foo=b_foo)
self.assertEqual(bar_qs[0].foo_id, b_foo.target)
def test_update_m2m_field(self):
msg = (
'Cannot update model field '
'<django.db.models.fields.related.ManyToManyField: m2m_foo> '
'(only non-relations and foreign keys permitted).'
)
with self.assertRaisesMessage(FieldError, msg):
Bar.objects.update(m2m_foo='whatever')
def test_update_annotated_queryset(self):
"""
Update of a queryset that's been annotated.
"""
# Trivial annotated update
qs = DataPoint.objects.annotate(alias=F('value'))
self.assertEqual(qs.update(another_value='foo'), 3)
# Update where annotation is used for filtering
qs = DataPoint.objects.annotate(alias=F('value')).filter(alias='apple')
self.assertEqual(qs.update(another_value='foo'), 1)
# Update where annotation is used in update parameters
qs = DataPoint.objects.annotate(alias=F('value'))
self.assertEqual(qs.update(another_value=F('alias')), 3)
# Update where aggregation annotation is used in update parameters
qs = DataPoint.objects.annotate(max=Max('value'))
with self.assertRaisesMessage(FieldError, 'Aggregate functions are not allowed in this query'):
qs.update(another_value=F('max'))
def test_update_annotated_multi_table_queryset(self):
"""
Update of a queryset that's been annotated and involves multiple tables.
"""
# Trivial annotated update
qs = DataPoint.objects.annotate(related_count=Count('relatedpoint'))
self.assertEqual(qs.update(value='Foo'), 3)
# Update where annotation is used for filtering
qs = DataPoint.objects.annotate(related_count=Count('relatedpoint'))
self.assertEqual(qs.filter(related_count=1).update(value='Foo'), 1)
# Update where annotation is used in update parameters
# #26539 - This isn't forbidden but also doesn't generate proper SQL
# qs = RelatedPoint.objects.annotate(data_name=F('data__name'))
# updated = qs.update(name=F('data_name'))
# self.assertEqual(updated, 1)
# Update where aggregation annotation is used in update parameters
qs = RelatedPoint.objects.annotate(max=Max('data__value'))
with self.assertRaisesMessage(FieldError, 'Aggregate functions are not allowed in this query'):
qs.update(name=F('max'))
|
c70d4b0c32744ac2d56dfc41f11a254dc6a0d0254b4ba247913fa090c549bb8f | import sys
import threading
import time
from unittest import skipIf, skipUnless
from django.db import (
DatabaseError, Error, IntegrityError, OperationalError, connection,
transaction,
)
from django.test import (
TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature,
)
from .models import Reporter
@skipUnlessDBFeature('uses_savepoints')
class AtomicTests(TransactionTestCase):
"""
Tests for the atomic decorator and context manager.
The tests make assertions on internal attributes because there isn't a
robust way to ask the database for its current transaction state.
Since the decorator syntax is converted into a context manager (see the
implementation), there are only a few basic tests with the decorator
syntax and the bulk of the tests use the context manager syntax.
"""
available_apps = ['transactions']
def test_decorator_syntax_commit(self):
@transaction.atomic
def make_reporter():
Reporter.objects.create(first_name="Tintin")
make_reporter()
self.assertQuerysetEqual(Reporter.objects.all(), ['<Reporter: Tintin>'])
def test_decorator_syntax_rollback(self):
@transaction.atomic
def make_reporter():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
with self.assertRaisesMessage(Exception, "Oops"):
make_reporter()
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_alternate_decorator_syntax_commit(self):
@transaction.atomic()
def make_reporter():
Reporter.objects.create(first_name="Tintin")
make_reporter()
self.assertQuerysetEqual(Reporter.objects.all(), ['<Reporter: Tintin>'])
def test_alternate_decorator_syntax_rollback(self):
@transaction.atomic()
def make_reporter():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
with self.assertRaisesMessage(Exception, "Oops"):
make_reporter()
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_commit(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
self.assertQuerysetEqual(Reporter.objects.all(), ['<Reporter: Tintin>'])
def test_rollback(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_nested_commit_commit(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
with transaction.atomic():
Reporter.objects.create(first_name="Archibald", last_name="Haddock")
self.assertQuerysetEqual(
Reporter.objects.all(),
['<Reporter: Archibald Haddock>', '<Reporter: Tintin>']
)
def test_nested_commit_rollback(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
self.assertQuerysetEqual(Reporter.objects.all(), ['<Reporter: Tintin>'])
def test_nested_rollback_commit(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(last_name="Tintin")
with transaction.atomic():
Reporter.objects.create(last_name="Haddock")
raise Exception("Oops, that's his first name")
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_nested_rollback_rollback(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(last_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
raise Exception("Oops, that's his first name")
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_merged_commit_commit(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Archibald", last_name="Haddock")
self.assertQuerysetEqual(
Reporter.objects.all(),
['<Reporter: Archibald Haddock>', '<Reporter: Tintin>']
)
def test_merged_commit_rollback(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
# Writes in the outer block are rolled back too.
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_merged_rollback_commit(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(last_name="Tintin")
with transaction.atomic(savepoint=False):
Reporter.objects.create(last_name="Haddock")
raise Exception("Oops, that's his first name")
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_merged_rollback_rollback(self):
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic():
Reporter.objects.create(last_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
raise Exception("Oops, that's his first name")
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_reuse_commit_commit(self):
atomic = transaction.atomic()
with atomic:
Reporter.objects.create(first_name="Tintin")
with atomic:
Reporter.objects.create(first_name="Archibald", last_name="Haddock")
self.assertQuerysetEqual(Reporter.objects.all(), ['<Reporter: Archibald Haddock>', '<Reporter: Tintin>'])
def test_reuse_commit_rollback(self):
atomic = transaction.atomic()
with atomic:
Reporter.objects.create(first_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with atomic:
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
self.assertQuerysetEqual(Reporter.objects.all(), ['<Reporter: Tintin>'])
def test_reuse_rollback_commit(self):
atomic = transaction.atomic()
with self.assertRaisesMessage(Exception, "Oops"):
with atomic:
Reporter.objects.create(last_name="Tintin")
with atomic:
Reporter.objects.create(last_name="Haddock")
raise Exception("Oops, that's his first name")
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_reuse_rollback_rollback(self):
atomic = transaction.atomic()
with self.assertRaisesMessage(Exception, "Oops"):
with atomic:
Reporter.objects.create(last_name="Tintin")
with self.assertRaisesMessage(Exception, "Oops"):
with atomic:
Reporter.objects.create(first_name="Haddock")
raise Exception("Oops, that's his last name")
raise Exception("Oops, that's his first name")
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_force_rollback(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
# atomic block shouldn't rollback, but force it.
self.assertFalse(transaction.get_rollback())
transaction.set_rollback(True)
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_prevent_rollback(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
sid = transaction.savepoint()
# trigger a database error inside an inner atomic without savepoint
with self.assertRaises(DatabaseError):
with transaction.atomic(savepoint=False):
with connection.cursor() as cursor:
cursor.execute(
"SELECT no_such_col FROM transactions_reporter")
# prevent atomic from rolling back since we're recovering manually
self.assertTrue(transaction.get_rollback())
transaction.set_rollback(False)
transaction.savepoint_rollback(sid)
self.assertQuerysetEqual(Reporter.objects.all(), ['<Reporter: Tintin>'])
class AtomicInsideTransactionTests(AtomicTests):
"""All basic tests for atomic should also pass within an existing transaction."""
def setUp(self):
self.atomic = transaction.atomic()
self.atomic.__enter__()
def tearDown(self):
self.atomic.__exit__(*sys.exc_info())
@skipIfDBFeature('autocommits_when_autocommit_is_off')
class AtomicWithoutAutocommitTests(AtomicTests):
"""All basic tests for atomic should also pass when autocommit is turned off."""
def setUp(self):
transaction.set_autocommit(False)
def tearDown(self):
# The tests access the database after exercising 'atomic', initiating
# a transaction ; a rollback is required before restoring autocommit.
transaction.rollback()
transaction.set_autocommit(True)
@skipUnlessDBFeature('uses_savepoints')
class AtomicMergeTests(TransactionTestCase):
"""Test merging transactions with savepoint=False."""
available_apps = ['transactions']
def test_merged_outer_rollback(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Archibald", last_name="Haddock")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Calculus")
raise Exception("Oops, that's his last name")
# The third insert couldn't be roll back. Temporarily mark the
# connection as not needing rollback to check it.
self.assertTrue(transaction.get_rollback())
transaction.set_rollback(False)
self.assertEqual(Reporter.objects.count(), 3)
transaction.set_rollback(True)
# The second insert couldn't be roll back. Temporarily mark the
# connection as not needing rollback to check it.
self.assertTrue(transaction.get_rollback())
transaction.set_rollback(False)
self.assertEqual(Reporter.objects.count(), 3)
transaction.set_rollback(True)
# The first block has a savepoint and must roll back.
self.assertQuerysetEqual(Reporter.objects.all(), [])
def test_merged_inner_savepoint_rollback(self):
with transaction.atomic():
Reporter.objects.create(first_name="Tintin")
with transaction.atomic():
Reporter.objects.create(first_name="Archibald", last_name="Haddock")
with self.assertRaisesMessage(Exception, "Oops"):
with transaction.atomic(savepoint=False):
Reporter.objects.create(first_name="Calculus")
raise Exception("Oops, that's his last name")
# The third insert couldn't be roll back. Temporarily mark the
# connection as not needing rollback to check it.
self.assertTrue(transaction.get_rollback())
transaction.set_rollback(False)
self.assertEqual(Reporter.objects.count(), 3)
transaction.set_rollback(True)
# The second block has a savepoint and must roll back.
self.assertEqual(Reporter.objects.count(), 1)
self.assertQuerysetEqual(Reporter.objects.all(), ['<Reporter: Tintin>'])
@skipUnlessDBFeature('uses_savepoints')
class AtomicErrorsTests(TransactionTestCase):
available_apps = ['transactions']
forbidden_atomic_msg = "This is forbidden when an 'atomic' block is active."
def test_atomic_prevents_setting_autocommit(self):
autocommit = transaction.get_autocommit()
with transaction.atomic():
with self.assertRaisesMessage(transaction.TransactionManagementError, self.forbidden_atomic_msg):
transaction.set_autocommit(not autocommit)
# Make sure autocommit wasn't changed.
self.assertEqual(connection.autocommit, autocommit)
def test_atomic_prevents_calling_transaction_methods(self):
with transaction.atomic():
with self.assertRaisesMessage(transaction.TransactionManagementError, self.forbidden_atomic_msg):
transaction.commit()
with self.assertRaisesMessage(transaction.TransactionManagementError, self.forbidden_atomic_msg):
transaction.rollback()
def test_atomic_prevents_queries_in_broken_transaction(self):
r1 = Reporter.objects.create(first_name="Archibald", last_name="Haddock")
with transaction.atomic():
r2 = Reporter(first_name="Cuthbert", last_name="Calculus", id=r1.id)
with self.assertRaises(IntegrityError):
r2.save(force_insert=True)
# The transaction is marked as needing rollback.
msg = (
"An error occurred in the current transaction. You can't "
"execute queries until the end of the 'atomic' block."
)
with self.assertRaisesMessage(transaction.TransactionManagementError, msg):
r2.save(force_update=True)
self.assertEqual(Reporter.objects.get(pk=r1.pk).last_name, "Haddock")
@skipIfDBFeature('atomic_transactions')
def test_atomic_allows_queries_after_fixing_transaction(self):
r1 = Reporter.objects.create(first_name="Archibald", last_name="Haddock")
with transaction.atomic():
r2 = Reporter(first_name="Cuthbert", last_name="Calculus", id=r1.id)
with self.assertRaises(IntegrityError):
r2.save(force_insert=True)
# Mark the transaction as no longer needing rollback.
transaction.set_rollback(False)
r2.save(force_update=True)
self.assertEqual(Reporter.objects.get(pk=r1.pk).last_name, "Calculus")
@skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_atomic_prevents_queries_in_broken_transaction_after_client_close(self):
with transaction.atomic():
Reporter.objects.create(first_name="Archibald", last_name="Haddock")
connection.close()
# The connection is closed and the transaction is marked as
# needing rollback. This will raise an InterfaceError on databases
# that refuse to create cursors on closed connections (PostgreSQL)
# and a TransactionManagementError on other databases.
with self.assertRaises(Error):
Reporter.objects.create(first_name="Cuthbert", last_name="Calculus")
# The connection is usable again .
self.assertEqual(Reporter.objects.count(), 0)
@skipUnless(connection.vendor == 'mysql', "MySQL-specific behaviors")
class AtomicMySQLTests(TransactionTestCase):
available_apps = ['transactions']
@skipIf(threading is None, "Test requires threading")
def test_implicit_savepoint_rollback(self):
"""MySQL implicitly rolls back savepoints when it deadlocks (#22291)."""
Reporter.objects.create(id=1)
Reporter.objects.create(id=2)
main_thread_ready = threading.Event()
def other_thread():
try:
with transaction.atomic():
Reporter.objects.select_for_update().get(id=1)
main_thread_ready.wait()
# 1) This line locks... (see below for 2)
Reporter.objects.exclude(id=1).update(id=2)
finally:
# This is the thread-local connection, not the main connection.
connection.close()
other_thread = threading.Thread(target=other_thread)
other_thread.start()
with self.assertRaisesMessage(OperationalError, 'Deadlock found'):
# Double atomic to enter a transaction and create a savepoint.
with transaction.atomic():
with transaction.atomic():
Reporter.objects.select_for_update().get(id=2)
main_thread_ready.set()
# The two threads can't be synchronized with an event here
# because the other thread locks. Sleep for a little while.
time.sleep(1)
# 2) ... and this line deadlocks. (see above for 1)
Reporter.objects.exclude(id=2).update(id=1)
other_thread.join()
class AtomicMiscTests(TransactionTestCase):
available_apps = ['transactions']
def test_wrap_callable_instance(self):
"""#20028 -- Atomic must support wrapping callable instances."""
class Callable:
def __call__(self):
pass
# Must not raise an exception
transaction.atomic(Callable())
@skipUnlessDBFeature('can_release_savepoints')
def test_atomic_does_not_leak_savepoints_on_failure(self):
"""#23074 -- Savepoints must be released after rollback."""
# Expect an error when rolling back a savepoint that doesn't exist.
# Done outside of the transaction block to ensure proper recovery.
with self.assertRaises(Error):
# Start a plain transaction.
with transaction.atomic():
# Swallow the intentional error raised in the sub-transaction.
with self.assertRaisesMessage(Exception, "Oops"):
# Start a sub-transaction with a savepoint.
with transaction.atomic():
sid = connection.savepoint_ids[-1]
raise Exception("Oops")
# This is expected to fail because the savepoint no longer exists.
connection.savepoint_rollback(sid)
def test_mark_for_rollback_on_error_in_transaction(self):
with transaction.atomic(savepoint=False):
# Swallow the intentional error raised.
with self.assertRaisesMessage(Exception, "Oops"):
# Wrap in `mark_for_rollback_on_error` to check if the transaction is marked broken.
with transaction.mark_for_rollback_on_error():
# Ensure that we are still in a good state.
self.assertFalse(transaction.get_rollback())
raise Exception("Oops")
# Ensure that `mark_for_rollback_on_error` marked the transaction as broken …
self.assertTrue(transaction.get_rollback())
# … and further queries fail.
msg = "You can't execute queries until the end of the 'atomic' block."
with self.assertRaisesMessage(transaction.TransactionManagementError, msg):
Reporter.objects.create()
# Transaction errors are reset at the end of an transaction, so this should just work.
Reporter.objects.create()
def test_mark_for_rollback_on_error_in_autocommit(self):
self.assertTrue(transaction.get_autocommit())
# Swallow the intentional error raised.
with self.assertRaisesMessage(Exception, "Oops"):
# Wrap in `mark_for_rollback_on_error` to check if the transaction is marked broken.
with transaction.mark_for_rollback_on_error():
# Ensure that we are still in a good state.
self.assertFalse(transaction.get_connection().needs_rollback)
raise Exception("Oops")
# Ensure that `mark_for_rollback_on_error` did not mark the transaction
# as broken, since we are in autocommit mode …
self.assertFalse(transaction.get_connection().needs_rollback)
# … and further queries work nicely.
Reporter.objects.create()
@skipIfDBFeature('autocommits_when_autocommit_is_off')
class NonAutocommitTests(TransactionTestCase):
available_apps = []
def test_orm_query_after_error_and_rollback(self):
"""
ORM queries are allowed after an error and a rollback in non-autocommit
mode (#27504).
"""
transaction.set_autocommit(False)
r1 = Reporter.objects.create(first_name='Archibald', last_name='Haddock')
r2 = Reporter(first_name='Cuthbert', last_name='Calculus', id=r1.id)
with self.assertRaises(IntegrityError):
r2.save(force_insert=True)
transaction.rollback()
Reporter.objects.last()
def test_orm_query_without_autocommit(self):
"""#24921 -- ORM queries must be possible after set_autocommit(False)."""
transaction.set_autocommit(False)
try:
Reporter.objects.create(first_name="Tintin")
finally:
transaction.rollback()
transaction.set_autocommit(True)
|
3ef6e4b3bd216f7ae4f31ce7840d40df2b95856f5866c8a716cc2522331779b6 | # Unit tests for cache framework
# Uses whatever cache backend is set in the test settings file.
import copy
import io
import os
import pickle
import re
import shutil
import tempfile
import threading
import time
import unittest
from unittest import mock
from django.conf import settings
from django.core import management, signals
from django.core.cache import (
DEFAULT_CACHE_ALIAS, CacheKeyWarning, cache, caches,
)
from django.core.cache.utils import make_template_fragment_key
from django.db import close_old_connections, connection, connections
from django.http import (
HttpRequest, HttpResponse, HttpResponseNotModified, StreamingHttpResponse,
)
from django.middleware.cache import (
CacheMiddleware, FetchFromCacheMiddleware, UpdateCacheMiddleware,
)
from django.middleware.csrf import CsrfViewMiddleware
from django.template import engines
from django.template.context_processors import csrf
from django.template.response import TemplateResponse
from django.test import (
RequestFactory, SimpleTestCase, TestCase, TransactionTestCase,
override_settings,
)
from django.test.signals import setting_changed
from django.utils import timezone, translation
from django.utils.cache import (
get_cache_key, learn_cache_key, patch_cache_control, patch_vary_headers,
)
from django.views.decorators.cache import cache_control, cache_page
from .models import Poll, expensive_calculation
# functions/classes for complex data type tests
def f():
return 42
class C:
def m(n):
return 24
class Unpicklable:
def __getstate__(self):
raise pickle.PickleError()
KEY_ERRORS_WITH_MEMCACHED_MSG = (
'Cache key contains characters that will cause errors if used with '
'memcached: %r'
)
@override_settings(CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
})
class DummyCacheTests(SimpleTestCase):
# The Dummy cache backend doesn't really behave like a test backend,
# so it has its own test case.
def test_simple(self):
"Dummy cache backend ignores cache set calls"
cache.set("key", "value")
self.assertIsNone(cache.get("key"))
def test_add(self):
"Add doesn't do anything in dummy cache backend"
cache.add("addkey1", "value")
result = cache.add("addkey1", "newvalue")
self.assertTrue(result)
self.assertIsNone(cache.get("addkey1"))
def test_non_existent(self):
"Nonexistent keys aren't found in the dummy cache backend"
self.assertIsNone(cache.get("does_not_exist"))
self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!")
def test_get_many(self):
"get_many returns nothing for the dummy cache backend"
cache.set_many({'a': 'a', 'b': 'b', 'c': 'c', 'd': 'd'})
self.assertEqual(cache.get_many(['a', 'c', 'd']), {})
self.assertEqual(cache.get_many(['a', 'b', 'e']), {})
def test_get_many_invalid_key(self):
with self.assertWarns(CacheKeyWarning, msg=KEY_ERRORS_WITH_MEMCACHED_MSG % 'key with spaces'):
cache.get_many(['key with spaces'])
def test_delete(self):
"Cache deletion is transparently ignored on the dummy cache backend"
cache.set_many({'key1': 'spam', 'key2': 'eggs'})
self.assertIsNone(cache.get("key1"))
cache.delete("key1")
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
def test_has_key(self):
"The has_key method doesn't ever return True for the dummy cache backend"
cache.set("hello1", "goodbye1")
self.assertFalse(cache.has_key("hello1"))
self.assertFalse(cache.has_key("goodbye1"))
def test_in(self):
"The in operator doesn't ever return True for the dummy cache backend"
cache.set("hello2", "goodbye2")
self.assertNotIn("hello2", cache)
self.assertNotIn("goodbye2", cache)
def test_incr(self):
"Dummy cache values can't be incremented"
cache.set('answer', 42)
with self.assertRaises(ValueError):
cache.incr('answer')
with self.assertRaises(ValueError):
cache.incr('does_not_exist')
def test_decr(self):
"Dummy cache values can't be decremented"
cache.set('answer', 42)
with self.assertRaises(ValueError):
cache.decr('answer')
with self.assertRaises(ValueError):
cache.decr('does_not_exist')
def test_touch(self):
"""Dummy cache can't do touch()."""
self.assertIs(cache.touch('whatever'), False)
def test_data_types(self):
"All data types are ignored equally by the dummy cache"
stuff = {
'string': 'this is a string',
'int': 42,
'list': [1, 2, 3, 4],
'tuple': (1, 2, 3, 4),
'dict': {'A': 1, 'B': 2},
'function': f,
'class': C,
}
cache.set("stuff", stuff)
self.assertIsNone(cache.get("stuff"))
def test_expiration(self):
"Expiration has no effect on the dummy cache"
cache.set('expire1', 'very quickly', 1)
cache.set('expire2', 'very quickly', 1)
cache.set('expire3', 'very quickly', 1)
time.sleep(2)
self.assertIsNone(cache.get("expire1"))
cache.add("expire2", "newvalue")
self.assertIsNone(cache.get("expire2"))
self.assertFalse(cache.has_key("expire3"))
def test_unicode(self):
"Unicode values are ignored by the dummy cache"
stuff = {
'ascii': 'ascii_value',
'unicode_ascii': 'Iñtërnâtiônàlizætiøn1',
'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2',
'ascii2': {'x': 1}
}
for (key, value) in stuff.items():
with self.subTest(key=key):
cache.set(key, value)
self.assertIsNone(cache.get(key))
def test_set_many(self):
"set_many does nothing for the dummy cache backend"
self.assertEqual(cache.set_many({'a': 1, 'b': 2}), [])
self.assertEqual(cache.set_many({'a': 1, 'b': 2}, timeout=2, version='1'), [])
def test_set_many_invalid_key(self):
with self.assertWarns(CacheKeyWarning, msg=KEY_ERRORS_WITH_MEMCACHED_MSG % 'key with spaces'):
cache.set_many({'key with spaces': 'foo'})
def test_delete_many(self):
"delete_many does nothing for the dummy cache backend"
cache.delete_many(['a', 'b'])
def test_delete_many_invalid_key(self):
with self.assertWarns(CacheKeyWarning, msg=KEY_ERRORS_WITH_MEMCACHED_MSG % 'key with spaces'):
cache.delete_many({'key with spaces': 'foo'})
def test_clear(self):
"clear does nothing for the dummy cache backend"
cache.clear()
def test_incr_version(self):
"Dummy cache versions can't be incremented"
cache.set('answer', 42)
with self.assertRaises(ValueError):
cache.incr_version('answer')
with self.assertRaises(ValueError):
cache.incr_version('does_not_exist')
def test_decr_version(self):
"Dummy cache versions can't be decremented"
cache.set('answer', 42)
with self.assertRaises(ValueError):
cache.decr_version('answer')
with self.assertRaises(ValueError):
cache.decr_version('does_not_exist')
def test_get_or_set(self):
self.assertEqual(cache.get_or_set('mykey', 'default'), 'default')
self.assertEqual(cache.get_or_set('mykey', None), None)
def test_get_or_set_callable(self):
def my_callable():
return 'default'
self.assertEqual(cache.get_or_set('mykey', my_callable), 'default')
self.assertEqual(cache.get_or_set('mykey', my_callable()), 'default')
def custom_key_func(key, key_prefix, version):
"A customized cache key function"
return 'CUSTOM-' + '-'.join([key_prefix, str(version), key])
_caches_setting_base = {
'default': {},
'prefix': {'KEY_PREFIX': 'cacheprefix{}'.format(os.getpid())},
'v2': {'VERSION': 2},
'custom_key': {'KEY_FUNCTION': custom_key_func},
'custom_key2': {'KEY_FUNCTION': 'cache.tests.custom_key_func'},
'cull': {'OPTIONS': {'MAX_ENTRIES': 30}},
'zero_cull': {'OPTIONS': {'CULL_FREQUENCY': 0, 'MAX_ENTRIES': 30}},
}
def caches_setting_for_tests(base=None, exclude=None, **params):
# `base` is used to pull in the memcached config from the original settings,
# `exclude` is a set of cache names denoting which `_caches_setting_base` keys
# should be omitted.
# `params` are test specific overrides and `_caches_settings_base` is the
# base config for the tests.
# This results in the following search order:
# params -> _caches_setting_base -> base
base = base or {}
exclude = exclude or set()
setting = {k: base.copy() for k in _caches_setting_base if k not in exclude}
for key, cache_params in setting.items():
cache_params.update(_caches_setting_base[key])
cache_params.update(params)
return setting
class BaseCacheTests:
# A common set of tests to apply to all cache backends
factory = RequestFactory()
def tearDown(self):
cache.clear()
def test_simple(self):
# Simple cache set/get works
cache.set("key", "value")
self.assertEqual(cache.get("key"), "value")
def test_add(self):
# A key can be added to a cache
cache.add("addkey1", "value")
result = cache.add("addkey1", "newvalue")
self.assertFalse(result)
self.assertEqual(cache.get("addkey1"), "value")
def test_prefix(self):
# Test for same cache key conflicts between shared backend
cache.set('somekey', 'value')
# should not be set in the prefixed cache
self.assertFalse(caches['prefix'].has_key('somekey'))
caches['prefix'].set('somekey', 'value2')
self.assertEqual(cache.get('somekey'), 'value')
self.assertEqual(caches['prefix'].get('somekey'), 'value2')
def test_non_existent(self):
"""Nonexistent cache keys return as None/default."""
self.assertIsNone(cache.get("does_not_exist"))
self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!")
def test_get_many(self):
# Multiple cache keys can be returned using get_many
cache.set_many({'a': 'a', 'b': 'b', 'c': 'c', 'd': 'd'})
self.assertEqual(cache.get_many(['a', 'c', 'd']), {'a': 'a', 'c': 'c', 'd': 'd'})
self.assertEqual(cache.get_many(['a', 'b', 'e']), {'a': 'a', 'b': 'b'})
self.assertEqual(cache.get_many(iter(['a', 'b', 'e'])), {'a': 'a', 'b': 'b'})
def test_delete(self):
# Cache keys can be deleted
cache.set_many({'key1': 'spam', 'key2': 'eggs'})
self.assertEqual(cache.get("key1"), "spam")
cache.delete("key1")
self.assertIsNone(cache.get("key1"))
self.assertEqual(cache.get("key2"), "eggs")
def test_has_key(self):
# The cache can be inspected for cache keys
cache.set("hello1", "goodbye1")
self.assertTrue(cache.has_key("hello1"))
self.assertFalse(cache.has_key("goodbye1"))
cache.set("no_expiry", "here", None)
self.assertTrue(cache.has_key("no_expiry"))
def test_in(self):
# The in operator can be used to inspect cache contents
cache.set("hello2", "goodbye2")
self.assertIn("hello2", cache)
self.assertNotIn("goodbye2", cache)
def test_incr(self):
# Cache values can be incremented
cache.set('answer', 41)
self.assertEqual(cache.incr('answer'), 42)
self.assertEqual(cache.get('answer'), 42)
self.assertEqual(cache.incr('answer', 10), 52)
self.assertEqual(cache.get('answer'), 52)
self.assertEqual(cache.incr('answer', -10), 42)
with self.assertRaises(ValueError):
cache.incr('does_not_exist')
def test_decr(self):
# Cache values can be decremented
cache.set('answer', 43)
self.assertEqual(cache.decr('answer'), 42)
self.assertEqual(cache.get('answer'), 42)
self.assertEqual(cache.decr('answer', 10), 32)
self.assertEqual(cache.get('answer'), 32)
self.assertEqual(cache.decr('answer', -10), 42)
with self.assertRaises(ValueError):
cache.decr('does_not_exist')
def test_close(self):
self.assertTrue(hasattr(cache, 'close'))
cache.close()
def test_data_types(self):
# Many different data types can be cached
stuff = {
'string': 'this is a string',
'int': 42,
'list': [1, 2, 3, 4],
'tuple': (1, 2, 3, 4),
'dict': {'A': 1, 'B': 2},
'function': f,
'class': C,
}
cache.set("stuff", stuff)
self.assertEqual(cache.get("stuff"), stuff)
def test_cache_read_for_model_instance(self):
# Don't want fields with callable as default to be called on cache read
expensive_calculation.num_runs = 0
Poll.objects.all().delete()
my_poll = Poll.objects.create(question="Well?")
self.assertEqual(Poll.objects.count(), 1)
pub_date = my_poll.pub_date
cache.set('question', my_poll)
cached_poll = cache.get('question')
self.assertEqual(cached_poll.pub_date, pub_date)
# We only want the default expensive calculation run once
self.assertEqual(expensive_calculation.num_runs, 1)
def test_cache_write_for_model_instance_with_deferred(self):
# Don't want fields with callable as default to be called on cache write
expensive_calculation.num_runs = 0
Poll.objects.all().delete()
Poll.objects.create(question="What?")
self.assertEqual(expensive_calculation.num_runs, 1)
defer_qs = Poll.objects.all().defer('question')
self.assertEqual(defer_qs.count(), 1)
self.assertEqual(expensive_calculation.num_runs, 1)
cache.set('deferred_queryset', defer_qs)
# cache set should not re-evaluate default functions
self.assertEqual(expensive_calculation.num_runs, 1)
def test_cache_read_for_model_instance_with_deferred(self):
# Don't want fields with callable as default to be called on cache read
expensive_calculation.num_runs = 0
Poll.objects.all().delete()
Poll.objects.create(question="What?")
self.assertEqual(expensive_calculation.num_runs, 1)
defer_qs = Poll.objects.all().defer('question')
self.assertEqual(defer_qs.count(), 1)
cache.set('deferred_queryset', defer_qs)
self.assertEqual(expensive_calculation.num_runs, 1)
runs_before_cache_read = expensive_calculation.num_runs
cache.get('deferred_queryset')
# We only want the default expensive calculation run on creation and set
self.assertEqual(expensive_calculation.num_runs, runs_before_cache_read)
def test_expiration(self):
# Cache values can be set to expire
cache.set('expire1', 'very quickly', 1)
cache.set('expire2', 'very quickly', 1)
cache.set('expire3', 'very quickly', 1)
time.sleep(2)
self.assertIsNone(cache.get("expire1"))
cache.add("expire2", "newvalue")
self.assertEqual(cache.get("expire2"), "newvalue")
self.assertFalse(cache.has_key("expire3"))
def test_touch(self):
# cache.touch() updates the timeout.
cache.set('expire1', 'very quickly', timeout=1)
self.assertIs(cache.touch('expire1', timeout=4), True)
time.sleep(2)
self.assertTrue(cache.has_key('expire1'))
time.sleep(3)
self.assertFalse(cache.has_key('expire1'))
# cache.touch() works without the timeout argument.
cache.set('expire1', 'very quickly', timeout=1)
self.assertIs(cache.touch('expire1'), True)
time.sleep(2)
self.assertTrue(cache.has_key('expire1'))
self.assertIs(cache.touch('nonexistent'), False)
def test_unicode(self):
# Unicode values can be cached
stuff = {
'ascii': 'ascii_value',
'unicode_ascii': 'Iñtërnâtiônàlizætiøn1',
'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2',
'ascii2': {'x': 1}
}
# Test `set`
for (key, value) in stuff.items():
with self.subTest(key=key):
cache.set(key, value)
self.assertEqual(cache.get(key), value)
# Test `add`
for (key, value) in stuff.items():
with self.subTest(key=key):
cache.delete(key)
cache.add(key, value)
self.assertEqual(cache.get(key), value)
# Test `set_many`
for (key, value) in stuff.items():
cache.delete(key)
cache.set_many(stuff)
for (key, value) in stuff.items():
with self.subTest(key=key):
self.assertEqual(cache.get(key), value)
def test_binary_string(self):
# Binary strings should be cacheable
from zlib import compress, decompress
value = 'value_to_be_compressed'
compressed_value = compress(value.encode())
# Test set
cache.set('binary1', compressed_value)
compressed_result = cache.get('binary1')
self.assertEqual(compressed_value, compressed_result)
self.assertEqual(value, decompress(compressed_result).decode())
# Test add
cache.add('binary1-add', compressed_value)
compressed_result = cache.get('binary1-add')
self.assertEqual(compressed_value, compressed_result)
self.assertEqual(value, decompress(compressed_result).decode())
# Test set_many
cache.set_many({'binary1-set_many': compressed_value})
compressed_result = cache.get('binary1-set_many')
self.assertEqual(compressed_value, compressed_result)
self.assertEqual(value, decompress(compressed_result).decode())
def test_set_many(self):
# Multiple keys can be set using set_many
cache.set_many({"key1": "spam", "key2": "eggs"})
self.assertEqual(cache.get("key1"), "spam")
self.assertEqual(cache.get("key2"), "eggs")
def test_set_many_returns_empty_list_on_success(self):
"""set_many() returns an empty list when all keys are inserted."""
failing_keys = cache.set_many({'key1': 'spam', 'key2': 'eggs'})
self.assertEqual(failing_keys, [])
def test_set_many_expiration(self):
# set_many takes a second ``timeout`` parameter
cache.set_many({"key1": "spam", "key2": "eggs"}, 1)
time.sleep(2)
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
def test_delete_many(self):
# Multiple keys can be deleted using delete_many
cache.set_many({'key1': 'spam', 'key2': 'eggs', 'key3': 'ham'})
cache.delete_many(["key1", "key2"])
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
self.assertEqual(cache.get("key3"), "ham")
def test_clear(self):
# The cache can be emptied using clear
cache.set_many({'key1': 'spam', 'key2': 'eggs'})
cache.clear()
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
def test_long_timeout(self):
"""
Followe memcached's convention where a timeout greater than 30 days is
treated as an absolute expiration timestamp instead of a relative
offset (#12399).
"""
cache.set('key1', 'eggs', 60 * 60 * 24 * 30 + 1) # 30 days + 1 second
self.assertEqual(cache.get('key1'), 'eggs')
cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1)
self.assertEqual(cache.get('key2'), 'ham')
cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60 * 60 * 24 * 30 + 1)
self.assertEqual(cache.get('key3'), 'sausage')
self.assertEqual(cache.get('key4'), 'lobster bisque')
def test_forever_timeout(self):
"""
Passing in None into timeout results in a value that is cached forever
"""
cache.set('key1', 'eggs', None)
self.assertEqual(cache.get('key1'), 'eggs')
cache.add('key2', 'ham', None)
self.assertEqual(cache.get('key2'), 'ham')
added = cache.add('key1', 'new eggs', None)
self.assertIs(added, False)
self.assertEqual(cache.get('key1'), 'eggs')
cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, None)
self.assertEqual(cache.get('key3'), 'sausage')
self.assertEqual(cache.get('key4'), 'lobster bisque')
cache.set('key5', 'belgian fries', timeout=1)
cache.touch('key5', timeout=None)
time.sleep(2)
self.assertEqual(cache.get('key5'), 'belgian fries')
def test_zero_timeout(self):
"""
Passing in zero into timeout results in a value that is not cached
"""
cache.set('key1', 'eggs', 0)
self.assertIsNone(cache.get('key1'))
cache.add('key2', 'ham', 0)
self.assertIsNone(cache.get('key2'))
cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 0)
self.assertIsNone(cache.get('key3'))
self.assertIsNone(cache.get('key4'))
cache.set('key5', 'belgian fries', timeout=5)
cache.touch('key5', timeout=0)
self.assertIsNone(cache.get('key5'))
def test_float_timeout(self):
# Make sure a timeout given as a float doesn't crash anything.
cache.set("key1", "spam", 100.2)
self.assertEqual(cache.get("key1"), "spam")
def _perform_cull_test(self, cull_cache, initial_count, final_count):
# Create initial cache key entries. This will overflow the cache,
# causing a cull.
for i in range(1, initial_count):
cull_cache.set('cull%d' % i, 'value', 1000)
count = 0
# Count how many keys are left in the cache.
for i in range(1, initial_count):
if cull_cache.has_key('cull%d' % i):
count += 1
self.assertEqual(count, final_count)
def test_cull(self):
self._perform_cull_test(caches['cull'], 50, 29)
def test_zero_cull(self):
self._perform_cull_test(caches['zero_cull'], 50, 19)
def _perform_invalid_key_test(self, key, expected_warning):
"""
All the builtin backends (except memcached, see below) should warn on
keys that would be refused by memcached. This encourages portable
caching code without making it too difficult to use production backends
with more liberal key rules. Refs #6447.
"""
# mimic custom ``make_key`` method being defined since the default will
# never show the below warnings
def func(key, *args):
return key
old_func = cache.key_func
cache.key_func = func
try:
with self.assertWarnsMessage(CacheKeyWarning, expected_warning):
cache.set(key, 'value')
finally:
cache.key_func = old_func
def test_invalid_key_characters(self):
# memcached doesn't allow whitespace or control characters in keys.
key = 'key with spaces and 清'
self._perform_invalid_key_test(key, KEY_ERRORS_WITH_MEMCACHED_MSG % key)
def test_invalid_key_length(self):
# memcached limits key length to 250.
key = ('a' * 250) + '清'
expected_warning = (
'Cache key will cause errors if used with memcached: '
'%r (longer than %s)' % (key, 250)
)
self._perform_invalid_key_test(key, expected_warning)
def test_cache_versioning_get_set(self):
# set, using default version = 1
cache.set('answer1', 42)
self.assertEqual(cache.get('answer1'), 42)
self.assertEqual(cache.get('answer1', version=1), 42)
self.assertIsNone(cache.get('answer1', version=2))
self.assertIsNone(caches['v2'].get('answer1'))
self.assertEqual(caches['v2'].get('answer1', version=1), 42)
self.assertIsNone(caches['v2'].get('answer1', version=2))
# set, default version = 1, but manually override version = 2
cache.set('answer2', 42, version=2)
self.assertIsNone(cache.get('answer2'))
self.assertIsNone(cache.get('answer2', version=1))
self.assertEqual(cache.get('answer2', version=2), 42)
self.assertEqual(caches['v2'].get('answer2'), 42)
self.assertIsNone(caches['v2'].get('answer2', version=1))
self.assertEqual(caches['v2'].get('answer2', version=2), 42)
# v2 set, using default version = 2
caches['v2'].set('answer3', 42)
self.assertIsNone(cache.get('answer3'))
self.assertIsNone(cache.get('answer3', version=1))
self.assertEqual(cache.get('answer3', version=2), 42)
self.assertEqual(caches['v2'].get('answer3'), 42)
self.assertIsNone(caches['v2'].get('answer3', version=1))
self.assertEqual(caches['v2'].get('answer3', version=2), 42)
# v2 set, default version = 2, but manually override version = 1
caches['v2'].set('answer4', 42, version=1)
self.assertEqual(cache.get('answer4'), 42)
self.assertEqual(cache.get('answer4', version=1), 42)
self.assertIsNone(cache.get('answer4', version=2))
self.assertIsNone(caches['v2'].get('answer4'))
self.assertEqual(caches['v2'].get('answer4', version=1), 42)
self.assertIsNone(caches['v2'].get('answer4', version=2))
def test_cache_versioning_add(self):
# add, default version = 1, but manually override version = 2
cache.add('answer1', 42, version=2)
self.assertIsNone(cache.get('answer1', version=1))
self.assertEqual(cache.get('answer1', version=2), 42)
cache.add('answer1', 37, version=2)
self.assertIsNone(cache.get('answer1', version=1))
self.assertEqual(cache.get('answer1', version=2), 42)
cache.add('answer1', 37, version=1)
self.assertEqual(cache.get('answer1', version=1), 37)
self.assertEqual(cache.get('answer1', version=2), 42)
# v2 add, using default version = 2
caches['v2'].add('answer2', 42)
self.assertIsNone(cache.get('answer2', version=1))
self.assertEqual(cache.get('answer2', version=2), 42)
caches['v2'].add('answer2', 37)
self.assertIsNone(cache.get('answer2', version=1))
self.assertEqual(cache.get('answer2', version=2), 42)
caches['v2'].add('answer2', 37, version=1)
self.assertEqual(cache.get('answer2', version=1), 37)
self.assertEqual(cache.get('answer2', version=2), 42)
# v2 add, default version = 2, but manually override version = 1
caches['v2'].add('answer3', 42, version=1)
self.assertEqual(cache.get('answer3', version=1), 42)
self.assertIsNone(cache.get('answer3', version=2))
caches['v2'].add('answer3', 37, version=1)
self.assertEqual(cache.get('answer3', version=1), 42)
self.assertIsNone(cache.get('answer3', version=2))
caches['v2'].add('answer3', 37)
self.assertEqual(cache.get('answer3', version=1), 42)
self.assertEqual(cache.get('answer3', version=2), 37)
def test_cache_versioning_has_key(self):
cache.set('answer1', 42)
# has_key
self.assertTrue(cache.has_key('answer1'))
self.assertTrue(cache.has_key('answer1', version=1))
self.assertFalse(cache.has_key('answer1', version=2))
self.assertFalse(caches['v2'].has_key('answer1'))
self.assertTrue(caches['v2'].has_key('answer1', version=1))
self.assertFalse(caches['v2'].has_key('answer1', version=2))
def test_cache_versioning_delete(self):
cache.set('answer1', 37, version=1)
cache.set('answer1', 42, version=2)
cache.delete('answer1')
self.assertIsNone(cache.get('answer1', version=1))
self.assertEqual(cache.get('answer1', version=2), 42)
cache.set('answer2', 37, version=1)
cache.set('answer2', 42, version=2)
cache.delete('answer2', version=2)
self.assertEqual(cache.get('answer2', version=1), 37)
self.assertIsNone(cache.get('answer2', version=2))
cache.set('answer3', 37, version=1)
cache.set('answer3', 42, version=2)
caches['v2'].delete('answer3')
self.assertEqual(cache.get('answer3', version=1), 37)
self.assertIsNone(cache.get('answer3', version=2))
cache.set('answer4', 37, version=1)
cache.set('answer4', 42, version=2)
caches['v2'].delete('answer4', version=1)
self.assertIsNone(cache.get('answer4', version=1))
self.assertEqual(cache.get('answer4', version=2), 42)
def test_cache_versioning_incr_decr(self):
cache.set('answer1', 37, version=1)
cache.set('answer1', 42, version=2)
cache.incr('answer1')
self.assertEqual(cache.get('answer1', version=1), 38)
self.assertEqual(cache.get('answer1', version=2), 42)
cache.decr('answer1')
self.assertEqual(cache.get('answer1', version=1), 37)
self.assertEqual(cache.get('answer1', version=2), 42)
cache.set('answer2', 37, version=1)
cache.set('answer2', 42, version=2)
cache.incr('answer2', version=2)
self.assertEqual(cache.get('answer2', version=1), 37)
self.assertEqual(cache.get('answer2', version=2), 43)
cache.decr('answer2', version=2)
self.assertEqual(cache.get('answer2', version=1), 37)
self.assertEqual(cache.get('answer2', version=2), 42)
cache.set('answer3', 37, version=1)
cache.set('answer3', 42, version=2)
caches['v2'].incr('answer3')
self.assertEqual(cache.get('answer3', version=1), 37)
self.assertEqual(cache.get('answer3', version=2), 43)
caches['v2'].decr('answer3')
self.assertEqual(cache.get('answer3', version=1), 37)
self.assertEqual(cache.get('answer3', version=2), 42)
cache.set('answer4', 37, version=1)
cache.set('answer4', 42, version=2)
caches['v2'].incr('answer4', version=1)
self.assertEqual(cache.get('answer4', version=1), 38)
self.assertEqual(cache.get('answer4', version=2), 42)
caches['v2'].decr('answer4', version=1)
self.assertEqual(cache.get('answer4', version=1), 37)
self.assertEqual(cache.get('answer4', version=2), 42)
def test_cache_versioning_get_set_many(self):
# set, using default version = 1
cache.set_many({'ford1': 37, 'arthur1': 42})
self.assertEqual(cache.get_many(['ford1', 'arthur1']), {'ford1': 37, 'arthur1': 42})
self.assertEqual(cache.get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42})
self.assertEqual(cache.get_many(['ford1', 'arthur1'], version=2), {})
self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1']), {})
self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42})
self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=2), {})
# set, default version = 1, but manually override version = 2
cache.set_many({'ford2': 37, 'arthur2': 42}, version=2)
self.assertEqual(cache.get_many(['ford2', 'arthur2']), {})
self.assertEqual(cache.get_many(['ford2', 'arthur2'], version=1), {})
self.assertEqual(cache.get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42})
self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2']), {'ford2': 37, 'arthur2': 42})
self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=1), {})
self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42})
# v2 set, using default version = 2
caches['v2'].set_many({'ford3': 37, 'arthur3': 42})
self.assertEqual(cache.get_many(['ford3', 'arthur3']), {})
self.assertEqual(cache.get_many(['ford3', 'arthur3'], version=1), {})
self.assertEqual(cache.get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42})
self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3']), {'ford3': 37, 'arthur3': 42})
self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=1), {})
self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42})
# v2 set, default version = 2, but manually override version = 1
caches['v2'].set_many({'ford4': 37, 'arthur4': 42}, version=1)
self.assertEqual(cache.get_many(['ford4', 'arthur4']), {'ford4': 37, 'arthur4': 42})
self.assertEqual(cache.get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42})
self.assertEqual(cache.get_many(['ford4', 'arthur4'], version=2), {})
self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4']), {})
self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42})
self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=2), {})
def test_incr_version(self):
cache.set('answer', 42, version=2)
self.assertIsNone(cache.get('answer'))
self.assertIsNone(cache.get('answer', version=1))
self.assertEqual(cache.get('answer', version=2), 42)
self.assertIsNone(cache.get('answer', version=3))
self.assertEqual(cache.incr_version('answer', version=2), 3)
self.assertIsNone(cache.get('answer'))
self.assertIsNone(cache.get('answer', version=1))
self.assertIsNone(cache.get('answer', version=2))
self.assertEqual(cache.get('answer', version=3), 42)
caches['v2'].set('answer2', 42)
self.assertEqual(caches['v2'].get('answer2'), 42)
self.assertIsNone(caches['v2'].get('answer2', version=1))
self.assertEqual(caches['v2'].get('answer2', version=2), 42)
self.assertIsNone(caches['v2'].get('answer2', version=3))
self.assertEqual(caches['v2'].incr_version('answer2'), 3)
self.assertIsNone(caches['v2'].get('answer2'))
self.assertIsNone(caches['v2'].get('answer2', version=1))
self.assertIsNone(caches['v2'].get('answer2', version=2))
self.assertEqual(caches['v2'].get('answer2', version=3), 42)
with self.assertRaises(ValueError):
cache.incr_version('does_not_exist')
def test_decr_version(self):
cache.set('answer', 42, version=2)
self.assertIsNone(cache.get('answer'))
self.assertIsNone(cache.get('answer', version=1))
self.assertEqual(cache.get('answer', version=2), 42)
self.assertEqual(cache.decr_version('answer', version=2), 1)
self.assertEqual(cache.get('answer'), 42)
self.assertEqual(cache.get('answer', version=1), 42)
self.assertIsNone(cache.get('answer', version=2))
caches['v2'].set('answer2', 42)
self.assertEqual(caches['v2'].get('answer2'), 42)
self.assertIsNone(caches['v2'].get('answer2', version=1))
self.assertEqual(caches['v2'].get('answer2', version=2), 42)
self.assertEqual(caches['v2'].decr_version('answer2'), 1)
self.assertIsNone(caches['v2'].get('answer2'))
self.assertEqual(caches['v2'].get('answer2', version=1), 42)
self.assertIsNone(caches['v2'].get('answer2', version=2))
with self.assertRaises(ValueError):
cache.decr_version('does_not_exist', version=2)
def test_custom_key_func(self):
# Two caches with different key functions aren't visible to each other
cache.set('answer1', 42)
self.assertEqual(cache.get('answer1'), 42)
self.assertIsNone(caches['custom_key'].get('answer1'))
self.assertIsNone(caches['custom_key2'].get('answer1'))
caches['custom_key'].set('answer2', 42)
self.assertIsNone(cache.get('answer2'))
self.assertEqual(caches['custom_key'].get('answer2'), 42)
self.assertEqual(caches['custom_key2'].get('answer2'), 42)
def test_cache_write_unpicklable_object(self):
update_middleware = UpdateCacheMiddleware()
update_middleware.cache = cache
fetch_middleware = FetchFromCacheMiddleware()
fetch_middleware.cache = cache
request = self.factory.get('/cache/test')
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertIsNone(get_cache_data)
response = HttpResponse()
content = 'Testing cookie serialization.'
response.content = content
response.set_cookie('foo', 'bar')
update_middleware.process_response(request, response)
get_cache_data = fetch_middleware.process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, content.encode())
self.assertEqual(get_cache_data.cookies, response.cookies)
update_middleware.process_response(request, get_cache_data)
get_cache_data = fetch_middleware.process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, content.encode())
self.assertEqual(get_cache_data.cookies, response.cookies)
def test_add_fail_on_pickleerror(self):
# Shouldn't fail silently if trying to cache an unpicklable type.
with self.assertRaises(pickle.PickleError):
cache.add('unpicklable', Unpicklable())
def test_set_fail_on_pickleerror(self):
with self.assertRaises(pickle.PickleError):
cache.set('unpicklable', Unpicklable())
def test_get_or_set(self):
self.assertIsNone(cache.get('projector'))
self.assertEqual(cache.get_or_set('projector', 42), 42)
self.assertEqual(cache.get('projector'), 42)
self.assertEqual(cache.get_or_set('null', None), None)
def test_get_or_set_callable(self):
def my_callable():
return 'value'
self.assertEqual(cache.get_or_set('mykey', my_callable), 'value')
self.assertEqual(cache.get_or_set('mykey', my_callable()), 'value')
def test_get_or_set_callable_returning_none(self):
self.assertIsNone(cache.get_or_set('mykey', lambda: None))
# Previous get_or_set() doesn't store None in the cache.
self.assertEqual(cache.get('mykey', 'default'), 'default')
def test_get_or_set_version(self):
msg = "get_or_set() missing 1 required positional argument: 'default'"
cache.get_or_set('brian', 1979, version=2)
with self.assertRaisesMessage(TypeError, msg):
cache.get_or_set('brian')
with self.assertRaisesMessage(TypeError, msg):
cache.get_or_set('brian', version=1)
self.assertIsNone(cache.get('brian', version=1))
self.assertEqual(cache.get_or_set('brian', 42, version=1), 42)
self.assertEqual(cache.get_or_set('brian', 1979, version=2), 1979)
self.assertIsNone(cache.get('brian', version=3))
def test_get_or_set_racing(self):
with mock.patch('%s.%s' % (settings.CACHES['default']['BACKEND'], 'add')) as cache_add:
# Simulate cache.add() failing to add a value. In that case, the
# default value should be returned.
cache_add.return_value = False
self.assertEqual(cache.get_or_set('key', 'default'), 'default')
@override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.db.DatabaseCache',
# Spaces are used in the table name to ensure quoting/escaping is working
LOCATION='test cache table'
))
class DBCacheTests(BaseCacheTests, TransactionTestCase):
available_apps = ['cache']
def setUp(self):
# The super calls needs to happen first for the settings override.
super().setUp()
self.create_table()
def tearDown(self):
# The super call needs to happen first because it uses the database.
super().tearDown()
self.drop_table()
def create_table(self):
management.call_command('createcachetable', verbosity=0)
def drop_table(self):
with connection.cursor() as cursor:
table_name = connection.ops.quote_name('test cache table')
cursor.execute('DROP TABLE %s' % table_name)
def test_get_many_num_queries(self):
cache.set_many({'a': 1, 'b': 2})
cache.set('expired', 'expired', 0.01)
with self.assertNumQueries(1):
self.assertEqual(cache.get_many(['a', 'b']), {'a': 1, 'b': 2})
time.sleep(0.02)
with self.assertNumQueries(2):
self.assertEqual(cache.get_many(['a', 'b', 'expired']), {'a': 1, 'b': 2})
def test_delete_many_num_queries(self):
cache.set_many({'a': 1, 'b': 2, 'c': 3})
with self.assertNumQueries(1):
cache.delete_many(['a', 'b', 'c'])
def test_zero_cull(self):
self._perform_cull_test(caches['zero_cull'], 50, 18)
def test_second_call_doesnt_crash(self):
out = io.StringIO()
management.call_command('createcachetable', stdout=out)
self.assertEqual(out.getvalue(), "Cache table 'test cache table' already exists.\n" * len(settings.CACHES))
@override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.db.DatabaseCache',
# Use another table name to avoid the 'table already exists' message.
LOCATION='createcachetable_dry_run_mode'
))
def test_createcachetable_dry_run_mode(self):
out = io.StringIO()
management.call_command('createcachetable', dry_run=True, stdout=out)
output = out.getvalue()
self.assertTrue(output.startswith("CREATE TABLE"))
def test_createcachetable_with_table_argument(self):
"""
Delete and recreate cache table with legacy behavior (explicitly
specifying the table name).
"""
self.drop_table()
out = io.StringIO()
management.call_command(
'createcachetable',
'test cache table',
verbosity=2,
stdout=out,
)
self.assertEqual(out.getvalue(), "Cache table 'test cache table' created.\n")
@override_settings(USE_TZ=True)
class DBCacheWithTimeZoneTests(DBCacheTests):
pass
class DBCacheRouter:
"""A router that puts the cache table on the 'other' database."""
def db_for_read(self, model, **hints):
if model._meta.app_label == 'django_cache':
return 'other'
return None
def db_for_write(self, model, **hints):
if model._meta.app_label == 'django_cache':
return 'other'
return None
def allow_migrate(self, db, app_label, **hints):
if app_label == 'django_cache':
return db == 'other'
return None
@override_settings(
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'my_cache_table',
},
},
)
class CreateCacheTableForDBCacheTests(TestCase):
databases = {'default', 'other'}
@override_settings(DATABASE_ROUTERS=[DBCacheRouter()])
def test_createcachetable_observes_database_router(self):
# cache table should not be created on 'default'
with self.assertNumQueries(0, using='default'):
management.call_command('createcachetable', database='default', verbosity=0)
# cache table should be created on 'other'
# Queries:
# 1: check table doesn't already exist
# 2: create savepoint (if transactional DDL is supported)
# 3: create the table
# 4: create the index
# 5: release savepoint (if transactional DDL is supported)
num = 5 if connections['other'].features.can_rollback_ddl else 3
with self.assertNumQueries(num, using='other'):
management.call_command('createcachetable', database='other', verbosity=0)
class PicklingSideEffect:
def __init__(self, cache):
self.cache = cache
self.locked = False
def __getstate__(self):
self.locked = self.cache._lock.locked()
return {}
limit_locmem_entries = override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.locmem.LocMemCache',
OPTIONS={'MAX_ENTRIES': 9},
))
@override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.locmem.LocMemCache',
))
class LocMemCacheTests(BaseCacheTests, TestCase):
def setUp(self):
super().setUp()
# LocMem requires a hack to make the other caches
# share a data store with the 'normal' cache.
caches['prefix']._cache = cache._cache
caches['prefix']._expire_info = cache._expire_info
caches['v2']._cache = cache._cache
caches['v2']._expire_info = cache._expire_info
caches['custom_key']._cache = cache._cache
caches['custom_key']._expire_info = cache._expire_info
caches['custom_key2']._cache = cache._cache
caches['custom_key2']._expire_info = cache._expire_info
@override_settings(CACHES={
'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'},
'other': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'other'
},
})
def test_multiple_caches(self):
"Multiple locmem caches are isolated"
cache.set('value', 42)
self.assertEqual(caches['default'].get('value'), 42)
self.assertIsNone(caches['other'].get('value'))
def test_locking_on_pickle(self):
"""#20613/#18541 -- Ensures pickling is done outside of the lock."""
bad_obj = PicklingSideEffect(cache)
cache.set('set', bad_obj)
self.assertFalse(bad_obj.locked, "Cache was locked during pickling")
cache.add('add', bad_obj)
self.assertFalse(bad_obj.locked, "Cache was locked during pickling")
def test_incr_decr_timeout(self):
"""incr/decr does not modify expiry time (matches memcached behavior)"""
key = 'value'
_key = cache.make_key(key)
cache.set(key, 1, timeout=cache.default_timeout * 10)
expire = cache._expire_info[_key]
cache.incr(key)
self.assertEqual(expire, cache._expire_info[_key])
cache.decr(key)
self.assertEqual(expire, cache._expire_info[_key])
@limit_locmem_entries
def test_lru_get(self):
"""get() moves cache keys."""
for key in range(9):
cache.set(key, key, timeout=None)
for key in range(6):
self.assertEqual(cache.get(key), key)
cache.set(9, 9, timeout=None)
for key in range(6):
self.assertEqual(cache.get(key), key)
for key in range(6, 9):
self.assertIsNone(cache.get(key))
self.assertEqual(cache.get(9), 9)
@limit_locmem_entries
def test_lru_set(self):
"""set() moves cache keys."""
for key in range(9):
cache.set(key, key, timeout=None)
for key in range(3, 9):
cache.set(key, key, timeout=None)
cache.set(9, 9, timeout=None)
for key in range(3, 10):
self.assertEqual(cache.get(key), key)
for key in range(3):
self.assertIsNone(cache.get(key))
@limit_locmem_entries
def test_lru_incr(self):
"""incr() moves cache keys."""
for key in range(9):
cache.set(key, key, timeout=None)
for key in range(6):
cache.incr(key)
cache.set(9, 9, timeout=None)
for key in range(6):
self.assertEqual(cache.get(key), key + 1)
for key in range(6, 9):
self.assertIsNone(cache.get(key))
self.assertEqual(cache.get(9), 9)
# memcached backend isn't guaranteed to be available.
# To check the memcached backend, the test settings file will
# need to contain at least one cache backend setting that points at
# your memcache server.
configured_caches = {}
for _cache_params in settings.CACHES.values():
configured_caches[_cache_params['BACKEND']] = _cache_params
MemcachedCache_params = configured_caches.get('django.core.cache.backends.memcached.MemcachedCache')
PyLibMCCache_params = configured_caches.get('django.core.cache.backends.memcached.PyLibMCCache')
# The memcached backends don't support cull-related options like `MAX_ENTRIES`.
memcached_excluded_caches = {'cull', 'zero_cull'}
class BaseMemcachedTests(BaseCacheTests):
# By default it's assumed that the client doesn't clean up connections
# properly, in which case the backend must do so after each request.
should_disconnect_on_close = True
def test_location_multiple_servers(self):
locations = [
['server1.tld', 'server2:11211'],
'server1.tld;server2:11211',
'server1.tld,server2:11211',
]
for location in locations:
with self.subTest(location=location):
params = {'BACKEND': self.base_params['BACKEND'], 'LOCATION': location}
with self.settings(CACHES={'default': params}):
self.assertEqual(cache._servers, ['server1.tld', 'server2:11211'])
def test_invalid_key_characters(self):
"""
On memcached, we don't introduce a duplicate key validation
step (for speed reasons), we just let the memcached API
library raise its own exception on bad keys. Refs #6447.
In order to be memcached-API-library agnostic, we only assert
that a generic exception of some kind is raised.
"""
# memcached does not allow whitespace or control characters in keys
# when using the ascii protocol.
with self.assertRaises(Exception):
cache.set('key with spaces', 'value')
def test_invalid_key_length(self):
# memcached limits key length to 250
with self.assertRaises(Exception):
cache.set('a' * 251, 'value')
def test_default_never_expiring_timeout(self):
# Regression test for #22845
with self.settings(CACHES=caches_setting_for_tests(
base=self.base_params,
exclude=memcached_excluded_caches,
TIMEOUT=None)):
cache.set('infinite_foo', 'bar')
self.assertEqual(cache.get('infinite_foo'), 'bar')
def test_default_far_future_timeout(self):
# Regression test for #22845
with self.settings(CACHES=caches_setting_for_tests(
base=self.base_params,
exclude=memcached_excluded_caches,
# 60*60*24*365, 1 year
TIMEOUT=31536000)):
cache.set('future_foo', 'bar')
self.assertEqual(cache.get('future_foo'), 'bar')
def test_cull(self):
# culling isn't implemented, memcached deals with it.
pass
def test_zero_cull(self):
# culling isn't implemented, memcached deals with it.
pass
def test_memcached_deletes_key_on_failed_set(self):
# By default memcached allows objects up to 1MB. For the cache_db session
# backend to always use the current session, memcached needs to delete
# the old key if it fails to set.
# pylibmc doesn't seem to have SERVER_MAX_VALUE_LENGTH as far as I can
# tell from a quick check of its source code. This is falling back to
# the default value exposed by python-memcached on my system.
max_value_length = getattr(cache._lib, 'SERVER_MAX_VALUE_LENGTH', 1048576)
cache.set('small_value', 'a')
self.assertEqual(cache.get('small_value'), 'a')
large_value = 'a' * (max_value_length + 1)
try:
cache.set('small_value', large_value)
except Exception:
# Some clients (e.g. pylibmc) raise when the value is too large,
# while others (e.g. python-memcached) intentionally return True
# indicating success. This test is primarily checking that the key
# was deleted, so the return/exception behavior for the set()
# itself is not important.
pass
# small_value should be deleted, or set if configured to accept larger values
value = cache.get('small_value')
self.assertTrue(value is None or value == large_value)
def test_close(self):
# For clients that don't manage their connections properly, the
# connection is closed when the request is complete.
signals.request_finished.disconnect(close_old_connections)
try:
with mock.patch.object(cache._lib.Client, 'disconnect_all', autospec=True) as mock_disconnect:
signals.request_finished.send(self.__class__)
self.assertIs(mock_disconnect.called, self.should_disconnect_on_close)
finally:
signals.request_finished.connect(close_old_connections)
def test_set_many_returns_failing_keys(self):
def fail_set_multi(mapping, *args, **kwargs):
return mapping.keys()
with mock.patch('%s.Client.set_multi' % self.client_library_name, side_effect=fail_set_multi):
failing_keys = cache.set_many({'key': 'value'})
self.assertEqual(failing_keys, ['key'])
@unittest.skipUnless(MemcachedCache_params, "MemcachedCache backend not configured")
@override_settings(CACHES=caches_setting_for_tests(
base=MemcachedCache_params,
exclude=memcached_excluded_caches,
))
class MemcachedCacheTests(BaseMemcachedTests, TestCase):
base_params = MemcachedCache_params
client_library_name = 'memcache'
def test_memcached_uses_highest_pickle_version(self):
# Regression test for #19810
for cache_key in settings.CACHES:
with self.subTest(cache_key=cache_key):
self.assertEqual(caches[cache_key]._cache.pickleProtocol, pickle.HIGHEST_PROTOCOL)
@override_settings(CACHES=caches_setting_for_tests(
base=MemcachedCache_params,
exclude=memcached_excluded_caches,
OPTIONS={'server_max_value_length': 9999},
))
def test_memcached_options(self):
self.assertEqual(cache._cache.server_max_value_length, 9999)
@unittest.skipUnless(PyLibMCCache_params, "PyLibMCCache backend not configured")
@override_settings(CACHES=caches_setting_for_tests(
base=PyLibMCCache_params,
exclude=memcached_excluded_caches,
))
class PyLibMCCacheTests(BaseMemcachedTests, TestCase):
base_params = PyLibMCCache_params
client_library_name = 'pylibmc'
# libmemcached manages its own connections.
should_disconnect_on_close = False
# By default, pylibmc/libmemcached don't verify keys client-side and so
# this test triggers a server-side bug that causes later tests to fail
# (#19914). The `verify_keys` behavior option could be set to True (which
# would avoid triggering the server-side bug), however this test would
# still fail due to https://github.com/lericson/pylibmc/issues/219.
@unittest.skip("triggers a memcached-server bug, causing subsequent tests to fail")
def test_invalid_key_characters(self):
pass
@override_settings(CACHES=caches_setting_for_tests(
base=PyLibMCCache_params,
exclude=memcached_excluded_caches,
OPTIONS={
'binary': True,
'behaviors': {'tcp_nodelay': True},
},
))
def test_pylibmc_options(self):
self.assertTrue(cache._cache.binary)
self.assertEqual(cache._cache.behaviors['tcp_nodelay'], int(True))
@override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.filebased.FileBasedCache',
))
class FileBasedCacheTests(BaseCacheTests, TestCase):
"""
Specific test cases for the file-based cache.
"""
def setUp(self):
super().setUp()
self.dirname = tempfile.mkdtemp()
# Caches location cannot be modified through override_settings / modify_settings,
# hence settings are manipulated directly here and the setting_changed signal
# is triggered manually.
for cache_params in settings.CACHES.values():
cache_params.update({'LOCATION': self.dirname})
setting_changed.send(self.__class__, setting='CACHES', enter=False)
def tearDown(self):
super().tearDown()
# Call parent first, as cache.clear() may recreate cache base directory
shutil.rmtree(self.dirname)
def test_ignores_non_cache_files(self):
fname = os.path.join(self.dirname, 'not-a-cache-file')
with open(fname, 'w'):
os.utime(fname, None)
cache.clear()
self.assertTrue(os.path.exists(fname),
'Expected cache.clear to ignore non cache files')
os.remove(fname)
def test_clear_does_not_remove_cache_dir(self):
cache.clear()
self.assertTrue(os.path.exists(self.dirname),
'Expected cache.clear to keep the cache dir')
def test_creates_cache_dir_if_nonexistent(self):
os.rmdir(self.dirname)
cache.set('foo', 'bar')
os.path.exists(self.dirname)
def test_get_ignores_enoent(self):
cache.set('foo', 'bar')
os.unlink(cache._key_to_file('foo'))
# Returns the default instead of erroring.
self.assertEqual(cache.get('foo', 'baz'), 'baz')
def test_get_does_not_ignore_non_filenotfound_exceptions(self):
with mock.patch('builtins.open', side_effect=IOError):
with self.assertRaises(IOError):
cache.get('foo')
def test_empty_cache_file_considered_expired(self):
cache_file = cache._key_to_file('foo')
with open(cache_file, 'wb') as fh:
fh.write(b'')
with open(cache_file, 'rb') as fh:
self.assertIs(cache._is_expired(fh), True)
@override_settings(CACHES={
'default': {
'BACKEND': 'cache.liberal_backend.CacheClass',
},
})
class CustomCacheKeyValidationTests(SimpleTestCase):
"""
Tests for the ability to mixin a custom ``validate_key`` method to
a custom cache backend that otherwise inherits from a builtin
backend, and override the default key validation. Refs #6447.
"""
def test_custom_key_validation(self):
# this key is both longer than 250 characters, and has spaces
key = 'some key with spaces' * 15
val = 'a value'
cache.set(key, val)
self.assertEqual(cache.get(key), val)
@override_settings(
CACHES={
'default': {
'BACKEND': 'cache.closeable_cache.CacheClass',
}
}
)
class CacheClosingTests(SimpleTestCase):
def test_close(self):
self.assertFalse(cache.closed)
signals.request_finished.send(self.__class__)
self.assertTrue(cache.closed)
DEFAULT_MEMORY_CACHES_SETTINGS = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'unique-snowflake',
}
}
NEVER_EXPIRING_CACHES_SETTINGS = copy.deepcopy(DEFAULT_MEMORY_CACHES_SETTINGS)
NEVER_EXPIRING_CACHES_SETTINGS['default']['TIMEOUT'] = None
class DefaultNonExpiringCacheKeyTests(SimpleTestCase):
"""
Settings having Cache arguments with a TIMEOUT=None create Caches that will
set non-expiring keys.
"""
def setUp(self):
# The 5 minute (300 seconds) default expiration time for keys is
# defined in the implementation of the initializer method of the
# BaseCache type.
self.DEFAULT_TIMEOUT = caches[DEFAULT_CACHE_ALIAS].default_timeout
def tearDown(self):
del(self.DEFAULT_TIMEOUT)
def test_default_expiration_time_for_keys_is_5_minutes(self):
"""The default expiration time of a cache key is 5 minutes.
This value is defined in
django.core.cache.backends.base.BaseCache.__init__().
"""
self.assertEqual(300, self.DEFAULT_TIMEOUT)
def test_caches_with_unset_timeout_has_correct_default_timeout(self):
"""Caches that have the TIMEOUT parameter undefined in the default
settings will use the default 5 minute timeout.
"""
cache = caches[DEFAULT_CACHE_ALIAS]
self.assertEqual(self.DEFAULT_TIMEOUT, cache.default_timeout)
@override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS)
def test_caches_set_with_timeout_as_none_has_correct_default_timeout(self):
"""Memory caches that have the TIMEOUT parameter set to `None` in the
default settings with have `None` as the default timeout.
This means "no timeout".
"""
cache = caches[DEFAULT_CACHE_ALIAS]
self.assertIsNone(cache.default_timeout)
self.assertIsNone(cache.get_backend_timeout())
@override_settings(CACHES=DEFAULT_MEMORY_CACHES_SETTINGS)
def test_caches_with_unset_timeout_set_expiring_key(self):
"""Memory caches that have the TIMEOUT parameter unset will set cache
keys having the default 5 minute timeout.
"""
key = "my-key"
value = "my-value"
cache = caches[DEFAULT_CACHE_ALIAS]
cache.set(key, value)
cache_key = cache.make_key(key)
self.assertIsNotNone(cache._expire_info[cache_key])
@override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS)
def test_caches_set_with_timeout_as_none_set_non_expiring_key(self):
"""Memory caches that have the TIMEOUT parameter set to `None` will set
a non expiring key by default.
"""
key = "another-key"
value = "another-value"
cache = caches[DEFAULT_CACHE_ALIAS]
cache.set(key, value)
cache_key = cache.make_key(key)
self.assertIsNone(cache._expire_info[cache_key])
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
CACHE_MIDDLEWARE_SECONDS=1,
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
USE_I18N=False,
ALLOWED_HOSTS=['.example.com'],
)
class CacheUtils(SimpleTestCase):
"""TestCase for django.utils.cache functions."""
host = 'www.example.com'
path = '/cache/test/'
factory = RequestFactory(HTTP_HOST=host)
def tearDown(self):
cache.clear()
def _get_request_cache(self, method='GET', query_string=None, update_cache=None):
request = self._get_request(self.host, self.path,
method, query_string=query_string)
request._cache_update_cache = True if not update_cache else update_cache
return request
def _set_cache(self, request, msg):
response = HttpResponse()
response.content = msg
return UpdateCacheMiddleware().process_response(request, response)
def test_patch_vary_headers(self):
headers = (
# Initial vary, new headers, resulting vary.
(None, ('Accept-Encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'),
('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
(None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
)
for initial_vary, newheaders, resulting_vary in headers:
with self.subTest(initial_vary=initial_vary, newheaders=newheaders):
response = HttpResponse()
if initial_vary is not None:
response['Vary'] = initial_vary
patch_vary_headers(response, newheaders)
self.assertEqual(response['Vary'], resulting_vary)
def test_get_cache_key(self):
request = self.factory.get(self.path)
response = HttpResponse()
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e'
)
# A specified key_prefix is taken into account.
key_prefix = 'localprefix'
learn_cache_key(request, response, key_prefix=key_prefix)
self.assertEqual(
get_cache_key(request, key_prefix=key_prefix),
'views.decorators.cache.cache_page.localprefix.GET.'
'18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e'
)
def test_get_cache_key_with_query(self):
request = self.factory.get(self.path, {'test': 1})
response = HttpResponse()
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
# The querystring is taken into account.
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'beaf87a9a99ee81c673ea2d67ccbec2a.d41d8cd98f00b204e9800998ecf8427e'
)
def test_cache_key_varies_by_url(self):
"""
get_cache_key keys differ by fully-qualified URL instead of path
"""
request1 = self.factory.get(self.path, HTTP_HOST='sub-1.example.com')
learn_cache_key(request1, HttpResponse())
request2 = self.factory.get(self.path, HTTP_HOST='sub-2.example.com')
learn_cache_key(request2, HttpResponse())
self.assertNotEqual(get_cache_key(request1), get_cache_key(request2))
def test_learn_cache_key(self):
request = self.factory.head(self.path)
response = HttpResponse()
response['Vary'] = 'Pony'
# Make sure that the Vary header is added to the key hash
learn_cache_key(request, response)
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e'
)
def test_patch_cache_control(self):
tests = (
# Initial Cache-Control, kwargs to patch_cache_control, expected Cache-Control parts
(None, {'private': True}, {'private'}),
('', {'private': True}, {'private'}),
# Test whether private/public attributes are mutually exclusive
('private', {'private': True}, {'private'}),
('private', {'public': True}, {'public'}),
('public', {'public': True}, {'public'}),
('public', {'private': True}, {'private'}),
('must-revalidate,max-age=60,private', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}),
('must-revalidate,max-age=60,public', {'private': True}, {'must-revalidate', 'max-age=60', 'private'}),
('must-revalidate,max-age=60', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}),
)
cc_delim_re = re.compile(r'\s*,\s*')
for initial_cc, newheaders, expected_cc in tests:
with self.subTest(initial_cc=initial_cc, newheaders=newheaders):
response = HttpResponse()
if initial_cc is not None:
response['Cache-Control'] = initial_cc
patch_cache_control(response, **newheaders)
parts = set(cc_delim_re.split(response['Cache-Control']))
self.assertEqual(parts, expected_cc)
@override_settings(
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'KEY_PREFIX': 'cacheprefix',
},
},
)
class PrefixedCacheUtils(CacheUtils):
pass
@override_settings(
CACHE_MIDDLEWARE_SECONDS=60,
CACHE_MIDDLEWARE_KEY_PREFIX='test',
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
)
class CacheHEADTest(SimpleTestCase):
path = '/cache/test/'
factory = RequestFactory()
def tearDown(self):
cache.clear()
def _set_cache(self, request, msg):
response = HttpResponse()
response.content = msg
return UpdateCacheMiddleware().process_response(request, response)
def test_head_caches_correctly(self):
test_content = 'test content'
request = self.factory.head(self.path)
request._cache_update_cache = True
self._set_cache(request, test_content)
request = self.factory.head(self.path)
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(test_content.encode(), get_cache_data.content)
def test_head_with_cached_get(self):
test_content = 'test content'
request = self.factory.get(self.path)
request._cache_update_cache = True
self._set_cache(request, test_content)
request = self.factory.head(self.path)
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(test_content.encode(), get_cache_data.content)
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
LANGUAGES=[
('en', 'English'),
('es', 'Spanish'),
],
)
class CacheI18nTest(SimpleTestCase):
path = '/cache/test/'
factory = RequestFactory()
def tearDown(self):
cache.clear()
@override_settings(USE_I18N=True, USE_L10N=False, USE_TZ=False)
def test_cache_key_i18n_translation(self):
request = self.factory.get(self.path)
lang = translation.get_language()
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertIn(lang, key, "Cache keys should include the language name when translation is active")
key2 = get_cache_key(request)
self.assertEqual(key, key2)
def check_accept_language_vary(self, accept_language, vary, reference_key):
request = self.factory.get(self.path)
request.META['HTTP_ACCEPT_LANGUAGE'] = accept_language
request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0'
response = HttpResponse()
response['Vary'] = vary
key = learn_cache_key(request, response)
key2 = get_cache_key(request)
self.assertEqual(key, reference_key)
self.assertEqual(key2, reference_key)
@override_settings(USE_I18N=True, USE_L10N=False, USE_TZ=False)
def test_cache_key_i18n_translation_accept_language(self):
lang = translation.get_language()
self.assertEqual(lang, 'en')
request = self.factory.get(self.path)
request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0'
response = HttpResponse()
response['Vary'] = 'accept-encoding'
key = learn_cache_key(request, response)
self.assertIn(lang, key, "Cache keys should include the language name when translation is active")
self.check_accept_language_vary(
'en-us',
'cookie, accept-language, accept-encoding',
key
)
self.check_accept_language_vary(
'en-US',
'cookie, accept-encoding, accept-language',
key
)
self.check_accept_language_vary(
'en-US,en;q=0.8',
'accept-encoding, accept-language, cookie',
key
)
self.check_accept_language_vary(
'en-US,en;q=0.8,ko;q=0.6',
'accept-language, cookie, accept-encoding',
key
)
self.check_accept_language_vary(
'ko-kr,ko;q=0.8,en-us;q=0.5,en;q=0.3 ',
'accept-encoding, cookie, accept-language',
key
)
self.check_accept_language_vary(
'ko-KR,ko;q=0.8,en-US;q=0.6,en;q=0.4',
'accept-language, accept-encoding, cookie',
key
)
self.check_accept_language_vary(
'ko;q=1.0,en;q=0.5',
'cookie, accept-language, accept-encoding',
key
)
self.check_accept_language_vary(
'ko, en',
'cookie, accept-encoding, accept-language',
key
)
self.check_accept_language_vary(
'ko-KR, en-US',
'accept-encoding, accept-language, cookie',
key
)
@override_settings(USE_I18N=False, USE_L10N=True, USE_TZ=False)
def test_cache_key_i18n_formatting(self):
request = self.factory.get(self.path)
lang = translation.get_language()
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertIn(lang, key, "Cache keys should include the language name when formatting is active")
key2 = get_cache_key(request)
self.assertEqual(key, key2)
@override_settings(USE_I18N=False, USE_L10N=False, USE_TZ=True)
def test_cache_key_i18n_timezone(self):
request = self.factory.get(self.path)
tz = timezone.get_current_timezone_name()
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertIn(tz, key, "Cache keys should include the time zone name when time zones are active")
key2 = get_cache_key(request)
self.assertEqual(key, key2)
@override_settings(USE_I18N=False, USE_L10N=False)
def test_cache_key_no_i18n(self):
request = self.factory.get(self.path)
lang = translation.get_language()
tz = timezone.get_current_timezone_name()
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertNotIn(lang, key, "Cache keys shouldn't include the language name when i18n isn't active")
self.assertNotIn(tz, key, "Cache keys shouldn't include the time zone name when i18n isn't active")
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX="test",
CACHE_MIDDLEWARE_SECONDS=60,
USE_I18N=True,
)
def test_middleware(self):
def set_cache(request, lang, msg):
translation.activate(lang)
response = HttpResponse()
response.content = msg
return UpdateCacheMiddleware().process_response(request, response)
# cache with non empty request.GET
request = self.factory.get(self.path, {'foo': 'bar', 'other': 'true'})
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware().process_request(request)
# first access, cache must return None
self.assertIsNone(get_cache_data)
response = HttpResponse()
content = 'Check for cache with QUERY_STRING'
response.content = content
UpdateCacheMiddleware().process_response(request, response)
get_cache_data = FetchFromCacheMiddleware().process_request(request)
# cache must return content
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, content.encode())
# different QUERY_STRING, cache must be empty
request = self.factory.get(self.path, {'foo': 'bar', 'somethingelse': 'true'})
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertIsNone(get_cache_data)
# i18n tests
en_message = "Hello world!"
es_message = "Hola mundo!"
request = self.factory.get(self.path)
request._cache_update_cache = True
set_cache(request, 'en', en_message)
get_cache_data = FetchFromCacheMiddleware().process_request(request)
# The cache can be recovered
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, en_message.encode())
# change the session language and set content
request = self.factory.get(self.path)
request._cache_update_cache = True
set_cache(request, 'es', es_message)
# change again the language
translation.activate('en')
# retrieve the content from cache
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertEqual(get_cache_data.content, en_message.encode())
# change again the language
translation.activate('es')
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertEqual(get_cache_data.content, es_message.encode())
# reset the language
translation.deactivate()
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX="test",
CACHE_MIDDLEWARE_SECONDS=60,
)
def test_middleware_doesnt_cache_streaming_response(self):
request = self.factory.get(self.path)
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertIsNone(get_cache_data)
content = ['Check for cache with streaming content.']
response = StreamingHttpResponse(content)
UpdateCacheMiddleware().process_response(request, response)
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertIsNone(get_cache_data)
@override_settings(
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'KEY_PREFIX': 'cacheprefix'
},
},
)
class PrefixedCacheI18nTest(CacheI18nTest):
pass
def hello_world_view(request, value):
return HttpResponse('Hello World %s' % value)
def csrf_view(request):
return HttpResponse(csrf(request)['csrf_token'])
@override_settings(
CACHE_MIDDLEWARE_ALIAS='other',
CACHE_MIDDLEWARE_KEY_PREFIX='middlewareprefix',
CACHE_MIDDLEWARE_SECONDS=30,
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
'other': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'other',
'TIMEOUT': '1',
},
},
)
class CacheMiddlewareTest(SimpleTestCase):
factory = RequestFactory()
def setUp(self):
self.default_cache = caches['default']
self.other_cache = caches['other']
def tearDown(self):
self.default_cache.clear()
self.other_cache.clear()
super().tearDown()
def test_constructor(self):
"""
Ensure the constructor is correctly distinguishing between usage of CacheMiddleware as
Middleware vs. usage of CacheMiddleware as view decorator and setting attributes
appropriately.
"""
# If no arguments are passed in construction, it's being used as middleware.
middleware = CacheMiddleware()
# Now test object attributes against values defined in setUp above
self.assertEqual(middleware.cache_timeout, 30)
self.assertEqual(middleware.key_prefix, 'middlewareprefix')
self.assertEqual(middleware.cache_alias, 'other')
# If arguments are being passed in construction, it's being used as a decorator.
# First, test with "defaults":
as_view_decorator = CacheMiddleware(cache_alias=None, key_prefix=None)
self.assertEqual(as_view_decorator.cache_timeout, 30) # Timeout value for 'default' cache, i.e. 30
self.assertEqual(as_view_decorator.key_prefix, '')
# Value of DEFAULT_CACHE_ALIAS from django.core.cache
self.assertEqual(as_view_decorator.cache_alias, 'default')
# Next, test with custom values:
as_view_decorator_with_custom = CacheMiddleware(cache_timeout=60, cache_alias='other', key_prefix='foo')
self.assertEqual(as_view_decorator_with_custom.cache_timeout, 60)
self.assertEqual(as_view_decorator_with_custom.key_prefix, 'foo')
self.assertEqual(as_view_decorator_with_custom.cache_alias, 'other')
def test_middleware(self):
middleware = CacheMiddleware()
prefix_middleware = CacheMiddleware(key_prefix='prefix1')
timeout_middleware = CacheMiddleware(cache_timeout=1)
request = self.factory.get('/view/')
# Put the request through the request middleware
result = middleware.process_request(request)
self.assertIsNone(result)
response = hello_world_view(request, '1')
# Now put the response through the response middleware
response = middleware.process_response(request, response)
# Repeating the request should result in a cache hit
result = middleware.process_request(request)
self.assertIsNotNone(result)
self.assertEqual(result.content, b'Hello World 1')
# The same request through a different middleware won't hit
result = prefix_middleware.process_request(request)
self.assertIsNone(result)
# The same request with a timeout _will_ hit
result = timeout_middleware.process_request(request)
self.assertIsNotNone(result)
self.assertEqual(result.content, b'Hello World 1')
def test_view_decorator(self):
# decorate the same view with different cache decorators
default_view = cache_page(3)(hello_world_view)
default_with_prefix_view = cache_page(3, key_prefix='prefix1')(hello_world_view)
explicit_default_view = cache_page(3, cache='default')(hello_world_view)
explicit_default_with_prefix_view = cache_page(3, cache='default', key_prefix='prefix1')(hello_world_view)
other_view = cache_page(1, cache='other')(hello_world_view)
other_with_prefix_view = cache_page(1, cache='other', key_prefix='prefix2')(hello_world_view)
request = self.factory.get('/view/')
# Request the view once
response = default_view(request, '1')
self.assertEqual(response.content, b'Hello World 1')
# Request again -- hit the cache
response = default_view(request, '2')
self.assertEqual(response.content, b'Hello World 1')
# Requesting the same view with the explicit cache should yield the same result
response = explicit_default_view(request, '3')
self.assertEqual(response.content, b'Hello World 1')
# Requesting with a prefix will hit a different cache key
response = explicit_default_with_prefix_view(request, '4')
self.assertEqual(response.content, b'Hello World 4')
# Hitting the same view again gives a cache hit
response = explicit_default_with_prefix_view(request, '5')
self.assertEqual(response.content, b'Hello World 4')
# And going back to the implicit cache will hit the same cache
response = default_with_prefix_view(request, '6')
self.assertEqual(response.content, b'Hello World 4')
# Requesting from an alternate cache won't hit cache
response = other_view(request, '7')
self.assertEqual(response.content, b'Hello World 7')
# But a repeated hit will hit cache
response = other_view(request, '8')
self.assertEqual(response.content, b'Hello World 7')
# And prefixing the alternate cache yields yet another cache entry
response = other_with_prefix_view(request, '9')
self.assertEqual(response.content, b'Hello World 9')
# But if we wait a couple of seconds...
time.sleep(2)
# ... the default cache will still hit
caches['default']
response = default_view(request, '11')
self.assertEqual(response.content, b'Hello World 1')
# ... the default cache with a prefix will still hit
response = default_with_prefix_view(request, '12')
self.assertEqual(response.content, b'Hello World 4')
# ... the explicit default cache will still hit
response = explicit_default_view(request, '13')
self.assertEqual(response.content, b'Hello World 1')
# ... the explicit default cache with a prefix will still hit
response = explicit_default_with_prefix_view(request, '14')
self.assertEqual(response.content, b'Hello World 4')
# .. but a rapidly expiring cache won't hit
response = other_view(request, '15')
self.assertEqual(response.content, b'Hello World 15')
# .. even if it has a prefix
response = other_with_prefix_view(request, '16')
self.assertEqual(response.content, b'Hello World 16')
def test_cached_control_private_not_cached(self):
"""Responses with 'Cache-Control: private' are not cached."""
view_with_private_cache = cache_page(3)(cache_control(private=True)(hello_world_view))
request = self.factory.get('/view/')
response = view_with_private_cache(request, '1')
self.assertEqual(response.content, b'Hello World 1')
response = view_with_private_cache(request, '2')
self.assertEqual(response.content, b'Hello World 2')
def test_sensitive_cookie_not_cached(self):
"""
Django must prevent caching of responses that set a user-specific (and
maybe security sensitive) cookie in response to a cookie-less request.
"""
csrf_middleware = CsrfViewMiddleware()
cache_middleware = CacheMiddleware()
request = self.factory.get('/view/')
self.assertIsNone(cache_middleware.process_request(request))
csrf_middleware.process_view(request, csrf_view, (), {})
response = csrf_view(request)
response = csrf_middleware.process_response(request, response)
response = cache_middleware.process_response(request, response)
# Inserting a CSRF cookie in a cookie-less request prevented caching.
self.assertIsNone(cache_middleware.process_request(request))
def test_304_response_has_http_caching_headers_but_not_cached(self):
original_view = mock.Mock(return_value=HttpResponseNotModified())
view = cache_page(2)(original_view)
request = self.factory.get('/view/')
# The view shouldn't be cached on the second call.
view(request).close()
response = view(request)
response.close()
self.assertEqual(original_view.call_count, 2)
self.assertIsInstance(response, HttpResponseNotModified)
self.assertIn('Cache-Control', response)
self.assertIn('Expires', response)
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
CACHE_MIDDLEWARE_SECONDS=1,
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
USE_I18N=False,
)
class TestWithTemplateResponse(SimpleTestCase):
"""
Tests various headers w/ TemplateResponse.
Most are probably redundant since they manipulate the same object
anyway but the ETag header is 'special' because it relies on the
content being complete (which is not necessarily always the case
with a TemplateResponse)
"""
path = '/cache/test/'
factory = RequestFactory()
def tearDown(self):
cache.clear()
def test_patch_vary_headers(self):
headers = (
# Initial vary, new headers, resulting vary.
(None, ('Accept-Encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'),
('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
(None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
)
for initial_vary, newheaders, resulting_vary in headers:
with self.subTest(initial_vary=initial_vary, newheaders=newheaders):
template = engines['django'].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
if initial_vary is not None:
response['Vary'] = initial_vary
patch_vary_headers(response, newheaders)
self.assertEqual(response['Vary'], resulting_vary)
def test_get_cache_key(self):
request = self.factory.get(self.path)
template = engines['django'].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
key_prefix = 'localprefix'
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e'
)
# A specified key_prefix is taken into account.
learn_cache_key(request, response, key_prefix=key_prefix)
self.assertEqual(
get_cache_key(request, key_prefix=key_prefix),
'views.decorators.cache.cache_page.localprefix.GET.'
'58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e'
)
def test_get_cache_key_with_query(self):
request = self.factory.get(self.path, {'test': 1})
template = engines['django'].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
# The querystring is taken into account.
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'0f1c2d56633c943073c4569d9a9502fe.d41d8cd98f00b204e9800998ecf8427e'
)
class TestMakeTemplateFragmentKey(SimpleTestCase):
def test_without_vary_on(self):
key = make_template_fragment_key('a.fragment')
self.assertEqual(key, 'template.cache.a.fragment.d41d8cd98f00b204e9800998ecf8427e')
def test_with_one_vary_on(self):
key = make_template_fragment_key('foo', ['abc'])
self.assertEqual(key, 'template.cache.foo.900150983cd24fb0d6963f7d28e17f72')
def test_with_many_vary_on(self):
key = make_template_fragment_key('bar', ['abc', 'def'])
self.assertEqual(key, 'template.cache.bar.4b35f12ab03cec09beec4c21b2d2fa88')
def test_proper_escaping(self):
key = make_template_fragment_key('spam', ['abc:def%'])
self.assertEqual(key, 'template.cache.spam.f27688177baec990cdf3fbd9d9c3f469')
class CacheHandlerTest(SimpleTestCase):
def test_same_instance(self):
"""
Attempting to retrieve the same alias should yield the same instance.
"""
cache1 = caches['default']
cache2 = caches['default']
self.assertIs(cache1, cache2)
def test_per_thread(self):
"""
Requesting the same alias from separate threads should yield separate
instances.
"""
c = []
def runner():
c.append(caches['default'])
for x in range(2):
t = threading.Thread(target=runner)
t.start()
t.join()
self.assertIsNot(c[0], c[1])
|
a75ba111b3ae5ab84c9e1704cfeba3d5cdc6943254c40100f6ca7300dc5a6ed3 | import datetime
import unittest
from unittest import mock
from urllib.parse import quote_plus
from django.test import SimpleTestCase
from django.utils.encoding import (
DjangoUnicodeDecodeError, escape_uri_path, filepath_to_uri, force_bytes,
force_text, get_system_encoding, iri_to_uri, smart_bytes, smart_text,
uri_to_iri,
)
from django.utils.functional import SimpleLazyObject
from django.utils.translation import gettext_lazy
class TestEncodingUtils(SimpleTestCase):
def test_force_text_exception(self):
"""
Broken __str__ actually raises an error.
"""
class MyString:
def __str__(self):
return b'\xc3\xb6\xc3\xa4\xc3\xbc'
# str(s) raises a TypeError if the result is not a text type.
with self.assertRaises(TypeError):
force_text(MyString())
def test_force_text_lazy(self):
s = SimpleLazyObject(lambda: 'x')
self.assertIs(type(force_text(s)), str)
def test_force_text_DjangoUnicodeDecodeError(self):
msg = (
"'utf-8' codec can't decode byte 0xff in position 0: invalid "
"start byte. You passed in b'\\xff' (<class 'bytes'>)"
)
with self.assertRaisesMessage(DjangoUnicodeDecodeError, msg):
force_text(b'\xff')
def test_force_bytes_exception(self):
"""
force_bytes knows how to convert to bytes an exception
containing non-ASCII characters in its args.
"""
error_msg = "This is an exception, voilà"
exc = ValueError(error_msg)
self.assertEqual(force_bytes(exc), error_msg.encode())
self.assertEqual(force_bytes(exc, encoding='ascii', errors='ignore'), b'This is an exception, voil')
def test_force_bytes_strings_only(self):
today = datetime.date.today()
self.assertEqual(force_bytes(today, strings_only=True), today)
def test_force_bytes_encoding(self):
error_msg = 'This is an exception, voilà'.encode()
result = force_bytes(error_msg, encoding='ascii', errors='ignore')
self.assertEqual(result, b'This is an exception, voil')
def test_force_bytes_memory_view(self):
data = b'abc'
result = force_bytes(memoryview(data))
# Type check is needed because memoryview(bytes) == bytes.
self.assertIs(type(result), bytes)
self.assertEqual(result, data)
def test_smart_bytes(self):
class Test:
def __str__(self):
return 'ŠĐĆŽćžšđ'
lazy_func = gettext_lazy('x')
self.assertIs(smart_bytes(lazy_func), lazy_func)
self.assertEqual(smart_bytes(Test()), b'\xc5\xa0\xc4\x90\xc4\x86\xc5\xbd\xc4\x87\xc5\xbe\xc5\xa1\xc4\x91')
self.assertEqual(smart_bytes(1), b'1')
self.assertEqual(smart_bytes('foo'), b'foo')
def test_smart_text(self):
class Test:
def __str__(self):
return 'ŠĐĆŽćžšđ'
lazy_func = gettext_lazy('x')
self.assertIs(smart_text(lazy_func), lazy_func)
self.assertEqual(smart_text(Test()), '\u0160\u0110\u0106\u017d\u0107\u017e\u0161\u0111')
self.assertEqual(smart_text(1), '1')
self.assertEqual(smart_text('foo'), 'foo')
def test_get_default_encoding(self):
with mock.patch('locale.getdefaultlocale', side_effect=Exception):
self.assertEqual(get_system_encoding(), 'ascii')
class TestRFC3987IEncodingUtils(unittest.TestCase):
def test_filepath_to_uri(self):
self.assertEqual(filepath_to_uri(None), None)
self.assertEqual(filepath_to_uri('upload\\чубака.mp4'), 'upload/%D1%87%D1%83%D0%B1%D0%B0%D0%BA%D0%B0.mp4')
def test_iri_to_uri(self):
cases = [
# Valid UTF-8 sequences are encoded.
('red%09rosé#red', 'red%09ros%C3%A9#red'),
('/blog/for/Jürgen Münster/', '/blog/for/J%C3%BCrgen%20M%C3%BCnster/'),
('locations/%s' % quote_plus('Paris & Orléans'), 'locations/Paris+%26+Orl%C3%A9ans'),
# Reserved chars remain unescaped.
('%&', '%&'),
('red&♥ros%#red', 'red&%E2%99%A5ros%#red'),
(gettext_lazy('red&♥ros%#red'), 'red&%E2%99%A5ros%#red'),
]
for iri, uri in cases:
self.assertEqual(iri_to_uri(iri), uri)
# Test idempotency.
self.assertEqual(iri_to_uri(iri_to_uri(iri)), uri)
def test_uri_to_iri(self):
cases = [
(None, None),
# Valid UTF-8 sequences are decoded.
('/%e2%89%Ab%E2%99%a5%E2%89%aB/', '/≫♥≫/'),
('/%E2%99%A5%E2%99%A5/?utf8=%E2%9C%93', '/♥♥/?utf8=✓'),
('/%41%5a%6B/', '/AZk/'),
# Reserved and non-URL valid ASCII chars are not decoded.
('/%25%20%02%41%7b/', '/%25%20%02A%7b/'),
# Broken UTF-8 sequences remain escaped.
('/%AAd%AAj%AAa%AAn%AAg%AAo%AA/', '/%AAd%AAj%AAa%AAn%AAg%AAo%AA/'),
('/%E2%99%A5%E2%E2%99%A5/', '/♥%E2♥/'),
('/%E2%99%A5%E2%99%E2%99%A5/', '/♥%E2%99♥/'),
('/%E2%E2%99%A5%E2%99%A5%99/', '/%E2♥♥%99/'),
('/%E2%99%A5%E2%99%A5/?utf8=%9C%93%E2%9C%93%9C%93', '/♥♥/?utf8=%9C%93✓%9C%93'),
]
for uri, iri in cases:
self.assertEqual(uri_to_iri(uri), iri)
# Test idempotency.
self.assertEqual(uri_to_iri(uri_to_iri(uri)), iri)
def test_complementarity(self):
cases = [
('/blog/for/J%C3%BCrgen%20M%C3%BCnster/', '/blog/for/J\xfcrgen%20M\xfcnster/'),
('%&', '%&'),
('red&%E2%99%A5ros%#red', 'red&♥ros%#red'),
('/%E2%99%A5%E2%99%A5/', '/♥♥/'),
('/%E2%99%A5%E2%99%A5/?utf8=%E2%9C%93', '/♥♥/?utf8=✓'),
('/%25%20%02%7b/', '/%25%20%02%7b/'),
('/%AAd%AAj%AAa%AAn%AAg%AAo%AA/', '/%AAd%AAj%AAa%AAn%AAg%AAo%AA/'),
('/%E2%99%A5%E2%E2%99%A5/', '/♥%E2♥/'),
('/%E2%99%A5%E2%99%E2%99%A5/', '/♥%E2%99♥/'),
('/%E2%E2%99%A5%E2%99%A5%99/', '/%E2♥♥%99/'),
('/%E2%99%A5%E2%99%A5/?utf8=%9C%93%E2%9C%93%9C%93', '/♥♥/?utf8=%9C%93✓%9C%93'),
]
for uri, iri in cases:
self.assertEqual(iri_to_uri(uri_to_iri(uri)), uri)
self.assertEqual(uri_to_iri(iri_to_uri(iri)), iri)
def test_escape_uri_path(self):
self.assertEqual(
escape_uri_path('/;some/=awful/?path/:with/@lots/&of/+awful/chars'),
'/%3Bsome/%3Dawful/%3Fpath/:with/@lots/&of/+awful/chars'
)
self.assertEqual(escape_uri_path('/foo#bar'), '/foo%23bar')
self.assertEqual(escape_uri_path('/foo?bar'), '/foo%3Fbar')
|
e71fdab3a5bc021e61386024fc8bbaa18e1c4a7038144047a58378845ca58bb3 | import os
from datetime import datetime
from django.test import SimpleTestCase
from django.utils.functional import lazystr
from django.utils.html import (
conditional_escape, escape, escapejs, format_html, html_safe, json_script,
linebreaks, smart_urlquote, strip_spaces_between_tags, strip_tags, urlize,
)
from django.utils.safestring import mark_safe
class TestUtilsHtml(SimpleTestCase):
def check_output(self, function, value, output=None):
"""
function(value) equals output. If output is None, function(value)
equals value.
"""
if output is None:
output = value
self.assertEqual(function(value), output)
def test_escape(self):
items = (
('&', '&'),
('<', '<'),
('>', '>'),
('"', '"'),
("'", '''),
)
# Substitution patterns for testing the above items.
patterns = ("%s", "asdf%sfdsa", "%s1", "1%sb")
for value, output in items:
with self.subTest(value=value, output=output):
for pattern in patterns:
with self.subTest(value=value, output=output, pattern=pattern):
self.check_output(escape, pattern % value, pattern % output)
self.check_output(escape, lazystr(pattern % value), pattern % output)
# Check repeated values.
self.check_output(escape, value * 2, output * 2)
# Verify it doesn't double replace &.
self.check_output(escape, '<&', '<&')
def test_format_html(self):
self.assertEqual(
format_html(
"{} {} {third} {fourth}",
"< Dangerous >",
mark_safe("<b>safe</b>"),
third="< dangerous again",
fourth=mark_safe("<i>safe again</i>"),
),
"< Dangerous > <b>safe</b> < dangerous again <i>safe again</i>"
)
def test_linebreaks(self):
items = (
("para1\n\npara2\r\rpara3", "<p>para1</p>\n\n<p>para2</p>\n\n<p>para3</p>"),
("para1\nsub1\rsub2\n\npara2", "<p>para1<br>sub1<br>sub2</p>\n\n<p>para2</p>"),
("para1\r\n\r\npara2\rsub1\r\rpara4", "<p>para1</p>\n\n<p>para2<br>sub1</p>\n\n<p>para4</p>"),
("para1\tmore\n\npara2", "<p>para1\tmore</p>\n\n<p>para2</p>"),
)
for value, output in items:
with self.subTest(value=value, output=output):
self.check_output(linebreaks, value, output)
self.check_output(linebreaks, lazystr(value), output)
def test_strip_tags(self):
items = (
('<p>See: 'é is an apostrophe followed by e acute</p>',
'See: 'é is an apostrophe followed by e acute'),
('<adf>a', 'a'),
('</adf>a', 'a'),
('<asdf><asdf>e', 'e'),
('hi, <f x', 'hi, <f x'),
('234<235, right?', '234<235, right?'),
('a4<a5 right?', 'a4<a5 right?'),
('b7>b2!', 'b7>b2!'),
('</fe', '</fe'),
('<x>b<y>', 'b'),
('a<p onclick="alert(\'<test>\')">b</p>c', 'abc'),
('a<p a >b</p>c', 'abc'),
('d<a:b c:d>e</p>f', 'def'),
('<strong>foo</strong><a href="http://example.com">bar</a>', 'foobar'),
# caused infinite loop on Pythons not patched with
# https://bugs.python.org/issue20288
('&gotcha&#;<>', '&gotcha&#;<>'),
('<sc<!-- -->ript>test<<!-- -->/script>', 'ript>test'),
('<script>alert()</script>&h', 'alert()h'),
)
for value, output in items:
with self.subTest(value=value, output=output):
self.check_output(strip_tags, value, output)
self.check_output(strip_tags, lazystr(value), output)
def test_strip_tags_files(self):
# Test with more lengthy content (also catching performance regressions)
for filename in ('strip_tags1.html', 'strip_tags2.txt'):
with self.subTest(filename=filename):
path = os.path.join(os.path.dirname(__file__), 'files', filename)
with open(path, 'r') as fp:
content = fp.read()
start = datetime.now()
stripped = strip_tags(content)
elapsed = datetime.now() - start
self.assertEqual(elapsed.seconds, 0)
self.assertIn("Please try again.", stripped)
self.assertNotIn('<', stripped)
def test_strip_spaces_between_tags(self):
# Strings that should come out untouched.
items = (' <adf>', '<adf> ', ' </adf> ', ' <f> x</f>')
for value in items:
with self.subTest(value=value):
self.check_output(strip_spaces_between_tags, value)
self.check_output(strip_spaces_between_tags, lazystr(value))
# Strings that have spaces to strip.
items = (
('<d> </d>', '<d></d>'),
('<p>hello </p>\n<p> world</p>', '<p>hello </p><p> world</p>'),
('\n<p>\t</p>\n<p> </p>\n', '\n<p></p><p></p>\n'),
)
for value, output in items:
with self.subTest(value=value, output=output):
self.check_output(strip_spaces_between_tags, value, output)
self.check_output(strip_spaces_between_tags, lazystr(value), output)
def test_escapejs(self):
items = (
('"double quotes" and \'single quotes\'', '\\u0022double quotes\\u0022 and \\u0027single quotes\\u0027'),
(r'\ : backslashes, too', '\\u005C : backslashes, too'),
(
'and lots of whitespace: \r\n\t\v\f\b',
'and lots of whitespace: \\u000D\\u000A\\u0009\\u000B\\u000C\\u0008'
),
(r'<script>and this</script>', '\\u003Cscript\\u003Eand this\\u003C/script\\u003E'),
(
'paragraph separator:\u2029and line separator:\u2028',
'paragraph separator:\\u2029and line separator:\\u2028'
),
('`', '\\u0060'),
)
for value, output in items:
with self.subTest(value=value, output=output):
self.check_output(escapejs, value, output)
self.check_output(escapejs, lazystr(value), output)
def test_json_script(self):
tests = (
# "<", ">" and "&" are quoted inside JSON strings
(('&<>', '<script id="test_id" type="application/json">"\\u0026\\u003C\\u003E"</script>')),
# "<", ">" and "&" are quoted inside JSON objects
(
{'a': '<script>test&ing</script>'},
'<script id="test_id" type="application/json">'
'{"a": "\\u003Cscript\\u003Etest\\u0026ing\\u003C/script\\u003E"}</script>'
),
# Lazy strings are quoted
(lazystr('&<>'), '<script id="test_id" type="application/json">"\\u0026\\u003C\\u003E"</script>'),
(
{'a': lazystr('<script>test&ing</script>')},
'<script id="test_id" type="application/json">'
'{"a": "\\u003Cscript\\u003Etest\\u0026ing\\u003C/script\\u003E"}</script>'
),
)
for arg, expected in tests:
with self.subTest(arg=arg):
self.assertEqual(json_script(arg, 'test_id'), expected)
def test_smart_urlquote(self):
items = (
('http://öäü.com/', 'http://xn--4ca9at.com/'),
('http://öäü.com/öäü/', 'http://xn--4ca9at.com/%C3%B6%C3%A4%C3%BC/'),
# Everything unsafe is quoted, !*'();:@&=+$,/?#[]~ is considered
# safe as per RFC.
('http://example.com/path/öäü/', 'http://example.com/path/%C3%B6%C3%A4%C3%BC/'),
('http://example.com/%C3%B6/ä/', 'http://example.com/%C3%B6/%C3%A4/'),
('http://example.com/?x=1&y=2+3&z=', 'http://example.com/?x=1&y=2+3&z='),
('http://example.com/?x=<>"\'', 'http://example.com/?x=%3C%3E%22%27'),
('http://example.com/?q=http://example.com/?x=1%26q=django',
'http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3Ddjango'),
('http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3Ddjango',
'http://example.com/?q=http%3A%2F%2Fexample.com%2F%3Fx%3D1%26q%3Ddjango'),
('http://.www.f oo.bar/', 'http://.www.f%20oo.bar/'),
)
# IDNs are properly quoted
for value, output in items:
with self.subTest(value=value, output=output):
self.assertEqual(smart_urlquote(value), output)
def test_conditional_escape(self):
s = '<h1>interop</h1>'
self.assertEqual(conditional_escape(s), '<h1>interop</h1>')
self.assertEqual(conditional_escape(mark_safe(s)), s)
self.assertEqual(conditional_escape(lazystr(mark_safe(s))), s)
def test_html_safe(self):
@html_safe
class HtmlClass:
def __str__(self):
return "<h1>I'm a html class!</h1>"
html_obj = HtmlClass()
self.assertTrue(hasattr(HtmlClass, '__html__'))
self.assertTrue(hasattr(html_obj, '__html__'))
self.assertEqual(str(html_obj), html_obj.__html__())
def test_html_safe_subclass(self):
class BaseClass:
def __html__(self):
# defines __html__ on its own
return 'some html content'
def __str__(self):
return 'some non html content'
@html_safe
class Subclass(BaseClass):
def __str__(self):
# overrides __str__ and is marked as html_safe
return 'some html safe content'
subclass_obj = Subclass()
self.assertEqual(str(subclass_obj), subclass_obj.__html__())
def test_html_safe_defines_html_error(self):
msg = "can't apply @html_safe to HtmlClass because it defines __html__()."
with self.assertRaisesMessage(ValueError, msg):
@html_safe
class HtmlClass:
def __html__(self):
return "<h1>I'm a html class!</h1>"
def test_html_safe_doesnt_define_str(self):
msg = "can't apply @html_safe to HtmlClass because it doesn't define __str__()."
with self.assertRaisesMessage(ValueError, msg):
@html_safe
class HtmlClass:
pass
def test_urlize(self):
tests = (
(
'Search for google.com/?q=! and see.',
'Search for <a href="http://google.com/?q=">google.com/?q=</a>! and see.'
),
(
lazystr('Search for google.com/?q=!'),
'Search for <a href="http://google.com/?q=">google.com/?q=</a>!'
),
('[email protected]', '<a href="mailto:[email protected]">[email protected]</a>'),
)
for value, output in tests:
with self.subTest(value=value):
self.assertEqual(urlize(value), output)
def test_urlize_unchanged_inputs(self):
tests = (
('a' + '@a' * 50000) + 'a', # simple_email_re catastrophic test
('a' + '.' * 1000000) + 'a', # trailing_punctuation catastrophic test
'foo@',
'@foo.com',
'[email protected]',
'foo@localhost',
'foo@localhost.',
)
for value in tests:
with self.subTest(value=value):
self.assertEqual(urlize(value), value)
|
fe89e3e0c3f85b6caee406d73f44100b42cab025b625f2d75ede97a8155988a4 | from django.template import Context, Template
from django.test import SimpleTestCase
from django.utils import html
from django.utils.functional import lazy, lazystr
from django.utils.safestring import SafeData, mark_safe
class customescape(str):
def __html__(self):
# implement specific and obviously wrong escaping
# in order to be able to tell for sure when it runs
return self.replace('<', '<<').replace('>', '>>')
class SafeStringTest(SimpleTestCase):
def assertRenderEqual(self, tpl, expected, **context):
context = Context(context)
tpl = Template(tpl)
self.assertEqual(tpl.render(context), expected)
def test_mark_safe(self):
s = mark_safe('a&b')
self.assertRenderEqual('{{ s }}', 'a&b', s=s)
self.assertRenderEqual('{{ s|force_escape }}', 'a&b', s=s)
def test_mark_safe_str(self):
"""
Calling str() on a SafeText instance doesn't lose the safe status.
"""
s = mark_safe('a&b')
self.assertIsInstance(str(s), type(s))
def test_mark_safe_object_implementing_dunder_html(self):
e = customescape('<a&b>')
s = mark_safe(e)
self.assertIs(s, e)
self.assertRenderEqual('{{ s }}', '<<a&b>>', s=s)
self.assertRenderEqual('{{ s|force_escape }}', '<a&b>', s=s)
def test_mark_safe_lazy(self):
s = lazystr('a&b')
self.assertIsInstance(mark_safe(s), SafeData)
self.assertRenderEqual('{{ s }}', 'a&b', s=mark_safe(s))
def test_mark_safe_object_implementing_dunder_str(self):
class Obj:
def __str__(self):
return '<obj>'
s = mark_safe(Obj())
self.assertRenderEqual('{{ s }}', '<obj>', s=s)
def test_mark_safe_result_implements_dunder_html(self):
self.assertEqual(mark_safe('a&b').__html__(), 'a&b')
def test_mark_safe_lazy_result_implements_dunder_html(self):
self.assertEqual(mark_safe(lazystr('a&b')).__html__(), 'a&b')
def test_add_lazy_safe_text_and_safe_text(self):
s = html.escape(lazystr('a'))
s += mark_safe('&b')
self.assertRenderEqual('{{ s }}', 'a&b', s=s)
s = html.escapejs(lazystr('a'))
s += mark_safe('&b')
self.assertRenderEqual('{{ s }}', 'a&b', s=s)
def test_mark_safe_as_decorator(self):
"""
mark_safe used as a decorator leaves the result of a function
unchanged.
"""
def clean_string_provider():
return '<html><body>dummy</body></html>'
self.assertEqual(mark_safe(clean_string_provider)(), clean_string_provider())
def test_mark_safe_decorator_does_not_affect_dunder_html(self):
"""
mark_safe doesn't affect a callable that has an __html__() method.
"""
class SafeStringContainer:
def __html__(self):
return '<html></html>'
self.assertIs(mark_safe(SafeStringContainer), SafeStringContainer)
def test_mark_safe_decorator_does_not_affect_promises(self):
"""
mark_safe doesn't affect lazy strings (Promise objects).
"""
def html_str():
return '<html></html>'
lazy_str = lazy(html_str, str)()
self.assertEqual(mark_safe(lazy_str), html_str())
|
83d450064b19e26e563cfeecb0a99e12ae9dbba79219328c01c7f593a3242de6 | from decimal import Decimal
from sys import float_info
from django.test import SimpleTestCase
from django.utils.numberformat import format as nformat
class TestNumberFormat(SimpleTestCase):
def test_format_number(self):
self.assertEqual(nformat(1234, '.'), '1234')
self.assertEqual(nformat(1234.2, '.'), '1234.2')
self.assertEqual(nformat(1234, '.', decimal_pos=2), '1234.00')
self.assertEqual(nformat(1234, '.', grouping=2, thousand_sep=','), '1234')
self.assertEqual(nformat(1234, '.', grouping=2, thousand_sep=',', force_grouping=True), '12,34')
self.assertEqual(nformat(-1234.33, '.', decimal_pos=1), '-1234.3')
# The use_l10n parameter can force thousand grouping behavior.
with self.settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True):
self.assertEqual(nformat(1234, '.', grouping=3, thousand_sep=',', use_l10n=False), '1234')
with self.settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=False):
self.assertEqual(nformat(1234, '.', grouping=3, thousand_sep=',', use_l10n=True), '1,234')
def test_format_string(self):
self.assertEqual(nformat('1234', '.'), '1234')
self.assertEqual(nformat('1234.2', '.'), '1234.2')
self.assertEqual(nformat('1234', '.', decimal_pos=2), '1234.00')
self.assertEqual(nformat('1234', '.', grouping=2, thousand_sep=','), '1234')
self.assertEqual(nformat('1234', '.', grouping=2, thousand_sep=',', force_grouping=True), '12,34')
self.assertEqual(nformat('-1234.33', '.', decimal_pos=1), '-1234.3')
self.assertEqual(nformat('10000', '.', grouping=3, thousand_sep='comma', force_grouping=True), '10comma000')
def test_large_number(self):
most_max = (
'{}179769313486231570814527423731704356798070567525844996'
'598917476803157260780028538760589558632766878171540458953'
'514382464234321326889464182768467546703537516986049910576'
'551282076245490090389328944075868508455133942304583236903'
'222948165808559332123348274797826204144723168738177180919'
'29988125040402618412485836{}'
)
most_max2 = (
'{}35953862697246314162905484746340871359614113505168999'
'31978349536063145215600570775211791172655337563430809179'
'07028764928468642653778928365536935093407075033972099821'
'15310256415249098018077865788815173701691026788460916647'
'38064458963316171186642466965495956524082894463374763543'
'61838599762500808052368249716736'
)
int_max = int(float_info.max)
self.assertEqual(nformat(int_max, '.'), most_max.format('', '8'))
self.assertEqual(nformat(int_max + 1, '.'), most_max.format('', '9'))
self.assertEqual(nformat(int_max * 2, '.'), most_max2.format(''))
self.assertEqual(nformat(0 - int_max, '.'), most_max.format('-', '8'))
self.assertEqual(nformat(-1 - int_max, '.'), most_max.format('-', '9'))
self.assertEqual(nformat(-2 * int_max, '.'), most_max2.format('-'))
def test_float_numbers(self):
# A float without a fractional part (3.) results in a ".0" when no
# deimal_pos is given. Contrast that with the Decimal('3.') case in
# test_decimal_numbers which doesn't return a fractional part.
self.assertEqual(nformat(3., '.'), '3.0')
def test_decimal_numbers(self):
self.assertEqual(nformat(Decimal('1234'), '.'), '1234')
self.assertEqual(nformat(Decimal('1234.2'), '.'), '1234.2')
self.assertEqual(nformat(Decimal('1234'), '.', decimal_pos=2), '1234.00')
self.assertEqual(nformat(Decimal('1234'), '.', grouping=2, thousand_sep=','), '1234')
self.assertEqual(nformat(Decimal('1234'), '.', grouping=2, thousand_sep=',', force_grouping=True), '12,34')
self.assertEqual(nformat(Decimal('-1234.33'), '.', decimal_pos=1), '-1234.3')
self.assertEqual(nformat(Decimal('0.00000001'), '.', decimal_pos=8), '0.00000001')
self.assertEqual(nformat(Decimal('9e-19'), '.', decimal_pos=2), '0.00')
self.assertEqual(nformat(Decimal('.00000000000099'), '.', decimal_pos=0), '0')
self.assertEqual(
nformat(Decimal('1e16'), '.', thousand_sep=',', grouping=3, force_grouping=True),
'10,000,000,000,000,000'
)
self.assertEqual(
nformat(Decimal('1e16'), '.', decimal_pos=2, thousand_sep=',', grouping=3, force_grouping=True),
'10,000,000,000,000,000.00'
)
self.assertEqual(nformat(Decimal('3.'), '.'), '3')
self.assertEqual(nformat(Decimal('3.0'), '.'), '3.0')
def test_decimal_subclass(self):
class EuroDecimal(Decimal):
"""
Wrapper for Decimal which prefixes each amount with the € symbol.
"""
def __format__(self, specifier, **kwargs):
amount = super().__format__(specifier, **kwargs)
return '€ {}'.format(amount)
price = EuroDecimal('1.23')
self.assertEqual(nformat(price, ','), '€ 1,23')
|
5ad60b39431d7561d55319bbeb78df9ba419238434f1a54eea810b0ade301613 | import unittest
from django.test import SimpleTestCase
from django.utils.functional import cached_property, lazy
from django.utils.version import PY36
class FunctionalTests(SimpleTestCase):
def test_lazy(self):
t = lazy(lambda: tuple(range(3)), list, tuple)
for a, b in zip(t(), range(3)):
self.assertEqual(a, b)
def test_lazy_base_class(self):
"""lazy also finds base class methods in the proxy object"""
class Base:
def base_method(self):
pass
class Klazz(Base):
pass
t = lazy(lambda: Klazz(), Klazz)()
self.assertIn('base_method', dir(t))
def test_lazy_base_class_override(self):
"""lazy finds the correct (overridden) method implementation"""
class Base:
def method(self):
return 'Base'
class Klazz(Base):
def method(self):
return 'Klazz'
t = lazy(lambda: Klazz(), Base)()
self.assertEqual(t.method(), 'Klazz')
def test_lazy_object_to_string(self):
class Klazz:
def __str__(self):
return "Î am ā Ǩlâzz."
def __bytes__(self):
return b"\xc3\x8e am \xc4\x81 binary \xc7\xa8l\xc3\xa2zz."
t = lazy(lambda: Klazz(), Klazz)()
self.assertEqual(str(t), "Î am ā Ǩlâzz.")
self.assertEqual(bytes(t), b"\xc3\x8e am \xc4\x81 binary \xc7\xa8l\xc3\xa2zz.")
def assertCachedPropertyWorks(self, attr, Class):
with self.subTest(attr=attr):
def get(source):
return getattr(source, attr)
obj = Class()
class SubClass(Class):
pass
subobj = SubClass()
# Docstring is preserved.
self.assertEqual(get(Class).__doc__, 'Here is the docstring...')
self.assertEqual(get(SubClass).__doc__, 'Here is the docstring...')
# It's cached.
self.assertEqual(get(obj), get(obj))
self.assertEqual(get(subobj), get(subobj))
# The correct value is returned.
self.assertEqual(get(obj)[0], 1)
self.assertEqual(get(subobj)[0], 1)
# State isn't shared between instances.
obj2 = Class()
subobj2 = SubClass()
self.assertNotEqual(get(obj), get(obj2))
self.assertNotEqual(get(subobj), get(subobj2))
# It behaves like a property when there's no instance.
self.assertIsInstance(get(Class), cached_property)
self.assertIsInstance(get(SubClass), cached_property)
# 'other_value' doesn't become a property.
self.assertTrue(callable(obj.other_value))
self.assertTrue(callable(subobj.other_value))
def test_cached_property(self):
"""cached_property caches its value and behaves like a property."""
class Class:
@cached_property
def value(self):
"""Here is the docstring..."""
return 1, object()
@cached_property
def __foo__(self):
"""Here is the docstring..."""
return 1, object()
def other_value(self):
"""Here is the docstring..."""
return 1, object()
other = cached_property(other_value, name='other')
attrs = ['value', 'other', '__foo__']
for attr in attrs:
self.assertCachedPropertyWorks(attr, Class)
@unittest.skipUnless(PY36, '__set_name__ is new in Python 3.6')
def test_cached_property_auto_name(self):
"""
cached_property caches its value and behaves like a property
on mangled methods or when the name kwarg isn't set.
"""
class Class:
@cached_property
def __value(self):
"""Here is the docstring..."""
return 1, object()
def other_value(self):
"""Here is the docstring..."""
return 1, object()
other = cached_property(other_value)
other2 = cached_property(other_value, name='different_name')
attrs = ['_Class__value', 'other']
for attr in attrs:
self.assertCachedPropertyWorks(attr, Class)
# An explicit name is ignored.
obj = Class()
obj.other2
self.assertFalse(hasattr(obj, 'different_name'))
@unittest.skipUnless(PY36, '__set_name__ is new in Python 3.6')
def test_cached_property_reuse_different_names(self):
"""Disallow this case because the decorated function wouldn't be cached."""
with self.assertRaises(RuntimeError) as ctx:
class ReusedCachedProperty:
@cached_property
def a(self):
pass
b = a
self.assertEqual(
str(ctx.exception.__context__),
str(TypeError(
"Cannot assign the same cached_property to two different "
"names ('a' and 'b')."
))
)
@unittest.skipUnless(PY36, '__set_name__ is new in Python 3.6')
def test_cached_property_reuse_same_name(self):
"""
Reusing a cached_property on different classes under the same name is
allowed.
"""
counter = 0
@cached_property
def _cp(_self):
nonlocal counter
counter += 1
return counter
class A:
cp = _cp
class B:
cp = _cp
a = A()
b = B()
self.assertEqual(a.cp, 1)
self.assertEqual(b.cp, 2)
self.assertEqual(a.cp, 1)
@unittest.skipUnless(PY36, '__set_name__ is new in Python 3.6')
def test_cached_property_set_name_not_called(self):
cp = cached_property(lambda s: None)
class Foo:
pass
Foo.cp = cp
msg = 'Cannot use cached_property instance without calling __set_name__() on it.'
with self.assertRaisesMessage(TypeError, msg):
Foo().cp
@unittest.skipIf(PY36, '__set_name__ is new in Python 3.6')
def test_cached_property_mangled_error(self):
msg = (
'cached_property does not work with mangled methods on '
'Python < 3.6 without the appropriate `name` argument.'
)
with self.assertRaisesMessage(ValueError, msg):
@cached_property
def __value(self):
pass
with self.assertRaisesMessage(ValueError, msg):
def func(self):
pass
cached_property(func, name='__value')
@unittest.skipIf(PY36, '__set_name__ is new in Python 3.6')
def test_cached_property_name_validation(self):
msg = "%s can't be used as the name of a cached_property."
with self.assertRaisesMessage(ValueError, msg % "'<lambda>'"):
cached_property(lambda x: None)
with self.assertRaisesMessage(ValueError, msg % 42):
cached_property(str, name=42)
def test_lazy_equality(self):
"""
== and != work correctly for Promises.
"""
lazy_a = lazy(lambda: 4, int)
lazy_b = lazy(lambda: 4, int)
lazy_c = lazy(lambda: 5, int)
self.assertEqual(lazy_a(), lazy_b())
self.assertNotEqual(lazy_b(), lazy_c())
def test_lazy_repr_text(self):
original_object = 'Lazy translation text'
lazy_obj = lazy(lambda: original_object, str)
self.assertEqual(repr(original_object), repr(lazy_obj()))
def test_lazy_repr_int(self):
original_object = 15
lazy_obj = lazy(lambda: original_object, int)
self.assertEqual(repr(original_object), repr(lazy_obj()))
def test_lazy_repr_bytes(self):
original_object = b'J\xc3\xbcst a str\xc3\xadng'
lazy_obj = lazy(lambda: original_object, bytes)
self.assertEqual(repr(original_object), repr(lazy_obj()))
|
4b98909c6549ccf74f37b9908f6ff0d70e5cbc7912c948a9cd89b111635f4e55 | import contextlib
import os
import py_compile
import shutil
import sys
import tempfile
import threading
import time
import zipfile
from importlib import import_module
from pathlib import Path
from unittest import mock, skip
from django.apps.registry import Apps
from django.test import SimpleTestCase
from django.test.utils import extend_sys_path
from django.utils import autoreload
from django.utils.autoreload import WatchmanUnavailable
class TestIterModulesAndFiles(SimpleTestCase):
def import_and_cleanup(self, name):
import_module(name)
self.addCleanup(lambda: sys.path_importer_cache.clear())
self.addCleanup(lambda: sys.modules.pop(name, None))
def clear_autoreload_caches(self):
autoreload.iter_modules_and_files.cache_clear()
def assertFileFound(self, filename):
# Some temp directories are symlinks. Python resolves these fully while
# importing.
resolved_filename = filename.resolve()
self.clear_autoreload_caches()
# Test uncached access
self.assertIn(resolved_filename, list(autoreload.iter_all_python_module_files()))
# Test cached access
self.assertIn(resolved_filename, list(autoreload.iter_all_python_module_files()))
self.assertEqual(autoreload.iter_modules_and_files.cache_info().hits, 1)
def assertFileNotFound(self, filename):
resolved_filename = filename.resolve()
self.clear_autoreload_caches()
# Test uncached access
self.assertNotIn(resolved_filename, list(autoreload.iter_all_python_module_files()))
# Test cached access
self.assertNotIn(resolved_filename, list(autoreload.iter_all_python_module_files()))
self.assertEqual(autoreload.iter_modules_and_files.cache_info().hits, 1)
def temporary_file(self, filename):
dirname = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, dirname)
return Path(dirname) / filename
def test_paths_are_pathlib_instances(self):
for filename in autoreload.iter_all_python_module_files():
self.assertIsInstance(filename, Path)
def test_file_added(self):
"""
When a file is added, it's returned by iter_all_python_module_files().
"""
filename = self.temporary_file('test_deleted_removed_module.py')
filename.touch()
with extend_sys_path(str(filename.parent)):
self.import_and_cleanup('test_deleted_removed_module')
self.assertFileFound(filename.absolute())
def test_check_errors(self):
"""
When a file containing an error is imported in a function wrapped by
check_errors(), gen_filenames() returns it.
"""
filename = self.temporary_file('test_syntax_error.py')
filename.write_text("Ceci n'est pas du Python.")
with extend_sys_path(str(filename.parent)):
with self.assertRaises(SyntaxError):
autoreload.check_errors(import_module)('test_syntax_error')
self.assertFileFound(filename)
def test_check_errors_catches_all_exceptions(self):
"""
Since Python may raise arbitrary exceptions when importing code,
check_errors() must catch Exception, not just some subclasses.
"""
filename = self.temporary_file('test_exception.py')
filename.write_text('raise Exception')
with extend_sys_path(str(filename.parent)):
with self.assertRaises(Exception):
autoreload.check_errors(import_module)('test_exception')
self.assertFileFound(filename)
def test_zip_reload(self):
"""
Modules imported from zipped files have their archive location included
in the result.
"""
zip_file = self.temporary_file('zip_import.zip')
with zipfile.ZipFile(str(zip_file), 'w', zipfile.ZIP_DEFLATED) as zipf:
zipf.writestr('test_zipped_file.py', '')
with extend_sys_path(str(zip_file)):
self.import_and_cleanup('test_zipped_file')
self.assertFileFound(zip_file)
def test_bytecode_conversion_to_source(self):
""".pyc and .pyo files are included in the files list."""
filename = self.temporary_file('test_compiled.py')
filename.touch()
compiled_file = Path(py_compile.compile(str(filename), str(filename.with_suffix('.pyc'))))
filename.unlink()
with extend_sys_path(str(compiled_file.parent)):
self.import_and_cleanup('test_compiled')
self.assertFileFound(compiled_file)
class TestCommonRoots(SimpleTestCase):
def test_common_roots(self):
paths = (
Path('/first/second'),
Path('/first/second/third'),
Path('/first/'),
Path('/root/first/'),
)
results = autoreload.common_roots(paths)
self.assertCountEqual(results, [Path('/first/'), Path('/root/first/')])
class TestSysPathDirectories(SimpleTestCase):
def setUp(self):
self._directory = tempfile.TemporaryDirectory()
self.directory = Path(self._directory.name).resolve().absolute()
self.file = self.directory / 'test'
self.file.touch()
def tearDown(self):
self._directory.cleanup()
def test_sys_paths_with_directories(self):
with extend_sys_path(str(self.file)):
paths = list(autoreload.sys_path_directories())
self.assertIn(self.file.parent, paths)
def test_sys_paths_non_existing(self):
nonexistant_file = Path(self.directory.name) / 'does_not_exist'
with extend_sys_path(str(nonexistant_file)):
paths = list(autoreload.sys_path_directories())
self.assertNotIn(nonexistant_file, paths)
self.assertNotIn(nonexistant_file.parent, paths)
def test_sys_paths_absolute(self):
paths = list(autoreload.sys_path_directories())
self.assertTrue(all(p.is_absolute() for p in paths))
def test_sys_paths_directories(self):
with extend_sys_path(str(self.directory)):
paths = list(autoreload.sys_path_directories())
self.assertIn(self.directory, paths)
class GetReloaderTests(SimpleTestCase):
@mock.patch('django.utils.autoreload.WatchmanReloader')
def test_watchman_unavailable(self, mocked_watchman):
mocked_watchman.check_availability.side_effect = WatchmanUnavailable
self.assertIsInstance(autoreload.get_reloader(), autoreload.StatReloader)
@mock.patch.object(autoreload.WatchmanReloader, 'check_availability')
def test_watchman_available(self, mocked_available):
# If WatchmanUnavailable isn't raised, Watchman will be chosen.
mocked_available.return_value = None
result = autoreload.get_reloader()
self.assertIsInstance(result, autoreload.WatchmanReloader)
class RunWithReloaderTests(SimpleTestCase):
@mock.patch.dict(os.environ, {autoreload.DJANGO_AUTORELOAD_ENV: 'true'})
@mock.patch('django.utils.autoreload.get_reloader')
def test_swallows_keyboard_interrupt(self, mocked_get_reloader):
mocked_get_reloader.side_effect = KeyboardInterrupt()
autoreload.run_with_reloader(lambda: None) # No exception
@mock.patch.dict(os.environ, {autoreload.DJANGO_AUTORELOAD_ENV: 'false'})
@mock.patch('django.utils.autoreload.restart_with_reloader')
def test_calls_sys_exit(self, mocked_restart_reloader):
mocked_restart_reloader.return_value = 1
with self.assertRaises(SystemExit) as exc:
autoreload.run_with_reloader(lambda: None)
self.assertEqual(exc.exception.code, 1)
@mock.patch.dict(os.environ, {autoreload.DJANGO_AUTORELOAD_ENV: 'true'})
@mock.patch('django.utils.autoreload.start_django')
@mock.patch('django.utils.autoreload.get_reloader')
def test_calls_start_django(self, mocked_reloader, mocked_start_django):
mocked_reloader.return_value = mock.sentinel.RELOADER
autoreload.run_with_reloader(mock.sentinel.METHOD)
self.assertEqual(mocked_start_django.call_count, 1)
self.assertSequenceEqual(
mocked_start_django.call_args[0],
[mock.sentinel.RELOADER, mock.sentinel.METHOD]
)
class StartDjangoTests(SimpleTestCase):
@mock.patch('django.utils.autoreload.StatReloader')
def test_watchman_becomes_unavailable(self, mocked_stat):
mocked_stat.should_stop.return_value = True
fake_reloader = mock.MagicMock()
fake_reloader.should_stop = False
fake_reloader.run.side_effect = autoreload.WatchmanUnavailable()
autoreload.start_django(fake_reloader, lambda: None)
self.assertEqual(mocked_stat.call_count, 1)
@mock.patch('django.utils.autoreload.ensure_echo_on')
def test_echo_on_called(self, mocked_echo):
fake_reloader = mock.MagicMock()
autoreload.start_django(fake_reloader, lambda: None)
self.assertEqual(mocked_echo.call_count, 1)
@mock.patch('django.utils.autoreload.check_errors')
def test_check_errors_called(self, mocked_check_errors):
fake_method = mock.MagicMock(return_value=None)
fake_reloader = mock.MagicMock()
autoreload.start_django(fake_reloader, fake_method)
self.assertCountEqual(mocked_check_errors.call_args[0], [fake_method])
@mock.patch('threading.Thread')
@mock.patch('django.utils.autoreload.check_errors')
def test_starts_thread_with_args(self, mocked_check_errors, mocked_thread):
fake_reloader = mock.MagicMock()
fake_main_func = mock.MagicMock()
fake_thread = mock.MagicMock()
mocked_check_errors.return_value = fake_main_func
mocked_thread.return_value = fake_thread
autoreload.start_django(fake_reloader, fake_main_func, 123, abc=123)
self.assertEqual(mocked_thread.call_count, 1)
self.assertEqual(
mocked_thread.call_args[1],
{'target': fake_main_func, 'args': (123,), 'kwargs': {'abc': 123}}
)
self.assertSequenceEqual(fake_thread.setDaemon.call_args[0], [True])
self.assertTrue(fake_thread.start.called)
class TestCheckErrors(SimpleTestCase):
def test_mutates_error_files(self):
fake_method = mock.MagicMock(side_effect=RuntimeError())
wrapped = autoreload.check_errors(fake_method)
with mock.patch.object(autoreload, '_error_files') as mocked_error_files:
with self.assertRaises(RuntimeError):
wrapped()
self.assertEqual(mocked_error_files.append.call_count, 1)
class TestRaiseLastException(SimpleTestCase):
@mock.patch('django.utils.autoreload._exception', None)
def test_no_exception(self):
# Should raise no exception if _exception is None
autoreload.raise_last_exception()
def test_raises_exception(self):
class MyException(Exception):
pass
# Create an exception
try:
raise MyException('Test Message')
except MyException:
exc_info = sys.exc_info()
with mock.patch('django.utils.autoreload._exception', exc_info):
with self.assertRaises(MyException, msg='Test Message'):
autoreload.raise_last_exception()
class RestartWithReloaderTests(SimpleTestCase):
executable = '/usr/bin/python'
def patch_autoreload(self, argv):
patch_call = mock.patch('django.utils.autoreload.subprocess.call', return_value=0)
patches = [
mock.patch('django.utils.autoreload.sys.argv', argv),
mock.patch('django.utils.autoreload.sys.executable', self.executable),
mock.patch('django.utils.autoreload.sys.warnoptions', ['all']),
]
for p in patches:
p.start()
self.addCleanup(p.stop)
mock_call = patch_call.start()
self.addCleanup(patch_call.stop)
return mock_call
def test_manage_py(self):
argv = ['./manage.py', 'runserver']
mock_call = self.patch_autoreload(argv)
autoreload.restart_with_reloader()
self.assertEqual(mock_call.call_count, 1)
self.assertEqual(mock_call.call_args[0][0], [self.executable, '-Wall'] + argv)
def test_python_m_django(self):
main = '/usr/lib/pythonX.Y/site-packages/django/__main__.py'
argv = [main, 'runserver']
mock_call = self.patch_autoreload(argv)
with mock.patch('django.__main__.__file__', main):
autoreload.restart_with_reloader()
self.assertEqual(mock_call.call_count, 1)
self.assertEqual(mock_call.call_args[0][0], [self.executable, '-Wall', '-m', 'django'] + argv[1:])
class ReloaderTests(SimpleTestCase):
RELOADER_CLS = None
def setUp(self):
self._tempdir = tempfile.TemporaryDirectory()
self.tempdir = Path(self._tempdir.name).resolve().absolute()
self.existing_file = self.ensure_file(self.tempdir / 'test.py')
self.nonexistant_file = (self.tempdir / 'does_not_exist.py').absolute()
self.reloader = self.RELOADER_CLS()
def tearDown(self):
self._tempdir.cleanup()
self.reloader.stop()
def ensure_file(self, path):
path.parent.mkdir(exist_ok=True, parents=True)
path.touch()
# On Linux and Windows updating the mtime of a file using touch() will set a timestamp
# value that is in the past, as the time value for the last kernel tick is used rather
# than getting the correct absolute time.
# To make testing simpler set the mtime to be the observed time when this function is
# called.
self.set_mtime(path, time.time())
return path.absolute()
def set_mtime(self, fp, value):
os.utime(str(fp), (value, value))
def increment_mtime(self, fp, by=1):
current_time = time.time()
self.set_mtime(fp, current_time + by)
@contextlib.contextmanager
def tick_twice(self):
ticker = self.reloader.tick()
next(ticker)
yield
next(ticker)
class IntegrationTests:
@mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed')
@mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset())
def test_file(self, mocked_modules, notify_mock):
self.reloader.watch_file(self.existing_file)
with self.tick_twice():
self.increment_mtime(self.existing_file)
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [self.existing_file])
@mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed')
@mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset())
def test_nonexistant_file(self, mocked_modules, notify_mock):
self.reloader.watch_file(self.nonexistant_file)
with self.tick_twice():
self.ensure_file(self.nonexistant_file)
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [self.nonexistant_file])
@mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed')
@mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset())
def test_nonexistant_file_in_non_existing_directory(self, mocked_modules, notify_mock):
non_existing_directory = self.tempdir / 'non_existing_dir'
nonexistant_file = non_existing_directory / 'test'
self.reloader.watch_file(nonexistant_file)
with self.tick_twice():
self.ensure_file(nonexistant_file)
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [nonexistant_file])
@mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed')
@mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset())
def test_glob(self, mocked_modules, notify_mock):
non_py_file = self.ensure_file(self.tempdir / 'non_py_file')
self.reloader.watch_dir(self.tempdir, '*.py')
with self.tick_twice():
self.increment_mtime(non_py_file)
self.increment_mtime(self.existing_file)
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [self.existing_file])
@mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed')
@mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset())
def test_glob_non_existing_directory(self, mocked_modules, notify_mock):
non_existing_directory = self.tempdir / 'does_not_exist'
nonexistant_file = non_existing_directory / 'test.py'
self.reloader.watch_dir(non_existing_directory, '*.py')
with self.tick_twice():
self.ensure_file(nonexistant_file)
self.set_mtime(nonexistant_file, time.time())
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [nonexistant_file])
@mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed')
@mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset())
def test_multiple_globs(self, mocked_modules, notify_mock):
self.ensure_file(self.tempdir / 'x.test')
self.reloader.watch_dir(self.tempdir, '*.py')
self.reloader.watch_dir(self.tempdir, '*.test')
with self.tick_twice():
self.increment_mtime(self.existing_file)
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [self.existing_file])
@mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed')
@mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset())
def test_overlapping_globs(self, mocked_modules, notify_mock):
self.reloader.watch_dir(self.tempdir, '*.py')
self.reloader.watch_dir(self.tempdir, '*.p*')
with self.tick_twice():
self.increment_mtime(self.existing_file)
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [self.existing_file])
@mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed')
@mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset())
def test_glob_recursive(self, mocked_modules, notify_mock):
non_py_file = self.ensure_file(self.tempdir / 'dir' / 'non_py_file')
py_file = self.ensure_file(self.tempdir / 'dir' / 'file.py')
self.reloader.watch_dir(self.tempdir, '**/*.py')
with self.tick_twice():
self.increment_mtime(non_py_file)
self.increment_mtime(py_file)
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [py_file])
@mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed')
@mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset())
def test_multiple_recursive_globs(self, mocked_modules, notify_mock):
non_py_file = self.ensure_file(self.tempdir / 'dir' / 'test.txt')
py_file = self.ensure_file(self.tempdir / 'dir' / 'file.py')
self.reloader.watch_dir(self.tempdir, '**/*.txt')
self.reloader.watch_dir(self.tempdir, '**/*.py')
with self.tick_twice():
self.increment_mtime(non_py_file)
self.increment_mtime(py_file)
self.assertEqual(notify_mock.call_count, 2)
self.assertCountEqual(notify_mock.call_args_list, [mock.call(py_file), mock.call(non_py_file)])
@mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed')
@mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset())
def test_nested_glob_recursive(self, mocked_modules, notify_mock):
inner_py_file = self.ensure_file(self.tempdir / 'dir' / 'file.py')
self.reloader.watch_dir(self.tempdir, '**/*.py')
self.reloader.watch_dir(inner_py_file.parent, '**/*.py')
with self.tick_twice():
self.increment_mtime(inner_py_file)
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [inner_py_file])
@mock.patch('django.utils.autoreload.BaseReloader.notify_file_changed')
@mock.patch('django.utils.autoreload.iter_all_python_module_files', return_value=frozenset())
def test_overlapping_glob_recursive(self, mocked_modules, notify_mock):
py_file = self.ensure_file(self.tempdir / 'dir' / 'file.py')
self.reloader.watch_dir(self.tempdir, '**/*.p*')
self.reloader.watch_dir(self.tempdir, '**/*.py*')
with self.tick_twice():
self.increment_mtime(py_file)
self.assertEqual(notify_mock.call_count, 1)
self.assertCountEqual(notify_mock.call_args[0], [py_file])
class BaseReloaderTests(ReloaderTests):
RELOADER_CLS = autoreload.BaseReloader
def test_watch_without_absolute(self):
with self.assertRaisesMessage(ValueError, 'test.py must be absolute.'):
self.reloader.watch_file('test.py')
def test_watch_with_single_file(self):
self.reloader.watch_file(self.existing_file)
watched_files = list(self.reloader.watched_files())
self.assertIn(self.existing_file, watched_files)
def test_watch_with_glob(self):
self.reloader.watch_dir(self.tempdir, '*.py')
watched_files = list(self.reloader.watched_files())
self.assertIn(self.existing_file, watched_files)
def test_watch_files_with_recursive_glob(self):
inner_file = self.ensure_file(self.tempdir / 'test' / 'test.py')
self.reloader.watch_dir(self.tempdir, '**/*.py')
watched_files = list(self.reloader.watched_files())
self.assertIn(self.existing_file, watched_files)
self.assertIn(inner_file, watched_files)
def test_run_loop_catches_stopiteration(self):
def mocked_tick():
yield
with mock.patch.object(self.reloader, 'tick', side_effect=mocked_tick) as tick:
self.reloader.run_loop()
self.assertEqual(tick.call_count, 1)
def test_run_loop_stop_and_return(self):
def mocked_tick(*args):
yield
self.reloader.stop()
return # Raises StopIteration
with mock.patch.object(self.reloader, 'tick', side_effect=mocked_tick) as tick:
self.reloader.run_loop()
self.assertEqual(tick.call_count, 1)
def test_wait_for_apps_ready_checks_for_exception(self):
app_reg = Apps()
app_reg.ready_event.set()
# thread.is_alive() is False if it's not started.
dead_thread = threading.Thread()
self.assertFalse(self.reloader.wait_for_apps_ready(app_reg, dead_thread))
def test_wait_for_apps_ready_without_exception(self):
app_reg = Apps()
app_reg.ready_event.set()
thread = mock.MagicMock()
thread.is_alive.return_value = True
self.assertTrue(self.reloader.wait_for_apps_ready(app_reg, thread))
def skip_unless_watchman_available():
try:
autoreload.WatchmanReloader.check_availability()
except WatchmanUnavailable as e:
return skip('Watchman unavailable: %s' % e)
return lambda func: func
@skip_unless_watchman_available()
class WatchmanReloaderTests(ReloaderTests, IntegrationTests):
RELOADER_CLS = autoreload.WatchmanReloader
def test_watch_glob_ignores_non_existing_directories_two_levels(self):
with mock.patch.object(self.reloader, '_subscribe') as mocked_subscribe:
self.reloader._watch_glob(self.tempdir / 'does_not_exist' / 'more', ['*'])
self.assertFalse(mocked_subscribe.called)
def test_watch_glob_uses_existing_parent_directories(self):
with mock.patch.object(self.reloader, '_subscribe') as mocked_subscribe:
self.reloader._watch_glob(self.tempdir / 'does_not_exist', ['*'])
self.assertSequenceEqual(
mocked_subscribe.call_args[0],
[
self.tempdir, 'glob-parent-does_not_exist:%s' % self.tempdir,
['anyof', ['match', 'does_not_exist/*', 'wholename']]
]
)
def test_watch_glob_multiple_patterns(self):
with mock.patch.object(self.reloader, '_subscribe') as mocked_subscribe:
self.reloader._watch_glob(self.tempdir, ['*', '*.py'])
self.assertSequenceEqual(
mocked_subscribe.call_args[0],
[
self.tempdir, 'glob:%s' % self.tempdir,
['anyof', ['match', '*', 'wholename'], ['match', '*.py', 'wholename']]
]
)
def test_watched_roots_contains_files(self):
paths = self.reloader.watched_roots([self.existing_file])
self.assertIn(self.existing_file.parent, paths)
def test_watched_roots_contains_directory_globs(self):
self.reloader.watch_dir(self.tempdir, '*.py')
paths = self.reloader.watched_roots([])
self.assertIn(self.tempdir, paths)
def test_watched_roots_contains_sys_path(self):
with extend_sys_path(str(self.tempdir)):
paths = self.reloader.watched_roots([])
self.assertIn(self.tempdir, paths)
def test_check_server_status(self):
self.assertTrue(self.reloader.check_server_status())
def test_check_server_status_raises_error(self):
with mock.patch.object(self.reloader.client, 'query') as mocked_query:
mocked_query.side_effect = Exception()
with self.assertRaises(autoreload.WatchmanUnavailable):
self.reloader.check_server_status()
@mock.patch('pywatchman.client')
def test_check_availability(self, mocked_client):
mocked_client().capabilityCheck.side_effect = Exception()
with self.assertRaisesMessage(WatchmanUnavailable, 'Cannot connect to the watchman service'):
self.RELOADER_CLS.check_availability()
@mock.patch('pywatchman.client')
def test_check_availability_lower_version(self, mocked_client):
mocked_client().capabilityCheck.return_value = {'version': '4.8.10'}
with self.assertRaisesMessage(WatchmanUnavailable, 'Watchman 4.9 or later is required.'):
self.RELOADER_CLS.check_availability()
def test_pywatchman_not_available(self):
with mock.patch.object(autoreload, 'pywatchman') as mocked:
mocked.__bool__.return_value = False
with self.assertRaisesMessage(WatchmanUnavailable, 'pywatchman not installed.'):
self.RELOADER_CLS.check_availability()
def test_update_watches_raises_exceptions(self):
class TestException(Exception):
pass
with mock.patch.object(self.reloader, '_update_watches') as mocked_watches:
with mock.patch.object(self.reloader, 'check_server_status') as mocked_server_status:
mocked_watches.side_effect = TestException()
mocked_server_status.return_value = True
with self.assertRaises(TestException):
self.reloader.update_watches()
self.assertIsInstance(mocked_server_status.call_args[0][0], TestException)
class StatReloaderTests(ReloaderTests, IntegrationTests):
RELOADER_CLS = autoreload.StatReloader
def setUp(self):
super().setUp()
# Shorten the sleep time to speed up tests.
self.reloader.SLEEP_TIME = 0.01
def test_snapshot_files_ignores_missing_files(self):
with mock.patch.object(self.reloader, 'watched_files', return_value=[self.nonexistant_file]):
self.assertEqual(dict(self.reloader.snapshot_files()), {})
def test_snapshot_files_updates(self):
with mock.patch.object(self.reloader, 'watched_files', return_value=[self.existing_file]):
snapshot1 = dict(self.reloader.snapshot_files())
self.assertIn(self.existing_file, snapshot1)
self.increment_mtime(self.existing_file)
snapshot2 = dict(self.reloader.snapshot_files())
self.assertNotEqual(snapshot1[self.existing_file], snapshot2[self.existing_file])
def test_does_not_fire_without_changes(self):
with mock.patch.object(self.reloader, 'watched_files', return_value=[self.existing_file]), \
mock.patch.object(self.reloader, 'notify_file_changed') as notifier:
mtime = self.existing_file.stat().st_mtime
initial_snapshot = {self.existing_file: mtime}
second_snapshot = self.reloader.loop_files(initial_snapshot, time.time())
self.assertEqual(second_snapshot, {})
notifier.assert_not_called()
def test_fires_when_created(self):
with mock.patch.object(self.reloader, 'watched_files', return_value=[self.nonexistant_file]), \
mock.patch.object(self.reloader, 'notify_file_changed') as notifier:
self.nonexistant_file.touch()
mtime = self.nonexistant_file.stat().st_mtime
second_snapshot = self.reloader.loop_files({}, mtime - 1)
self.assertCountEqual(second_snapshot.keys(), [self.nonexistant_file])
notifier.assert_called_once_with(self.nonexistant_file)
def test_fires_with_changes(self):
with mock.patch.object(self.reloader, 'watched_files', return_value=[self.existing_file]), \
mock.patch.object(self.reloader, 'notify_file_changed') as notifier:
initial_snapshot = {self.existing_file: 1}
second_snapshot = self.reloader.loop_files(initial_snapshot, time.time())
notifier.assert_called_once_with(self.existing_file)
self.assertCountEqual(second_snapshot.keys(), [self.existing_file])
|
bc00311c668ea03dafbafa07f753ef886b220a0133104a2788ba9e931d476b3e | """
Tests for stuff in django.utils.datastructures.
"""
import copy
from django.test import SimpleTestCase
from django.utils.datastructures import (
CaseInsensitiveMapping, DictWrapper, ImmutableList, MultiValueDict,
MultiValueDictKeyError, OrderedSet,
)
class OrderedSetTests(SimpleTestCase):
def test_bool(self):
# Refs #23664
s = OrderedSet()
self.assertFalse(s)
s.add(1)
self.assertTrue(s)
def test_len(self):
s = OrderedSet()
self.assertEqual(len(s), 0)
s.add(1)
s.add(2)
s.add(2)
self.assertEqual(len(s), 2)
class MultiValueDictTests(SimpleTestCase):
def test_multivaluedict(self):
d = MultiValueDict({'name': ['Adrian', 'Simon'], 'position': ['Developer']})
self.assertEqual(d['name'], 'Simon')
self.assertEqual(d.get('name'), 'Simon')
self.assertEqual(d.getlist('name'), ['Adrian', 'Simon'])
self.assertEqual(
sorted(d.items()),
[('name', 'Simon'), ('position', 'Developer')]
)
self.assertEqual(
sorted(d.lists()),
[('name', ['Adrian', 'Simon']), ('position', ['Developer'])]
)
with self.assertRaises(MultiValueDictKeyError) as cm:
d.__getitem__('lastname')
self.assertEqual(str(cm.exception), "'lastname'")
self.assertIsNone(d.get('lastname'))
self.assertEqual(d.get('lastname', 'nonexistent'), 'nonexistent')
self.assertEqual(d.getlist('lastname'), [])
self.assertEqual(d.getlist('doesnotexist', ['Adrian', 'Simon']), ['Adrian', 'Simon'])
d.setlist('lastname', ['Holovaty', 'Willison'])
self.assertEqual(d.getlist('lastname'), ['Holovaty', 'Willison'])
self.assertEqual(sorted(d.values()), ['Developer', 'Simon', 'Willison'])
def test_appendlist(self):
d = MultiValueDict()
d.appendlist('name', 'Adrian')
d.appendlist('name', 'Simon')
self.assertEqual(d.getlist('name'), ['Adrian', 'Simon'])
def test_copy(self):
for copy_func in [copy.copy, lambda d: d.copy()]:
d1 = MultiValueDict({
"developers": ["Carl", "Fred"]
})
self.assertEqual(d1["developers"], "Fred")
d2 = copy_func(d1)
d2.update({"developers": "Groucho"})
self.assertEqual(d2["developers"], "Groucho")
self.assertEqual(d1["developers"], "Fred")
d1 = MultiValueDict({
"key": [[]]
})
self.assertEqual(d1["key"], [])
d2 = copy_func(d1)
d2["key"].append("Penguin")
self.assertEqual(d1["key"], ["Penguin"])
self.assertEqual(d2["key"], ["Penguin"])
def test_dict_translation(self):
mvd = MultiValueDict({
'devs': ['Bob', 'Joe'],
'pm': ['Rory'],
})
d = mvd.dict()
self.assertEqual(sorted(d), sorted(mvd))
for key in mvd:
self.assertEqual(d[key], mvd[key])
self.assertEqual({}, MultiValueDict().dict())
def test_getlist_doesnt_mutate(self):
x = MultiValueDict({'a': ['1', '2'], 'b': ['3']})
values = x.getlist('a')
values += x.getlist('b')
self.assertEqual(x.getlist('a'), ['1', '2'])
def test_internal_getlist_does_mutate(self):
x = MultiValueDict({'a': ['1', '2'], 'b': ['3']})
values = x._getlist('a')
values += x._getlist('b')
self.assertEqual(x._getlist('a'), ['1', '2', '3'])
def test_getlist_default(self):
x = MultiValueDict({'a': [1]})
MISSING = object()
values = x.getlist('b', default=MISSING)
self.assertIs(values, MISSING)
def test_getlist_none_empty_values(self):
x = MultiValueDict({'a': None, 'b': []})
self.assertIsNone(x.getlist('a'))
self.assertEqual(x.getlist('b'), [])
class ImmutableListTests(SimpleTestCase):
def test_sort(self):
d = ImmutableList(range(10))
# AttributeError: ImmutableList object is immutable.
with self.assertRaisesMessage(AttributeError, 'ImmutableList object is immutable.'):
d.sort()
self.assertEqual(repr(d), '(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)')
def test_custom_warning(self):
d = ImmutableList(range(10), warning="Object is immutable!")
self.assertEqual(d[1], 1)
# AttributeError: Object is immutable!
with self.assertRaisesMessage(AttributeError, 'Object is immutable!'):
d.__setitem__(1, 'test')
class DictWrapperTests(SimpleTestCase):
def test_dictwrapper(self):
def f(x):
return "*%s" % x
d = DictWrapper({'a': 'a'}, f, 'xx_')
self.assertEqual(
"Normal: %(a)s. Modified: %(xx_a)s" % d,
'Normal: a. Modified: *a'
)
class CaseInsensitiveMappingTests(SimpleTestCase):
def setUp(self):
self.dict1 = CaseInsensitiveMapping({
'Accept': 'application/json',
'content-type': 'text/html',
})
def test_create_with_invalid_values(self):
msg = 'dictionary update sequence element #1 has length 4; 2 is required'
with self.assertRaisesMessage(ValueError, msg):
CaseInsensitiveMapping([('Key1', 'Val1'), 'Key2'])
def test_create_with_invalid_key(self):
msg = 'Element key 1 invalid, only strings are allowed'
with self.assertRaisesMessage(ValueError, msg):
CaseInsensitiveMapping([(1, '2')])
def test_list(self):
self.assertEqual(sorted(list(self.dict1)), sorted(['Accept', 'content-type']))
def test_dict(self):
self.assertEqual(dict(self.dict1), {'Accept': 'application/json', 'content-type': 'text/html'})
def test_repr(self):
dict1 = CaseInsensitiveMapping({'Accept': 'application/json'})
dict2 = CaseInsensitiveMapping({'content-type': 'text/html'})
self.assertEqual(repr(dict1), repr({'Accept': 'application/json'}))
self.assertEqual(repr(dict2), repr({'content-type': 'text/html'}))
def test_str(self):
dict1 = CaseInsensitiveMapping({'Accept': 'application/json'})
dict2 = CaseInsensitiveMapping({'content-type': 'text/html'})
self.assertEqual(str(dict1), str({'Accept': 'application/json'}))
self.assertEqual(str(dict2), str({'content-type': 'text/html'}))
def test_equal(self):
self.assertEqual(self.dict1, {'Accept': 'application/json', 'content-type': 'text/html'})
self.assertNotEqual(self.dict1, {'accept': 'application/jso', 'Content-Type': 'text/html'})
self.assertNotEqual(self.dict1, 'string')
def test_items(self):
other = {'Accept': 'application/json', 'content-type': 'text/html'}
self.assertEqual(sorted(self.dict1.items()), sorted(other.items()))
def test_copy(self):
copy = self.dict1.copy()
self.assertIs(copy, self.dict1)
self.assertEqual(copy, self.dict1)
def test_getitem(self):
self.assertEqual(self.dict1['Accept'], 'application/json')
self.assertEqual(self.dict1['accept'], 'application/json')
self.assertEqual(self.dict1['aCCept'], 'application/json')
self.assertEqual(self.dict1['content-type'], 'text/html')
self.assertEqual(self.dict1['Content-Type'], 'text/html')
self.assertEqual(self.dict1['Content-type'], 'text/html')
def test_in(self):
self.assertIn('Accept', self.dict1)
self.assertIn('accept', self.dict1)
self.assertIn('aCCept', self.dict1)
self.assertIn('content-type', self.dict1)
self.assertIn('Content-Type', self.dict1)
def test_del(self):
self.assertIn('Accept', self.dict1)
msg = "'CaseInsensitiveMapping' object does not support item deletion"
with self.assertRaisesMessage(TypeError, msg):
del self.dict1['Accept']
self.assertIn('Accept', self.dict1)
def test_set(self):
self.assertEqual(len(self.dict1), 2)
msg = "'CaseInsensitiveMapping' object does not support item assignment"
with self.assertRaisesMessage(TypeError, msg):
self.dict1['New Key'] = 1
self.assertEqual(len(self.dict1), 2)
|
cfd4d9bae1475dca4ce2985cf9f10d5359ba7c1f69ad06618101f99e94500ea7 | import unittest
from datetime import datetime
from django.test import SimpleTestCase
from django.utils.datastructures import MultiValueDict
from django.utils.http import (
base36_to_int, escape_leading_slashes, http_date, int_to_base36,
is_safe_url, is_same_domain, parse_etags, parse_http_date, quote_etag,
urlencode, urlquote, urlquote_plus, urlsafe_base64_decode,
urlsafe_base64_encode, urlunquote, urlunquote_plus,
)
class URLEncodeTests(SimpleTestCase):
cannot_encode_none_msg = (
'Cannot encode None in a query string. Did you mean to pass an '
'empty string or omit the value?'
)
def test_tuples(self):
self.assertEqual(urlencode((('a', 1), ('b', 2), ('c', 3))), 'a=1&b=2&c=3')
def test_dict(self):
result = urlencode({'a': 1, 'b': 2, 'c': 3})
# Dictionaries are treated as unordered.
self.assertIn(result, [
'a=1&b=2&c=3',
'a=1&c=3&b=2',
'b=2&a=1&c=3',
'b=2&c=3&a=1',
'c=3&a=1&b=2',
'c=3&b=2&a=1',
])
def test_dict_containing_sequence_not_doseq(self):
self.assertEqual(urlencode({'a': [1, 2]}, doseq=False), 'a=%5B%271%27%2C+%272%27%5D')
def test_dict_containing_sequence_doseq(self):
self.assertEqual(urlencode({'a': [1, 2]}, doseq=True), 'a=1&a=2')
def test_dict_containing_empty_sequence_doseq(self):
self.assertEqual(urlencode({'a': []}, doseq=True), '')
def test_multivaluedict(self):
result = urlencode(MultiValueDict({
'name': ['Adrian', 'Simon'],
'position': ['Developer'],
}), doseq=True)
# MultiValueDicts are similarly unordered.
self.assertIn(result, [
'name=Adrian&name=Simon&position=Developer',
'position=Developer&name=Adrian&name=Simon',
])
def test_dict_with_bytes_values(self):
self.assertEqual(urlencode({'a': b'abc'}, doseq=True), 'a=abc')
def test_dict_with_sequence_of_bytes(self):
self.assertEqual(urlencode({'a': [b'spam', b'eggs', b'bacon']}, doseq=True), 'a=spam&a=eggs&a=bacon')
def test_dict_with_bytearray(self):
self.assertEqual(urlencode({'a': bytearray(range(2))}, doseq=True), 'a=0&a=1')
self.assertEqual(urlencode({'a': bytearray(range(2))}, doseq=False), 'a=%5B%270%27%2C+%271%27%5D')
def test_generator(self):
def gen():
yield from range(2)
self.assertEqual(urlencode({'a': gen()}, doseq=True), 'a=0&a=1')
self.assertEqual(urlencode({'a': gen()}, doseq=False), 'a=%5B%270%27%2C+%271%27%5D')
def test_none(self):
with self.assertRaisesMessage(TypeError, self.cannot_encode_none_msg):
urlencode({'a': None})
def test_none_in_sequence(self):
with self.assertRaisesMessage(TypeError, self.cannot_encode_none_msg):
urlencode({'a': [None]}, doseq=True)
def test_none_in_generator(self):
def gen():
yield None
with self.assertRaisesMessage(TypeError, self.cannot_encode_none_msg):
urlencode({'a': gen()}, doseq=True)
class Base36IntTests(SimpleTestCase):
def test_roundtrip(self):
for n in [0, 1, 1000, 1000000]:
self.assertEqual(n, base36_to_int(int_to_base36(n)))
def test_negative_input(self):
with self.assertRaisesMessage(ValueError, 'Negative base36 conversion input.'):
int_to_base36(-1)
def test_to_base36_errors(self):
for n in ['1', 'foo', {1: 2}, (1, 2, 3), 3.141]:
with self.assertRaises(TypeError):
int_to_base36(n)
def test_invalid_literal(self):
for n in ['#', ' ']:
with self.assertRaisesMessage(ValueError, "invalid literal for int() with base 36: '%s'" % n):
base36_to_int(n)
def test_input_too_large(self):
with self.assertRaisesMessage(ValueError, 'Base36 input too large'):
base36_to_int('1' * 14)
def test_to_int_errors(self):
for n in [123, {1: 2}, (1, 2, 3), 3.141]:
with self.assertRaises(TypeError):
base36_to_int(n)
def test_values(self):
for n, b36 in [(0, '0'), (1, '1'), (42, '16'), (818469960, 'django')]:
self.assertEqual(int_to_base36(n), b36)
self.assertEqual(base36_to_int(b36), n)
class IsSafeURLTests(unittest.TestCase):
def test_bad_urls(self):
bad_urls = (
'http://example.com',
'http:///example.com',
'https://example.com',
'ftp://example.com',
r'\\example.com',
r'\\\example.com',
r'/\\/example.com',
r'\\\example.com',
r'\\example.com',
r'\\//example.com',
r'/\/example.com',
r'\/example.com',
r'/\example.com',
'http:///example.com',
r'http:/\//example.com',
r'http:\/example.com',
r'http:/\example.com',
'javascript:alert("XSS")',
'\njavascript:alert(x)',
'\x08//example.com',
r'http://otherserver\@example.com',
r'http:\\testserver\@example.com',
r'http://testserver\me:[email protected]',
r'http://testserver\@example.com',
r'http:\\testserver\confirm\[email protected]',
'http:999999999',
'ftp:9999999999',
'\n',
'http://[2001:cdba:0000:0000:0000:0000:3257:9652/',
'http://2001:cdba:0000:0000:0000:0000:3257:9652]/',
)
for bad_url in bad_urls:
with self.subTest(url=bad_url):
self.assertIs(is_safe_url(bad_url, allowed_hosts={'testserver', 'testserver2'}), False)
def test_good_urls(self):
good_urls = (
'/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://example.com',
'view/?param=//example.com',
'https://testserver/',
'HTTPS://testserver/',
'//testserver/',
'http://testserver/[email protected]',
'/url%20with%20spaces/',
'path/http:2222222222',
)
for good_url in good_urls:
with self.subTest(url=good_url):
self.assertIs(is_safe_url(good_url, allowed_hosts={'otherserver', 'testserver'}), True)
def test_basic_auth(self):
# Valid basic auth credentials are allowed.
self.assertIs(is_safe_url(r'http://user:pass@testserver/', allowed_hosts={'user:pass@testserver'}), True)
def test_no_allowed_hosts(self):
# A path without host is allowed.
self.assertIs(is_safe_url('/confirm/[email protected]', allowed_hosts=None), True)
# Basic auth without host is not allowed.
self.assertIs(is_safe_url(r'http://testserver\@example.com', allowed_hosts=None), False)
def test_allowed_hosts_str(self):
self.assertIs(is_safe_url('http://good.com/good', allowed_hosts='good.com'), True)
self.assertIs(is_safe_url('http://good.co/evil', allowed_hosts='good.com'), False)
def test_secure_param_https_urls(self):
secure_urls = (
'https://example.com/p',
'HTTPS://example.com/p',
'/view/?param=http://example.com',
)
for url in secure_urls:
with self.subTest(url=url):
self.assertIs(is_safe_url(url, allowed_hosts={'example.com'}, require_https=True), True)
def test_secure_param_non_https_urls(self):
insecure_urls = (
'http://example.com/p',
'ftp://example.com/p',
'//example.com/p',
)
for url in insecure_urls:
with self.subTest(url=url):
self.assertIs(is_safe_url(url, allowed_hosts={'example.com'}, require_https=True), False)
class URLSafeBase64Tests(unittest.TestCase):
def test_roundtrip(self):
bytestring = b'foo'
encoded = urlsafe_base64_encode(bytestring)
decoded = urlsafe_base64_decode(encoded)
self.assertEqual(bytestring, decoded)
class URLQuoteTests(unittest.TestCase):
def test_quote(self):
self.assertEqual(urlquote('Paris & Orl\xe9ans'), 'Paris%20%26%20Orl%C3%A9ans')
self.assertEqual(urlquote('Paris & Orl\xe9ans', safe="&"), 'Paris%20&%20Orl%C3%A9ans')
def test_unquote(self):
self.assertEqual(urlunquote('Paris%20%26%20Orl%C3%A9ans'), 'Paris & Orl\xe9ans')
self.assertEqual(urlunquote('Paris%20&%20Orl%C3%A9ans'), 'Paris & Orl\xe9ans')
def test_quote_plus(self):
self.assertEqual(urlquote_plus('Paris & Orl\xe9ans'), 'Paris+%26+Orl%C3%A9ans')
self.assertEqual(urlquote_plus('Paris & Orl\xe9ans', safe="&"), 'Paris+&+Orl%C3%A9ans')
def test_unquote_plus(self):
self.assertEqual(urlunquote_plus('Paris+%26+Orl%C3%A9ans'), 'Paris & Orl\xe9ans')
self.assertEqual(urlunquote_plus('Paris+&+Orl%C3%A9ans'), 'Paris & Orl\xe9ans')
class IsSameDomainTests(unittest.TestCase):
def test_good(self):
for pair in (
('example.com', 'example.com'),
('example.com', '.example.com'),
('foo.example.com', '.example.com'),
('example.com:8888', 'example.com:8888'),
('example.com:8888', '.example.com:8888'),
('foo.example.com:8888', '.example.com:8888'),
):
self.assertIs(is_same_domain(*pair), True)
def test_bad(self):
for pair in (
('example2.com', 'example.com'),
('foo.example.com', 'example.com'),
('example.com:9999', 'example.com:8888'),
('foo.example.com:8888', ''),
):
self.assertIs(is_same_domain(*pair), False)
class ETagProcessingTests(unittest.TestCase):
def test_parsing(self):
self.assertEqual(
parse_etags(r'"" , "etag", "e\\tag", W/"weak"'),
['""', '"etag"', r'"e\\tag"', 'W/"weak"']
)
self.assertEqual(parse_etags('*'), ['*'])
# Ignore RFC 2616 ETags that are invalid according to RFC 7232.
self.assertEqual(parse_etags(r'"etag", "e\"t\"ag"'), ['"etag"'])
def test_quoting(self):
self.assertEqual(quote_etag('etag'), '"etag"') # unquoted
self.assertEqual(quote_etag('"etag"'), '"etag"') # quoted
self.assertEqual(quote_etag('W/"etag"'), 'W/"etag"') # quoted, weak
class HttpDateProcessingTests(unittest.TestCase):
def test_http_date(self):
t = 1167616461.0
self.assertEqual(http_date(t), 'Mon, 01 Jan 2007 01:54:21 GMT')
def test_parsing_rfc1123(self):
parsed = parse_http_date('Sun, 06 Nov 1994 08:49:37 GMT')
self.assertEqual(datetime.utcfromtimestamp(parsed), datetime(1994, 11, 6, 8, 49, 37))
def test_parsing_rfc850(self):
parsed = parse_http_date('Sunday, 06-Nov-94 08:49:37 GMT')
self.assertEqual(datetime.utcfromtimestamp(parsed), datetime(1994, 11, 6, 8, 49, 37))
def test_parsing_asctime(self):
parsed = parse_http_date('Sun Nov 6 08:49:37 1994')
self.assertEqual(datetime.utcfromtimestamp(parsed), datetime(1994, 11, 6, 8, 49, 37))
def test_parsing_year_less_than_70(self):
parsed = parse_http_date('Sun Nov 6 08:49:37 0050')
self.assertEqual(datetime.utcfromtimestamp(parsed), datetime(2050, 11, 6, 8, 49, 37))
class EscapeLeadingSlashesTests(unittest.TestCase):
def test(self):
tests = (
('//example.com', '/%2Fexample.com'),
('//', '/%2F'),
)
for url, expected in tests:
with self.subTest(url=url):
self.assertEqual(escape_leading_slashes(url), expected)
|
4ea54d2bef691673bfa069e61a7e3db7cd1515d9c070a893600c8aaa6130c428 | import json
import sys
from django.test import SimpleTestCase
from django.utils import text
from django.utils.functional import lazystr
from django.utils.text import format_lazy
from django.utils.translation import gettext_lazy, override
IS_WIDE_BUILD = (len('\U0001F4A9') == 1)
class TestUtilsText(SimpleTestCase):
def test_get_text_list(self):
self.assertEqual(text.get_text_list(['a', 'b', 'c', 'd']), 'a, b, c or d')
self.assertEqual(text.get_text_list(['a', 'b', 'c'], 'and'), 'a, b and c')
self.assertEqual(text.get_text_list(['a', 'b'], 'and'), 'a and b')
self.assertEqual(text.get_text_list(['a']), 'a')
self.assertEqual(text.get_text_list([]), '')
with override('ar'):
self.assertEqual(text.get_text_list(['a', 'b', 'c']), "a، b أو c")
def test_smart_split(self):
testdata = [
('This is "a person" test.',
['This', 'is', '"a person"', 'test.']),
('This is "a person\'s" test.',
['This', 'is', '"a person\'s"', 'test.']),
('This is "a person\\"s" test.',
['This', 'is', '"a person\\"s"', 'test.']),
('"a \'one',
['"a', "'one"]),
('all friends\' tests',
['all', 'friends\'', 'tests']),
('url search_page words="something else"',
['url', 'search_page', 'words="something else"']),
("url search_page words='something else'",
['url', 'search_page', "words='something else'"]),
('url search_page words "something else"',
['url', 'search_page', 'words', '"something else"']),
('url search_page words-"something else"',
['url', 'search_page', 'words-"something else"']),
('url search_page words=hello',
['url', 'search_page', 'words=hello']),
('url search_page words="something else',
['url', 'search_page', 'words="something', 'else']),
("cut:','|cut:' '",
["cut:','|cut:' '"]),
(lazystr("a b c d"), # Test for #20231
['a', 'b', 'c', 'd']),
]
for test, expected in testdata:
self.assertEqual(list(text.smart_split(test)), expected)
def test_truncate_chars(self):
truncator = text.Truncator('The quick brown fox jumped over the lazy dog.')
self.assertEqual('The quick brown fox jumped over the lazy dog.', truncator.chars(100)),
self.assertEqual('The quick brown fox …', truncator.chars(21)),
self.assertEqual('The quick brown fo.....', truncator.chars(23, '.....')),
self.assertEqual('.....', truncator.chars(4, '.....')),
nfc = text.Truncator('o\xfco\xfco\xfco\xfc')
nfd = text.Truncator('ou\u0308ou\u0308ou\u0308ou\u0308')
self.assertEqual('oüoüoüoü', nfc.chars(8))
self.assertEqual('oüoüoüoü', nfd.chars(8))
self.assertEqual('oü…', nfc.chars(3))
self.assertEqual('oü…', nfd.chars(3))
# Ensure the final length is calculated correctly when there are
# combining characters with no precomposed form, and that combining
# characters are not split up.
truncator = text.Truncator('-B\u030AB\u030A----8')
self.assertEqual('-B\u030A…', truncator.chars(3))
self.assertEqual('-B\u030AB\u030A-…', truncator.chars(5))
self.assertEqual('-B\u030AB\u030A----8', truncator.chars(8))
# Ensure the length of the end text is correctly calculated when it
# contains combining characters with no precomposed form.
truncator = text.Truncator('-----')
self.assertEqual('---B\u030A', truncator.chars(4, 'B\u030A'))
self.assertEqual('-----', truncator.chars(5, 'B\u030A'))
# Make a best effort to shorten to the desired length, but requesting
# a length shorter than the ellipsis shouldn't break
self.assertEqual('…', text.Truncator('asdf').chars(0))
# lazy strings are handled correctly
self.assertEqual(text.Truncator(lazystr('The quick brown fox')).chars(10), 'The quick…')
def test_truncate_words(self):
truncator = text.Truncator('The quick brown fox jumped over the lazy dog.')
self.assertEqual('The quick brown fox jumped over the lazy dog.', truncator.words(10))
self.assertEqual('The quick brown fox…', truncator.words(4))
self.assertEqual('The quick brown fox[snip]', truncator.words(4, '[snip]'))
# lazy strings are handled correctly
truncator = text.Truncator(lazystr('The quick brown fox jumped over the lazy dog.'))
self.assertEqual('The quick brown fox…', truncator.words(4))
def test_truncate_html_words(self):
truncator = text.Truncator(
'<p id="par"><strong><em>The quick brown fox jumped over the lazy dog.</em></strong></p>'
)
self.assertEqual(
'<p id="par"><strong><em>The quick brown fox jumped over the lazy dog.</em></strong></p>',
truncator.words(10, html=True)
)
self.assertEqual(
'<p id="par"><strong><em>The quick brown fox…</em></strong></p>',
truncator.words(4, html=True)
)
self.assertEqual(
'<p id="par"><strong><em>The quick brown fox....</em></strong></p>',
truncator.words(4, '....', html=True)
)
self.assertEqual(
'<p id="par"><strong><em>The quick brown fox</em></strong></p>',
truncator.words(4, '', html=True)
)
# Test with new line inside tag
truncator = text.Truncator(
'<p>The quick <a href="xyz.html"\n id="mylink">brown fox</a> jumped over the lazy dog.</p>'
)
self.assertEqual(
'<p>The quick <a href="xyz.html"\n id="mylink">brown…</a></p>',
truncator.words(3, html=True)
)
# Test self-closing tags
truncator = text.Truncator('<br/>The <hr />quick brown fox jumped over the lazy dog.')
self.assertEqual('<br/>The <hr />quick brown…', truncator.words(3, html=True))
truncator = text.Truncator('<br>The <hr/>quick <em>brown fox</em> jumped over the lazy dog.')
self.assertEqual('<br>The <hr/>quick <em>brown…</em>', truncator.words(3, html=True))
# Test html entities
truncator = text.Truncator('<i>Buenos días! ¿Cómo está?</i>')
self.assertEqual('<i>Buenos días! ¿Cómo…</i>', truncator.words(3, html=True))
truncator = text.Truncator('<p>I <3 python, what about you?</p>')
self.assertEqual('<p>I <3 python…</p>', truncator.words(3, html=True))
re_tag_catastrophic_test = ('</a' + '\t' * 50000) + '//>'
truncator = text.Truncator(re_tag_catastrophic_test)
self.assertEqual(re_tag_catastrophic_test, truncator.words(500, html=True))
def test_wrap(self):
digits = '1234 67 9'
self.assertEqual(text.wrap(digits, 100), '1234 67 9')
self.assertEqual(text.wrap(digits, 9), '1234 67 9')
self.assertEqual(text.wrap(digits, 8), '1234 67\n9')
self.assertEqual(text.wrap('short\na long line', 7), 'short\na long\nline')
self.assertEqual(text.wrap('do-not-break-long-words please? ok', 8), 'do-not-break-long-words\nplease?\nok')
long_word = 'l%sng' % ('o' * 20)
self.assertEqual(text.wrap(long_word, 20), long_word)
self.assertEqual(text.wrap('a %s word' % long_word, 10), 'a\n%s\nword' % long_word)
self.assertEqual(text.wrap(lazystr(digits), 100), '1234 67 9')
def test_normalize_newlines(self):
self.assertEqual(text.normalize_newlines("abc\ndef\rghi\r\n"), "abc\ndef\nghi\n")
self.assertEqual(text.normalize_newlines("\n\r\r\n\r"), "\n\n\n\n")
self.assertEqual(text.normalize_newlines("abcdefghi"), "abcdefghi")
self.assertEqual(text.normalize_newlines(""), "")
self.assertEqual(text.normalize_newlines(lazystr("abc\ndef\rghi\r\n")), "abc\ndef\nghi\n")
def test_phone2numeric(self):
numeric = text.phone2numeric('0800 flowers')
self.assertEqual(numeric, '0800 3569377')
lazy_numeric = lazystr(text.phone2numeric('0800 flowers'))
self.assertEqual(lazy_numeric, '0800 3569377')
def test_slugify(self):
items = (
# given - expected - unicode?
('Hello, World!', 'hello-world', False),
('spam & eggs', 'spam-eggs', False),
('spam & ıçüş', 'spam-ıçüş', True),
('foo ıç bar', 'foo-ıç-bar', True),
(' foo ıç bar', 'foo-ıç-bar', True),
('你好', '你好', True),
)
for value, output, is_unicode in items:
self.assertEqual(text.slugify(value, allow_unicode=is_unicode), output)
# interning the result may be useful, e.g. when fed to Path.
self.assertEqual(sys.intern(text.slugify('a')), 'a')
def test_unescape_entities(self):
items = [
('', ''),
('foo', 'foo'),
('&', '&'),
('&', '&'),
('&', '&'),
('foo & bar', 'foo & bar'),
('foo & bar', 'foo & bar'),
]
for value, output in items:
self.assertEqual(text.unescape_entities(value), output)
self.assertEqual(text.unescape_entities(lazystr(value)), output)
def test_unescape_string_literal(self):
items = [
('"abc"', 'abc'),
("'abc'", 'abc'),
('"a \"bc\""', 'a "bc"'),
("'\'ab\' c'", "'ab' c"),
]
for value, output in items:
self.assertEqual(text.unescape_string_literal(value), output)
self.assertEqual(text.unescape_string_literal(lazystr(value)), output)
def test_get_valid_filename(self):
filename = "^&'@{}[],$=!-#()%+~_123.txt"
self.assertEqual(text.get_valid_filename(filename), "-_123.txt")
self.assertEqual(text.get_valid_filename(lazystr(filename)), "-_123.txt")
def test_compress_sequence(self):
data = [{'key': i} for i in range(10)]
seq = list(json.JSONEncoder().iterencode(data))
seq = [s.encode() for s in seq]
actual_length = len(b''.join(seq))
out = text.compress_sequence(seq)
compressed_length = len(b''.join(out))
self.assertTrue(compressed_length < actual_length)
def test_format_lazy(self):
self.assertEqual('django/test', format_lazy('{}/{}', 'django', lazystr('test')))
self.assertEqual('django/test', format_lazy('{0}/{1}', *('django', 'test')))
self.assertEqual('django/test', format_lazy('{a}/{b}', **{'a': 'django', 'b': 'test'}))
self.assertEqual('django/test', format_lazy('{a[0]}/{a[1]}', a=('django', 'test')))
t = {}
s = format_lazy('{0[a]}-{p[a]}', t, p=t)
t['a'] = lazystr('django')
self.assertEqual('django-django', s)
t['a'] = 'update'
self.assertEqual('update-update', s)
# The format string can be lazy. (string comes from contrib.admin)
s = format_lazy(
gettext_lazy("Added {name} \"{object}\"."),
name='article', object='My first try',
)
with override('fr'):
self.assertEqual('Ajout de article «\xa0My first try\xa0».', s)
|
9f2175b02e97a3ae479097b7e3a7cadaba4fe9fae04d741d23f9dc3f4b5372fa | import threading
from datetime import datetime, timedelta
from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from django.db import DEFAULT_DB_ALIAS, DatabaseError, connections
from django.db.models.manager import BaseManager
from django.db.models.query import EmptyQuerySet, QuerySet
from django.test import (
SimpleTestCase, TestCase, TransactionTestCase, skipUnlessDBFeature,
)
from django.utils.translation import gettext_lazy
from .models import Article, ArticleSelectOnSave, FeaturedArticle, SelfRef
class ModelInstanceCreationTests(TestCase):
def test_object_is_not_written_to_database_until_save_was_called(self):
a = Article(
id=None,
headline='Parrot programs in Python',
pub_date=datetime(2005, 7, 28),
)
self.assertIsNone(a.id)
self.assertEqual(Article.objects.all().count(), 0)
# Save it into the database. You have to call save() explicitly.
a.save()
self.assertIsNotNone(a.id)
self.assertEqual(Article.objects.all().count(), 1)
def test_can_initialize_model_instance_using_positional_arguments(self):
"""
You can initialize a model instance using positional arguments,
which should match the field order as defined in the model.
"""
a = Article(None, 'Second article', datetime(2005, 7, 29))
a.save()
self.assertEqual(a.headline, 'Second article')
self.assertEqual(a.pub_date, datetime(2005, 7, 29, 0, 0))
def test_can_create_instance_using_kwargs(self):
a = Article(
id=None,
headline='Third article',
pub_date=datetime(2005, 7, 30),
)
a.save()
self.assertEqual(a.headline, 'Third article')
self.assertEqual(a.pub_date, datetime(2005, 7, 30, 0, 0))
def test_autofields_generate_different_values_for_each_instance(self):
a1 = Article.objects.create(headline='First', pub_date=datetime(2005, 7, 30, 0, 0))
a2 = Article.objects.create(headline='First', pub_date=datetime(2005, 7, 30, 0, 0))
a3 = Article.objects.create(headline='First', pub_date=datetime(2005, 7, 30, 0, 0))
self.assertNotEqual(a3.id, a1.id)
self.assertNotEqual(a3.id, a2.id)
def test_can_mix_and_match_position_and_kwargs(self):
# You can also mix and match position and keyword arguments, but
# be sure not to duplicate field information.
a = Article(None, 'Fourth article', pub_date=datetime(2005, 7, 31))
a.save()
self.assertEqual(a.headline, 'Fourth article')
def test_cannot_create_instance_with_invalid_kwargs(self):
with self.assertRaisesMessage(TypeError, "Article() got an unexpected keyword argument 'foo'"):
Article(
id=None,
headline='Some headline',
pub_date=datetime(2005, 7, 31),
foo='bar',
)
def test_can_leave_off_value_for_autofield_and_it_gets_value_on_save(self):
"""
You can leave off the value for an AutoField when creating an
object, because it'll get filled in automatically when you save().
"""
a = Article(headline='Article 5', pub_date=datetime(2005, 7, 31))
a.save()
self.assertEqual(a.headline, 'Article 5')
self.assertIsNotNone(a.id)
def test_leaving_off_a_field_with_default_set_the_default_will_be_saved(self):
a = Article(pub_date=datetime(2005, 7, 31))
a.save()
self.assertEqual(a.headline, 'Default headline')
def test_for_datetimefields_saves_as_much_precision_as_was_given(self):
"""as much precision in *seconds*"""
a1 = Article(
headline='Article 7',
pub_date=datetime(2005, 7, 31, 12, 30),
)
a1.save()
self.assertEqual(Article.objects.get(id__exact=a1.id).pub_date, datetime(2005, 7, 31, 12, 30))
a2 = Article(
headline='Article 8',
pub_date=datetime(2005, 7, 31, 12, 30, 45),
)
a2.save()
self.assertEqual(Article.objects.get(id__exact=a2.id).pub_date, datetime(2005, 7, 31, 12, 30, 45))
def test_saving_an_object_again_does_not_create_a_new_object(self):
a = Article(headline='original', pub_date=datetime(2014, 5, 16))
a.save()
current_id = a.id
a.save()
self.assertEqual(a.id, current_id)
a.headline = 'Updated headline'
a.save()
self.assertEqual(a.id, current_id)
def test_querysets_checking_for_membership(self):
headlines = [
'Parrot programs in Python', 'Second article', 'Third article']
some_pub_date = datetime(2014, 5, 16, 12, 1)
for headline in headlines:
Article(headline=headline, pub_date=some_pub_date).save()
a = Article(headline='Some headline', pub_date=some_pub_date)
a.save()
# You can use 'in' to test for membership...
self.assertIn(a, Article.objects.all())
# ... but there will often be more efficient ways if that is all you need:
self.assertTrue(Article.objects.filter(id=a.id).exists())
class ModelTest(TestCase):
def test_objects_attribute_is_only_available_on_the_class_itself(self):
with self.assertRaisesMessage(AttributeError, "Manager isn't accessible via Article instances"):
getattr(Article(), "objects",)
self.assertFalse(hasattr(Article(), 'objects'))
self.assertTrue(hasattr(Article, 'objects'))
def test_queryset_delete_removes_all_items_in_that_queryset(self):
headlines = [
'An article', 'Article One', 'Amazing article', 'Boring article']
some_pub_date = datetime(2014, 5, 16, 12, 1)
for headline in headlines:
Article(headline=headline, pub_date=some_pub_date).save()
self.assertQuerysetEqual(
Article.objects.all().order_by('headline'),
["<Article: Amazing article>",
"<Article: An article>",
"<Article: Article One>",
"<Article: Boring article>"]
)
Article.objects.filter(headline__startswith='A').delete()
self.assertQuerysetEqual(Article.objects.all().order_by('headline'), ["<Article: Boring article>"])
def test_not_equal_and_equal_operators_behave_as_expected_on_instances(self):
some_pub_date = datetime(2014, 5, 16, 12, 1)
a1 = Article.objects.create(headline='First', pub_date=some_pub_date)
a2 = Article.objects.create(headline='Second', pub_date=some_pub_date)
self.assertNotEqual(a1, a2)
self.assertEqual(a1, Article.objects.get(id__exact=a1.id))
self.assertNotEqual(Article.objects.get(id__exact=a1.id), Article.objects.get(id__exact=a2.id))
def test_microsecond_precision(self):
a9 = Article(
headline='Article 9',
pub_date=datetime(2005, 7, 31, 12, 30, 45, 180),
)
a9.save()
self.assertEqual(Article.objects.get(pk=a9.pk).pub_date, datetime(2005, 7, 31, 12, 30, 45, 180))
def test_manually_specify_primary_key(self):
# You can manually specify the primary key when creating a new object.
a101 = Article(
id=101,
headline='Article 101',
pub_date=datetime(2005, 7, 31, 12, 30, 45),
)
a101.save()
a101 = Article.objects.get(pk=101)
self.assertEqual(a101.headline, 'Article 101')
def test_create_method(self):
# You can create saved objects in a single step
a10 = Article.objects.create(
headline="Article 10",
pub_date=datetime(2005, 7, 31, 12, 30, 45),
)
self.assertEqual(Article.objects.get(headline="Article 10"), a10)
def test_year_lookup_edge_case(self):
# Edge-case test: A year lookup should retrieve all objects in
# the given year, including Jan. 1 and Dec. 31.
Article.objects.create(
headline='Article 11',
pub_date=datetime(2008, 1, 1),
)
Article.objects.create(
headline='Article 12',
pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999),
)
self.assertQuerysetEqual(
Article.objects.filter(pub_date__year=2008),
["<Article: Article 11>", "<Article: Article 12>"]
)
def test_unicode_data(self):
# Unicode data works, too.
a = Article(
headline='\u6797\u539f \u3081\u3050\u307f',
pub_date=datetime(2005, 7, 28),
)
a.save()
self.assertEqual(Article.objects.get(pk=a.id).headline, '\u6797\u539f \u3081\u3050\u307f')
def test_hash_function(self):
# Model instances have a hash function, so they can be used in sets
# or as dictionary keys. Two models compare as equal if their primary
# keys are equal.
a10 = Article.objects.create(
headline="Article 10",
pub_date=datetime(2005, 7, 31, 12, 30, 45),
)
a11 = Article.objects.create(
headline='Article 11',
pub_date=datetime(2008, 1, 1),
)
a12 = Article.objects.create(
headline='Article 12',
pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999),
)
s = {a10, a11, a12}
self.assertIn(Article.objects.get(headline='Article 11'), s)
def test_extra_method_select_argument_with_dashes_and_values(self):
# The 'select' argument to extra() supports names with dashes in
# them, as long as you use values().
Article.objects.bulk_create([
Article(headline='Article 10', pub_date=datetime(2005, 7, 31, 12, 30, 45)),
Article(headline='Article 11', pub_date=datetime(2008, 1, 1)),
Article(headline='Article 12', pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999)),
])
dicts = Article.objects.filter(
pub_date__year=2008).extra(
select={'dashed-value': '1'}).values('headline', 'dashed-value')
self.assertEqual(
[sorted(d.items()) for d in dicts],
[[('dashed-value', 1), ('headline', 'Article 11')], [('dashed-value', 1), ('headline', 'Article 12')]]
)
def test_extra_method_select_argument_with_dashes(self):
# If you use 'select' with extra() and names containing dashes on a
# query that's *not* a values() query, those extra 'select' values
# will silently be ignored.
Article.objects.bulk_create([
Article(headline='Article 10', pub_date=datetime(2005, 7, 31, 12, 30, 45)),
Article(headline='Article 11', pub_date=datetime(2008, 1, 1)),
Article(headline='Article 12', pub_date=datetime(2008, 12, 31, 23, 59, 59, 999999)),
])
articles = Article.objects.filter(
pub_date__year=2008).extra(select={'dashed-value': '1', 'undashedvalue': '2'})
self.assertEqual(articles[0].undashedvalue, 2)
def test_create_relation_with_gettext_lazy(self):
"""
gettext_lazy objects work when saving model instances
through various methods. Refs #10498.
"""
notlazy = 'test'
lazy = gettext_lazy(notlazy)
Article.objects.create(headline=lazy, pub_date=datetime.now())
article = Article.objects.get()
self.assertEqual(article.headline, notlazy)
# test that assign + save works with Promise objects
article.headline = lazy
article.save()
self.assertEqual(article.headline, notlazy)
# test .update()
Article.objects.update(headline=lazy)
article = Article.objects.get()
self.assertEqual(article.headline, notlazy)
# still test bulk_create()
Article.objects.all().delete()
Article.objects.bulk_create([Article(headline=lazy, pub_date=datetime.now())])
article = Article.objects.get()
self.assertEqual(article.headline, notlazy)
def test_emptyqs(self):
msg = "EmptyQuerySet can't be instantiated"
with self.assertRaisesMessage(TypeError, msg):
EmptyQuerySet()
self.assertIsInstance(Article.objects.none(), EmptyQuerySet)
self.assertNotIsInstance('', EmptyQuerySet)
def test_emptyqs_values(self):
# test for #15959
Article.objects.create(headline='foo', pub_date=datetime.now())
with self.assertNumQueries(0):
qs = Article.objects.none().values_list('pk')
self.assertIsInstance(qs, EmptyQuerySet)
self.assertEqual(len(qs), 0)
def test_emptyqs_customqs(self):
# A hacky test for custom QuerySet subclass - refs #17271
Article.objects.create(headline='foo', pub_date=datetime.now())
class CustomQuerySet(QuerySet):
def do_something(self):
return 'did something'
qs = Article.objects.all()
qs.__class__ = CustomQuerySet
qs = qs.none()
with self.assertNumQueries(0):
self.assertEqual(len(qs), 0)
self.assertIsInstance(qs, EmptyQuerySet)
self.assertEqual(qs.do_something(), 'did something')
def test_emptyqs_values_order(self):
# Tests for ticket #17712
Article.objects.create(headline='foo', pub_date=datetime.now())
with self.assertNumQueries(0):
self.assertEqual(len(Article.objects.none().values_list('id').order_by('id')), 0)
with self.assertNumQueries(0):
self.assertEqual(len(Article.objects.none().filter(
id__in=Article.objects.values_list('id', flat=True))), 0)
@skipUnlessDBFeature('can_distinct_on_fields')
def test_emptyqs_distinct(self):
# Tests for #19426
Article.objects.create(headline='foo', pub_date=datetime.now())
with self.assertNumQueries(0):
self.assertEqual(len(Article.objects.none().distinct('headline', 'pub_date')), 0)
def test_ticket_20278(self):
sr = SelfRef.objects.create()
with self.assertRaises(ObjectDoesNotExist):
SelfRef.objects.get(selfref=sr)
def test_eq(self):
self.assertEqual(Article(id=1), Article(id=1))
self.assertNotEqual(Article(id=1), object())
self.assertNotEqual(object(), Article(id=1))
a = Article()
self.assertEqual(a, a)
self.assertNotEqual(Article(), a)
def test_hash(self):
# Value based on PK
self.assertEqual(hash(Article(id=1)), hash(1))
msg = 'Model instances without primary key value are unhashable'
with self.assertRaisesMessage(TypeError, msg):
# No PK value -> unhashable (because save() would then change
# hash)
hash(Article())
def test_delete_and_access_field(self):
# Accessing a field after it's deleted from a model reloads its value.
pub_date = datetime.now()
article = Article.objects.create(headline='foo', pub_date=pub_date)
new_pub_date = article.pub_date + timedelta(days=10)
article.headline = 'bar'
article.pub_date = new_pub_date
del article.headline
with self.assertNumQueries(1):
self.assertEqual(article.headline, 'foo')
# Fields that weren't deleted aren't reloaded.
self.assertEqual(article.pub_date, new_pub_date)
class ModelLookupTest(TestCase):
@classmethod
def setUpTestData(cls):
# Create an Article.
cls.a = Article(
id=None,
headline='Swallow programs in Python',
pub_date=datetime(2005, 7, 28),
)
# Save it into the database. You have to call save() explicitly.
cls.a.save()
def test_all_lookup(self):
# Change values by changing the attributes, then calling save().
self.a.headline = 'Parrot programs in Python'
self.a.save()
# Article.objects.all() returns all the articles in the database.
self.assertQuerysetEqual(Article.objects.all(), ['<Article: Parrot programs in Python>'])
def test_rich_lookup(self):
# Django provides a rich database lookup API.
self.assertEqual(Article.objects.get(id__exact=self.a.id), self.a)
self.assertEqual(Article.objects.get(headline__startswith='Swallow'), self.a)
self.assertEqual(Article.objects.get(pub_date__year=2005), self.a)
self.assertEqual(Article.objects.get(pub_date__year=2005, pub_date__month=7), self.a)
self.assertEqual(Article.objects.get(pub_date__year=2005, pub_date__month=7, pub_date__day=28), self.a)
self.assertEqual(Article.objects.get(pub_date__week_day=5), self.a)
def test_equal_lookup(self):
# The "__exact" lookup type can be omitted, as a shortcut.
self.assertEqual(Article.objects.get(id=self.a.id), self.a)
self.assertEqual(Article.objects.get(headline='Swallow programs in Python'), self.a)
self.assertQuerysetEqual(
Article.objects.filter(pub_date__year=2005),
['<Article: Swallow programs in Python>'],
)
self.assertQuerysetEqual(
Article.objects.filter(pub_date__year=2004),
[],
)
self.assertQuerysetEqual(
Article.objects.filter(pub_date__year=2005, pub_date__month=7),
['<Article: Swallow programs in Python>'],
)
self.assertQuerysetEqual(
Article.objects.filter(pub_date__week_day=5),
['<Article: Swallow programs in Python>'],
)
self.assertQuerysetEqual(
Article.objects.filter(pub_date__week_day=6),
[],
)
def test_does_not_exist(self):
# Django raises an Article.DoesNotExist exception for get() if the
# parameters don't match any object.
with self.assertRaisesMessage(ObjectDoesNotExist, "Article matching query does not exist."):
Article.objects.get(id__exact=2000,)
# To avoid dict-ordering related errors check only one lookup
# in single assert.
with self.assertRaises(ObjectDoesNotExist):
Article.objects.get(pub_date__year=2005, pub_date__month=8)
with self.assertRaisesMessage(ObjectDoesNotExist, "Article matching query does not exist."):
Article.objects.get(pub_date__week_day=6,)
def test_lookup_by_primary_key(self):
# Lookup by a primary key is the most common case, so Django
# provides a shortcut for primary-key exact lookups.
# The following is identical to articles.get(id=a.id).
self.assertEqual(Article.objects.get(pk=self.a.id), self.a)
# pk can be used as a shortcut for the primary key name in any query.
self.assertQuerysetEqual(Article.objects.filter(pk__in=[self.a.id]), ["<Article: Swallow programs in Python>"])
# Model instances of the same type and same ID are considered equal.
a = Article.objects.get(pk=self.a.id)
b = Article.objects.get(pk=self.a.id)
self.assertEqual(a, b)
def test_too_many(self):
# Create a very similar object
a = Article(
id=None,
headline='Swallow bites Python',
pub_date=datetime(2005, 7, 28),
)
a.save()
self.assertEqual(Article.objects.count(), 2)
# Django raises an Article.MultipleObjectsReturned exception if the
# lookup matches more than one object
msg = "get() returned more than one Article -- it returned 2!"
with self.assertRaisesMessage(MultipleObjectsReturned, msg):
Article.objects.get(headline__startswith='Swallow',)
with self.assertRaisesMessage(MultipleObjectsReturned, msg):
Article.objects.get(pub_date__year=2005,)
with self.assertRaisesMessage(MultipleObjectsReturned, msg):
Article.objects.get(pub_date__year=2005, pub_date__month=7)
class ConcurrentSaveTests(TransactionTestCase):
available_apps = ['basic']
@skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_concurrent_delete_with_save(self):
"""
Test fetching, deleting and finally saving an object - we should get
an insert in this case.
"""
a = Article.objects.create(headline='foo', pub_date=datetime.now())
exceptions = []
def deleter():
try:
# Do not delete a directly - doing so alters its state.
Article.objects.filter(pk=a.pk).delete()
except Exception as e:
exceptions.append(e)
finally:
connections[DEFAULT_DB_ALIAS].close()
self.assertEqual(len(exceptions), 0)
t = threading.Thread(target=deleter)
t.start()
t.join()
a.save()
self.assertEqual(Article.objects.get(pk=a.pk).headline, 'foo')
class ManagerTest(SimpleTestCase):
QUERYSET_PROXY_METHODS = [
'none',
'count',
'dates',
'datetimes',
'distinct',
'extra',
'get',
'get_or_create',
'update_or_create',
'create',
'bulk_create',
'bulk_update',
'filter',
'aggregate',
'annotate',
'complex_filter',
'exclude',
'in_bulk',
'iterator',
'earliest',
'latest',
'first',
'last',
'order_by',
'select_for_update',
'select_related',
'prefetch_related',
'values',
'values_list',
'update',
'reverse',
'defer',
'only',
'using',
'exists',
'explain',
'_insert',
'_update',
'raw',
'union',
'intersection',
'difference',
]
def test_manager_methods(self):
"""
This test ensures that the correct set of methods from `QuerySet`
are copied onto `Manager`.
It's particularly useful to prevent accidentally leaking new methods
into `Manager`. New `QuerySet` methods that should also be copied onto
`Manager` will need to be added to `ManagerTest.QUERYSET_PROXY_METHODS`.
"""
self.assertEqual(
sorted(BaseManager._get_queryset_methods(QuerySet)),
sorted(self.QUERYSET_PROXY_METHODS),
)
class SelectOnSaveTests(TestCase):
def test_select_on_save(self):
a1 = Article.objects.create(pub_date=datetime.now())
with self.assertNumQueries(1):
a1.save()
asos = ArticleSelectOnSave.objects.create(pub_date=datetime.now())
with self.assertNumQueries(2):
asos.save()
with self.assertNumQueries(1):
asos.save(force_update=True)
Article.objects.all().delete()
with self.assertRaisesMessage(DatabaseError, 'Forced update did not affect any rows.'):
with self.assertNumQueries(1):
asos.save(force_update=True)
def test_select_on_save_lying_update(self):
"""
select_on_save works correctly if the database doesn't return correct
information about matched rows from UPDATE.
"""
# Change the manager to not return "row matched" for update().
# We are going to change the Article's _base_manager class
# dynamically. This is a bit of a hack, but it seems hard to
# test this properly otherwise. Article's manager, because
# proxy models use their parent model's _base_manager.
orig_class = Article._base_manager._queryset_class
class FakeQuerySet(QuerySet):
# Make sure the _update method below is in fact called.
called = False
def _update(self, *args, **kwargs):
FakeQuerySet.called = True
super()._update(*args, **kwargs)
return 0
try:
Article._base_manager._queryset_class = FakeQuerySet
asos = ArticleSelectOnSave.objects.create(pub_date=datetime.now())
with self.assertNumQueries(3):
asos.save()
self.assertTrue(FakeQuerySet.called)
# This is not wanted behavior, but this is how Django has always
# behaved for databases that do not return correct information
# about matched rows for UPDATE.
with self.assertRaisesMessage(DatabaseError, 'Forced update did not affect any rows.'):
asos.save(force_update=True)
msg = (
"An error occurred in the current transaction. You can't "
"execute queries until the end of the 'atomic' block."
)
with self.assertRaisesMessage(DatabaseError, msg):
asos.save(update_fields=['pub_date'])
finally:
Article._base_manager._queryset_class = orig_class
class ModelRefreshTests(TestCase):
def test_refresh(self):
a = Article.objects.create(pub_date=datetime.now())
Article.objects.create(pub_date=datetime.now())
Article.objects.filter(pk=a.pk).update(headline='new headline')
with self.assertNumQueries(1):
a.refresh_from_db()
self.assertEqual(a.headline, 'new headline')
orig_pub_date = a.pub_date
new_pub_date = a.pub_date + timedelta(10)
Article.objects.update(headline='new headline 2', pub_date=new_pub_date)
with self.assertNumQueries(1):
a.refresh_from_db(fields=['headline'])
self.assertEqual(a.headline, 'new headline 2')
self.assertEqual(a.pub_date, orig_pub_date)
with self.assertNumQueries(1):
a.refresh_from_db()
self.assertEqual(a.pub_date, new_pub_date)
def test_unknown_kwarg(self):
s = SelfRef.objects.create()
msg = "refresh_from_db() got an unexpected keyword argument 'unknown_kwarg'"
with self.assertRaisesMessage(TypeError, msg):
s.refresh_from_db(unknown_kwarg=10)
def test_lookup_in_fields(self):
s = SelfRef.objects.create()
msg = 'Found "__" in fields argument. Relations and transforms are not allowed in fields.'
with self.assertRaisesMessage(ValueError, msg):
s.refresh_from_db(fields=['foo__bar'])
def test_refresh_fk(self):
s1 = SelfRef.objects.create()
s2 = SelfRef.objects.create()
s3 = SelfRef.objects.create(selfref=s1)
s3_copy = SelfRef.objects.get(pk=s3.pk)
s3_copy.selfref.touched = True
s3.selfref = s2
s3.save()
with self.assertNumQueries(1):
s3_copy.refresh_from_db()
with self.assertNumQueries(1):
# The old related instance was thrown away (the selfref_id has
# changed). It needs to be reloaded on access, so one query
# executed.
self.assertFalse(hasattr(s3_copy.selfref, 'touched'))
self.assertEqual(s3_copy.selfref, s2)
def test_refresh_null_fk(self):
s1 = SelfRef.objects.create()
s2 = SelfRef.objects.create(selfref=s1)
s2.selfref = None
s2.refresh_from_db()
self.assertEqual(s2.selfref, s1)
def test_refresh_unsaved(self):
pub_date = datetime.now()
a = Article.objects.create(pub_date=pub_date)
a2 = Article(id=a.pk)
with self.assertNumQueries(1):
a2.refresh_from_db()
self.assertEqual(a2.pub_date, pub_date)
self.assertEqual(a2._state.db, "default")
def test_refresh_fk_on_delete_set_null(self):
a = Article.objects.create(
headline='Parrot programs in Python',
pub_date=datetime(2005, 7, 28),
)
s1 = SelfRef.objects.create(article=a)
a.delete()
s1.refresh_from_db()
self.assertIsNone(s1.article_id)
self.assertIsNone(s1.article)
def test_refresh_no_fields(self):
a = Article.objects.create(pub_date=datetime.now())
with self.assertNumQueries(0):
a.refresh_from_db(fields=[])
def test_refresh_clears_reverse_related(self):
"""refresh_from_db() clear cached reverse relations."""
article = Article.objects.create(
headline='Parrot programs in Python',
pub_date=datetime(2005, 7, 28),
)
self.assertFalse(hasattr(article, 'featured'))
FeaturedArticle.objects.create(article_id=article.pk)
article.refresh_from_db()
self.assertTrue(hasattr(article, 'featured'))
def test_refresh_clears_one_to_one_field(self):
article = Article.objects.create(
headline='Parrot programs in Python',
pub_date=datetime(2005, 7, 28),
)
featured = FeaturedArticle.objects.create(article_id=article.pk)
self.assertEqual(featured.article.headline, 'Parrot programs in Python')
article.headline = 'Parrot programs in Python 2.0'
article.save()
featured.refresh_from_db()
self.assertEqual(featured.article.headline, 'Parrot programs in Python 2.0')
def test_prefetched_cache_cleared(self):
a = Article.objects.create(pub_date=datetime(2005, 7, 28))
s = SelfRef.objects.create(article=a)
# refresh_from_db() without fields=[...]
a1_prefetched = Article.objects.prefetch_related('selfref_set').first()
self.assertCountEqual(a1_prefetched.selfref_set.all(), [s])
s.article = None
s.save()
# Relation is cleared and prefetch cache is stale.
self.assertCountEqual(a1_prefetched.selfref_set.all(), [s])
a1_prefetched.refresh_from_db()
# Cache was cleared and new results are available.
self.assertCountEqual(a1_prefetched.selfref_set.all(), [])
# refresh_from_db() with fields=[...]
a2_prefetched = Article.objects.prefetch_related('selfref_set').first()
self.assertCountEqual(a2_prefetched.selfref_set.all(), [])
s.article = a
s.save()
# Relation is added and prefetch cache is stale.
self.assertCountEqual(a2_prefetched.selfref_set.all(), [])
a2_prefetched.refresh_from_db(fields=['selfref_set'])
# Cache was cleared and new results are available.
self.assertCountEqual(a2_prefetched.selfref_set.all(), [s])
|
39ae666717fc819d286891cc8c6dd888d9073156302db4822cea853fda2d1173 | """
Tests for django test runner
"""
import unittest
from unittest import mock
from admin_scripts.tests import AdminScriptTestCase
from django import db
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.management import call_command
from django.core.management.base import SystemCheckError
from django.test import TransactionTestCase, skipUnlessDBFeature, testcases
from django.test.runner import DiscoverRunner
from django.test.testcases import connections_support_transactions
from django.test.utils import dependency_ordered
from .models import B, Person, Through
class DependencyOrderingTests(unittest.TestCase):
def test_simple_dependencies(self):
raw = [
('s1', ('s1_db', ['alpha'])),
('s2', ('s2_db', ['bravo'])),
('s3', ('s3_db', ['charlie'])),
]
dependencies = {
'alpha': ['charlie'],
'bravo': ['charlie'],
}
ordered = dependency_ordered(raw, dependencies=dependencies)
ordered_sigs = [sig for sig, value in ordered]
self.assertIn('s1', ordered_sigs)
self.assertIn('s2', ordered_sigs)
self.assertIn('s3', ordered_sigs)
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s1'))
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s2'))
def test_chained_dependencies(self):
raw = [
('s1', ('s1_db', ['alpha'])),
('s2', ('s2_db', ['bravo'])),
('s3', ('s3_db', ['charlie'])),
]
dependencies = {
'alpha': ['bravo'],
'bravo': ['charlie'],
}
ordered = dependency_ordered(raw, dependencies=dependencies)
ordered_sigs = [sig for sig, value in ordered]
self.assertIn('s1', ordered_sigs)
self.assertIn('s2', ordered_sigs)
self.assertIn('s3', ordered_sigs)
# Explicit dependencies
self.assertLess(ordered_sigs.index('s2'), ordered_sigs.index('s1'))
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s2'))
# Implied dependencies
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s1'))
def test_multiple_dependencies(self):
raw = [
('s1', ('s1_db', ['alpha'])),
('s2', ('s2_db', ['bravo'])),
('s3', ('s3_db', ['charlie'])),
('s4', ('s4_db', ['delta'])),
]
dependencies = {
'alpha': ['bravo', 'delta'],
'bravo': ['charlie'],
'delta': ['charlie'],
}
ordered = dependency_ordered(raw, dependencies=dependencies)
ordered_sigs = [sig for sig, aliases in ordered]
self.assertIn('s1', ordered_sigs)
self.assertIn('s2', ordered_sigs)
self.assertIn('s3', ordered_sigs)
self.assertIn('s4', ordered_sigs)
# Explicit dependencies
self.assertLess(ordered_sigs.index('s2'), ordered_sigs.index('s1'))
self.assertLess(ordered_sigs.index('s4'), ordered_sigs.index('s1'))
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s2'))
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s4'))
# Implicit dependencies
self.assertLess(ordered_sigs.index('s3'), ordered_sigs.index('s1'))
def test_circular_dependencies(self):
raw = [
('s1', ('s1_db', ['alpha'])),
('s2', ('s2_db', ['bravo'])),
]
dependencies = {
'bravo': ['alpha'],
'alpha': ['bravo'],
}
with self.assertRaises(ImproperlyConfigured):
dependency_ordered(raw, dependencies=dependencies)
def test_own_alias_dependency(self):
raw = [
('s1', ('s1_db', ['alpha', 'bravo']))
]
dependencies = {
'alpha': ['bravo']
}
with self.assertRaises(ImproperlyConfigured):
dependency_ordered(raw, dependencies=dependencies)
# reordering aliases shouldn't matter
raw = [
('s1', ('s1_db', ['bravo', 'alpha']))
]
with self.assertRaises(ImproperlyConfigured):
dependency_ordered(raw, dependencies=dependencies)
class MockTestRunner:
def __init__(self, *args, **kwargs):
pass
MockTestRunner.run_tests = mock.Mock(return_value=[])
class ManageCommandTests(unittest.TestCase):
def test_custom_test_runner(self):
call_command('test', 'sites',
testrunner='test_runner.tests.MockTestRunner')
MockTestRunner.run_tests.assert_called_with(('sites',))
def test_bad_test_runner(self):
with self.assertRaises(AttributeError):
call_command('test', 'sites', testrunner='test_runner.NonexistentRunner')
class CustomTestRunnerOptionsSettingsTests(AdminScriptTestCase):
"""
Custom runners can add command line arguments. The runner is specified
through a settings file.
"""
def setUp(self):
settings = {
'TEST_RUNNER': '\'test_runner.runner.CustomOptionsTestRunner\'',
}
self.write_settings('settings.py', sdict=settings)
def tearDown(self):
self.remove_settings('settings.py')
def test_default_options(self):
args = ['test', '--settings=test_project.settings']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, '1:2:3')
def test_default_and_given_options(self):
args = ['test', '--settings=test_project.settings', '--option_b=foo']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, '1:foo:3')
def test_option_name_and_value_separated(self):
args = ['test', '--settings=test_project.settings', '--option_b', 'foo']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, '1:foo:3')
def test_all_options_given(self):
args = ['test', '--settings=test_project.settings', '--option_a=bar',
'--option_b=foo', '--option_c=31337']
out, err = self.run_django_admin(args)
self.assertNoOutput(err)
self.assertOutput(out, 'bar:foo:31337')
class CustomTestRunnerOptionsCmdlineTests(AdminScriptTestCase):
"""
Custom runners can add command line arguments when the runner is specified
using --testrunner.
"""
def setUp(self):
self.write_settings('settings.py')
def tearDown(self):
self.remove_settings('settings.py')
def test_testrunner_option(self):
args = [
'test', '--testrunner', 'test_runner.runner.CustomOptionsTestRunner',
'--option_a=bar', '--option_b=foo', '--option_c=31337'
]
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, 'bar:foo:31337')
def test_testrunner_equals(self):
args = [
'test', '--testrunner=test_runner.runner.CustomOptionsTestRunner',
'--option_a=bar', '--option_b=foo', '--option_c=31337'
]
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertNoOutput(err)
self.assertOutput(out, 'bar:foo:31337')
def test_no_testrunner(self):
args = ['test', '--testrunner']
out, err = self.run_django_admin(args, 'test_project.settings')
self.assertIn('usage', err)
self.assertNotIn('Traceback', err)
self.assertNoOutput(out)
class Ticket17477RegressionTests(AdminScriptTestCase):
def setUp(self):
self.write_settings('settings.py')
def tearDown(self):
self.remove_settings('settings.py')
def test_ticket_17477(self):
"""'manage.py help test' works after r16352."""
args = ['help', 'test']
out, err = self.run_manage(args)
self.assertNoOutput(err)
class SQLiteInMemoryTestDbs(TransactionTestCase):
available_apps = ['test_runner']
databases = {'default', 'other'}
@unittest.skipUnless(all(db.connections[conn].vendor == 'sqlite' for conn in db.connections),
"This is an sqlite-specific issue")
def test_transaction_support(self):
# Assert connections mocking is appropriately applied by preventing
# any attempts at calling create_test_db on the global connection
# objects.
for connection in db.connections.all():
create_test_db = mock.patch.object(
connection.creation,
'create_test_db',
side_effect=AssertionError("Global connection object shouldn't be manipulated.")
)
create_test_db.start()
self.addCleanup(create_test_db.stop)
for option_key, option_value in (
('NAME', ':memory:'), ('TEST', {'NAME': ':memory:'})):
tested_connections = db.ConnectionHandler({
'default': {
'ENGINE': 'django.db.backends.sqlite3',
option_key: option_value,
},
'other': {
'ENGINE': 'django.db.backends.sqlite3',
option_key: option_value,
},
})
with mock.patch('django.test.utils.connections', new=tested_connections):
other = tested_connections['other']
DiscoverRunner(verbosity=0).setup_databases()
msg = (
"DATABASES setting '%s' option set to sqlite3's ':memory:' value "
"shouldn't interfere with transaction support detection." % option_key
)
# Transaction support is properly initialized for the 'other' DB.
self.assertTrue(other.features.supports_transactions, msg)
# And all the DBs report that they support transactions.
self.assertTrue(connections_support_transactions(), msg)
class DummyBackendTest(unittest.TestCase):
def test_setup_databases(self):
"""
setup_databases() doesn't fail with dummy database backend.
"""
tested_connections = db.ConnectionHandler({})
with mock.patch('django.test.utils.connections', new=tested_connections):
runner_instance = DiscoverRunner(verbosity=0)
old_config = runner_instance.setup_databases()
runner_instance.teardown_databases(old_config)
class AliasedDefaultTestSetupTest(unittest.TestCase):
def test_setup_aliased_default_database(self):
"""
setup_datebases() doesn't fail when 'default' is aliased
"""
tested_connections = db.ConnectionHandler({
'default': {
'NAME': 'dummy'
},
'aliased': {
'NAME': 'dummy'
}
})
with mock.patch('django.test.utils.connections', new=tested_connections):
runner_instance = DiscoverRunner(verbosity=0)
old_config = runner_instance.setup_databases()
runner_instance.teardown_databases(old_config)
class SetupDatabasesTests(unittest.TestCase):
def setUp(self):
self.runner_instance = DiscoverRunner(verbosity=0)
def test_setup_aliased_databases(self):
tested_connections = db.ConnectionHandler({
'default': {
'ENGINE': 'django.db.backends.dummy',
'NAME': 'dbname',
},
'other': {
'ENGINE': 'django.db.backends.dummy',
'NAME': 'dbname',
}
})
with mock.patch('django.db.backends.dummy.base.DatabaseWrapper.creation_class') as mocked_db_creation:
with mock.patch('django.test.utils.connections', new=tested_connections):
old_config = self.runner_instance.setup_databases()
self.runner_instance.teardown_databases(old_config)
mocked_db_creation.return_value.destroy_test_db.assert_called_once_with('dbname', 0, False)
def test_destroy_test_db_restores_db_name(self):
tested_connections = db.ConnectionHandler({
'default': {
'ENGINE': settings.DATABASES[db.DEFAULT_DB_ALIAS]["ENGINE"],
'NAME': 'xxx_test_database',
},
})
# Using the real current name as old_name to not mess with the test suite.
old_name = settings.DATABASES[db.DEFAULT_DB_ALIAS]["NAME"]
with mock.patch('django.db.connections', new=tested_connections):
tested_connections['default'].creation.destroy_test_db(old_name, verbosity=0, keepdb=True)
self.assertEqual(tested_connections['default'].settings_dict["NAME"], old_name)
def test_serialization(self):
tested_connections = db.ConnectionHandler({
'default': {
'ENGINE': 'django.db.backends.dummy',
},
})
with mock.patch('django.db.backends.dummy.base.DatabaseWrapper.creation_class') as mocked_db_creation:
with mock.patch('django.test.utils.connections', new=tested_connections):
self.runner_instance.setup_databases()
mocked_db_creation.return_value.create_test_db.assert_called_once_with(
verbosity=0, autoclobber=False, serialize=True, keepdb=False
)
def test_serialized_off(self):
tested_connections = db.ConnectionHandler({
'default': {
'ENGINE': 'django.db.backends.dummy',
'TEST': {'SERIALIZE': False},
},
})
with mock.patch('django.db.backends.dummy.base.DatabaseWrapper.creation_class') as mocked_db_creation:
with mock.patch('django.test.utils.connections', new=tested_connections):
self.runner_instance.setup_databases()
mocked_db_creation.return_value.create_test_db.assert_called_once_with(
verbosity=0, autoclobber=False, serialize=False, keepdb=False
)
@skipUnlessDBFeature('supports_sequence_reset')
class AutoIncrementResetTest(TransactionTestCase):
"""
Creating the same models in different test methods receive the same PK
values since the sequences are reset before each test method.
"""
available_apps = ['test_runner']
reset_sequences = True
def _test(self):
# Regular model
p = Person.objects.create(first_name='Jack', last_name='Smith')
self.assertEqual(p.pk, 1)
# Auto-created many-to-many through model
p.friends.add(Person.objects.create(first_name='Jacky', last_name='Smith'))
self.assertEqual(p.friends.through.objects.first().pk, 1)
# Many-to-many through model
b = B.objects.create()
t = Through.objects.create(person=p, b=b)
self.assertEqual(t.pk, 1)
def test_autoincrement_reset1(self):
self._test()
def test_autoincrement_reset2(self):
self._test()
class EmptyDefaultDatabaseTest(unittest.TestCase):
def test_empty_default_database(self):
"""
An empty default database in settings does not raise an ImproperlyConfigured
error when running a unit test that does not use a database.
"""
testcases.connections = db.ConnectionHandler({'default': {}})
connection = testcases.connections[db.utils.DEFAULT_DB_ALIAS]
self.assertEqual(connection.settings_dict['ENGINE'], 'django.db.backends.dummy')
connections_support_transactions()
class RunTestsExceptionHandlingTests(unittest.TestCase):
def test_run_checks_raises(self):
"""
Teardown functions are run when run_checks() raises SystemCheckError.
"""
with mock.patch('django.test.runner.DiscoverRunner.setup_test_environment'), \
mock.patch('django.test.runner.DiscoverRunner.setup_databases'), \
mock.patch('django.test.runner.DiscoverRunner.build_suite'), \
mock.patch('django.test.runner.DiscoverRunner.run_checks', side_effect=SystemCheckError), \
mock.patch('django.test.runner.DiscoverRunner.teardown_databases') as teardown_databases, \
mock.patch('django.test.runner.DiscoverRunner.teardown_test_environment') as teardown_test_environment:
runner = DiscoverRunner(verbosity=0, interactive=False)
with self.assertRaises(SystemCheckError):
runner.run_tests(['test_runner_apps.sample.tests_sample.TestDjangoTestCase'])
self.assertTrue(teardown_databases.called)
self.assertTrue(teardown_test_environment.called)
def test_run_checks_raises_and_teardown_raises(self):
"""
SystemCheckError is surfaced when run_checks() raises SystemCheckError
and teardown databases() raises ValueError.
"""
with mock.patch('django.test.runner.DiscoverRunner.setup_test_environment'), \
mock.patch('django.test.runner.DiscoverRunner.setup_databases'), \
mock.patch('django.test.runner.DiscoverRunner.build_suite'), \
mock.patch('django.test.runner.DiscoverRunner.run_checks', side_effect=SystemCheckError), \
mock.patch('django.test.runner.DiscoverRunner.teardown_databases', side_effect=ValueError) \
as teardown_databases, \
mock.patch('django.test.runner.DiscoverRunner.teardown_test_environment') as teardown_test_environment:
runner = DiscoverRunner(verbosity=0, interactive=False)
with self.assertRaises(SystemCheckError):
runner.run_tests(['test_runner_apps.sample.tests_sample.TestDjangoTestCase'])
self.assertTrue(teardown_databases.called)
self.assertFalse(teardown_test_environment.called)
def test_run_checks_passes_and_teardown_raises(self):
"""
Exceptions on teardown are surfaced if no exceptions happen during
run_checks().
"""
with mock.patch('django.test.runner.DiscoverRunner.setup_test_environment'), \
mock.patch('django.test.runner.DiscoverRunner.setup_databases'), \
mock.patch('django.test.runner.DiscoverRunner.build_suite'), \
mock.patch('django.test.runner.DiscoverRunner.run_checks'), \
mock.patch('django.test.runner.DiscoverRunner.teardown_databases', side_effect=ValueError) \
as teardown_databases, \
mock.patch('django.test.runner.DiscoverRunner.teardown_test_environment') as teardown_test_environment:
runner = DiscoverRunner(verbosity=0, interactive=False)
with self.assertRaises(ValueError):
# Suppress the output when running TestDjangoTestCase.
with mock.patch('sys.stderr'):
runner.run_tests(['test_runner_apps.sample.tests_sample.TestDjangoTestCase'])
self.assertTrue(teardown_databases.called)
self.assertFalse(teardown_test_environment.called)
|
9b5a77de146b1679d310daf732c9a4a904905eccf44df2f8739f8e86118a7cd3 | import os
from argparse import ArgumentParser
from contextlib import contextmanager
from unittest import TestSuite, TextTestRunner, defaultTestLoader
from django.db import connections
from django.test import SimpleTestCase
from django.test.runner import DiscoverRunner
from django.test.utils import captured_stdout
@contextmanager
def change_cwd(directory):
current_dir = os.path.abspath(os.path.dirname(__file__))
new_dir = os.path.join(current_dir, directory)
old_cwd = os.getcwd()
os.chdir(new_dir)
try:
yield
finally:
os.chdir(old_cwd)
class DiscoverRunnerTests(SimpleTestCase):
def test_init_debug_mode(self):
runner = DiscoverRunner()
self.assertFalse(runner.debug_mode)
def test_add_arguments_debug_mode(self):
parser = ArgumentParser()
DiscoverRunner.add_arguments(parser)
ns = parser.parse_args([])
self.assertFalse(ns.debug_mode)
ns = parser.parse_args(["--debug-mode"])
self.assertTrue(ns.debug_mode)
def test_dotted_test_module(self):
count = DiscoverRunner().build_suite(
['test_runner_apps.sample.tests_sample'],
).countTestCases()
self.assertEqual(count, 4)
def test_dotted_test_class_vanilla_unittest(self):
count = DiscoverRunner().build_suite(
['test_runner_apps.sample.tests_sample.TestVanillaUnittest'],
).countTestCases()
self.assertEqual(count, 1)
def test_dotted_test_class_django_testcase(self):
count = DiscoverRunner().build_suite(
['test_runner_apps.sample.tests_sample.TestDjangoTestCase'],
).countTestCases()
self.assertEqual(count, 1)
def test_dotted_test_method_django_testcase(self):
count = DiscoverRunner().build_suite(
['test_runner_apps.sample.tests_sample.TestDjangoTestCase.test_sample'],
).countTestCases()
self.assertEqual(count, 1)
def test_pattern(self):
count = DiscoverRunner(
pattern="*_tests.py",
).build_suite(['test_runner_apps.sample']).countTestCases()
self.assertEqual(count, 1)
def test_file_path(self):
with change_cwd(".."):
count = DiscoverRunner().build_suite(
['test_runner_apps/sample/'],
).countTestCases()
self.assertEqual(count, 5)
def test_empty_label(self):
"""
If the test label is empty, discovery should happen on the current
working directory.
"""
with change_cwd("."):
suite = DiscoverRunner().build_suite([])
self.assertEqual(
suite._tests[0].id().split(".")[0],
os.path.basename(os.getcwd()),
)
def test_empty_test_case(self):
count = DiscoverRunner().build_suite(
['test_runner_apps.sample.tests_sample.EmptyTestCase'],
).countTestCases()
self.assertEqual(count, 0)
def test_discovery_on_package(self):
count = DiscoverRunner().build_suite(
['test_runner_apps.sample.tests'],
).countTestCases()
self.assertEqual(count, 1)
def test_ignore_adjacent(self):
"""
When given a dotted path to a module, unittest discovery searches
not just the module, but also the directory containing the module.
This results in tests from adjacent modules being run when they
should not. The discover runner avoids this behavior.
"""
count = DiscoverRunner().build_suite(
['test_runner_apps.sample.empty'],
).countTestCases()
self.assertEqual(count, 0)
def test_testcase_ordering(self):
with change_cwd(".."):
suite = DiscoverRunner().build_suite(['test_runner_apps/sample/'])
self.assertEqual(
suite._tests[0].__class__.__name__,
'TestDjangoTestCase',
msg="TestDjangoTestCase should be the first test case")
self.assertEqual(
suite._tests[1].__class__.__name__,
'TestZimpleTestCase',
msg="TestZimpleTestCase should be the second test case")
# All others can follow in unspecified order, including doctests
self.assertIn('DocTestCase', [t.__class__.__name__ for t in suite._tests[2:]])
def test_duplicates_ignored(self):
"""
Tests shouldn't be discovered twice when discovering on overlapping paths.
"""
base_app = 'forms_tests'
sub_app = 'forms_tests.field_tests'
with self.modify_settings(INSTALLED_APPS={'append': sub_app}):
single = DiscoverRunner().build_suite([base_app]).countTestCases()
dups = DiscoverRunner().build_suite([base_app, sub_app]).countTestCases()
self.assertEqual(single, dups)
def test_reverse(self):
"""
Reverse should reorder tests while maintaining the grouping specified
by ``DiscoverRunner.reorder_by``.
"""
runner = DiscoverRunner(reverse=True)
suite = runner.build_suite(
test_labels=('test_runner_apps.sample', 'test_runner_apps.simple'))
self.assertIn('test_runner_apps.simple', next(iter(suite)).id(),
msg="Test labels should be reversed.")
suite = runner.build_suite(test_labels=('test_runner_apps.simple',))
suite = tuple(suite)
self.assertIn('DjangoCase', suite[0].id(),
msg="Test groups should not be reversed.")
self.assertIn('SimpleCase', suite[4].id(),
msg="Test groups order should be preserved.")
self.assertIn('DjangoCase2', suite[0].id(),
msg="Django test cases should be reversed.")
self.assertIn('SimpleCase2', suite[4].id(),
msg="Simple test cases should be reversed.")
self.assertIn('UnittestCase2', suite[8].id(),
msg="Unittest test cases should be reversed.")
self.assertIn('test_2', suite[0].id(),
msg="Methods of Django cases should be reversed.")
self.assertIn('test_2', suite[4].id(),
msg="Methods of simple cases should be reversed.")
self.assertIn('test_2', suite[8].id(),
msg="Methods of unittest cases should be reversed.")
def test_overridable_get_test_runner_kwargs(self):
self.assertIsInstance(DiscoverRunner().get_test_runner_kwargs(), dict)
def test_overridable_test_suite(self):
self.assertEqual(DiscoverRunner().test_suite, TestSuite)
def test_overridable_test_runner(self):
self.assertEqual(DiscoverRunner().test_runner, TextTestRunner)
def test_overridable_test_loader(self):
self.assertEqual(DiscoverRunner().test_loader, defaultTestLoader)
def test_tags(self):
runner = DiscoverRunner(tags=['core'])
self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 1)
runner = DiscoverRunner(tags=['fast'])
self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 2)
runner = DiscoverRunner(tags=['slow'])
self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 2)
def test_exclude_tags(self):
runner = DiscoverRunner(tags=['fast'], exclude_tags=['core'])
self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 1)
runner = DiscoverRunner(tags=['fast'], exclude_tags=['slow'])
self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 0)
runner = DiscoverRunner(exclude_tags=['slow'])
self.assertEqual(runner.build_suite(['test_runner_apps.tagged.tests']).countTestCases(), 0)
def test_tag_inheritance(self):
def count_tests(**kwargs):
suite = DiscoverRunner(**kwargs).build_suite(['test_runner_apps.tagged.tests_inheritance'])
return suite.countTestCases()
self.assertEqual(count_tests(tags=['foo']), 4)
self.assertEqual(count_tests(tags=['bar']), 2)
self.assertEqual(count_tests(tags=['baz']), 2)
self.assertEqual(count_tests(tags=['foo'], exclude_tags=['bar']), 2)
self.assertEqual(count_tests(tags=['foo'], exclude_tags=['bar', 'baz']), 1)
self.assertEqual(count_tests(exclude_tags=['foo']), 0)
def test_included_tags_displayed(self):
runner = DiscoverRunner(tags=['foo', 'bar'], verbosity=2)
with captured_stdout() as stdout:
runner.build_suite(['test_runner_apps.tagged.tests'])
self.assertIn('Including test tag(s): bar, foo.\n', stdout.getvalue())
def test_excluded_tags_displayed(self):
runner = DiscoverRunner(exclude_tags=['foo', 'bar'], verbosity=3)
with captured_stdout() as stdout:
runner.build_suite(['test_runner_apps.tagged.tests'])
self.assertIn('Excluding test tag(s): bar, foo.\n', stdout.getvalue())
class DiscoverRunnerGetDatabasesTests(SimpleTestCase):
runner = DiscoverRunner(verbosity=2)
skip_msg = 'Skipping setup of unused database(s): '
def get_databases(self, test_labels):
suite = self.runner.build_suite(test_labels)
with captured_stdout() as stdout:
databases = self.runner.get_databases(suite)
return databases, stdout.getvalue()
def test_mixed(self):
databases, output = self.get_databases(['test_runner_apps.databases.tests'])
self.assertEqual(databases, set(connections))
self.assertNotIn(self.skip_msg, output)
def test_all(self):
databases, output = self.get_databases(['test_runner_apps.databases.tests.AllDatabasesTests'])
self.assertEqual(databases, set(connections))
self.assertNotIn(self.skip_msg, output)
def test_default_and_other(self):
databases, output = self.get_databases([
'test_runner_apps.databases.tests.DefaultDatabaseTests',
'test_runner_apps.databases.tests.OtherDatabaseTests',
])
self.assertEqual(databases, set(connections))
self.assertNotIn(self.skip_msg, output)
def test_default_only(self):
databases, output = self.get_databases(['test_runner_apps.databases.tests.DefaultDatabaseTests'])
self.assertEqual(databases, {'default'})
self.assertIn(self.skip_msg + 'other', output)
def test_other_only(self):
databases, output = self.get_databases(['test_runner_apps.databases.tests.OtherDatabaseTests'])
self.assertEqual(databases, {'other'})
self.assertIn(self.skip_msg + 'default', output)
def test_no_databases_required(self):
databases, output = self.get_databases(['test_runner_apps.databases.tests.NoDatabaseTests'])
self.assertEqual(databases, set())
self.assertIn(self.skip_msg + 'default, other', output)
|
8b715bbace4db65718da98c5fa582668d66f1a2f6e67c3910f1233344a8f0a4b | from unittest import mock
from django.db import connection, transaction
from django.test import TestCase, skipUnlessDBFeature
from django.test.utils import CaptureQueriesContext
from .models import (
Article, InheritedArticleA, InheritedArticleB, NullablePublicationThrough,
NullableTargetArticle, Publication,
)
class ManyToManyTests(TestCase):
def setUp(self):
# Create a couple of Publications.
self.p1 = Publication.objects.create(title='The Python Journal')
self.p2 = Publication.objects.create(title='Science News')
self.p3 = Publication.objects.create(title='Science Weekly')
self.p4 = Publication.objects.create(title='Highlights for Children')
self.a1 = Article.objects.create(headline='Django lets you build Web apps easily')
self.a1.publications.add(self.p1)
self.a2 = Article.objects.create(headline='NASA uses Python')
self.a2.publications.add(self.p1, self.p2, self.p3, self.p4)
self.a3 = Article.objects.create(headline='NASA finds intelligent life on Earth')
self.a3.publications.add(self.p2)
self.a4 = Article.objects.create(headline='Oxygen-free diet works wonders')
self.a4.publications.add(self.p2)
def test_add(self):
# Create an Article.
a5 = Article(headline='Django lets you create Web apps easily')
# You can't associate it with a Publication until it's been saved.
msg = (
'"<Article: Django lets you create Web apps easily>" needs to have '
'a value for field "id" before this many-to-many relationship can be used.'
)
with self.assertRaisesMessage(ValueError, msg):
getattr(a5, 'publications')
# Save it!
a5.save()
# Associate the Article with a Publication.
a5.publications.add(self.p1)
self.assertQuerysetEqual(a5.publications.all(), ['<Publication: The Python Journal>'])
# Create another Article, and set it to appear in both Publications.
a6 = Article(headline='ESA uses Python')
a6.save()
a6.publications.add(self.p1, self.p2)
a6.publications.add(self.p3)
# Adding a second time is OK
a6.publications.add(self.p3)
self.assertQuerysetEqual(
a6.publications.all(),
[
'<Publication: Science News>',
'<Publication: Science Weekly>',
'<Publication: The Python Journal>',
]
)
# Adding an object of the wrong type raises TypeError
with self.assertRaisesMessage(TypeError, "'Publication' instance expected, got <Article"):
with transaction.atomic():
a6.publications.add(a5)
# Add a Publication directly via publications.add by using keyword arguments.
a6.publications.create(title='Highlights for Adults')
self.assertQuerysetEqual(
a6.publications.all(),
[
'<Publication: Highlights for Adults>',
'<Publication: Science News>',
'<Publication: Science Weekly>',
'<Publication: The Python Journal>',
]
)
def test_reverse_add(self):
# Adding via the 'other' end of an m2m
a5 = Article(headline='NASA finds intelligent life on Mars')
a5.save()
self.p2.article_set.add(a5)
self.assertQuerysetEqual(
self.p2.article_set.all(),
[
'<Article: NASA finds intelligent life on Earth>',
'<Article: NASA finds intelligent life on Mars>',
'<Article: NASA uses Python>',
'<Article: Oxygen-free diet works wonders>',
]
)
self.assertQuerysetEqual(a5.publications.all(), ['<Publication: Science News>'])
# Adding via the other end using keywords
self.p2.article_set.create(headline='Carbon-free diet works wonders')
self.assertQuerysetEqual(
self.p2.article_set.all(),
[
'<Article: Carbon-free diet works wonders>',
'<Article: NASA finds intelligent life on Earth>',
'<Article: NASA finds intelligent life on Mars>',
'<Article: NASA uses Python>',
'<Article: Oxygen-free diet works wonders>',
])
a6 = self.p2.article_set.all()[3]
self.assertQuerysetEqual(
a6.publications.all(),
[
'<Publication: Highlights for Children>',
'<Publication: Science News>',
'<Publication: Science Weekly>',
'<Publication: The Python Journal>',
]
)
def test_related_sets(self):
# Article objects have access to their related Publication objects.
self.assertQuerysetEqual(self.a1.publications.all(), ['<Publication: The Python Journal>'])
self.assertQuerysetEqual(
self.a2.publications.all(),
[
'<Publication: Highlights for Children>',
'<Publication: Science News>',
'<Publication: Science Weekly>',
'<Publication: The Python Journal>',
]
)
# Publication objects have access to their related Article objects.
self.assertQuerysetEqual(
self.p2.article_set.all(),
[
'<Article: NASA finds intelligent life on Earth>',
'<Article: NASA uses Python>',
'<Article: Oxygen-free diet works wonders>',
]
)
self.assertQuerysetEqual(
self.p1.article_set.all(),
[
'<Article: Django lets you build Web apps easily>',
'<Article: NASA uses Python>',
]
)
self.assertQuerysetEqual(
Publication.objects.get(id=self.p4.id).article_set.all(),
['<Article: NASA uses Python>']
)
def test_selects(self):
# We can perform kwarg queries across m2m relationships
self.assertQuerysetEqual(
Article.objects.filter(publications__id__exact=self.p1.id),
[
'<Article: Django lets you build Web apps easily>',
'<Article: NASA uses Python>',
])
self.assertQuerysetEqual(
Article.objects.filter(publications__pk=self.p1.id),
[
'<Article: Django lets you build Web apps easily>',
'<Article: NASA uses Python>',
]
)
self.assertQuerysetEqual(
Article.objects.filter(publications=self.p1.id),
[
'<Article: Django lets you build Web apps easily>',
'<Article: NASA uses Python>',
]
)
self.assertQuerysetEqual(
Article.objects.filter(publications=self.p1),
[
'<Article: Django lets you build Web apps easily>',
'<Article: NASA uses Python>',
]
)
self.assertQuerysetEqual(
Article.objects.filter(publications__title__startswith="Science"),
[
'<Article: NASA finds intelligent life on Earth>',
'<Article: NASA uses Python>',
'<Article: NASA uses Python>',
'<Article: Oxygen-free diet works wonders>',
]
)
self.assertQuerysetEqual(
Article.objects.filter(publications__title__startswith="Science").distinct(),
[
'<Article: NASA finds intelligent life on Earth>',
'<Article: NASA uses Python>',
'<Article: Oxygen-free diet works wonders>',
]
)
# The count() function respects distinct() as well.
self.assertEqual(Article.objects.filter(publications__title__startswith="Science").count(), 4)
self.assertEqual(Article.objects.filter(publications__title__startswith="Science").distinct().count(), 3)
self.assertQuerysetEqual(
Article.objects.filter(publications__in=[self.p1.id, self.p2.id]).distinct(),
[
'<Article: Django lets you build Web apps easily>',
'<Article: NASA finds intelligent life on Earth>',
'<Article: NASA uses Python>',
'<Article: Oxygen-free diet works wonders>',
])
self.assertQuerysetEqual(
Article.objects.filter(publications__in=[self.p1.id, self.p2]).distinct(),
[
'<Article: Django lets you build Web apps easily>',
'<Article: NASA finds intelligent life on Earth>',
'<Article: NASA uses Python>',
'<Article: Oxygen-free diet works wonders>',
]
)
self.assertQuerysetEqual(
Article.objects.filter(publications__in=[self.p1, self.p2]).distinct(),
[
'<Article: Django lets you build Web apps easily>',
'<Article: NASA finds intelligent life on Earth>',
'<Article: NASA uses Python>',
'<Article: Oxygen-free diet works wonders>',
]
)
# Excluding a related item works as you would expect, too (although the SQL
# involved is a little complex).
self.assertQuerysetEqual(
Article.objects.exclude(publications=self.p2),
['<Article: Django lets you build Web apps easily>']
)
def test_reverse_selects(self):
# Reverse m2m queries are supported (i.e., starting at the table that
# doesn't have a ManyToManyField).
python_journal = ['<Publication: The Python Journal>']
self.assertQuerysetEqual(Publication.objects.filter(id__exact=self.p1.id), python_journal)
self.assertQuerysetEqual(Publication.objects.filter(pk=self.p1.id), python_journal)
self.assertQuerysetEqual(
Publication.objects.filter(article__headline__startswith="NASA"),
[
'<Publication: Highlights for Children>',
'<Publication: Science News>',
'<Publication: Science News>',
'<Publication: Science Weekly>',
'<Publication: The Python Journal>',
])
self.assertQuerysetEqual(Publication.objects.filter(article__id__exact=self.a1.id), python_journal)
self.assertQuerysetEqual(Publication.objects.filter(article__pk=self.a1.id), python_journal)
self.assertQuerysetEqual(Publication.objects.filter(article=self.a1.id), python_journal)
self.assertQuerysetEqual(Publication.objects.filter(article=self.a1), python_journal)
self.assertQuerysetEqual(
Publication.objects.filter(article__in=[self.a1.id, self.a2.id]).distinct(),
[
'<Publication: Highlights for Children>',
'<Publication: Science News>',
'<Publication: Science Weekly>',
'<Publication: The Python Journal>',
])
self.assertQuerysetEqual(
Publication.objects.filter(article__in=[self.a1.id, self.a2]).distinct(),
[
'<Publication: Highlights for Children>',
'<Publication: Science News>',
'<Publication: Science Weekly>',
'<Publication: The Python Journal>',
])
self.assertQuerysetEqual(
Publication.objects.filter(article__in=[self.a1, self.a2]).distinct(),
[
'<Publication: Highlights for Children>',
'<Publication: Science News>',
'<Publication: Science Weekly>',
'<Publication: The Python Journal>',
])
def test_delete(self):
# If we delete a Publication, its Articles won't be able to access it.
self.p1.delete()
self.assertQuerysetEqual(
Publication.objects.all(),
[
'<Publication: Highlights for Children>',
'<Publication: Science News>',
'<Publication: Science Weekly>',
]
)
self.assertQuerysetEqual(self.a1.publications.all(), [])
# If we delete an Article, its Publications won't be able to access it.
self.a2.delete()
self.assertQuerysetEqual(
Article.objects.all(),
[
'<Article: Django lets you build Web apps easily>',
'<Article: NASA finds intelligent life on Earth>',
'<Article: Oxygen-free diet works wonders>',
]
)
self.assertQuerysetEqual(
self.p2.article_set.all(),
[
'<Article: NASA finds intelligent life on Earth>',
'<Article: Oxygen-free diet works wonders>',
]
)
def test_bulk_delete(self):
# Bulk delete some Publications - references to deleted publications should go
Publication.objects.filter(title__startswith='Science').delete()
self.assertQuerysetEqual(
Publication.objects.all(),
[
'<Publication: Highlights for Children>',
'<Publication: The Python Journal>',
]
)
self.assertQuerysetEqual(
Article.objects.all(),
[
'<Article: Django lets you build Web apps easily>',
'<Article: NASA finds intelligent life on Earth>',
'<Article: NASA uses Python>',
'<Article: Oxygen-free diet works wonders>',
]
)
self.assertQuerysetEqual(
self.a2.publications.all(),
[
'<Publication: Highlights for Children>',
'<Publication: The Python Journal>',
]
)
# Bulk delete some articles - references to deleted objects should go
q = Article.objects.filter(headline__startswith='Django')
self.assertQuerysetEqual(q, ['<Article: Django lets you build Web apps easily>'])
q.delete()
# After the delete, the QuerySet cache needs to be cleared,
# and the referenced objects should be gone
self.assertQuerysetEqual(q, [])
self.assertQuerysetEqual(self.p1.article_set.all(), ['<Article: NASA uses Python>'])
def test_remove(self):
# Removing publication from an article:
self.assertQuerysetEqual(
self.p2.article_set.all(),
[
'<Article: NASA finds intelligent life on Earth>',
'<Article: NASA uses Python>',
'<Article: Oxygen-free diet works wonders>',
]
)
self.a4.publications.remove(self.p2)
self.assertQuerysetEqual(
self.p2.article_set.all(),
[
'<Article: NASA finds intelligent life on Earth>',
'<Article: NASA uses Python>',
]
)
self.assertQuerysetEqual(self.a4.publications.all(), [])
# And from the other end
self.p2.article_set.remove(self.a3)
self.assertQuerysetEqual(self.p2.article_set.all(), ['<Article: NASA uses Python>'])
self.assertQuerysetEqual(self.a3.publications.all(), [])
def test_set(self):
self.p2.article_set.set([self.a4, self.a3])
self.assertQuerysetEqual(
self.p2.article_set.all(),
[
'<Article: NASA finds intelligent life on Earth>',
'<Article: Oxygen-free diet works wonders>',
]
)
self.assertQuerysetEqual(self.a4.publications.all(), ['<Publication: Science News>'])
self.a4.publications.set([self.p3.id])
self.assertQuerysetEqual(self.p2.article_set.all(), ['<Article: NASA finds intelligent life on Earth>'])
self.assertQuerysetEqual(self.a4.publications.all(), ['<Publication: Science Weekly>'])
self.p2.article_set.set([])
self.assertQuerysetEqual(self.p2.article_set.all(), [])
self.a4.publications.set([])
self.assertQuerysetEqual(self.a4.publications.all(), [])
self.p2.article_set.set([self.a4, self.a3], clear=True)
self.assertQuerysetEqual(
self.p2.article_set.all(),
[
'<Article: NASA finds intelligent life on Earth>',
'<Article: Oxygen-free diet works wonders>',
]
)
self.assertQuerysetEqual(self.a4.publications.all(), ['<Publication: Science News>'])
self.a4.publications.set([self.p3.id], clear=True)
self.assertQuerysetEqual(self.p2.article_set.all(), ['<Article: NASA finds intelligent life on Earth>'])
self.assertQuerysetEqual(self.a4.publications.all(), ['<Publication: Science Weekly>'])
self.p2.article_set.set([], clear=True)
self.assertQuerysetEqual(self.p2.article_set.all(), [])
self.a4.publications.set([], clear=True)
self.assertQuerysetEqual(self.a4.publications.all(), [])
def test_assign_forward(self):
msg = (
"Direct assignment to the reverse side of a many-to-many set is "
"prohibited. Use article_set.set() instead."
)
with self.assertRaisesMessage(TypeError, msg):
self.p2.article_set = [self.a4, self.a3]
def test_assign_reverse(self):
msg = (
"Direct assignment to the forward side of a many-to-many "
"set is prohibited. Use publications.set() instead."
)
with self.assertRaisesMessage(TypeError, msg):
self.a1.publications = [self.p1, self.p2]
def test_assign(self):
# Relation sets can be assigned using set().
self.p2.article_set.set([self.a4, self.a3])
self.assertQuerysetEqual(
self.p2.article_set.all(), [
'<Article: NASA finds intelligent life on Earth>',
'<Article: Oxygen-free diet works wonders>',
]
)
self.assertQuerysetEqual(self.a4.publications.all(), ['<Publication: Science News>'])
self.a4.publications.set([self.p3.id])
self.assertQuerysetEqual(self.p2.article_set.all(), ['<Article: NASA finds intelligent life on Earth>'])
self.assertQuerysetEqual(self.a4.publications.all(), ['<Publication: Science Weekly>'])
# An alternate to calling clear() is to set an empty set.
self.p2.article_set.set([])
self.assertQuerysetEqual(self.p2.article_set.all(), [])
self.a4.publications.set([])
self.assertQuerysetEqual(self.a4.publications.all(), [])
def test_assign_ids(self):
# Relation sets can also be set using primary key values
self.p2.article_set.set([self.a4.id, self.a3.id])
self.assertQuerysetEqual(
self.p2.article_set.all(),
[
'<Article: NASA finds intelligent life on Earth>',
'<Article: Oxygen-free diet works wonders>',
]
)
self.assertQuerysetEqual(self.a4.publications.all(), ['<Publication: Science News>'])
self.a4.publications.set([self.p3.id])
self.assertQuerysetEqual(self.p2.article_set.all(), ['<Article: NASA finds intelligent life on Earth>'])
self.assertQuerysetEqual(self.a4.publications.all(), ['<Publication: Science Weekly>'])
def test_forward_assign_with_queryset(self):
# Querysets used in m2m assignments are pre-evaluated so their value
# isn't affected by the clearing operation in ManyRelatedManager.set()
# (#19816).
self.a1.publications.set([self.p1, self.p2])
qs = self.a1.publications.filter(title='The Python Journal')
self.a1.publications.set(qs)
self.assertEqual(1, self.a1.publications.count())
self.assertEqual(1, qs.count())
def test_reverse_assign_with_queryset(self):
# Querysets used in M2M assignments are pre-evaluated so their value
# isn't affected by the clearing operation in ManyRelatedManager.set()
# (#19816).
self.p1.article_set.set([self.a1, self.a2])
qs = self.p1.article_set.filter(headline='Django lets you build Web apps easily')
self.p1.article_set.set(qs)
self.assertEqual(1, self.p1.article_set.count())
self.assertEqual(1, qs.count())
def test_clear(self):
# Relation sets can be cleared:
self.p2.article_set.clear()
self.assertQuerysetEqual(self.p2.article_set.all(), [])
self.assertQuerysetEqual(self.a4.publications.all(), [])
# And you can clear from the other end
self.p2.article_set.add(self.a3, self.a4)
self.assertQuerysetEqual(
self.p2.article_set.all(),
[
'<Article: NASA finds intelligent life on Earth>',
'<Article: Oxygen-free diet works wonders>',
]
)
self.assertQuerysetEqual(self.a4.publications.all(), ['<Publication: Science News>'])
self.a4.publications.clear()
self.assertQuerysetEqual(self.a4.publications.all(), [])
self.assertQuerysetEqual(self.p2.article_set.all(), ['<Article: NASA finds intelligent life on Earth>'])
def test_clear_after_prefetch(self):
a4 = Article.objects.prefetch_related('publications').get(id=self.a4.id)
self.assertQuerysetEqual(a4.publications.all(), ['<Publication: Science News>'])
a4.publications.clear()
self.assertQuerysetEqual(a4.publications.all(), [])
def test_remove_after_prefetch(self):
a4 = Article.objects.prefetch_related('publications').get(id=self.a4.id)
self.assertQuerysetEqual(a4.publications.all(), ['<Publication: Science News>'])
a4.publications.remove(self.p2)
self.assertQuerysetEqual(a4.publications.all(), [])
def test_add_after_prefetch(self):
a4 = Article.objects.prefetch_related('publications').get(id=self.a4.id)
self.assertEqual(a4.publications.count(), 1)
a4.publications.add(self.p1)
self.assertEqual(a4.publications.count(), 2)
def test_set_after_prefetch(self):
a4 = Article.objects.prefetch_related('publications').get(id=self.a4.id)
self.assertEqual(a4.publications.count(), 1)
a4.publications.set([self.p2, self.p1])
self.assertEqual(a4.publications.count(), 2)
a4.publications.set([self.p1])
self.assertEqual(a4.publications.count(), 1)
def test_add_then_remove_after_prefetch(self):
a4 = Article.objects.prefetch_related('publications').get(id=self.a4.id)
self.assertEqual(a4.publications.count(), 1)
a4.publications.add(self.p1)
self.assertEqual(a4.publications.count(), 2)
a4.publications.remove(self.p1)
self.assertQuerysetEqual(a4.publications.all(), ['<Publication: Science News>'])
def test_inherited_models_selects(self):
"""
#24156 - Objects from child models where the parent's m2m field uses
related_name='+' should be retrieved correctly.
"""
a = InheritedArticleA.objects.create()
b = InheritedArticleB.objects.create()
a.publications.add(self.p1, self.p2)
self.assertQuerysetEqual(
a.publications.all(),
[
'<Publication: Science News>',
'<Publication: The Python Journal>',
])
self.assertQuerysetEqual(b.publications.all(), [])
b.publications.add(self.p3)
self.assertQuerysetEqual(
a.publications.all(),
[
'<Publication: Science News>',
'<Publication: The Python Journal>',
]
)
self.assertQuerysetEqual(b.publications.all(), ['<Publication: Science Weekly>'])
class ManyToManyQueryTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.article = Article.objects.create(headline='Django lets you build Web apps easily')
cls.nullable_target_article = NullableTargetArticle.objects.create(headline='The python is good')
NullablePublicationThrough.objects.create(article=cls.nullable_target_article, publication=None)
@skipUnlessDBFeature('supports_foreign_keys')
def test_count_join_optimization(self):
with CaptureQueriesContext(connection) as query:
self.article.publications.count()
self.assertNotIn('JOIN', query[0]['sql'])
self.assertEqual(self.nullable_target_article.publications.count(), 0)
def test_count_join_optimization_disabled(self):
with mock.patch.object(connection.features, 'supports_foreign_keys', False), \
CaptureQueriesContext(connection) as query:
self.article.publications.count()
self.assertIn('JOIN', query[0]['sql'])
@skipUnlessDBFeature('supports_foreign_keys')
def test_exists_join_optimization(self):
with CaptureQueriesContext(connection) as query:
self.article.publications.exists()
self.assertNotIn('JOIN', query[0]['sql'])
self.assertIs(self.nullable_target_article.publications.exists(), False)
def test_exists_join_optimization_disabled(self):
with mock.patch.object(connection.features, 'supports_foreign_keys', False), \
CaptureQueriesContext(connection) as query:
self.article.publications.exists()
self.assertIn('JOIN', query[0]['sql'])
|
dc0373ce789ae2b5a136d561952bc922066066aa7ae0b349bc65374e5f46a67f | """
Many-to-many relationships
To define a many-to-many relationship, use ``ManyToManyField()``.
In this example, an ``Article`` can be published in multiple ``Publication``
objects, and a ``Publication`` has multiple ``Article`` objects.
"""
from django.db import models
class Publication(models.Model):
title = models.CharField(max_length=30)
class Meta:
ordering = ('title',)
def __str__(self):
return self.title
class Tag(models.Model):
id = models.BigAutoField(primary_key=True)
name = models.CharField(max_length=50)
def __str__(self):
return self.name
class Article(models.Model):
headline = models.CharField(max_length=100)
# Assign a string as name to make sure the intermediary model is
# correctly created. Refs #20207
publications = models.ManyToManyField(Publication, name='publications')
tags = models.ManyToManyField(Tag, related_name='tags')
class Meta:
ordering = ('headline',)
def __str__(self):
return self.headline
# Models to test correct related_name inheritance
class AbstractArticle(models.Model):
class Meta:
abstract = True
publications = models.ManyToManyField(Publication, name='publications', related_name='+')
class InheritedArticleA(AbstractArticle):
pass
class InheritedArticleB(AbstractArticle):
pass
class NullableTargetArticle(models.Model):
headline = models.CharField(max_length=100)
publications = models.ManyToManyField(Publication, through='NullablePublicationThrough')
class NullablePublicationThrough(models.Model):
article = models.ForeignKey(NullableTargetArticle, models.CASCADE)
publication = models.ForeignKey(Publication, models.CASCADE, null=True)
|
f8262bca7cb02d18c2e7cf0a4657cc17fd238fda90e7b3fb049dad6fae820a94 | from io import BytesIO
from itertools import chain
from urllib.parse import urlencode
from django.core.exceptions import DisallowedHost
from django.core.handlers.wsgi import LimitedStream, WSGIRequest
from django.http import HttpRequest, RawPostDataException, UnreadablePostError
from django.http.request import HttpHeaders, split_domain_port
from django.test import RequestFactory, SimpleTestCase, override_settings
from django.test.client import FakePayload
class RequestsTests(SimpleTestCase):
def test_httprequest(self):
request = HttpRequest()
self.assertEqual(list(request.GET), [])
self.assertEqual(list(request.POST), [])
self.assertEqual(list(request.COOKIES), [])
self.assertEqual(list(request.META), [])
# .GET and .POST should be QueryDicts
self.assertEqual(request.GET.urlencode(), '')
self.assertEqual(request.POST.urlencode(), '')
# and FILES should be MultiValueDict
self.assertEqual(request.FILES.getlist('foo'), [])
self.assertIsNone(request.content_type)
self.assertIsNone(request.content_params)
def test_httprequest_full_path(self):
request = HttpRequest()
request.path = '/;some/?awful/=path/foo:bar/'
request.path_info = '/prefix' + request.path
request.META['QUERY_STRING'] = ';some=query&+query=string'
expected = '/%3Bsome/%3Fawful/%3Dpath/foo:bar/?;some=query&+query=string'
self.assertEqual(request.get_full_path(), expected)
self.assertEqual(request.get_full_path_info(), '/prefix' + expected)
def test_httprequest_full_path_with_query_string_and_fragment(self):
request = HttpRequest()
request.path = '/foo#bar'
request.path_info = '/prefix' + request.path
request.META['QUERY_STRING'] = 'baz#quux'
self.assertEqual(request.get_full_path(), '/foo%23bar?baz#quux')
self.assertEqual(request.get_full_path_info(), '/prefix/foo%23bar?baz#quux')
def test_httprequest_repr(self):
request = HttpRequest()
request.path = '/somepath/'
request.method = 'GET'
request.GET = {'get-key': 'get-value'}
request.POST = {'post-key': 'post-value'}
request.COOKIES = {'post-key': 'post-value'}
request.META = {'post-key': 'post-value'}
self.assertEqual(repr(request), "<HttpRequest: GET '/somepath/'>")
def test_httprequest_repr_invalid_method_and_path(self):
request = HttpRequest()
self.assertEqual(repr(request), "<HttpRequest>")
request = HttpRequest()
request.method = "GET"
self.assertEqual(repr(request), "<HttpRequest>")
request = HttpRequest()
request.path = ""
self.assertEqual(repr(request), "<HttpRequest>")
def test_wsgirequest(self):
request = WSGIRequest({
'PATH_INFO': 'bogus',
'REQUEST_METHOD': 'bogus',
'CONTENT_TYPE': 'text/html; charset=utf8',
'wsgi.input': BytesIO(b''),
})
self.assertEqual(list(request.GET), [])
self.assertEqual(list(request.POST), [])
self.assertEqual(list(request.COOKIES), [])
self.assertEqual(
set(request.META),
{'PATH_INFO', 'REQUEST_METHOD', 'SCRIPT_NAME', 'CONTENT_TYPE', 'wsgi.input'}
)
self.assertEqual(request.META['PATH_INFO'], 'bogus')
self.assertEqual(request.META['REQUEST_METHOD'], 'bogus')
self.assertEqual(request.META['SCRIPT_NAME'], '')
self.assertEqual(request.content_type, 'text/html')
self.assertEqual(request.content_params, {'charset': 'utf8'})
def test_wsgirequest_with_script_name(self):
"""
The request's path is correctly assembled, regardless of whether or
not the SCRIPT_NAME has a trailing slash (#20169).
"""
# With trailing slash
request = WSGIRequest({
'PATH_INFO': '/somepath/',
'SCRIPT_NAME': '/PREFIX/',
'REQUEST_METHOD': 'get',
'wsgi.input': BytesIO(b''),
})
self.assertEqual(request.path, '/PREFIX/somepath/')
# Without trailing slash
request = WSGIRequest({
'PATH_INFO': '/somepath/',
'SCRIPT_NAME': '/PREFIX',
'REQUEST_METHOD': 'get',
'wsgi.input': BytesIO(b''),
})
self.assertEqual(request.path, '/PREFIX/somepath/')
def test_wsgirequest_script_url_double_slashes(self):
"""
WSGI squashes multiple successive slashes in PATH_INFO, WSGIRequest
should take that into account when populating request.path and
request.META['SCRIPT_NAME'] (#17133).
"""
request = WSGIRequest({
'SCRIPT_URL': '/mst/milestones//accounts/login//help',
'PATH_INFO': '/milestones/accounts/login/help',
'REQUEST_METHOD': 'get',
'wsgi.input': BytesIO(b''),
})
self.assertEqual(request.path, '/mst/milestones/accounts/login/help')
self.assertEqual(request.META['SCRIPT_NAME'], '/mst')
def test_wsgirequest_with_force_script_name(self):
"""
The FORCE_SCRIPT_NAME setting takes precedence over the request's
SCRIPT_NAME environment parameter (#20169).
"""
with override_settings(FORCE_SCRIPT_NAME='/FORCED_PREFIX/'):
request = WSGIRequest({
'PATH_INFO': '/somepath/',
'SCRIPT_NAME': '/PREFIX/',
'REQUEST_METHOD': 'get',
'wsgi.input': BytesIO(b''),
})
self.assertEqual(request.path, '/FORCED_PREFIX/somepath/')
def test_wsgirequest_path_with_force_script_name_trailing_slash(self):
"""
The request's path is correctly assembled, regardless of whether or not
the FORCE_SCRIPT_NAME setting has a trailing slash (#20169).
"""
# With trailing slash
with override_settings(FORCE_SCRIPT_NAME='/FORCED_PREFIX/'):
request = WSGIRequest({'PATH_INFO': '/somepath/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
self.assertEqual(request.path, '/FORCED_PREFIX/somepath/')
# Without trailing slash
with override_settings(FORCE_SCRIPT_NAME='/FORCED_PREFIX'):
request = WSGIRequest({'PATH_INFO': '/somepath/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
self.assertEqual(request.path, '/FORCED_PREFIX/somepath/')
def test_wsgirequest_repr(self):
request = WSGIRequest({'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
self.assertEqual(repr(request), "<WSGIRequest: GET '/'>")
request = WSGIRequest({'PATH_INFO': '/somepath/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
request.GET = {'get-key': 'get-value'}
request.POST = {'post-key': 'post-value'}
request.COOKIES = {'post-key': 'post-value'}
request.META = {'post-key': 'post-value'}
self.assertEqual(repr(request), "<WSGIRequest: GET '/somepath/'>")
def test_wsgirequest_path_info(self):
def wsgi_str(path_info, encoding='utf-8'):
path_info = path_info.encode(encoding) # Actual URL sent by the browser (bytestring)
path_info = path_info.decode('iso-8859-1') # Value in the WSGI environ dict (native string)
return path_info
# Regression for #19468
request = WSGIRequest({'PATH_INFO': wsgi_str("/سلام/"), 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
self.assertEqual(request.path, "/سلام/")
# The URL may be incorrectly encoded in a non-UTF-8 encoding (#26971)
request = WSGIRequest({
'PATH_INFO': wsgi_str("/café/", encoding='iso-8859-1'),
'REQUEST_METHOD': 'get',
'wsgi.input': BytesIO(b''),
})
# Since it's impossible to decide the (wrong) encoding of the URL, it's
# left percent-encoded in the path.
self.assertEqual(request.path, "/caf%E9/")
def test_limited_stream(self):
# Read all of a limited stream
stream = LimitedStream(BytesIO(b'test'), 2)
self.assertEqual(stream.read(), b'te')
# Reading again returns nothing.
self.assertEqual(stream.read(), b'')
# Read a number of characters greater than the stream has to offer
stream = LimitedStream(BytesIO(b'test'), 2)
self.assertEqual(stream.read(5), b'te')
# Reading again returns nothing.
self.assertEqual(stream.readline(5), b'')
# Read sequentially from a stream
stream = LimitedStream(BytesIO(b'12345678'), 8)
self.assertEqual(stream.read(5), b'12345')
self.assertEqual(stream.read(5), b'678')
# Reading again returns nothing.
self.assertEqual(stream.readline(5), b'')
# Read lines from a stream
stream = LimitedStream(BytesIO(b'1234\n5678\nabcd\nefgh\nijkl'), 24)
# Read a full line, unconditionally
self.assertEqual(stream.readline(), b'1234\n')
# Read a number of characters less than a line
self.assertEqual(stream.readline(2), b'56')
# Read the rest of the partial line
self.assertEqual(stream.readline(), b'78\n')
# Read a full line, with a character limit greater than the line length
self.assertEqual(stream.readline(6), b'abcd\n')
# Read the next line, deliberately terminated at the line end
self.assertEqual(stream.readline(4), b'efgh')
# Read the next line... just the line end
self.assertEqual(stream.readline(), b'\n')
# Read everything else.
self.assertEqual(stream.readline(), b'ijkl')
# Regression for #15018
# If a stream contains a newline, but the provided length
# is less than the number of provided characters, the newline
# doesn't reset the available character count
stream = LimitedStream(BytesIO(b'1234\nabcdef'), 9)
self.assertEqual(stream.readline(10), b'1234\n')
self.assertEqual(stream.readline(3), b'abc')
# Now expire the available characters
self.assertEqual(stream.readline(3), b'd')
# Reading again returns nothing.
self.assertEqual(stream.readline(2), b'')
# Same test, but with read, not readline.
stream = LimitedStream(BytesIO(b'1234\nabcdef'), 9)
self.assertEqual(stream.read(6), b'1234\na')
self.assertEqual(stream.read(2), b'bc')
self.assertEqual(stream.read(2), b'd')
self.assertEqual(stream.read(2), b'')
self.assertEqual(stream.read(), b'')
def test_stream(self):
payload = FakePayload('name=value')
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload},
)
self.assertEqual(request.read(), b'name=value')
def test_read_after_value(self):
"""
Reading from request is allowed after accessing request contents as
POST or body.
"""
payload = FakePayload('name=value')
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload,
})
self.assertEqual(request.POST, {'name': ['value']})
self.assertEqual(request.body, b'name=value')
self.assertEqual(request.read(), b'name=value')
def test_value_after_read(self):
"""
Construction of POST or body is not allowed after reading
from request.
"""
payload = FakePayload('name=value')
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload,
})
self.assertEqual(request.read(2), b'na')
with self.assertRaises(RawPostDataException):
request.body
self.assertEqual(request.POST, {})
def test_non_ascii_POST(self):
payload = FakePayload(urlencode({'key': 'España'}))
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'wsgi.input': payload,
})
self.assertEqual(request.POST, {'key': ['España']})
def test_alternate_charset_POST(self):
"""
Test a POST with non-utf-8 payload encoding.
"""
payload = FakePayload(urlencode({'key': 'España'.encode('latin-1')}))
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': 'application/x-www-form-urlencoded; charset=iso-8859-1',
'wsgi.input': payload,
})
self.assertEqual(request.POST, {'key': ['España']})
def test_body_after_POST_multipart_form_data(self):
"""
Reading body after parsing multipart/form-data is not allowed
"""
# Because multipart is used for large amounts of data i.e. file uploads,
# we don't want the data held in memory twice, and we don't want to
# silence the error by setting body = '' either.
payload = FakePayload("\r\n".join([
'--boundary',
'Content-Disposition: form-data; name="name"',
'',
'value',
'--boundary--'
'']))
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload,
})
self.assertEqual(request.POST, {'name': ['value']})
with self.assertRaises(RawPostDataException):
request.body
def test_body_after_POST_multipart_related(self):
"""
Reading body after parsing multipart that isn't form-data is allowed
"""
# Ticket #9054
# There are cases in which the multipart data is related instead of
# being a binary upload, in which case it should still be accessible
# via body.
payload_data = b"\r\n".join([
b'--boundary',
b'Content-ID: id; name="name"',
b'',
b'value',
b'--boundary--'
b''])
payload = FakePayload(payload_data)
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/related; boundary=boundary',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload,
})
self.assertEqual(request.POST, {})
self.assertEqual(request.body, payload_data)
def test_POST_multipart_with_content_length_zero(self):
"""
Multipart POST requests with Content-Length >= 0 are valid and need to be handled.
"""
# According to:
# https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.13
# Every request.POST with Content-Length >= 0 is a valid request,
# this test ensures that we handle Content-Length == 0.
payload = FakePayload("\r\n".join([
'--boundary',
'Content-Disposition: form-data; name="name"',
'',
'value',
'--boundary--'
'']))
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
'CONTENT_LENGTH': 0,
'wsgi.input': payload,
})
self.assertEqual(request.POST, {})
def test_POST_binary_only(self):
payload = b'\r\n\x01\x00\x00\x00ab\x00\x00\xcd\xcc,@'
environ = {
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/octet-stream',
'CONTENT_LENGTH': len(payload),
'wsgi.input': BytesIO(payload),
}
request = WSGIRequest(environ)
self.assertEqual(request.POST, {})
self.assertEqual(request.FILES, {})
self.assertEqual(request.body, payload)
# Same test without specifying content-type
environ.update({'CONTENT_TYPE': '', 'wsgi.input': BytesIO(payload)})
request = WSGIRequest(environ)
self.assertEqual(request.POST, {})
self.assertEqual(request.FILES, {})
self.assertEqual(request.body, payload)
def test_read_by_lines(self):
payload = FakePayload('name=value')
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload,
})
self.assertEqual(list(request), [b'name=value'])
def test_POST_after_body_read(self):
"""
POST should be populated even if body is read first
"""
payload = FakePayload('name=value')
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload,
})
request.body # evaluate
self.assertEqual(request.POST, {'name': ['value']})
def test_POST_after_body_read_and_stream_read(self):
"""
POST should be populated even if body is read first, and then
the stream is read second.
"""
payload = FakePayload('name=value')
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload,
})
request.body # evaluate
self.assertEqual(request.read(1), b'n')
self.assertEqual(request.POST, {'name': ['value']})
def test_POST_after_body_read_and_stream_read_multipart(self):
"""
POST should be populated even if body is read first, and then
the stream is read second. Using multipart/form-data instead of urlencoded.
"""
payload = FakePayload("\r\n".join([
'--boundary',
'Content-Disposition: form-data; name="name"',
'',
'value',
'--boundary--'
'']))
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload,
})
request.body # evaluate
# Consume enough data to mess up the parsing:
self.assertEqual(request.read(13), b'--boundary\r\nC')
self.assertEqual(request.POST, {'name': ['value']})
def test_POST_immutable_for_mutipart(self):
"""
MultiPartParser.parse() leaves request.POST immutable.
"""
payload = FakePayload("\r\n".join([
'--boundary',
'Content-Disposition: form-data; name="name"',
'',
'value',
'--boundary--',
]))
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload,
})
self.assertFalse(request.POST._mutable)
def test_POST_connection_error(self):
"""
If wsgi.input.read() raises an exception while trying to read() the
POST, the exception should be identifiable (not a generic IOError).
"""
class ExplodingBytesIO(BytesIO):
def read(self, len=0):
raise IOError("kaboom!")
payload = b'name=value'
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': ExplodingBytesIO(payload),
})
with self.assertRaises(UnreadablePostError):
request.body
def test_set_encoding_clears_POST(self):
payload = FakePayload('name=Hello Günter')
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload,
})
self.assertEqual(request.POST, {'name': ['Hello Günter']})
request.encoding = 'iso-8859-16'
self.assertEqual(request.POST, {'name': ['Hello GĂŒnter']})
def test_set_encoding_clears_GET(self):
request = WSGIRequest({
'REQUEST_METHOD': 'GET',
'wsgi.input': '',
'QUERY_STRING': 'name=Hello%20G%C3%BCnter',
})
self.assertEqual(request.GET, {'name': ['Hello Günter']})
request.encoding = 'iso-8859-16'
self.assertEqual(request.GET, {'name': ['Hello G\u0102\u0152nter']})
def test_FILES_connection_error(self):
"""
If wsgi.input.read() raises an exception while trying to read() the
FILES, the exception should be identifiable (not a generic IOError).
"""
class ExplodingBytesIO(BytesIO):
def read(self, len=0):
raise IOError("kaboom!")
payload = b'x'
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=foo_',
'CONTENT_LENGTH': len(payload),
'wsgi.input': ExplodingBytesIO(payload),
})
with self.assertRaises(UnreadablePostError):
request.FILES
@override_settings(ALLOWED_HOSTS=['example.com'])
def test_get_raw_uri(self):
factory = RequestFactory(HTTP_HOST='evil.com')
request = factory.get('////absolute-uri')
self.assertEqual(request.get_raw_uri(), 'http://evil.com//absolute-uri')
request = factory.get('/?foo=bar')
self.assertEqual(request.get_raw_uri(), 'http://evil.com/?foo=bar')
request = factory.get('/path/with:colons')
self.assertEqual(request.get_raw_uri(), 'http://evil.com/path/with:colons')
class HostValidationTests(SimpleTestCase):
poisoned_hosts = [
'[email protected]',
'example.com:[email protected]',
'example.com:[email protected]:80',
'example.com:80/badpath',
'example.com: recovermypassword.com',
]
@override_settings(
USE_X_FORWARDED_HOST=False,
ALLOWED_HOSTS=[
'forward.com', 'example.com', 'internal.com', '12.34.56.78',
'[2001:19f0:feee::dead:beef:cafe]', 'xn--4ca9at.com',
'.multitenant.com', 'INSENSITIVE.com', '[::ffff:169.254.169.254]',
])
def test_http_get_host(self):
# Check if X_FORWARDED_HOST is provided.
request = HttpRequest()
request.META = {
'HTTP_X_FORWARDED_HOST': 'forward.com',
'HTTP_HOST': 'example.com',
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
# X_FORWARDED_HOST is ignored.
self.assertEqual(request.get_host(), 'example.com')
# Check if X_FORWARDED_HOST isn't provided.
request = HttpRequest()
request.META = {
'HTTP_HOST': 'example.com',
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
self.assertEqual(request.get_host(), 'example.com')
# Check if HTTP_HOST isn't provided.
request = HttpRequest()
request.META = {
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
self.assertEqual(request.get_host(), 'internal.com')
# Check if HTTP_HOST isn't provided, and we're on a nonstandard port
request = HttpRequest()
request.META = {
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 8042,
}
self.assertEqual(request.get_host(), 'internal.com:8042')
legit_hosts = [
'example.com',
'example.com:80',
'12.34.56.78',
'12.34.56.78:443',
'[2001:19f0:feee::dead:beef:cafe]',
'[2001:19f0:feee::dead:beef:cafe]:8080',
'xn--4ca9at.com', # Punycode for öäü.com
'anything.multitenant.com',
'multitenant.com',
'insensitive.com',
'example.com.',
'example.com.:80',
'[::ffff:169.254.169.254]',
]
for host in legit_hosts:
request = HttpRequest()
request.META = {
'HTTP_HOST': host,
}
request.get_host()
# Poisoned host headers are rejected as suspicious
for host in chain(self.poisoned_hosts, ['other.com', 'example.com..']):
with self.assertRaises(DisallowedHost):
request = HttpRequest()
request.META = {
'HTTP_HOST': host,
}
request.get_host()
@override_settings(USE_X_FORWARDED_HOST=True, ALLOWED_HOSTS=['*'])
def test_http_get_host_with_x_forwarded_host(self):
# Check if X_FORWARDED_HOST is provided.
request = HttpRequest()
request.META = {
'HTTP_X_FORWARDED_HOST': 'forward.com',
'HTTP_HOST': 'example.com',
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
# X_FORWARDED_HOST is obeyed.
self.assertEqual(request.get_host(), 'forward.com')
# Check if X_FORWARDED_HOST isn't provided.
request = HttpRequest()
request.META = {
'HTTP_HOST': 'example.com',
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
self.assertEqual(request.get_host(), 'example.com')
# Check if HTTP_HOST isn't provided.
request = HttpRequest()
request.META = {
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
self.assertEqual(request.get_host(), 'internal.com')
# Check if HTTP_HOST isn't provided, and we're on a nonstandard port
request = HttpRequest()
request.META = {
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 8042,
}
self.assertEqual(request.get_host(), 'internal.com:8042')
# Poisoned host headers are rejected as suspicious
legit_hosts = [
'example.com',
'example.com:80',
'12.34.56.78',
'12.34.56.78:443',
'[2001:19f0:feee::dead:beef:cafe]',
'[2001:19f0:feee::dead:beef:cafe]:8080',
'xn--4ca9at.com', # Punycode for öäü.com
]
for host in legit_hosts:
request = HttpRequest()
request.META = {
'HTTP_HOST': host,
}
request.get_host()
for host in self.poisoned_hosts:
with self.assertRaises(DisallowedHost):
request = HttpRequest()
request.META = {
'HTTP_HOST': host,
}
request.get_host()
@override_settings(USE_X_FORWARDED_PORT=False)
def test_get_port(self):
request = HttpRequest()
request.META = {
'SERVER_PORT': '8080',
'HTTP_X_FORWARDED_PORT': '80',
}
# Shouldn't use the X-Forwarded-Port header
self.assertEqual(request.get_port(), '8080')
request = HttpRequest()
request.META = {
'SERVER_PORT': '8080',
}
self.assertEqual(request.get_port(), '8080')
@override_settings(USE_X_FORWARDED_PORT=True)
def test_get_port_with_x_forwarded_port(self):
request = HttpRequest()
request.META = {
'SERVER_PORT': '8080',
'HTTP_X_FORWARDED_PORT': '80',
}
# Should use the X-Forwarded-Port header
self.assertEqual(request.get_port(), '80')
request = HttpRequest()
request.META = {
'SERVER_PORT': '8080',
}
self.assertEqual(request.get_port(), '8080')
@override_settings(DEBUG=True, ALLOWED_HOSTS=[])
def test_host_validation_in_debug_mode(self):
"""
If ALLOWED_HOSTS is empty and DEBUG is True, variants of localhost are
allowed.
"""
valid_hosts = ['localhost', '127.0.0.1', '[::1]']
for host in valid_hosts:
request = HttpRequest()
request.META = {'HTTP_HOST': host}
self.assertEqual(request.get_host(), host)
# Other hostnames raise a DisallowedHost.
with self.assertRaises(DisallowedHost):
request = HttpRequest()
request.META = {'HTTP_HOST': 'example.com'}
request.get_host()
@override_settings(ALLOWED_HOSTS=[])
def test_get_host_suggestion_of_allowed_host(self):
"""get_host() makes helpful suggestions if a valid-looking host is not in ALLOWED_HOSTS."""
msg_invalid_host = "Invalid HTTP_HOST header: %r."
msg_suggestion = msg_invalid_host + " You may need to add %r to ALLOWED_HOSTS."
msg_suggestion2 = msg_invalid_host + " The domain name provided is not valid according to RFC 1034/1035"
for host in [ # Valid-looking hosts
'example.com',
'12.34.56.78',
'[2001:19f0:feee::dead:beef:cafe]',
'xn--4ca9at.com', # Punycode for öäü.com
]:
request = HttpRequest()
request.META = {'HTTP_HOST': host}
with self.assertRaisesMessage(DisallowedHost, msg_suggestion % (host, host)):
request.get_host()
for domain, port in [ # Valid-looking hosts with a port number
('example.com', 80),
('12.34.56.78', 443),
('[2001:19f0:feee::dead:beef:cafe]', 8080),
]:
host = '%s:%s' % (domain, port)
request = HttpRequest()
request.META = {'HTTP_HOST': host}
with self.assertRaisesMessage(DisallowedHost, msg_suggestion % (host, domain)):
request.get_host()
for host in self.poisoned_hosts:
request = HttpRequest()
request.META = {'HTTP_HOST': host}
with self.assertRaisesMessage(DisallowedHost, msg_invalid_host % host):
request.get_host()
request = HttpRequest()
request.META = {'HTTP_HOST': "invalid_hostname.com"}
with self.assertRaisesMessage(DisallowedHost, msg_suggestion2 % "invalid_hostname.com"):
request.get_host()
def test_split_domain_port_removes_trailing_dot(self):
domain, port = split_domain_port('example.com.:8080')
self.assertEqual(domain, 'example.com')
self.assertEqual(port, '8080')
class BuildAbsoluteURITests(SimpleTestCase):
factory = RequestFactory()
def test_absolute_url(self):
request = HttpRequest()
url = 'https://www.example.com/asdf'
self.assertEqual(request.build_absolute_uri(location=url), url)
def test_host_retrieval(self):
request = HttpRequest()
request.get_host = lambda: 'www.example.com'
request.path = ''
self.assertEqual(
request.build_absolute_uri(location='/path/with:colons'),
'http://www.example.com/path/with:colons'
)
def test_request_path_begins_with_two_slashes(self):
# //// creates a request with a path beginning with //
request = self.factory.get('////absolute-uri')
tests = (
# location isn't provided
(None, 'http://testserver//absolute-uri'),
# An absolute URL
('http://example.com/?foo=bar', 'http://example.com/?foo=bar'),
# A schema-relative URL
('//example.com/?foo=bar', 'http://example.com/?foo=bar'),
# Relative URLs
('/foo/bar/', 'http://testserver/foo/bar/'),
('/foo/./bar/', 'http://testserver/foo/bar/'),
('/foo/../bar/', 'http://testserver/bar/'),
('///foo/bar/', 'http://testserver/foo/bar/'),
)
for location, expected_url in tests:
with self.subTest(location=location):
self.assertEqual(request.build_absolute_uri(location=location), expected_url)
class RequestHeadersTests(SimpleTestCase):
ENVIRON = {
# Non-headers are ignored.
'PATH_INFO': '/somepath/',
'REQUEST_METHOD': 'get',
'wsgi.input': BytesIO(b''),
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
# These non-HTTP prefixed headers are included.
'CONTENT_TYPE': 'text/html',
'CONTENT_LENGTH': '100',
# All HTTP-prefixed headers are included.
'HTTP_ACCEPT': '*',
'HTTP_HOST': 'example.com',
'HTTP_USER_AGENT': 'python-requests/1.2.0',
}
def test_base_request_headers(self):
request = HttpRequest()
request.META = self.ENVIRON
self.assertEqual(dict(request.headers), {
'Content-Type': 'text/html',
'Content-Length': '100',
'Accept': '*',
'Host': 'example.com',
'User-Agent': 'python-requests/1.2.0',
})
def test_wsgi_request_headers(self):
request = WSGIRequest(self.ENVIRON)
self.assertEqual(dict(request.headers), {
'Content-Type': 'text/html',
'Content-Length': '100',
'Accept': '*',
'Host': 'example.com',
'User-Agent': 'python-requests/1.2.0',
})
def test_wsgi_request_headers_getitem(self):
request = WSGIRequest(self.ENVIRON)
self.assertEqual(request.headers['User-Agent'], 'python-requests/1.2.0')
self.assertEqual(request.headers['user-agent'], 'python-requests/1.2.0')
self.assertEqual(request.headers['Content-Type'], 'text/html')
self.assertEqual(request.headers['Content-Length'], '100')
def test_wsgi_request_headers_get(self):
request = WSGIRequest(self.ENVIRON)
self.assertEqual(request.headers.get('User-Agent'), 'python-requests/1.2.0')
self.assertEqual(request.headers.get('user-agent'), 'python-requests/1.2.0')
self.assertEqual(request.headers.get('Content-Type'), 'text/html')
self.assertEqual(request.headers.get('Content-Length'), '100')
class HttpHeadersTests(SimpleTestCase):
def test_basic(self):
environ = {
'CONTENT_TYPE': 'text/html',
'CONTENT_LENGTH': '100',
'HTTP_HOST': 'example.com',
}
headers = HttpHeaders(environ)
self.assertEqual(sorted(headers), ['Content-Length', 'Content-Type', 'Host'])
self.assertEqual(headers, {
'Content-Type': 'text/html',
'Content-Length': '100',
'Host': 'example.com',
})
def test_parse_header_name(self):
tests = (
('PATH_INFO', None),
('HTTP_ACCEPT', 'Accept'),
('HTTP_USER_AGENT', 'User-Agent'),
('HTTP_X_FORWARDED_PROTO', 'X-Forwarded-Proto'),
('CONTENT_TYPE', 'Content-Type'),
('CONTENT_LENGTH', 'Content-Length'),
)
for header, expected in tests:
with self.subTest(header=header):
self.assertEqual(HttpHeaders.parse_header_name(header), expected)
|
094d78e128de28b30d03e845887106eb7c01af7f29f41f3768ba9231f56594de | import datetime
import pickle
import unittest
from collections import OrderedDict
from operator import attrgetter
from django.core.exceptions import EmptyResultSet, FieldError
from django.db import DEFAULT_DB_ALIAS, connection
from django.db.models import Count, F, Q
from django.db.models.sql.constants import LOUTER
from django.db.models.sql.where import NothingNode, WhereNode
from django.test import SimpleTestCase, TestCase, skipUnlessDBFeature
from django.test.utils import CaptureQueriesContext
from .models import (
FK1, Annotation, Article, Author, BaseA, Book, CategoryItem,
CategoryRelationship, Celebrity, Channel, Chapter, Child, ChildObjectA,
Classroom, CommonMixedCaseForeignKeys, Company, Cover, CustomPk,
CustomPkTag, Detail, DumbCategory, Eaten, Employment, ExtraInfo, Fan, Food,
Identifier, Individual, Item, Job, JobResponsibilities, Join, LeafA, LeafB,
LoopX, LoopZ, ManagedModel, Member, MixedCaseDbColumnCategoryItem,
MixedCaseFieldCategoryItem, ModelA, ModelB, ModelC, ModelD, MyObject,
NamedCategory, Node, Note, NullableName, Number, ObjectA, ObjectB, ObjectC,
OneToOneCategory, Order, OrderItem, Page, Paragraph, Person, Plaything,
PointerA, Program, ProxyCategory, ProxyObjectA, ProxyObjectB, Ranking,
Related, RelatedIndividual, RelatedObject, Report, ReportComment,
ReservedName, Responsibility, School, SharedConnection, SimpleCategory,
SingleObject, SpecialCategory, Staff, StaffUser, Student, Tag, Task,
Teacher, Ticket21203Child, Ticket21203Parent, Ticket23605A, Ticket23605B,
Ticket23605C, TvChef, Valid, X,
)
class Queries1Tests(TestCase):
@classmethod
def setUpTestData(cls):
generic = NamedCategory.objects.create(name="Generic")
cls.t1 = Tag.objects.create(name='t1', category=generic)
cls.t2 = Tag.objects.create(name='t2', parent=cls.t1, category=generic)
cls.t3 = Tag.objects.create(name='t3', parent=cls.t1)
t4 = Tag.objects.create(name='t4', parent=cls.t3)
cls.t5 = Tag.objects.create(name='t5', parent=cls.t3)
cls.n1 = Note.objects.create(note='n1', misc='foo', id=1)
n2 = Note.objects.create(note='n2', misc='bar', id=2)
cls.n3 = Note.objects.create(note='n3', misc='foo', id=3)
ann1 = Annotation.objects.create(name='a1', tag=cls.t1)
ann1.notes.add(cls.n1)
ann2 = Annotation.objects.create(name='a2', tag=t4)
ann2.notes.add(n2, cls.n3)
# Create these out of order so that sorting by 'id' will be different to sorting
# by 'info'. Helps detect some problems later.
cls.e2 = ExtraInfo.objects.create(info='e2', note=n2, value=41)
e1 = ExtraInfo.objects.create(info='e1', note=cls.n1, value=42)
cls.a1 = Author.objects.create(name='a1', num=1001, extra=e1)
cls.a2 = Author.objects.create(name='a2', num=2002, extra=e1)
a3 = Author.objects.create(name='a3', num=3003, extra=cls.e2)
cls.a4 = Author.objects.create(name='a4', num=4004, extra=cls.e2)
cls.time1 = datetime.datetime(2007, 12, 19, 22, 25, 0)
cls.time2 = datetime.datetime(2007, 12, 19, 21, 0, 0)
time3 = datetime.datetime(2007, 12, 20, 22, 25, 0)
time4 = datetime.datetime(2007, 12, 20, 21, 0, 0)
cls.i1 = Item.objects.create(name='one', created=cls.time1, modified=cls.time1, creator=cls.a1, note=cls.n3)
cls.i1.tags.set([cls.t1, cls.t2])
cls.i2 = Item.objects.create(name='two', created=cls.time2, creator=cls.a2, note=n2)
cls.i2.tags.set([cls.t1, cls.t3])
cls.i3 = Item.objects.create(name='three', created=time3, creator=cls.a2, note=cls.n3)
i4 = Item.objects.create(name='four', created=time4, creator=cls.a4, note=cls.n3)
i4.tags.set([t4])
cls.r1 = Report.objects.create(name='r1', creator=cls.a1)
Report.objects.create(name='r2', creator=a3)
Report.objects.create(name='r3')
# Ordering by 'rank' gives us rank2, rank1, rank3. Ordering by the Meta.ordering
# will be rank3, rank2, rank1.
cls.rank1 = Ranking.objects.create(rank=2, author=cls.a2)
Cover.objects.create(title="first", item=i4)
Cover.objects.create(title="second", item=cls.i2)
def test_subquery_condition(self):
qs1 = Tag.objects.filter(pk__lte=0)
qs2 = Tag.objects.filter(parent__in=qs1)
qs3 = Tag.objects.filter(parent__in=qs2)
self.assertEqual(qs3.query.subq_aliases, {'T', 'U', 'V'})
self.assertIn('v0', str(qs3.query).lower())
qs4 = qs3.filter(parent__in=qs1)
self.assertEqual(qs4.query.subq_aliases, {'T', 'U', 'V'})
# It is possible to reuse U for the second subquery, no need to use W.
self.assertNotIn('w0', str(qs4.query).lower())
# So, 'U0."id"' is referenced in SELECT and WHERE twice.
self.assertEqual(str(qs4.query).lower().count('u0.'), 4)
def test_ticket1050(self):
self.assertQuerysetEqual(
Item.objects.filter(tags__isnull=True),
['<Item: three>']
)
self.assertQuerysetEqual(
Item.objects.filter(tags__id__isnull=True),
['<Item: three>']
)
def test_ticket1801(self):
self.assertQuerysetEqual(
Author.objects.filter(item=self.i2),
['<Author: a2>']
)
self.assertQuerysetEqual(
Author.objects.filter(item=self.i3),
['<Author: a2>']
)
self.assertQuerysetEqual(
Author.objects.filter(item=self.i2) & Author.objects.filter(item=self.i3),
['<Author: a2>']
)
def test_ticket2306(self):
# Checking that no join types are "left outer" joins.
query = Item.objects.filter(tags=self.t2).query
self.assertNotIn(LOUTER, [x.join_type for x in query.alias_map.values()])
self.assertQuerysetEqual(
Item.objects.filter(Q(tags=self.t1)).order_by('name'),
['<Item: one>', '<Item: two>']
)
self.assertQuerysetEqual(
Item.objects.filter(Q(tags=self.t1)).filter(Q(tags=self.t2)),
['<Item: one>']
)
self.assertQuerysetEqual(
Item.objects.filter(Q(tags=self.t1)).filter(Q(creator__name='fred') | Q(tags=self.t2)),
['<Item: one>']
)
# Each filter call is processed "at once" against a single table, so this is
# different from the previous example as it tries to find tags that are two
# things at once (rather than two tags).
self.assertQuerysetEqual(
Item.objects.filter(Q(tags=self.t1) & Q(tags=self.t2)),
[]
)
self.assertQuerysetEqual(
Item.objects.filter(Q(tags=self.t1), Q(creator__name='fred') | Q(tags=self.t2)),
[]
)
qs = Author.objects.filter(ranking__rank=2, ranking__id=self.rank1.id)
self.assertQuerysetEqual(list(qs), ['<Author: a2>'])
self.assertEqual(2, qs.query.count_active_tables(), 2)
qs = Author.objects.filter(ranking__rank=2).filter(ranking__id=self.rank1.id)
self.assertEqual(qs.query.count_active_tables(), 3)
def test_ticket4464(self):
self.assertQuerysetEqual(
Item.objects.filter(tags=self.t1).filter(tags=self.t2),
['<Item: one>']
)
self.assertQuerysetEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).distinct().order_by('name'),
['<Item: one>', '<Item: two>']
)
self.assertQuerysetEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).filter(tags=self.t3),
['<Item: two>']
)
# Make sure .distinct() works with slicing (this was broken in Oracle).
self.assertQuerysetEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).order_by('name')[:3],
['<Item: one>', '<Item: one>', '<Item: two>']
)
self.assertQuerysetEqual(
Item.objects.filter(tags__in=[self.t1, self.t2]).distinct().order_by('name')[:3],
['<Item: one>', '<Item: two>']
)
def test_tickets_2080_3592(self):
self.assertQuerysetEqual(
Author.objects.filter(item__name='one') | Author.objects.filter(name='a3'),
['<Author: a1>', '<Author: a3>']
)
self.assertQuerysetEqual(
Author.objects.filter(Q(item__name='one') | Q(name='a3')),
['<Author: a1>', '<Author: a3>']
)
self.assertQuerysetEqual(
Author.objects.filter(Q(name='a3') | Q(item__name='one')),
['<Author: a1>', '<Author: a3>']
)
self.assertQuerysetEqual(
Author.objects.filter(Q(item__name='three') | Q(report__name='r3')),
['<Author: a2>']
)
def test_ticket6074(self):
# Merging two empty result sets shouldn't leave a queryset with no constraints
# (which would match everything).
self.assertQuerysetEqual(Author.objects.filter(Q(id__in=[])), [])
self.assertQuerysetEqual(
Author.objects.filter(Q(id__in=[]) | Q(id__in=[])),
[]
)
def test_tickets_1878_2939(self):
self.assertEqual(Item.objects.values('creator').distinct().count(), 3)
# Create something with a duplicate 'name' so that we can test multi-column
# cases (which require some tricky SQL transformations under the covers).
xx = Item(name='four', created=self.time1, creator=self.a2, note=self.n1)
xx.save()
self.assertEqual(
Item.objects.exclude(name='two').values('creator', 'name').distinct().count(),
4
)
self.assertEqual(
(
Item.objects
.exclude(name='two')
.extra(select={'foo': '%s'}, select_params=(1,))
.values('creator', 'name', 'foo')
.distinct()
.count()
),
4
)
self.assertEqual(
(
Item.objects
.exclude(name='two')
.extra(select={'foo': '%s'}, select_params=(1,))
.values('creator', 'name')
.distinct()
.count()
),
4
)
xx.delete()
def test_ticket7323(self):
self.assertEqual(Item.objects.values('creator', 'name').count(), 4)
def test_ticket2253(self):
q1 = Item.objects.order_by('name')
q2 = Item.objects.filter(id=self.i1.id)
self.assertQuerysetEqual(
q1,
['<Item: four>', '<Item: one>', '<Item: three>', '<Item: two>']
)
self.assertQuerysetEqual(q2, ['<Item: one>'])
self.assertQuerysetEqual(
(q1 | q2).order_by('name'),
['<Item: four>', '<Item: one>', '<Item: three>', '<Item: two>']
)
self.assertQuerysetEqual((q1 & q2).order_by('name'), ['<Item: one>'])
q1 = Item.objects.filter(tags=self.t1)
q2 = Item.objects.filter(note=self.n3, tags=self.t2)
q3 = Item.objects.filter(creator=self.a4)
self.assertQuerysetEqual(
((q1 & q2) | q3).order_by('name'),
['<Item: four>', '<Item: one>']
)
def test_order_by_tables(self):
q1 = Item.objects.order_by('name')
q2 = Item.objects.filter(id=self.i1.id)
list(q2)
combined_query = (q1 & q2).order_by('name').query
self.assertEqual(len([
t for t in combined_query.alias_map if combined_query.alias_refcount[t]
]), 1)
def test_order_by_join_unref(self):
"""
This test is related to the above one, testing that there aren't
old JOINs in the query.
"""
qs = Celebrity.objects.order_by('greatest_fan__fan_of')
self.assertIn('OUTER JOIN', str(qs.query))
qs = qs.order_by('id')
self.assertNotIn('OUTER JOIN', str(qs.query))
def test_get_clears_ordering(self):
"""
get() should clear ordering for optimization purposes.
"""
with CaptureQueriesContext(connection) as captured_queries:
Author.objects.order_by('name').get(pk=self.a1.pk)
self.assertNotIn('order by', captured_queries[0]['sql'].lower())
def test_tickets_4088_4306(self):
self.assertQuerysetEqual(
Report.objects.filter(creator=1001),
['<Report: r1>']
)
self.assertQuerysetEqual(
Report.objects.filter(creator__num=1001),
['<Report: r1>']
)
self.assertQuerysetEqual(Report.objects.filter(creator__id=1001), [])
self.assertQuerysetEqual(
Report.objects.filter(creator__id=self.a1.id),
['<Report: r1>']
)
self.assertQuerysetEqual(
Report.objects.filter(creator__name='a1'),
['<Report: r1>']
)
def test_ticket4510(self):
self.assertQuerysetEqual(
Author.objects.filter(report__name='r1'),
['<Author: a1>']
)
def test_ticket7378(self):
self.assertQuerysetEqual(self.a1.report_set.all(), ['<Report: r1>'])
def test_tickets_5324_6704(self):
self.assertQuerysetEqual(
Item.objects.filter(tags__name='t4'),
['<Item: four>']
)
self.assertQuerysetEqual(
Item.objects.exclude(tags__name='t4').order_by('name').distinct(),
['<Item: one>', '<Item: three>', '<Item: two>']
)
self.assertQuerysetEqual(
Item.objects.exclude(tags__name='t4').order_by('name').distinct().reverse(),
['<Item: two>', '<Item: three>', '<Item: one>']
)
self.assertQuerysetEqual(
Author.objects.exclude(item__name='one').distinct().order_by('name'),
['<Author: a2>', '<Author: a3>', '<Author: a4>']
)
# Excluding across a m2m relation when there is more than one related
# object associated was problematic.
self.assertQuerysetEqual(
Item.objects.exclude(tags__name='t1').order_by('name'),
['<Item: four>', '<Item: three>']
)
self.assertQuerysetEqual(
Item.objects.exclude(tags__name='t1').exclude(tags__name='t4'),
['<Item: three>']
)
# Excluding from a relation that cannot be NULL should not use outer joins.
query = Item.objects.exclude(creator__in=[self.a1, self.a2]).query
self.assertNotIn(LOUTER, [x.join_type for x in query.alias_map.values()])
# Similarly, when one of the joins cannot possibly, ever, involve NULL
# values (Author -> ExtraInfo, in the following), it should never be
# promoted to a left outer join. So the following query should only
# involve one "left outer" join (Author -> Item is 0-to-many).
qs = Author.objects.filter(id=self.a1.id).filter(Q(extra__note=self.n1) | Q(item__note=self.n3))
self.assertEqual(
len([
x for x in qs.query.alias_map.values()
if x.join_type == LOUTER and qs.query.alias_refcount[x.table_alias]
]),
1
)
# The previous changes shouldn't affect nullable foreign key joins.
self.assertQuerysetEqual(
Tag.objects.filter(parent__isnull=True).order_by('name'),
['<Tag: t1>']
)
self.assertQuerysetEqual(
Tag.objects.exclude(parent__isnull=True).order_by('name'),
['<Tag: t2>', '<Tag: t3>', '<Tag: t4>', '<Tag: t5>']
)
self.assertQuerysetEqual(
Tag.objects.exclude(Q(parent__name='t1') | Q(parent__isnull=True)).order_by('name'),
['<Tag: t4>', '<Tag: t5>']
)
self.assertQuerysetEqual(
Tag.objects.exclude(Q(parent__isnull=True) | Q(parent__name='t1')).order_by('name'),
['<Tag: t4>', '<Tag: t5>']
)
self.assertQuerysetEqual(
Tag.objects.exclude(Q(parent__parent__isnull=True)).order_by('name'),
['<Tag: t4>', '<Tag: t5>']
)
self.assertQuerysetEqual(
Tag.objects.filter(~Q(parent__parent__isnull=True)).order_by('name'),
['<Tag: t4>', '<Tag: t5>']
)
def test_ticket2091(self):
t = Tag.objects.get(name='t4')
self.assertQuerysetEqual(
Item.objects.filter(tags__in=[t]),
['<Item: four>']
)
def test_avoid_infinite_loop_on_too_many_subqueries(self):
x = Tag.objects.filter(pk=1)
local_recursion_limit = 127
msg = 'Maximum recursion depth exceeded: too many subqueries.'
with self.assertRaisesMessage(RuntimeError, msg):
for i in range(local_recursion_limit * 2):
x = Tag.objects.filter(pk__in=x)
def test_reasonable_number_of_subq_aliases(self):
x = Tag.objects.filter(pk=1)
for _ in range(20):
x = Tag.objects.filter(pk__in=x)
self.assertEqual(
x.query.subq_aliases, {
'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'AA', 'AB', 'AC', 'AD',
'AE', 'AF', 'AG', 'AH', 'AI', 'AJ', 'AK', 'AL', 'AM', 'AN',
}
)
def test_heterogeneous_qs_combination(self):
# Combining querysets built on different models should behave in a well-defined
# fashion. We raise an error.
with self.assertRaisesMessage(AssertionError, 'Cannot combine queries on two different base models.'):
Author.objects.all() & Tag.objects.all()
with self.assertRaisesMessage(AssertionError, 'Cannot combine queries on two different base models.'):
Author.objects.all() | Tag.objects.all()
def test_ticket3141(self):
self.assertEqual(Author.objects.extra(select={'foo': '1'}).count(), 4)
self.assertEqual(
Author.objects.extra(select={'foo': '%s'}, select_params=(1,)).count(),
4
)
def test_ticket2400(self):
self.assertQuerysetEqual(
Author.objects.filter(item__isnull=True),
['<Author: a3>']
)
self.assertQuerysetEqual(
Tag.objects.filter(item__isnull=True),
['<Tag: t5>']
)
def test_ticket2496(self):
self.assertQuerysetEqual(
Item.objects.extra(tables=['queries_author']).select_related().order_by('name')[:1],
['<Item: four>']
)
def test_error_raised_on_filter_with_dictionary(self):
with self.assertRaisesMessage(FieldError, 'Cannot parse keyword query as dict'):
Note.objects.filter({'note': 'n1', 'misc': 'foo'})
def test_tickets_2076_7256(self):
# Ordering on related tables should be possible, even if the table is
# not otherwise involved.
self.assertQuerysetEqual(
Item.objects.order_by('note__note', 'name'),
['<Item: two>', '<Item: four>', '<Item: one>', '<Item: three>']
)
# Ordering on a related field should use the remote model's default
# ordering as a final step.
self.assertQuerysetEqual(
Author.objects.order_by('extra', '-name'),
['<Author: a2>', '<Author: a1>', '<Author: a4>', '<Author: a3>']
)
# Using remote model default ordering can span multiple models (in this
# case, Cover is ordered by Item's default, which uses Note's default).
self.assertQuerysetEqual(
Cover.objects.all(),
['<Cover: first>', '<Cover: second>']
)
# If the remote model does not have a default ordering, we order by its 'id'
# field.
self.assertQuerysetEqual(
Item.objects.order_by('creator', 'name'),
['<Item: one>', '<Item: three>', '<Item: two>', '<Item: four>']
)
# Ordering by a many-valued attribute (e.g. a many-to-many or reverse
# ForeignKey) is legal, but the results might not make sense. That
# isn't Django's problem. Garbage in, garbage out.
self.assertQuerysetEqual(
Item.objects.filter(tags__isnull=False).order_by('tags', 'id'),
['<Item: one>', '<Item: two>', '<Item: one>', '<Item: two>', '<Item: four>']
)
# If we replace the default ordering, Django adjusts the required
# tables automatically. Item normally requires a join with Note to do
# the default ordering, but that isn't needed here.
qs = Item.objects.order_by('name')
self.assertQuerysetEqual(
qs,
['<Item: four>', '<Item: one>', '<Item: three>', '<Item: two>']
)
self.assertEqual(len(qs.query.alias_map), 1)
def test_tickets_2874_3002(self):
qs = Item.objects.select_related().order_by('note__note', 'name')
self.assertQuerysetEqual(
qs,
['<Item: two>', '<Item: four>', '<Item: one>', '<Item: three>']
)
# This is also a good select_related() test because there are multiple
# Note entries in the SQL. The two Note items should be different.
self.assertEqual(repr(qs[0].note), '<Note: n2>')
self.assertEqual(repr(qs[0].creator.extra.note), '<Note: n1>')
def test_ticket3037(self):
self.assertQuerysetEqual(
Item.objects.filter(Q(creator__name='a3', name='two') | Q(creator__name='a4', name='four')),
['<Item: four>']
)
def test_tickets_5321_7070(self):
# Ordering columns must be included in the output columns. Note that
# this means results that might otherwise be distinct are not (if there
# are multiple values in the ordering cols), as in this example. This
# isn't a bug; it's a warning to be careful with the selection of
# ordering columns.
self.assertSequenceEqual(
Note.objects.values('misc').distinct().order_by('note', '-misc'),
[{'misc': 'foo'}, {'misc': 'bar'}, {'misc': 'foo'}]
)
def test_ticket4358(self):
# If you don't pass any fields to values(), relation fields are
# returned as "foo_id" keys, not "foo". For consistency, you should be
# able to pass "foo_id" in the fields list and have it work, too. We
# actually allow both "foo" and "foo_id".
# The *_id version is returned by default.
self.assertIn('note_id', ExtraInfo.objects.values()[0])
# You can also pass it in explicitly.
self.assertSequenceEqual(ExtraInfo.objects.values('note_id'), [{'note_id': 1}, {'note_id': 2}])
# ...or use the field name.
self.assertSequenceEqual(ExtraInfo.objects.values('note'), [{'note': 1}, {'note': 2}])
def test_ticket2902(self):
# Parameters can be given to extra_select, *if* you use an OrderedDict.
# (First we need to know which order the keys fall in "naturally" on
# your system, so we can put things in the wrong way around from
# normal. A normal dict would thus fail.)
s = [('a', '%s'), ('b', '%s')]
params = ['one', 'two']
if list({'a': 1, 'b': 2}) == ['a', 'b']:
s.reverse()
params.reverse()
d = Item.objects.extra(select=OrderedDict(s), select_params=params).values('a', 'b')[0]
self.assertEqual(d, {'a': 'one', 'b': 'two'})
# Order by the number of tags attached to an item.
qs = (
Item.objects
.extra(select={
'count': 'select count(*) from queries_item_tags where queries_item_tags.item_id = queries_item.id'
})
.order_by('-count')
)
self.assertEqual([o.count for o in qs], [2, 2, 1, 0])
def test_ticket6154(self):
# Multiple filter statements are joined using "AND" all the time.
self.assertQuerysetEqual(
Author.objects.filter(id=self.a1.id).filter(Q(extra__note=self.n1) | Q(item__note=self.n3)),
['<Author: a1>']
)
self.assertQuerysetEqual(
Author.objects.filter(Q(extra__note=self.n1) | Q(item__note=self.n3)).filter(id=self.a1.id),
['<Author: a1>']
)
def test_ticket6981(self):
self.assertQuerysetEqual(
Tag.objects.select_related('parent').order_by('name'),
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>', '<Tag: t4>', '<Tag: t5>']
)
def test_ticket9926(self):
self.assertQuerysetEqual(
Tag.objects.select_related("parent", "category").order_by('name'),
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>', '<Tag: t4>', '<Tag: t5>']
)
self.assertQuerysetEqual(
Tag.objects.select_related('parent', "parent__category").order_by('name'),
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>', '<Tag: t4>', '<Tag: t5>']
)
def test_tickets_6180_6203(self):
# Dates with limits and/or counts
self.assertEqual(Item.objects.count(), 4)
self.assertEqual(Item.objects.datetimes('created', 'month').count(), 1)
self.assertEqual(Item.objects.datetimes('created', 'day').count(), 2)
self.assertEqual(len(Item.objects.datetimes('created', 'day')), 2)
self.assertEqual(Item.objects.datetimes('created', 'day')[0], datetime.datetime(2007, 12, 19, 0, 0))
def test_tickets_7087_12242(self):
# Dates with extra select columns
self.assertQuerysetEqual(
Item.objects.datetimes('created', 'day').extra(select={'a': 1}),
['datetime.datetime(2007, 12, 19, 0, 0)', 'datetime.datetime(2007, 12, 20, 0, 0)']
)
self.assertQuerysetEqual(
Item.objects.extra(select={'a': 1}).datetimes('created', 'day'),
['datetime.datetime(2007, 12, 19, 0, 0)', 'datetime.datetime(2007, 12, 20, 0, 0)']
)
name = "one"
self.assertQuerysetEqual(
Item.objects.datetimes('created', 'day').extra(where=['name=%s'], params=[name]),
['datetime.datetime(2007, 12, 19, 0, 0)']
)
self.assertQuerysetEqual(
Item.objects.extra(where=['name=%s'], params=[name]).datetimes('created', 'day'),
['datetime.datetime(2007, 12, 19, 0, 0)']
)
def test_ticket7155(self):
# Nullable dates
self.assertQuerysetEqual(
Item.objects.datetimes('modified', 'day'),
['datetime.datetime(2007, 12, 19, 0, 0)']
)
def test_ticket7098(self):
# Make sure semi-deprecated ordering by related models syntax still
# works.
self.assertSequenceEqual(
Item.objects.values('note__note').order_by('queries_note.note', 'id'),
[{'note__note': 'n2'}, {'note__note': 'n3'}, {'note__note': 'n3'}, {'note__note': 'n3'}]
)
def test_ticket7096(self):
# Make sure exclude() with multiple conditions continues to work.
self.assertQuerysetEqual(
Tag.objects.filter(parent=self.t1, name='t3').order_by('name'),
['<Tag: t3>']
)
self.assertQuerysetEqual(
Tag.objects.exclude(parent=self.t1, name='t3').order_by('name'),
['<Tag: t1>', '<Tag: t2>', '<Tag: t4>', '<Tag: t5>']
)
self.assertQuerysetEqual(
Item.objects.exclude(tags__name='t1', name='one').order_by('name').distinct(),
['<Item: four>', '<Item: three>', '<Item: two>']
)
self.assertQuerysetEqual(
Item.objects.filter(name__in=['three', 'four']).exclude(tags__name='t1').order_by('name'),
['<Item: four>', '<Item: three>']
)
# More twisted cases, involving nested negations.
self.assertQuerysetEqual(
Item.objects.exclude(~Q(tags__name='t1', name='one')),
['<Item: one>']
)
self.assertQuerysetEqual(
Item.objects.filter(~Q(tags__name='t1', name='one'), name='two'),
['<Item: two>']
)
self.assertQuerysetEqual(
Item.objects.exclude(~Q(tags__name='t1', name='one'), name='two'),
['<Item: four>', '<Item: one>', '<Item: three>']
)
def test_tickets_7204_7506(self):
# Make sure querysets with related fields can be pickled. If this
# doesn't crash, it's a Good Thing.
pickle.dumps(Item.objects.all())
def test_ticket7813(self):
# We should also be able to pickle things that use select_related().
# The only tricky thing here is to ensure that we do the related
# selections properly after unpickling.
qs = Item.objects.select_related()
query = qs.query.get_compiler(qs.db).as_sql()[0]
query2 = pickle.loads(pickle.dumps(qs.query))
self.assertEqual(
query2.get_compiler(qs.db).as_sql()[0],
query
)
def test_deferred_load_qs_pickling(self):
# Check pickling of deferred-loading querysets
qs = Item.objects.defer('name', 'creator')
q2 = pickle.loads(pickle.dumps(qs))
self.assertEqual(list(qs), list(q2))
q3 = pickle.loads(pickle.dumps(qs, pickle.HIGHEST_PROTOCOL))
self.assertEqual(list(qs), list(q3))
def test_ticket7277(self):
self.assertQuerysetEqual(
self.n1.annotation_set.filter(
Q(tag=self.t5) | Q(tag__children=self.t5) | Q(tag__children__children=self.t5)
),
['<Annotation: a1>']
)
def test_tickets_7448_7707(self):
# Complex objects should be converted to strings before being used in
# lookups.
self.assertQuerysetEqual(
Item.objects.filter(created__in=[self.time1, self.time2]),
['<Item: one>', '<Item: two>']
)
def test_ticket7235(self):
# An EmptyQuerySet should not raise exceptions if it is filtered.
Eaten.objects.create(meal='m')
q = Eaten.objects.none()
with self.assertNumQueries(0):
self.assertQuerysetEqual(q.all(), [])
self.assertQuerysetEqual(q.filter(meal='m'), [])
self.assertQuerysetEqual(q.exclude(meal='m'), [])
self.assertQuerysetEqual(q.complex_filter({'pk': 1}), [])
self.assertQuerysetEqual(q.select_related('food'), [])
self.assertQuerysetEqual(q.annotate(Count('food')), [])
self.assertQuerysetEqual(q.order_by('meal', 'food'), [])
self.assertQuerysetEqual(q.distinct(), [])
self.assertQuerysetEqual(
q.extra(select={'foo': "1"}),
[]
)
self.assertQuerysetEqual(q.reverse(), [])
q.query.low_mark = 1
with self.assertRaisesMessage(AssertionError, 'Cannot change a query once a slice has been taken'):
q.extra(select={'foo': "1"})
self.assertQuerysetEqual(q.defer('meal'), [])
self.assertQuerysetEqual(q.only('meal'), [])
def test_ticket7791(self):
# There were "issues" when ordering and distinct-ing on fields related
# via ForeignKeys.
self.assertEqual(
len(Note.objects.order_by('extrainfo__info').distinct()),
3
)
# Pickling of QuerySets using datetimes() should work.
qs = Item.objects.datetimes('created', 'month')
pickle.loads(pickle.dumps(qs))
def test_ticket9997(self):
# If a ValuesList or Values queryset is passed as an inner query, we
# make sure it's only requesting a single value and use that as the
# thing to select.
self.assertQuerysetEqual(
Tag.objects.filter(name__in=Tag.objects.filter(parent=self.t1).values('name')),
['<Tag: t2>', '<Tag: t3>']
)
# Multi-valued values() and values_list() querysets should raise errors.
with self.assertRaisesMessage(TypeError, 'Cannot use multi-field values as a filter value.'):
Tag.objects.filter(name__in=Tag.objects.filter(parent=self.t1).values('name', 'id'))
with self.assertRaisesMessage(TypeError, 'Cannot use multi-field values as a filter value.'):
Tag.objects.filter(name__in=Tag.objects.filter(parent=self.t1).values_list('name', 'id'))
def test_ticket9985(self):
# qs.values_list(...).values(...) combinations should work.
self.assertSequenceEqual(
Note.objects.values_list("note", flat=True).values("id").order_by("id"),
[{'id': 1}, {'id': 2}, {'id': 3}]
)
self.assertQuerysetEqual(
Annotation.objects.filter(notes__in=Note.objects.filter(note="n1").values_list('note').values('id')),
['<Annotation: a1>']
)
def test_ticket10205(self):
# When bailing out early because of an empty "__in" filter, we need
# to set things up correctly internally so that subqueries can continue properly.
self.assertEqual(Tag.objects.filter(name__in=()).update(name="foo"), 0)
def test_ticket10432(self):
# Testing an empty "__in" filter with a generator as the value.
def f():
return iter([])
n_obj = Note.objects.all()[0]
def g():
yield n_obj.pk
self.assertQuerysetEqual(Note.objects.filter(pk__in=f()), [])
self.assertEqual(list(Note.objects.filter(pk__in=g())), [n_obj])
def test_ticket10742(self):
# Queries used in an __in clause don't execute subqueries
subq = Author.objects.filter(num__lt=3000)
qs = Author.objects.filter(pk__in=subq)
self.assertQuerysetEqual(qs, ['<Author: a1>', '<Author: a2>'])
# The subquery result cache should not be populated
self.assertIsNone(subq._result_cache)
subq = Author.objects.filter(num__lt=3000)
qs = Author.objects.exclude(pk__in=subq)
self.assertQuerysetEqual(qs, ['<Author: a3>', '<Author: a4>'])
# The subquery result cache should not be populated
self.assertIsNone(subq._result_cache)
subq = Author.objects.filter(num__lt=3000)
self.assertQuerysetEqual(
Author.objects.filter(Q(pk__in=subq) & Q(name='a1')),
['<Author: a1>']
)
# The subquery result cache should not be populated
self.assertIsNone(subq._result_cache)
def test_ticket7076(self):
# Excluding shouldn't eliminate NULL entries.
self.assertQuerysetEqual(
Item.objects.exclude(modified=self.time1).order_by('name'),
['<Item: four>', '<Item: three>', '<Item: two>']
)
self.assertQuerysetEqual(
Tag.objects.exclude(parent__name=self.t1.name),
['<Tag: t1>', '<Tag: t4>', '<Tag: t5>']
)
def test_ticket7181(self):
# Ordering by related tables should accommodate nullable fields (this
# test is a little tricky, since NULL ordering is database dependent.
# Instead, we just count the number of results).
self.assertEqual(len(Tag.objects.order_by('parent__name')), 5)
# Empty querysets can be merged with others.
self.assertQuerysetEqual(
Note.objects.none() | Note.objects.all(),
['<Note: n1>', '<Note: n2>', '<Note: n3>']
)
self.assertQuerysetEqual(
Note.objects.all() | Note.objects.none(),
['<Note: n1>', '<Note: n2>', '<Note: n3>']
)
self.assertQuerysetEqual(Note.objects.none() & Note.objects.all(), [])
self.assertQuerysetEqual(Note.objects.all() & Note.objects.none(), [])
def test_ticket9411(self):
# Make sure bump_prefix() (an internal Query method) doesn't (re-)break. It's
# sufficient that this query runs without error.
qs = Tag.objects.values_list('id', flat=True).order_by('id')
qs.query.bump_prefix(qs.query)
first = qs[0]
self.assertEqual(list(qs), list(range(first, first + 5)))
def test_ticket8439(self):
# Complex combinations of conjunctions, disjunctions and nullable
# relations.
self.assertQuerysetEqual(
Author.objects.filter(Q(item__note__extrainfo=self.e2) | Q(report=self.r1, name='xyz')),
['<Author: a2>']
)
self.assertQuerysetEqual(
Author.objects.filter(Q(report=self.r1, name='xyz') | Q(item__note__extrainfo=self.e2)),
['<Author: a2>']
)
self.assertQuerysetEqual(
Annotation.objects.filter(Q(tag__parent=self.t1) | Q(notes__note='n1', name='a1')),
['<Annotation: a1>']
)
xx = ExtraInfo.objects.create(info='xx', note=self.n3)
self.assertQuerysetEqual(
Note.objects.filter(Q(extrainfo__author=self.a1) | Q(extrainfo=xx)),
['<Note: n1>', '<Note: n3>']
)
q = Note.objects.filter(Q(extrainfo__author=self.a1) | Q(extrainfo=xx)).query
self.assertEqual(
len([x for x in q.alias_map.values() if x.join_type == LOUTER and q.alias_refcount[x.table_alias]]),
1
)
def test_ticket17429(self):
"""
Meta.ordering=None works the same as Meta.ordering=[]
"""
original_ordering = Tag._meta.ordering
Tag._meta.ordering = None
try:
self.assertQuerysetEqual(
Tag.objects.all(),
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>', '<Tag: t4>', '<Tag: t5>'],
ordered=False
)
finally:
Tag._meta.ordering = original_ordering
def test_exclude(self):
self.assertQuerysetEqual(
Item.objects.exclude(tags__name='t4'),
[repr(i) for i in Item.objects.filter(~Q(tags__name='t4'))])
self.assertQuerysetEqual(
Item.objects.exclude(Q(tags__name='t4') | Q(tags__name='t3')),
[repr(i) for i in Item.objects.filter(~(Q(tags__name='t4') | Q(tags__name='t3')))])
self.assertQuerysetEqual(
Item.objects.exclude(Q(tags__name='t4') | ~Q(tags__name='t3')),
[repr(i) for i in Item.objects.filter(~(Q(tags__name='t4') | ~Q(tags__name='t3')))])
def test_nested_exclude(self):
self.assertQuerysetEqual(
Item.objects.exclude(~Q(tags__name='t4')),
[repr(i) for i in Item.objects.filter(~~Q(tags__name='t4'))])
def test_double_exclude(self):
self.assertQuerysetEqual(
Item.objects.filter(Q(tags__name='t4')),
[repr(i) for i in Item.objects.filter(~~Q(tags__name='t4'))])
self.assertQuerysetEqual(
Item.objects.filter(Q(tags__name='t4')),
[repr(i) for i in Item.objects.filter(~Q(~Q(tags__name='t4')))])
def test_exclude_in(self):
self.assertQuerysetEqual(
Item.objects.exclude(Q(tags__name__in=['t4', 't3'])),
[repr(i) for i in Item.objects.filter(~Q(tags__name__in=['t4', 't3']))])
self.assertQuerysetEqual(
Item.objects.filter(Q(tags__name__in=['t4', 't3'])),
[repr(i) for i in Item.objects.filter(~~Q(tags__name__in=['t4', 't3']))])
def test_ticket_10790_1(self):
# Querying direct fields with isnull should trim the left outer join.
# It also should not create INNER JOIN.
q = Tag.objects.filter(parent__isnull=True)
self.assertQuerysetEqual(q, ['<Tag: t1>'])
self.assertNotIn('JOIN', str(q.query))
q = Tag.objects.filter(parent__isnull=False)
self.assertQuerysetEqual(
q,
['<Tag: t2>', '<Tag: t3>', '<Tag: t4>', '<Tag: t5>'],
)
self.assertNotIn('JOIN', str(q.query))
q = Tag.objects.exclude(parent__isnull=True)
self.assertQuerysetEqual(
q,
['<Tag: t2>', '<Tag: t3>', '<Tag: t4>', '<Tag: t5>'],
)
self.assertNotIn('JOIN', str(q.query))
q = Tag.objects.exclude(parent__isnull=False)
self.assertQuerysetEqual(q, ['<Tag: t1>'])
self.assertNotIn('JOIN', str(q.query))
q = Tag.objects.exclude(parent__parent__isnull=False)
self.assertQuerysetEqual(
q,
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>'],
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1)
self.assertNotIn('INNER JOIN', str(q.query))
def test_ticket_10790_2(self):
# Querying across several tables should strip only the last outer join,
# while preserving the preceding inner joins.
q = Tag.objects.filter(parent__parent__isnull=False)
self.assertQuerysetEqual(
q,
['<Tag: t4>', '<Tag: t5>'],
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q.query).count('INNER JOIN'), 1)
# Querying without isnull should not convert anything to left outer join.
q = Tag.objects.filter(parent__parent=self.t1)
self.assertQuerysetEqual(
q,
['<Tag: t4>', '<Tag: t5>'],
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q.query).count('INNER JOIN'), 1)
def test_ticket_10790_3(self):
# Querying via indirect fields should populate the left outer join
q = NamedCategory.objects.filter(tag__isnull=True)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1)
# join to dumbcategory ptr_id
self.assertEqual(str(q.query).count('INNER JOIN'), 1)
self.assertQuerysetEqual(q, [])
# Querying across several tables should strip only the last join, while
# preserving the preceding left outer joins.
q = NamedCategory.objects.filter(tag__parent__isnull=True)
self.assertEqual(str(q.query).count('INNER JOIN'), 1)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1)
self.assertQuerysetEqual(q, ['<NamedCategory: Generic>'])
def test_ticket_10790_4(self):
# Querying across m2m field should not strip the m2m table from join.
q = Author.objects.filter(item__tags__isnull=True)
self.assertQuerysetEqual(
q,
['<Author: a2>', '<Author: a3>'],
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 2)
self.assertNotIn('INNER JOIN', str(q.query))
q = Author.objects.filter(item__tags__parent__isnull=True)
self.assertQuerysetEqual(
q,
['<Author: a1>', '<Author: a2>', '<Author: a2>', '<Author: a3>'],
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 3)
self.assertNotIn('INNER JOIN', str(q.query))
def test_ticket_10790_5(self):
# Querying with isnull=False across m2m field should not create outer joins
q = Author.objects.filter(item__tags__isnull=False)
self.assertQuerysetEqual(
q,
['<Author: a1>', '<Author: a1>', '<Author: a2>', '<Author: a2>', '<Author: a4>']
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q.query).count('INNER JOIN'), 2)
q = Author.objects.filter(item__tags__parent__isnull=False)
self.assertQuerysetEqual(
q,
['<Author: a1>', '<Author: a2>', '<Author: a4>']
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q.query).count('INNER JOIN'), 3)
q = Author.objects.filter(item__tags__parent__parent__isnull=False)
self.assertQuerysetEqual(
q,
['<Author: a4>']
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q.query).count('INNER JOIN'), 4)
def test_ticket_10790_6(self):
# Querying with isnull=True across m2m field should not create inner joins
# and strip last outer join
q = Author.objects.filter(item__tags__parent__parent__isnull=True)
self.assertQuerysetEqual(
q,
['<Author: a1>', '<Author: a1>', '<Author: a2>', '<Author: a2>',
'<Author: a2>', '<Author: a3>']
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 4)
self.assertEqual(str(q.query).count('INNER JOIN'), 0)
q = Author.objects.filter(item__tags__parent__isnull=True)
self.assertQuerysetEqual(
q,
['<Author: a1>', '<Author: a2>', '<Author: a2>', '<Author: a3>']
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 3)
self.assertEqual(str(q.query).count('INNER JOIN'), 0)
def test_ticket_10790_7(self):
# Reverse querying with isnull should not strip the join
q = Author.objects.filter(item__isnull=True)
self.assertQuerysetEqual(
q,
['<Author: a3>']
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 1)
self.assertEqual(str(q.query).count('INNER JOIN'), 0)
q = Author.objects.filter(item__isnull=False)
self.assertQuerysetEqual(
q,
['<Author: a1>', '<Author: a2>', '<Author: a2>', '<Author: a4>']
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q.query).count('INNER JOIN'), 1)
def test_ticket_10790_8(self):
# Querying with combined q-objects should also strip the left outer join
q = Tag.objects.filter(Q(parent__isnull=True) | Q(parent=self.t1))
self.assertQuerysetEqual(
q,
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>']
)
self.assertEqual(str(q.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q.query).count('INNER JOIN'), 0)
def test_ticket_10790_combine(self):
# Combining queries should not re-populate the left outer join
q1 = Tag.objects.filter(parent__isnull=True)
q2 = Tag.objects.filter(parent__isnull=False)
q3 = q1 | q2
self.assertQuerysetEqual(
q3,
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>', '<Tag: t4>', '<Tag: t5>'],
)
self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q3.query).count('INNER JOIN'), 0)
q3 = q1 & q2
self.assertQuerysetEqual(q3, [])
self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q3.query).count('INNER JOIN'), 0)
q2 = Tag.objects.filter(parent=self.t1)
q3 = q1 | q2
self.assertQuerysetEqual(
q3,
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>']
)
self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q3.query).count('INNER JOIN'), 0)
q3 = q2 | q1
self.assertQuerysetEqual(
q3,
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>']
)
self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(q3.query).count('INNER JOIN'), 0)
q1 = Tag.objects.filter(parent__isnull=True)
q2 = Tag.objects.filter(parent__parent__isnull=True)
q3 = q1 | q2
self.assertQuerysetEqual(
q3,
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>']
)
self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 1)
self.assertEqual(str(q3.query).count('INNER JOIN'), 0)
q3 = q2 | q1
self.assertQuerysetEqual(
q3,
['<Tag: t1>', '<Tag: t2>', '<Tag: t3>']
)
self.assertEqual(str(q3.query).count('LEFT OUTER JOIN'), 1)
self.assertEqual(str(q3.query).count('INNER JOIN'), 0)
def test_ticket19672(self):
self.assertQuerysetEqual(
Report.objects.filter(Q(creator__isnull=False) & ~Q(creator__extra__value=41)),
['<Report: r1>']
)
def test_ticket_20250(self):
# A negated Q along with an annotated queryset failed in Django 1.4
qs = Author.objects.annotate(Count('item'))
qs = qs.filter(~Q(extra__value=0)).order_by('name')
self.assertIn('SELECT', str(qs.query))
self.assertQuerysetEqual(
qs,
['<Author: a1>', '<Author: a2>', '<Author: a3>', '<Author: a4>']
)
def test_lookup_constraint_fielderror(self):
msg = (
"Cannot resolve keyword 'unknown_field' into field. Choices are: "
"annotation, category, category_id, children, id, item, "
"managedmodel, name, note, parent, parent_id"
)
with self.assertRaisesMessage(FieldError, msg):
Tag.objects.filter(unknown_field__name='generic')
def test_common_mixed_case_foreign_keys(self):
"""
Valid query should be generated when fields fetched from joined tables
include FKs whose names only differ by case.
"""
c1 = SimpleCategory.objects.create(name='c1')
c2 = SimpleCategory.objects.create(name='c2')
c3 = SimpleCategory.objects.create(name='c3')
category = CategoryItem.objects.create(category=c1)
mixed_case_field_category = MixedCaseFieldCategoryItem.objects.create(CaTeGoRy=c2)
mixed_case_db_column_category = MixedCaseDbColumnCategoryItem.objects.create(category=c3)
CommonMixedCaseForeignKeys.objects.create(
category=category,
mixed_case_field_category=mixed_case_field_category,
mixed_case_db_column_category=mixed_case_db_column_category,
)
qs = CommonMixedCaseForeignKeys.objects.values(
'category',
'mixed_case_field_category',
'mixed_case_db_column_category',
'category__category',
'mixed_case_field_category__CaTeGoRy',
'mixed_case_db_column_category__category',
)
self.assertTrue(qs.first())
class Queries2Tests(TestCase):
@classmethod
def setUpTestData(cls):
Number.objects.create(num=4)
Number.objects.create(num=8)
Number.objects.create(num=12)
def test_ticket4289(self):
# A slight variation on the restricting the filtering choices by the
# lookup constraints.
self.assertQuerysetEqual(Number.objects.filter(num__lt=4), [])
self.assertQuerysetEqual(Number.objects.filter(num__gt=8, num__lt=12), [])
self.assertQuerysetEqual(
Number.objects.filter(num__gt=8, num__lt=13),
['<Number: 12>']
)
self.assertQuerysetEqual(
Number.objects.filter(Q(num__lt=4) | Q(num__gt=8, num__lt=12)),
[]
)
self.assertQuerysetEqual(
Number.objects.filter(Q(num__gt=8, num__lt=12) | Q(num__lt=4)),
[]
)
self.assertQuerysetEqual(
Number.objects.filter(Q(num__gt=8) & Q(num__lt=12) | Q(num__lt=4)),
[]
)
self.assertQuerysetEqual(
Number.objects.filter(Q(num__gt=7) & Q(num__lt=12) | Q(num__lt=4)),
['<Number: 8>']
)
def test_ticket12239(self):
# Custom lookups are registered to round float values correctly on gte
# and lt IntegerField queries.
self.assertQuerysetEqual(
Number.objects.filter(num__gt=11.9),
['<Number: 12>']
)
self.assertQuerysetEqual(Number.objects.filter(num__gt=12), [])
self.assertQuerysetEqual(Number.objects.filter(num__gt=12.0), [])
self.assertQuerysetEqual(Number.objects.filter(num__gt=12.1), [])
self.assertQuerysetEqual(
Number.objects.filter(num__lt=12),
['<Number: 4>', '<Number: 8>'],
ordered=False
)
self.assertQuerysetEqual(
Number.objects.filter(num__lt=12.0),
['<Number: 4>', '<Number: 8>'],
ordered=False
)
self.assertQuerysetEqual(
Number.objects.filter(num__lt=12.1),
['<Number: 4>', '<Number: 8>', '<Number: 12>'],
ordered=False
)
self.assertQuerysetEqual(
Number.objects.filter(num__gte=11.9),
['<Number: 12>']
)
self.assertQuerysetEqual(
Number.objects.filter(num__gte=12),
['<Number: 12>']
)
self.assertQuerysetEqual(
Number.objects.filter(num__gte=12.0),
['<Number: 12>']
)
self.assertQuerysetEqual(Number.objects.filter(num__gte=12.1), [])
self.assertQuerysetEqual(Number.objects.filter(num__gte=12.9), [])
self.assertQuerysetEqual(
Number.objects.filter(num__lte=11.9),
['<Number: 4>', '<Number: 8>'],
ordered=False
)
self.assertQuerysetEqual(
Number.objects.filter(num__lte=12),
['<Number: 4>', '<Number: 8>', '<Number: 12>'],
ordered=False
)
self.assertQuerysetEqual(
Number.objects.filter(num__lte=12.0),
['<Number: 4>', '<Number: 8>', '<Number: 12>'],
ordered=False
)
self.assertQuerysetEqual(
Number.objects.filter(num__lte=12.1),
['<Number: 4>', '<Number: 8>', '<Number: 12>'],
ordered=False
)
self.assertQuerysetEqual(
Number.objects.filter(num__lte=12.9),
['<Number: 4>', '<Number: 8>', '<Number: 12>'],
ordered=False
)
def test_ticket7759(self):
# Count should work with a partially read result set.
count = Number.objects.count()
qs = Number.objects.all()
def run():
for obj in qs:
return qs.count() == count
self.assertTrue(run())
class Queries3Tests(TestCase):
def test_ticket7107(self):
# This shouldn't create an infinite loop.
self.assertQuerysetEqual(Valid.objects.all(), [])
def test_ticket8683(self):
# An error should be raised when QuerySet.datetimes() is passed the
# wrong type of field.
with self.assertRaisesMessage(AssertionError, "'name' isn't a DateField, TimeField, or DateTimeField."):
Item.objects.datetimes('name', 'month')
def test_ticket22023(self):
with self.assertRaisesMessage(TypeError, "Cannot call only() after .values() or .values_list()"):
Valid.objects.values().only()
with self.assertRaisesMessage(TypeError, "Cannot call defer() after .values() or .values_list()"):
Valid.objects.values().defer()
class Queries4Tests(TestCase):
@classmethod
def setUpTestData(cls):
generic = NamedCategory.objects.create(name="Generic")
cls.t1 = Tag.objects.create(name='t1', category=generic)
n1 = Note.objects.create(note='n1', misc='foo')
n2 = Note.objects.create(note='n2', misc='bar')
e1 = ExtraInfo.objects.create(info='e1', note=n1)
e2 = ExtraInfo.objects.create(info='e2', note=n2)
cls.a1 = Author.objects.create(name='a1', num=1001, extra=e1)
cls.a3 = Author.objects.create(name='a3', num=3003, extra=e2)
cls.r1 = Report.objects.create(name='r1', creator=cls.a1)
cls.r2 = Report.objects.create(name='r2', creator=cls.a3)
cls.r3 = Report.objects.create(name='r3')
Item.objects.create(name='i1', created=datetime.datetime.now(), note=n1, creator=cls.a1)
Item.objects.create(name='i2', created=datetime.datetime.now(), note=n1, creator=cls.a3)
def test_ticket24525(self):
tag = Tag.objects.create()
anth100 = tag.note_set.create(note='ANTH', misc='100')
math101 = tag.note_set.create(note='MATH', misc='101')
s1 = tag.annotation_set.create(name='1')
s2 = tag.annotation_set.create(name='2')
s1.notes.set([math101, anth100])
s2.notes.set([math101])
result = math101.annotation_set.all() & tag.annotation_set.exclude(notes__in=[anth100])
self.assertEqual(list(result), [s2])
def test_ticket11811(self):
unsaved_category = NamedCategory(name="Other")
msg = 'Unsaved model instance <NamedCategory: Other> cannot be used in an ORM query.'
with self.assertRaisesMessage(ValueError, msg):
Tag.objects.filter(pk=self.t1.pk).update(category=unsaved_category)
def test_ticket14876(self):
# Note: when combining the query we need to have information available
# about the join type of the trimmed "creator__isnull" join. If we
# don't have that information, then the join is created as INNER JOIN
# and results will be incorrect.
q1 = Report.objects.filter(Q(creator__isnull=True) | Q(creator__extra__info='e1'))
q2 = Report.objects.filter(Q(creator__isnull=True)) | Report.objects.filter(Q(creator__extra__info='e1'))
self.assertQuerysetEqual(q1, ["<Report: r1>", "<Report: r3>"], ordered=False)
self.assertEqual(str(q1.query), str(q2.query))
q1 = Report.objects.filter(Q(creator__extra__info='e1') | Q(creator__isnull=True))
q2 = Report.objects.filter(Q(creator__extra__info='e1')) | Report.objects.filter(Q(creator__isnull=True))
self.assertQuerysetEqual(q1, ["<Report: r1>", "<Report: r3>"], ordered=False)
self.assertEqual(str(q1.query), str(q2.query))
q1 = Item.objects.filter(Q(creator=self.a1) | Q(creator__report__name='r1')).order_by()
q2 = (
Item.objects
.filter(Q(creator=self.a1)).order_by() | Item.objects.filter(Q(creator__report__name='r1'))
.order_by()
)
self.assertQuerysetEqual(q1, ["<Item: i1>"])
self.assertEqual(str(q1.query), str(q2.query))
q1 = Item.objects.filter(Q(creator__report__name='e1') | Q(creator=self.a1)).order_by()
q2 = (
Item.objects.filter(Q(creator__report__name='e1')).order_by() |
Item.objects.filter(Q(creator=self.a1)).order_by()
)
self.assertQuerysetEqual(q1, ["<Item: i1>"])
self.assertEqual(str(q1.query), str(q2.query))
def test_combine_join_reuse(self):
# Joins having identical connections are correctly recreated in the
# rhs query, in case the query is ORed together (#18748).
Report.objects.create(name='r4', creator=self.a1)
q1 = Author.objects.filter(report__name='r5')
q2 = Author.objects.filter(report__name='r4').filter(report__name='r1')
combined = q1 | q2
self.assertEqual(str(combined.query).count('JOIN'), 2)
self.assertEqual(len(combined), 1)
self.assertEqual(combined[0].name, 'a1')
def test_join_reuse_order(self):
# Join aliases are reused in order. This shouldn't raise AssertionError
# because change_map contains a circular reference (#26522).
s1 = School.objects.create()
s2 = School.objects.create()
s3 = School.objects.create()
t1 = Teacher.objects.create()
otherteachers = Teacher.objects.exclude(pk=t1.pk).exclude(friends=t1)
qs1 = otherteachers.filter(schools=s1).filter(schools=s2)
qs2 = otherteachers.filter(schools=s1).filter(schools=s3)
self.assertQuerysetEqual(qs1 | qs2, [])
def test_ticket7095(self):
# Updates that are filtered on the model being updated are somewhat
# tricky in MySQL.
ManagedModel.objects.create(data='mm1', tag=self.t1, public=True)
self.assertEqual(ManagedModel.objects.update(data='mm'), 1)
# A values() or values_list() query across joined models must use outer
# joins appropriately.
# Note: In Oracle, we expect a null CharField to return '' instead of
# None.
if connection.features.interprets_empty_strings_as_nulls:
expected_null_charfield_repr = ''
else:
expected_null_charfield_repr = None
self.assertSequenceEqual(
Report.objects.values_list("creator__extra__info", flat=True).order_by("name"),
['e1', 'e2', expected_null_charfield_repr],
)
# Similarly for select_related(), joins beyond an initial nullable join
# must use outer joins so that all results are included.
self.assertQuerysetEqual(
Report.objects.select_related("creator", "creator__extra").order_by("name"),
['<Report: r1>', '<Report: r2>', '<Report: r3>']
)
# When there are multiple paths to a table from another table, we have
# to be careful not to accidentally reuse an inappropriate join when
# using select_related(). We used to return the parent's Detail record
# here by mistake.
d1 = Detail.objects.create(data="d1")
d2 = Detail.objects.create(data="d2")
m1 = Member.objects.create(name="m1", details=d1)
m2 = Member.objects.create(name="m2", details=d2)
Child.objects.create(person=m2, parent=m1)
obj = m1.children.select_related("person__details")[0]
self.assertEqual(obj.person.details.data, 'd2')
def test_order_by_resetting(self):
# Calling order_by() with no parameters removes any existing ordering on the
# model. But it should still be possible to add new ordering after that.
qs = Author.objects.order_by().order_by('name')
self.assertIn('ORDER BY', qs.query.get_compiler(qs.db).as_sql()[0])
def test_order_by_reverse_fk(self):
# It is possible to order by reverse of foreign key, although that can lead
# to duplicate results.
c1 = SimpleCategory.objects.create(name="category1")
c2 = SimpleCategory.objects.create(name="category2")
CategoryItem.objects.create(category=c1)
CategoryItem.objects.create(category=c2)
CategoryItem.objects.create(category=c1)
self.assertSequenceEqual(SimpleCategory.objects.order_by('categoryitem', 'pk'), [c1, c2, c1])
def test_ticket10181(self):
# Avoid raising an EmptyResultSet if an inner query is probably
# empty (and hence, not executed).
self.assertQuerysetEqual(
Tag.objects.filter(id__in=Tag.objects.filter(id__in=[])),
[]
)
def test_ticket15316_filter_false(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(name="named category1", special_name="special1")
c3 = SpecialCategory.objects.create(name="named category2", special_name="special2")
CategoryItem.objects.create(category=c1)
ci2 = CategoryItem.objects.create(category=c2)
ci3 = CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.filter(category__specialcategory__isnull=False)
self.assertEqual(qs.count(), 2)
self.assertSequenceEqual(qs, [ci2, ci3])
def test_ticket15316_exclude_false(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(name="named category1", special_name="special1")
c3 = SpecialCategory.objects.create(name="named category2", special_name="special2")
ci1 = CategoryItem.objects.create(category=c1)
CategoryItem.objects.create(category=c2)
CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.exclude(category__specialcategory__isnull=False)
self.assertEqual(qs.count(), 1)
self.assertSequenceEqual(qs, [ci1])
def test_ticket15316_filter_true(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(name="named category1", special_name="special1")
c3 = SpecialCategory.objects.create(name="named category2", special_name="special2")
ci1 = CategoryItem.objects.create(category=c1)
CategoryItem.objects.create(category=c2)
CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.filter(category__specialcategory__isnull=True)
self.assertEqual(qs.count(), 1)
self.assertSequenceEqual(qs, [ci1])
def test_ticket15316_exclude_true(self):
c1 = SimpleCategory.objects.create(name="category1")
c2 = SpecialCategory.objects.create(name="named category1", special_name="special1")
c3 = SpecialCategory.objects.create(name="named category2", special_name="special2")
CategoryItem.objects.create(category=c1)
ci2 = CategoryItem.objects.create(category=c2)
ci3 = CategoryItem.objects.create(category=c3)
qs = CategoryItem.objects.exclude(category__specialcategory__isnull=True)
self.assertEqual(qs.count(), 2)
self.assertSequenceEqual(qs, [ci2, ci3])
def test_ticket15316_one2one_filter_false(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
CategoryItem.objects.create(category=c)
ci2 = CategoryItem.objects.create(category=c0)
ci3 = CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.filter(category__onetoonecategory__isnull=False).order_by('pk')
self.assertEqual(qs.count(), 2)
self.assertSequenceEqual(qs, [ci2, ci3])
def test_ticket15316_one2one_exclude_false(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
ci1 = CategoryItem.objects.create(category=c)
CategoryItem.objects.create(category=c0)
CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.exclude(category__onetoonecategory__isnull=False)
self.assertEqual(qs.count(), 1)
self.assertSequenceEqual(qs, [ci1])
def test_ticket15316_one2one_filter_true(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
ci1 = CategoryItem.objects.create(category=c)
CategoryItem.objects.create(category=c0)
CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.filter(category__onetoonecategory__isnull=True)
self.assertEqual(qs.count(), 1)
self.assertSequenceEqual(qs, [ci1])
def test_ticket15316_one2one_exclude_true(self):
c = SimpleCategory.objects.create(name="cat")
c0 = SimpleCategory.objects.create(name="cat0")
c1 = SimpleCategory.objects.create(name="category1")
OneToOneCategory.objects.create(category=c1, new_name="new1")
OneToOneCategory.objects.create(category=c0, new_name="new2")
CategoryItem.objects.create(category=c)
ci2 = CategoryItem.objects.create(category=c0)
ci3 = CategoryItem.objects.create(category=c1)
qs = CategoryItem.objects.exclude(category__onetoonecategory__isnull=True).order_by('pk')
self.assertEqual(qs.count(), 2)
self.assertSequenceEqual(qs, [ci2, ci3])
class Queries5Tests(TestCase):
@classmethod
def setUpTestData(cls):
# Ordering by 'rank' gives us rank2, rank1, rank3. Ordering by the
# Meta.ordering will be rank3, rank2, rank1.
n1 = Note.objects.create(note='n1', misc='foo', id=1)
n2 = Note.objects.create(note='n2', misc='bar', id=2)
e1 = ExtraInfo.objects.create(info='e1', note=n1)
e2 = ExtraInfo.objects.create(info='e2', note=n2)
a1 = Author.objects.create(name='a1', num=1001, extra=e1)
a2 = Author.objects.create(name='a2', num=2002, extra=e1)
a3 = Author.objects.create(name='a3', num=3003, extra=e2)
cls.rank1 = Ranking.objects.create(rank=2, author=a2)
Ranking.objects.create(rank=1, author=a3)
Ranking.objects.create(rank=3, author=a1)
def test_ordering(self):
# Cross model ordering is possible in Meta, too.
self.assertQuerysetEqual(
Ranking.objects.all(),
['<Ranking: 3: a1>', '<Ranking: 2: a2>', '<Ranking: 1: a3>']
)
self.assertQuerysetEqual(
Ranking.objects.all().order_by('rank'),
['<Ranking: 1: a3>', '<Ranking: 2: a2>', '<Ranking: 3: a1>']
)
# Ordering of extra() pieces is possible, too and you can mix extra
# fields and model fields in the ordering.
self.assertQuerysetEqual(
Ranking.objects.extra(tables=['django_site'], order_by=['-django_site.id', 'rank']),
['<Ranking: 1: a3>', '<Ranking: 2: a2>', '<Ranking: 3: a1>']
)
sql = 'case when %s > 2 then 1 else 0 end' % connection.ops.quote_name('rank')
qs = Ranking.objects.extra(select={'good': sql})
self.assertEqual(
[o.good for o in qs.extra(order_by=('-good',))],
[True, False, False]
)
self.assertQuerysetEqual(
qs.extra(order_by=('-good', 'id')),
['<Ranking: 3: a1>', '<Ranking: 2: a2>', '<Ranking: 1: a3>']
)
# Despite having some extra aliases in the query, we can still omit
# them in a values() query.
dicts = qs.values('id', 'rank').order_by('id')
self.assertEqual(
[d['rank'] for d in dicts],
[2, 1, 3]
)
def test_ticket7256(self):
# An empty values() call includes all aliases, including those from an
# extra()
sql = 'case when %s > 2 then 1 else 0 end' % connection.ops.quote_name('rank')
qs = Ranking.objects.extra(select={'good': sql})
dicts = qs.values().order_by('id')
for d in dicts:
del d['id']
del d['author_id']
self.assertEqual(
[sorted(d.items()) for d in dicts],
[[('good', 0), ('rank', 2)], [('good', 0), ('rank', 1)], [('good', 1), ('rank', 3)]]
)
def test_ticket7045(self):
# Extra tables used to crash SQL construction on the second use.
qs = Ranking.objects.extra(tables=['django_site'])
qs.query.get_compiler(qs.db).as_sql()
# test passes if this doesn't raise an exception.
qs.query.get_compiler(qs.db).as_sql()
def test_ticket9848(self):
# Make sure that updates which only filter on sub-tables don't
# inadvertently update the wrong records (bug #9848).
author_start = Author.objects.get(name='a1')
ranking_start = Ranking.objects.get(author__name='a1')
# Make sure that the IDs from different tables don't happen to match.
self.assertQuerysetEqual(
Ranking.objects.filter(author__name='a1'),
['<Ranking: 3: a1>']
)
self.assertEqual(
Ranking.objects.filter(author__name='a1').update(rank=4636),
1
)
r = Ranking.objects.get(author__name='a1')
self.assertEqual(r.id, ranking_start.id)
self.assertEqual(r.author.id, author_start.id)
self.assertEqual(r.rank, 4636)
r.rank = 3
r.save()
self.assertQuerysetEqual(
Ranking.objects.all(),
['<Ranking: 3: a1>', '<Ranking: 2: a2>', '<Ranking: 1: a3>']
)
def test_ticket5261(self):
# Test different empty excludes.
self.assertQuerysetEqual(
Note.objects.exclude(Q()),
['<Note: n1>', '<Note: n2>']
)
self.assertQuerysetEqual(
Note.objects.filter(~Q()),
['<Note: n1>', '<Note: n2>']
)
self.assertQuerysetEqual(
Note.objects.filter(~Q() | ~Q()),
['<Note: n1>', '<Note: n2>']
)
self.assertQuerysetEqual(
Note.objects.exclude(~Q() & ~Q()),
['<Note: n1>', '<Note: n2>']
)
def test_extra_select_literal_percent_s(self):
# Allow %%s to escape select clauses
self.assertEqual(
Note.objects.extra(select={'foo': "'%%s'"})[0].foo,
'%s'
)
self.assertEqual(
Note.objects.extra(select={'foo': "'%%s bar %%s'"})[0].foo,
'%s bar %s'
)
self.assertEqual(
Note.objects.extra(select={'foo': "'bar %%s'"})[0].foo,
'bar %s'
)
class SelectRelatedTests(TestCase):
def test_tickets_3045_3288(self):
# Once upon a time, select_related() with circular relations would loop
# infinitely if you forgot to specify "depth". Now we set an arbitrary
# default upper bound.
self.assertQuerysetEqual(X.objects.all(), [])
self.assertQuerysetEqual(X.objects.select_related(), [])
class SubclassFKTests(TestCase):
def test_ticket7778(self):
# Model subclasses could not be deleted if a nullable foreign key
# relates to a model that relates back.
num_celebs = Celebrity.objects.count()
tvc = TvChef.objects.create(name="Huey")
self.assertEqual(Celebrity.objects.count(), num_celebs + 1)
Fan.objects.create(fan_of=tvc)
Fan.objects.create(fan_of=tvc)
tvc.delete()
# The parent object should have been deleted as well.
self.assertEqual(Celebrity.objects.count(), num_celebs)
class CustomPkTests(TestCase):
def test_ticket7371(self):
self.assertQuerysetEqual(Related.objects.order_by('custom'), [])
class NullableRelOrderingTests(TestCase):
def test_ticket10028(self):
# Ordering by model related to nullable relations(!) should use outer
# joins, so that all results are included.
Plaything.objects.create(name="p1")
self.assertQuerysetEqual(
Plaything.objects.all(),
['<Plaything: p1>']
)
def test_join_already_in_query(self):
# Ordering by model related to nullable relations should not change
# the join type of already existing joins.
Plaything.objects.create(name="p1")
s = SingleObject.objects.create(name='s')
r = RelatedObject.objects.create(single=s, f=1)
Plaything.objects.create(name="p2", others=r)
qs = Plaything.objects.all().filter(others__isnull=False).order_by('pk')
self.assertNotIn('JOIN', str(qs.query))
qs = Plaything.objects.all().filter(others__f__isnull=False).order_by('pk')
self.assertIn('INNER', str(qs.query))
qs = qs.order_by('others__single__name')
# The ordering by others__single__pk will add one new join (to single)
# and that join must be LEFT join. The already existing join to related
# objects must be kept INNER. So, we have both an INNER and a LEFT join
# in the query.
self.assertEqual(str(qs.query).count('LEFT'), 1)
self.assertEqual(str(qs.query).count('INNER'), 1)
self.assertQuerysetEqual(
qs,
['<Plaything: p2>']
)
class DisjunctiveFilterTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.n1 = Note.objects.create(note='n1', misc='foo', id=1)
ExtraInfo.objects.create(info='e1', note=cls.n1)
def test_ticket7872(self):
# Another variation on the disjunctive filtering theme.
# For the purposes of this regression test, it's important that there is no
# Join object related to the LeafA we create.
LeafA.objects.create(data='first')
self.assertQuerysetEqual(LeafA.objects.all(), ['<LeafA: first>'])
self.assertQuerysetEqual(
LeafA.objects.filter(Q(data='first') | Q(join__b__data='second')),
['<LeafA: first>']
)
def test_ticket8283(self):
# Checking that applying filters after a disjunction works correctly.
self.assertQuerysetEqual(
(ExtraInfo.objects.filter(note=self.n1) | ExtraInfo.objects.filter(info='e2')).filter(note=self.n1),
['<ExtraInfo: e1>']
)
self.assertQuerysetEqual(
(ExtraInfo.objects.filter(info='e2') | ExtraInfo.objects.filter(note=self.n1)).filter(note=self.n1),
['<ExtraInfo: e1>']
)
class Queries6Tests(TestCase):
@classmethod
def setUpTestData(cls):
generic = NamedCategory.objects.create(name="Generic")
cls.t1 = Tag.objects.create(name='t1', category=generic)
cls.t2 = Tag.objects.create(name='t2', parent=cls.t1, category=generic)
cls.t3 = Tag.objects.create(name='t3', parent=cls.t1)
cls.t4 = Tag.objects.create(name='t4', parent=cls.t3)
cls.t5 = Tag.objects.create(name='t5', parent=cls.t3)
n1 = Note.objects.create(note='n1', misc='foo', id=1)
ann1 = Annotation.objects.create(name='a1', tag=cls.t1)
ann1.notes.add(n1)
Annotation.objects.create(name='a2', tag=cls.t4)
def test_parallel_iterators(self):
# Parallel iterators work.
qs = Tag.objects.all()
i1, i2 = iter(qs), iter(qs)
self.assertEqual(repr(next(i1)), '<Tag: t1>')
self.assertEqual(repr(next(i1)), '<Tag: t2>')
self.assertEqual(repr(next(i2)), '<Tag: t1>')
self.assertEqual(repr(next(i2)), '<Tag: t2>')
self.assertEqual(repr(next(i2)), '<Tag: t3>')
self.assertEqual(repr(next(i1)), '<Tag: t3>')
qs = X.objects.all()
self.assertFalse(qs)
self.assertFalse(qs)
def test_nested_queries_sql(self):
# Nested queries should not evaluate the inner query as part of constructing the
# SQL (so we should see a nested query here, indicated by two "SELECT" calls).
qs = Annotation.objects.filter(notes__in=Note.objects.filter(note="xyzzy"))
self.assertEqual(
qs.query.get_compiler(qs.db).as_sql()[0].count('SELECT'),
2
)
def test_tickets_8921_9188(self):
# Incorrect SQL was being generated for certain types of exclude()
# queries that crossed multi-valued relations (#8921, #9188 and some
# preemptively discovered cases).
self.assertQuerysetEqual(
PointerA.objects.filter(connection__pointerb__id=1),
[]
)
self.assertQuerysetEqual(
PointerA.objects.exclude(connection__pointerb__id=1),
[]
)
self.assertQuerysetEqual(
Tag.objects.exclude(children=None),
['<Tag: t1>', '<Tag: t3>']
)
# This example is tricky because the parent could be NULL, so only checking
# parents with annotations omits some results (tag t1, in this case).
self.assertQuerysetEqual(
Tag.objects.exclude(parent__annotation__name="a1"),
['<Tag: t1>', '<Tag: t4>', '<Tag: t5>']
)
# The annotation->tag link is single values and tag->children links is
# multi-valued. So we have to split the exclude filter in the middle
# and then optimize the inner query without losing results.
self.assertQuerysetEqual(
Annotation.objects.exclude(tag__children__name="t2"),
['<Annotation: a2>']
)
# Nested queries are possible (although should be used with care, since
# they have performance problems on backends like MySQL.
self.assertQuerysetEqual(
Annotation.objects.filter(notes__in=Note.objects.filter(note="n1")),
['<Annotation: a1>']
)
def test_ticket3739(self):
# The all() method on querysets returns a copy of the queryset.
q1 = Tag.objects.order_by('name')
self.assertIsNot(q1, q1.all())
def test_ticket_11320(self):
qs = Tag.objects.exclude(category=None).exclude(category__name='foo')
self.assertEqual(str(qs.query).count(' INNER JOIN '), 1)
def test_distinct_ordered_sliced_subquery_aggregation(self):
self.assertEqual(Tag.objects.distinct().order_by('category__name')[:3].count(), 3)
def test_multiple_columns_with_the_same_name_slice(self):
self.assertEqual(
list(Tag.objects.order_by('name').values_list('name', 'category__name')[:2]),
[('t1', 'Generic'), ('t2', 'Generic')],
)
self.assertSequenceEqual(
Tag.objects.order_by('name').select_related('category')[:2],
[self.t1, self.t2],
)
self.assertEqual(
list(Tag.objects.order_by('-name').values_list('name', 'parent__name')[:2]),
[('t5', 't3'), ('t4', 't3')],
)
self.assertSequenceEqual(
Tag.objects.order_by('-name').select_related('parent')[:2],
[self.t5, self.t4],
)
class RawQueriesTests(TestCase):
def setUp(self):
Note.objects.create(note='n1', misc='foo', id=1)
def test_ticket14729(self):
# Test representation of raw query with one or few parameters passed as list
query = "SELECT * FROM queries_note WHERE note = %s"
params = ['n1']
qs = Note.objects.raw(query, params=params)
self.assertEqual(repr(qs), "<RawQuerySet: SELECT * FROM queries_note WHERE note = n1>")
query = "SELECT * FROM queries_note WHERE note = %s and misc = %s"
params = ['n1', 'foo']
qs = Note.objects.raw(query, params=params)
self.assertEqual(repr(qs), "<RawQuerySet: SELECT * FROM queries_note WHERE note = n1 and misc = foo>")
class GeneratorExpressionTests(SimpleTestCase):
def test_ticket10432(self):
# Using an empty generator expression as the rvalue for an "__in"
# lookup is legal.
self.assertCountEqual(Note.objects.filter(pk__in=(x for x in ())), [])
class ComparisonTests(TestCase):
def setUp(self):
self.n1 = Note.objects.create(note='n1', misc='foo', id=1)
e1 = ExtraInfo.objects.create(info='e1', note=self.n1)
self.a2 = Author.objects.create(name='a2', num=2002, extra=e1)
def test_ticket8597(self):
# Regression tests for case-insensitive comparisons
Item.objects.create(name="a_b", created=datetime.datetime.now(), creator=self.a2, note=self.n1)
Item.objects.create(name="x%y", created=datetime.datetime.now(), creator=self.a2, note=self.n1)
self.assertQuerysetEqual(
Item.objects.filter(name__iexact="A_b"),
['<Item: a_b>']
)
self.assertQuerysetEqual(
Item.objects.filter(name__iexact="x%Y"),
['<Item: x%y>']
)
self.assertQuerysetEqual(
Item.objects.filter(name__istartswith="A_b"),
['<Item: a_b>']
)
self.assertQuerysetEqual(
Item.objects.filter(name__iendswith="A_b"),
['<Item: a_b>']
)
class ExistsSql(TestCase):
def test_exists(self):
with CaptureQueriesContext(connection) as captured_queries:
self.assertFalse(Tag.objects.exists())
# Ok - so the exist query worked - but did it include too many columns?
self.assertEqual(len(captured_queries), 1)
qstr = captured_queries[0]['sql']
id, name = connection.ops.quote_name('id'), connection.ops.quote_name('name')
self.assertNotIn(id, qstr)
self.assertNotIn(name, qstr)
def test_ticket_18414(self):
Article.objects.create(name='one', created=datetime.datetime.now())
Article.objects.create(name='one', created=datetime.datetime.now())
Article.objects.create(name='two', created=datetime.datetime.now())
self.assertTrue(Article.objects.exists())
self.assertTrue(Article.objects.distinct().exists())
self.assertTrue(Article.objects.distinct()[1:3].exists())
self.assertFalse(Article.objects.distinct()[1:1].exists())
@skipUnlessDBFeature('can_distinct_on_fields')
def test_ticket_18414_distinct_on(self):
Article.objects.create(name='one', created=datetime.datetime.now())
Article.objects.create(name='one', created=datetime.datetime.now())
Article.objects.create(name='two', created=datetime.datetime.now())
self.assertTrue(Article.objects.distinct('name').exists())
self.assertTrue(Article.objects.distinct('name')[1:2].exists())
self.assertFalse(Article.objects.distinct('name')[2:3].exists())
class QuerysetOrderedTests(unittest.TestCase):
"""
Tests for the Queryset.ordered attribute.
"""
def test_no_default_or_explicit_ordering(self):
self.assertIs(Annotation.objects.all().ordered, False)
def test_cleared_default_ordering(self):
self.assertIs(Tag.objects.all().ordered, True)
self.assertIs(Tag.objects.all().order_by().ordered, False)
def test_explicit_ordering(self):
self.assertIs(Annotation.objects.all().order_by('id').ordered, True)
def test_empty_queryset(self):
self.assertIs(Annotation.objects.none().ordered, True)
def test_order_by_extra(self):
self.assertIs(Annotation.objects.all().extra(order_by=['id']).ordered, True)
def test_annotated_ordering(self):
qs = Annotation.objects.annotate(num_notes=Count('notes'))
self.assertIs(qs.ordered, False)
self.assertIs(qs.order_by('num_notes').ordered, True)
@skipUnlessDBFeature('allow_sliced_subqueries_with_in')
class SubqueryTests(TestCase):
@classmethod
def setUpTestData(cls):
NamedCategory.objects.create(id=1, name='first')
NamedCategory.objects.create(id=2, name='second')
NamedCategory.objects.create(id=3, name='third')
NamedCategory.objects.create(id=4, name='fourth')
def test_ordered_subselect(self):
"Subselects honor any manual ordering"
query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[0:2])
self.assertEqual(set(query.values_list('id', flat=True)), {3, 4})
query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[:2])
self.assertEqual(set(query.values_list('id', flat=True)), {3, 4})
query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:2])
self.assertEqual(set(query.values_list('id', flat=True)), {3})
query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[2:])
self.assertEqual(set(query.values_list('id', flat=True)), {1, 2})
def test_slice_subquery_and_query(self):
"""
Slice a query that has a sliced subquery
"""
query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[0:2])[0:2]
self.assertEqual({x.id for x in query}, {3, 4})
query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:3])[1:3]
self.assertEqual({x.id for x in query}, {3})
query = DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[2:])[1:]
self.assertEqual({x.id for x in query}, {2})
def test_related_sliced_subquery(self):
"""
Related objects constraints can safely contain sliced subqueries.
refs #22434
"""
generic = NamedCategory.objects.create(id=5, name="Generic")
t1 = Tag.objects.create(name='t1', category=generic)
t2 = Tag.objects.create(name='t2', category=generic)
ManagedModel.objects.create(data='mm1', tag=t1, public=True)
mm2 = ManagedModel.objects.create(data='mm2', tag=t2, public=True)
query = ManagedModel.normal_manager.filter(
tag__in=Tag.objects.order_by('-id')[:1]
)
self.assertEqual({x.id for x in query}, {mm2.id})
def test_sliced_delete(self):
"Delete queries can safely contain sliced subqueries"
DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[0:1]).delete()
self.assertEqual(set(DumbCategory.objects.values_list('id', flat=True)), {1, 2, 3})
DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:2]).delete()
self.assertEqual(set(DumbCategory.objects.values_list('id', flat=True)), {1, 3})
DumbCategory.objects.filter(id__in=DumbCategory.objects.order_by('-id')[1:]).delete()
self.assertEqual(set(DumbCategory.objects.values_list('id', flat=True)), {3})
def test_distinct_ordered_sliced_subquery(self):
# Implicit values('id').
self.assertSequenceEqual(
NamedCategory.objects.filter(
id__in=NamedCategory.objects.distinct().order_by('name')[0:2],
).order_by('name').values_list('name', flat=True), ['first', 'fourth']
)
# Explicit values('id').
self.assertSequenceEqual(
NamedCategory.objects.filter(
id__in=NamedCategory.objects.distinct().order_by('-name').values('id')[0:2],
).order_by('name').values_list('name', flat=True), ['second', 'third']
)
# Annotated value.
self.assertSequenceEqual(
DumbCategory.objects.filter(
id__in=DumbCategory.objects.annotate(
double_id=F('id') * 2
).order_by('id').distinct().values('double_id')[0:2],
).order_by('id').values_list('id', flat=True), [2, 4]
)
@skipUnlessDBFeature('allow_sliced_subqueries_with_in')
class QuerySetBitwiseOperationTests(TestCase):
@classmethod
def setUpTestData(cls):
school = School.objects.create()
cls.room_1 = Classroom.objects.create(school=school, has_blackboard=False, name='Room 1')
cls.room_2 = Classroom.objects.create(school=school, has_blackboard=True, name='Room 2')
cls.room_3 = Classroom.objects.create(school=school, has_blackboard=True, name='Room 3')
cls.room_4 = Classroom.objects.create(school=school, has_blackboard=False, name='Room 4')
def test_or_with_rhs_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=True)
qs2 = Classroom.objects.filter(has_blackboard=False)[:1]
self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2, self.room_3])
def test_or_with_lhs_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=True)[:1]
qs2 = Classroom.objects.filter(has_blackboard=False)
self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2, self.room_4])
def test_or_with_both_slice(self):
qs1 = Classroom.objects.filter(has_blackboard=False)[:1]
qs2 = Classroom.objects.filter(has_blackboard=True)[:1]
self.assertCountEqual(qs1 | qs2, [self.room_1, self.room_2])
def test_or_with_both_slice_and_ordering(self):
qs1 = Classroom.objects.filter(has_blackboard=False).order_by('-pk')[:1]
qs2 = Classroom.objects.filter(has_blackboard=True).order_by('-name')[:1]
self.assertCountEqual(qs1 | qs2, [self.room_3, self.room_4])
class CloneTests(TestCase):
def test_evaluated_queryset_as_argument(self):
"#13227 -- If a queryset is already evaluated, it can still be used as a query arg"
n = Note(note='Test1', misc='misc')
n.save()
e = ExtraInfo(info='good', note=n)
e.save()
n_list = Note.objects.all()
# Evaluate the Note queryset, populating the query cache
list(n_list)
# Use the note queryset in a query, and evaluate
# that query in a way that involves cloning.
self.assertEqual(ExtraInfo.objects.filter(note__in=n_list)[0].info, 'good')
def test_no_model_options_cloning(self):
"""
Cloning a queryset does not get out of hand. While complete
testing is impossible, this is a sanity check against invalid use of
deepcopy. refs #16759.
"""
opts_class = type(Note._meta)
note_deepcopy = getattr(opts_class, "__deepcopy__", None)
opts_class.__deepcopy__ = lambda obj, memo: self.fail("Model options shouldn't be cloned.")
try:
Note.objects.filter(pk__lte=F('pk') + 1).all()
finally:
if note_deepcopy is None:
delattr(opts_class, "__deepcopy__")
else:
opts_class.__deepcopy__ = note_deepcopy
def test_no_fields_cloning(self):
"""
Cloning a queryset does not get out of hand. While complete
testing is impossible, this is a sanity check against invalid use of
deepcopy. refs #16759.
"""
opts_class = type(Note._meta.get_field("misc"))
note_deepcopy = getattr(opts_class, "__deepcopy__", None)
opts_class.__deepcopy__ = lambda obj, memo: self.fail("Model fields shouldn't be cloned")
try:
Note.objects.filter(note=F('misc')).all()
finally:
if note_deepcopy is None:
delattr(opts_class, "__deepcopy__")
else:
opts_class.__deepcopy__ = note_deepcopy
class EmptyQuerySetTests(SimpleTestCase):
def test_emptyqueryset_values(self):
# #14366 -- Calling .values() on an empty QuerySet and then cloning
# that should not cause an error
self.assertCountEqual(Number.objects.none().values('num').order_by('num'), [])
def test_values_subquery(self):
self.assertCountEqual(Number.objects.filter(pk__in=Number.objects.none().values('pk')), [])
self.assertCountEqual(Number.objects.filter(pk__in=Number.objects.none().values_list('pk')), [])
def test_ticket_19151(self):
# #19151 -- Calling .values() or .values_list() on an empty QuerySet
# should return an empty QuerySet and not cause an error.
q = Author.objects.none()
self.assertCountEqual(q.values(), [])
self.assertCountEqual(q.values_list(), [])
class ValuesQuerysetTests(TestCase):
@classmethod
def setUpTestData(cls):
Number.objects.create(num=72)
def test_flat_values_list(self):
qs = Number.objects.values_list("num")
qs = qs.values_list("num", flat=True)
self.assertSequenceEqual(qs, [72])
def test_extra_values(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(select=OrderedDict([('value_plus_x', 'num+%s'),
('value_minus_x', 'num-%s')]),
select_params=(1, 2))
qs = qs.order_by('value_minus_x')
qs = qs.values('num')
self.assertSequenceEqual(qs, [{'num': 72}])
def test_extra_values_order_twice(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(select={'value_plus_one': 'num+1', 'value_minus_one': 'num-1'})
qs = qs.order_by('value_minus_one').order_by('value_plus_one')
qs = qs.values('num')
self.assertSequenceEqual(qs, [{'num': 72}])
def test_extra_values_order_multiple(self):
# Postgres doesn't allow constants in order by, so check for that.
qs = Number.objects.extra(select={
'value_plus_one': 'num+1',
'value_minus_one': 'num-1',
'constant_value': '1'
})
qs = qs.order_by('value_plus_one', 'value_minus_one', 'constant_value')
qs = qs.values('num')
self.assertSequenceEqual(qs, [{'num': 72}])
def test_extra_values_order_in_extra(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(
select={'value_plus_one': 'num+1', 'value_minus_one': 'num-1'},
order_by=['value_minus_one'],
)
qs = qs.values('num')
def test_extra_select_params_values_order_in_extra(self):
# testing for 23259 issue
qs = Number.objects.extra(
select={'value_plus_x': 'num+%s'},
select_params=[1],
order_by=['value_plus_x'],
)
qs = qs.filter(num=72)
qs = qs.values('num')
self.assertSequenceEqual(qs, [{'num': 72}])
def test_extra_multiple_select_params_values_order_by(self):
# testing for 23259 issue
qs = Number.objects.extra(select=OrderedDict([('value_plus_x', 'num+%s'),
('value_minus_x', 'num-%s')]),
select_params=(72, 72))
qs = qs.order_by('value_minus_x')
qs = qs.filter(num=1)
qs = qs.values('num')
self.assertSequenceEqual(qs, [])
def test_extra_values_list(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(select={'value_plus_one': 'num+1'})
qs = qs.order_by('value_plus_one')
qs = qs.values_list('num')
self.assertSequenceEqual(qs, [(72,)])
def test_flat_extra_values_list(self):
# testing for ticket 14930 issues
qs = Number.objects.extra(select={'value_plus_one': 'num+1'})
qs = qs.order_by('value_plus_one')
qs = qs.values_list('num', flat=True)
self.assertSequenceEqual(qs, [72])
def test_field_error_values_list(self):
# see #23443
msg = "Cannot resolve keyword %r into field. Join on 'name' not permitted." % 'foo'
with self.assertRaisesMessage(FieldError, msg):
Tag.objects.values_list('name__foo')
def test_named_values_list_flat(self):
msg = "'flat' and 'named' can't be used together."
with self.assertRaisesMessage(TypeError, msg):
Number.objects.values_list('num', flat=True, named=True)
def test_named_values_list_bad_field_name(self):
msg = "Type names and field names must be valid identifiers: '1'"
with self.assertRaisesMessage(ValueError, msg):
Number.objects.extra(select={'1': 'num+1'}).values_list('1', named=True).first()
def test_named_values_list_with_fields(self):
qs = Number.objects.extra(select={'num2': 'num+1'}).annotate(Count('id'))
values = qs.values_list('num', 'num2', named=True).first()
self.assertEqual(type(values).__name__, 'Row')
self.assertEqual(values._fields, ('num', 'num2'))
self.assertEqual(values.num, 72)
self.assertEqual(values.num2, 73)
def test_named_values_list_without_fields(self):
qs = Number.objects.extra(select={'num2': 'num+1'}).annotate(Count('id'))
values = qs.values_list(named=True).first()
self.assertEqual(type(values).__name__, 'Row')
self.assertEqual(values._fields, ('num2', 'id', 'num', 'id__count'))
self.assertEqual(values.num, 72)
self.assertEqual(values.num2, 73)
self.assertEqual(values.id__count, 1)
def test_named_values_list_expression_with_default_alias(self):
expr = Count('id')
values = Number.objects.annotate(id__count1=expr).values_list(expr, 'id__count1', named=True).first()
self.assertEqual(values._fields, ('id__count2', 'id__count1'))
def test_named_values_list_expression(self):
expr = F('num') + 1
qs = Number.objects.annotate(combinedexpression1=expr).values_list(expr, 'combinedexpression1', named=True)
values = qs.first()
self.assertEqual(values._fields, ('combinedexpression2', 'combinedexpression1'))
class QuerySetSupportsPythonIdioms(TestCase):
@classmethod
def setUpTestData(cls):
some_date = datetime.datetime(2014, 5, 16, 12, 1)
for i in range(1, 8):
Article.objects.create(
name="Article {}".format(i), created=some_date)
def get_ordered_articles(self):
return Article.objects.all().order_by('name')
def test_can_get_items_using_index_and_slice_notation(self):
self.assertEqual(self.get_ordered_articles()[0].name, 'Article 1')
self.assertQuerysetEqual(
self.get_ordered_articles()[1:3],
["<Article: Article 2>", "<Article: Article 3>"]
)
def test_slicing_with_steps_can_be_used(self):
self.assertQuerysetEqual(
self.get_ordered_articles()[::2], [
"<Article: Article 1>",
"<Article: Article 3>",
"<Article: Article 5>",
"<Article: Article 7>"
]
)
def test_slicing_without_step_is_lazy(self):
with self.assertNumQueries(0):
self.get_ordered_articles()[0:5]
def test_slicing_with_tests_is_not_lazy(self):
with self.assertNumQueries(1):
self.get_ordered_articles()[0:5:3]
def test_slicing_can_slice_again_after_slicing(self):
self.assertQuerysetEqual(
self.get_ordered_articles()[0:5][0:2],
["<Article: Article 1>", "<Article: Article 2>"]
)
self.assertQuerysetEqual(self.get_ordered_articles()[0:5][4:], ["<Article: Article 5>"])
self.assertQuerysetEqual(self.get_ordered_articles()[0:5][5:], [])
# Some more tests!
self.assertQuerysetEqual(
self.get_ordered_articles()[2:][0:2],
["<Article: Article 3>", "<Article: Article 4>"]
)
self.assertQuerysetEqual(
self.get_ordered_articles()[2:][:2],
["<Article: Article 3>", "<Article: Article 4>"]
)
self.assertQuerysetEqual(self.get_ordered_articles()[2:][2:3], ["<Article: Article 5>"])
# Using an offset without a limit is also possible.
self.assertQuerysetEqual(
self.get_ordered_articles()[5:],
["<Article: Article 6>", "<Article: Article 7>"]
)
def test_slicing_cannot_filter_queryset_once_sliced(self):
with self.assertRaisesMessage(AssertionError, "Cannot filter a query once a slice has been taken."):
Article.objects.all()[0:5].filter(id=1)
def test_slicing_cannot_reorder_queryset_once_sliced(self):
with self.assertRaisesMessage(AssertionError, "Cannot reorder a query once a slice has been taken."):
Article.objects.all()[0:5].order_by('id')
def test_slicing_cannot_combine_queries_once_sliced(self):
with self.assertRaisesMessage(AssertionError, "Cannot combine queries once a slice has been taken."):
Article.objects.all()[0:1] & Article.objects.all()[4:5]
def test_slicing_negative_indexing_not_supported_for_single_element(self):
"""hint: inverting your ordering might do what you need"""
with self.assertRaisesMessage(AssertionError, "Negative indexing is not supported."):
Article.objects.all()[-1]
def test_slicing_negative_indexing_not_supported_for_range(self):
"""hint: inverting your ordering might do what you need"""
with self.assertRaisesMessage(AssertionError, "Negative indexing is not supported."):
Article.objects.all()[0:-5]
def test_can_get_number_of_items_in_queryset_using_standard_len(self):
self.assertEqual(len(Article.objects.filter(name__exact='Article 1')), 1)
def test_can_combine_queries_using_and_and_or_operators(self):
s1 = Article.objects.filter(name__exact='Article 1')
s2 = Article.objects.filter(name__exact='Article 2')
self.assertQuerysetEqual(
(s1 | s2).order_by('name'),
["<Article: Article 1>", "<Article: Article 2>"]
)
self.assertQuerysetEqual(s1 & s2, [])
class WeirdQuerysetSlicingTests(TestCase):
@classmethod
def setUpTestData(cls):
Number.objects.create(num=1)
Number.objects.create(num=2)
Article.objects.create(name='one', created=datetime.datetime.now())
Article.objects.create(name='two', created=datetime.datetime.now())
Article.objects.create(name='three', created=datetime.datetime.now())
Article.objects.create(name='four', created=datetime.datetime.now())
food = Food.objects.create(name='spam')
Eaten.objects.create(meal='spam with eggs', food=food)
def test_tickets_7698_10202(self):
# People like to slice with '0' as the high-water mark.
self.assertQuerysetEqual(Article.objects.all()[0:0], [])
self.assertQuerysetEqual(Article.objects.all()[0:0][:10], [])
self.assertEqual(Article.objects.all()[:0].count(), 0)
with self.assertRaisesMessage(TypeError, 'Cannot reverse a query once a slice has been taken.'):
Article.objects.all()[:0].latest('created')
def test_empty_resultset_sql(self):
# ticket #12192
self.assertNumQueries(0, lambda: list(Number.objects.all()[1:1]))
def test_empty_sliced_subquery(self):
self.assertEqual(Eaten.objects.filter(food__in=Food.objects.all()[0:0]).count(), 0)
def test_empty_sliced_subquery_exclude(self):
self.assertEqual(Eaten.objects.exclude(food__in=Food.objects.all()[0:0]).count(), 1)
def test_zero_length_values_slicing(self):
n = 42
with self.assertNumQueries(0):
self.assertQuerysetEqual(Article.objects.values()[n:n], [])
self.assertQuerysetEqual(Article.objects.values_list()[n:n], [])
class EscapingTests(TestCase):
def test_ticket_7302(self):
# Reserved names are appropriately escaped
ReservedName.objects.create(name='a', order=42)
ReservedName.objects.create(name='b', order=37)
self.assertQuerysetEqual(
ReservedName.objects.all().order_by('order'),
['<ReservedName: b>', '<ReservedName: a>']
)
self.assertQuerysetEqual(
ReservedName.objects.extra(select={'stuff': 'name'}, order_by=('order', 'stuff')),
['<ReservedName: b>', '<ReservedName: a>']
)
class ToFieldTests(TestCase):
def test_in_query(self):
apple = Food.objects.create(name="apple")
pear = Food.objects.create(name="pear")
lunch = Eaten.objects.create(food=apple, meal="lunch")
dinner = Eaten.objects.create(food=pear, meal="dinner")
self.assertEqual(
set(Eaten.objects.filter(food__in=[apple, pear])),
{lunch, dinner},
)
def test_in_subquery(self):
apple = Food.objects.create(name="apple")
lunch = Eaten.objects.create(food=apple, meal="lunch")
self.assertEqual(
set(Eaten.objects.filter(food__in=Food.objects.filter(name='apple'))),
{lunch}
)
self.assertEqual(
set(Eaten.objects.filter(food__in=Food.objects.filter(name='apple').values('eaten__meal'))),
set()
)
self.assertEqual(
set(Food.objects.filter(eaten__in=Eaten.objects.filter(meal='lunch'))),
{apple}
)
def test_nested_in_subquery(self):
extra = ExtraInfo.objects.create()
author = Author.objects.create(num=42, extra=extra)
report = Report.objects.create(creator=author)
comment = ReportComment.objects.create(report=report)
comments = ReportComment.objects.filter(
report__in=Report.objects.filter(
creator__in=extra.author_set.all(),
),
)
self.assertSequenceEqual(comments, [comment])
def test_reverse_in(self):
apple = Food.objects.create(name="apple")
pear = Food.objects.create(name="pear")
lunch_apple = Eaten.objects.create(food=apple, meal="lunch")
lunch_pear = Eaten.objects.create(food=pear, meal="dinner")
self.assertEqual(
set(Food.objects.filter(eaten__in=[lunch_apple, lunch_pear])),
{apple, pear}
)
def test_single_object(self):
apple = Food.objects.create(name="apple")
lunch = Eaten.objects.create(food=apple, meal="lunch")
dinner = Eaten.objects.create(food=apple, meal="dinner")
self.assertEqual(
set(Eaten.objects.filter(food=apple)),
{lunch, dinner}
)
def test_single_object_reverse(self):
apple = Food.objects.create(name="apple")
lunch = Eaten.objects.create(food=apple, meal="lunch")
self.assertEqual(
set(Food.objects.filter(eaten=lunch)),
{apple}
)
def test_recursive_fk(self):
node1 = Node.objects.create(num=42)
node2 = Node.objects.create(num=1, parent=node1)
self.assertEqual(
list(Node.objects.filter(parent=node1)),
[node2]
)
def test_recursive_fk_reverse(self):
node1 = Node.objects.create(num=42)
node2 = Node.objects.create(num=1, parent=node1)
self.assertEqual(
list(Node.objects.filter(node=node2)),
[node1]
)
class IsNullTests(TestCase):
def test_primary_key(self):
custom = CustomPk.objects.create(name='pk')
null = Related.objects.create()
notnull = Related.objects.create(custom=custom)
self.assertSequenceEqual(Related.objects.filter(custom__isnull=False), [notnull])
self.assertSequenceEqual(Related.objects.filter(custom__isnull=True), [null])
def test_to_field(self):
apple = Food.objects.create(name="apple")
Eaten.objects.create(food=apple, meal="lunch")
Eaten.objects.create(meal="lunch")
self.assertQuerysetEqual(
Eaten.objects.filter(food__isnull=False),
['<Eaten: apple at lunch>']
)
self.assertQuerysetEqual(
Eaten.objects.filter(food__isnull=True),
['<Eaten: None at lunch>']
)
class ConditionalTests(TestCase):
"""Tests whose execution depend on different environment conditions like
Python version or DB backend features"""
@classmethod
def setUpTestData(cls):
generic = NamedCategory.objects.create(name="Generic")
t1 = Tag.objects.create(name='t1', category=generic)
Tag.objects.create(name='t2', parent=t1, category=generic)
t3 = Tag.objects.create(name='t3', parent=t1)
Tag.objects.create(name='t4', parent=t3)
Tag.objects.create(name='t5', parent=t3)
def test_infinite_loop(self):
# If you're not careful, it's possible to introduce infinite loops via
# default ordering on foreign keys in a cycle. We detect that.
with self.assertRaisesMessage(FieldError, 'Infinite loop caused by ordering.'):
list(LoopX.objects.all()) # Force queryset evaluation with list()
with self.assertRaisesMessage(FieldError, 'Infinite loop caused by ordering.'):
list(LoopZ.objects.all()) # Force queryset evaluation with list()
# Note that this doesn't cause an infinite loop, since the default
# ordering on the Tag model is empty (and thus defaults to using "id"
# for the related field).
self.assertEqual(len(Tag.objects.order_by('parent')), 5)
# ... but you can still order in a non-recursive fashion among linked
# fields (the previous test failed because the default ordering was
# recursive).
self.assertQuerysetEqual(
LoopX.objects.all().order_by('y__x__y__x__id'),
[]
)
# When grouping without specifying ordering, we add an explicit "ORDER BY NULL"
# portion in MySQL to prevent unnecessary sorting.
@skipUnlessDBFeature('requires_explicit_null_ordering_when_grouping')
def test_null_ordering_added(self):
query = Tag.objects.values_list('parent_id', flat=True).order_by().query
query.group_by = ['parent_id']
sql = query.get_compiler(DEFAULT_DB_ALIAS).as_sql()[0]
fragment = "ORDER BY "
pos = sql.find(fragment)
self.assertEqual(sql.find(fragment, pos + 1), -1)
self.assertEqual(sql.find("NULL", pos + len(fragment)), pos + len(fragment))
def test_in_list_limit(self):
# The "in" lookup works with lists of 1000 items or more.
# The numbers amount is picked to force three different IN batches
# for Oracle, yet to be less than 2100 parameter limit for MSSQL.
numbers = list(range(2050))
max_query_params = connection.features.max_query_params
if max_query_params is None or max_query_params >= len(numbers):
Number.objects.bulk_create(Number(num=num) for num in numbers)
for number in [1000, 1001, 2000, len(numbers)]:
with self.subTest(number=number):
self.assertEqual(Number.objects.filter(num__in=numbers[:number]).count(), number)
class UnionTests(unittest.TestCase):
"""
Tests for the union of two querysets. Bug #12252.
"""
@classmethod
def setUpTestData(cls):
objectas = []
objectbs = []
objectcs = []
a_info = ['one', 'two', 'three']
for name in a_info:
o = ObjectA(name=name)
o.save()
objectas.append(o)
b_info = [('un', 1, objectas[0]), ('deux', 2, objectas[0]), ('trois', 3, objectas[2])]
for name, number, objecta in b_info:
o = ObjectB(name=name, num=number, objecta=objecta)
o.save()
objectbs.append(o)
c_info = [('ein', objectas[2], objectbs[2]), ('zwei', objectas[1], objectbs[1])]
for name, objecta, objectb in c_info:
o = ObjectC(name=name, objecta=objecta, objectb=objectb)
o.save()
objectcs.append(o)
def check_union(self, model, Q1, Q2):
filter = model.objects.filter
self.assertEqual(set(filter(Q1) | filter(Q2)), set(filter(Q1 | Q2)))
self.assertEqual(set(filter(Q2) | filter(Q1)), set(filter(Q1 | Q2)))
def test_A_AB(self):
Q1 = Q(name='two')
Q2 = Q(objectb__name='deux')
self.check_union(ObjectA, Q1, Q2)
def test_A_AB2(self):
Q1 = Q(name='two')
Q2 = Q(objectb__name='deux', objectb__num=2)
self.check_union(ObjectA, Q1, Q2)
def test_AB_ACB(self):
Q1 = Q(objectb__name='deux')
Q2 = Q(objectc__objectb__name='deux')
self.check_union(ObjectA, Q1, Q2)
def test_BAB_BAC(self):
Q1 = Q(objecta__objectb__name='deux')
Q2 = Q(objecta__objectc__name='ein')
self.check_union(ObjectB, Q1, Q2)
def test_BAB_BACB(self):
Q1 = Q(objecta__objectb__name='deux')
Q2 = Q(objecta__objectc__objectb__name='trois')
self.check_union(ObjectB, Q1, Q2)
def test_BA_BCA__BAB_BAC_BCA(self):
Q1 = Q(objecta__name='one', objectc__objecta__name='two')
Q2 = Q(objecta__objectc__name='ein', objectc__objecta__name='three', objecta__objectb__name='trois')
self.check_union(ObjectB, Q1, Q2)
class DefaultValuesInsertTest(TestCase):
def test_no_extra_params(self):
"""
Can create an instance of a model with only the PK field (#17056)."
"""
DumbCategory.objects.create()
class ExcludeTests(TestCase):
@classmethod
def setUpTestData(cls):
f1 = Food.objects.create(name='apples')
Food.objects.create(name='oranges')
Eaten.objects.create(food=f1, meal='dinner')
j1 = Job.objects.create(name='Manager')
r1 = Responsibility.objects.create(description='Playing golf')
j2 = Job.objects.create(name='Programmer')
r2 = Responsibility.objects.create(description='Programming')
JobResponsibilities.objects.create(job=j1, responsibility=r1)
JobResponsibilities.objects.create(job=j2, responsibility=r2)
def test_to_field(self):
self.assertQuerysetEqual(
Food.objects.exclude(eaten__meal='dinner'),
['<Food: oranges>'])
self.assertQuerysetEqual(
Job.objects.exclude(responsibilities__description='Playing golf'),
['<Job: Programmer>'])
self.assertQuerysetEqual(
Responsibility.objects.exclude(jobs__name='Manager'),
['<Responsibility: Programming>'])
def test_ticket14511(self):
alex = Person.objects.get_or_create(name='Alex')[0]
jane = Person.objects.get_or_create(name='Jane')[0]
oracle = Company.objects.get_or_create(name='Oracle')[0]
google = Company.objects.get_or_create(name='Google')[0]
microsoft = Company.objects.get_or_create(name='Microsoft')[0]
intel = Company.objects.get_or_create(name='Intel')[0]
def employ(employer, employee, title):
Employment.objects.get_or_create(employee=employee, employer=employer, title=title)
employ(oracle, alex, 'Engineer')
employ(oracle, alex, 'Developer')
employ(google, alex, 'Engineer')
employ(google, alex, 'Manager')
employ(microsoft, alex, 'Manager')
employ(intel, alex, 'Manager')
employ(microsoft, jane, 'Developer')
employ(intel, jane, 'Manager')
alex_tech_employers = alex.employers.filter(
employment__title__in=('Engineer', 'Developer')).distinct().order_by('name')
self.assertSequenceEqual(alex_tech_employers, [google, oracle])
alex_nontech_employers = alex.employers.exclude(
employment__title__in=('Engineer', 'Developer')).distinct().order_by('name')
self.assertSequenceEqual(alex_nontech_employers, [google, intel, microsoft])
class ExcludeTest17600(TestCase):
"""
Some regressiontests for ticket #17600. Some of these likely duplicate
other existing tests.
"""
@classmethod
def setUpTestData(cls):
# Create a few Orders.
cls.o1 = Order.objects.create(pk=1)
cls.o2 = Order.objects.create(pk=2)
cls.o3 = Order.objects.create(pk=3)
# Create some OrderItems for the first order with homogeneous
# status_id values
cls.oi1 = OrderItem.objects.create(order=cls.o1, status=1)
cls.oi2 = OrderItem.objects.create(order=cls.o1, status=1)
cls.oi3 = OrderItem.objects.create(order=cls.o1, status=1)
# Create some OrderItems for the second order with heterogeneous
# status_id values
cls.oi4 = OrderItem.objects.create(order=cls.o2, status=1)
cls.oi5 = OrderItem.objects.create(order=cls.o2, status=2)
cls.oi6 = OrderItem.objects.create(order=cls.o2, status=3)
# Create some OrderItems for the second order with heterogeneous
# status_id values
cls.oi7 = OrderItem.objects.create(order=cls.o3, status=2)
cls.oi8 = OrderItem.objects.create(order=cls.o3, status=3)
cls.oi9 = OrderItem.objects.create(order=cls.o3, status=4)
def test_exclude_plain(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertQuerysetEqual(
Order.objects.exclude(items__status=1),
['<Order: 3>'])
def test_exclude_plain_distinct(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertQuerysetEqual(
Order.objects.exclude(items__status=1).distinct(),
['<Order: 3>'])
def test_exclude_with_q_object_distinct(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertQuerysetEqual(
Order.objects.exclude(Q(items__status=1)).distinct(),
['<Order: 3>'])
def test_exclude_with_q_object_no_distinct(self):
"""
This should exclude Orders which have some items with status 1
"""
self.assertQuerysetEqual(
Order.objects.exclude(Q(items__status=1)),
['<Order: 3>'])
def test_exclude_with_q_is_equal_to_plain_exclude(self):
"""
Using exclude(condition) and exclude(Q(condition)) should
yield the same QuerySet
"""
self.assertEqual(
list(Order.objects.exclude(items__status=1).distinct()),
list(Order.objects.exclude(Q(items__status=1)).distinct()))
def test_exclude_with_q_is_equal_to_plain_exclude_variation(self):
"""
Using exclude(condition) and exclude(Q(condition)) should
yield the same QuerySet
"""
self.assertEqual(
list(Order.objects.exclude(items__status=1)),
list(Order.objects.exclude(Q(items__status=1)).distinct()))
@unittest.expectedFailure
def test_only_orders_with_all_items_having_status_1(self):
"""
This should only return orders having ALL items set to status 1, or
those items not having any orders at all. The correct way to write
this query in SQL seems to be using two nested subqueries.
"""
self.assertQuerysetEqual(
Order.objects.exclude(~Q(items__status=1)).distinct(),
['<Order: 1>'])
class Exclude15786(TestCase):
"""Regression test for #15786"""
def test_ticket15786(self):
c1 = SimpleCategory.objects.create(name='c1')
c2 = SimpleCategory.objects.create(name='c2')
OneToOneCategory.objects.create(category=c1)
OneToOneCategory.objects.create(category=c2)
rel = CategoryRelationship.objects.create(first=c1, second=c2)
self.assertEqual(
CategoryRelationship.objects.exclude(
first__onetoonecategory=F('second__onetoonecategory')
).get(), rel
)
class NullInExcludeTest(TestCase):
@classmethod
def setUpTestData(cls):
NullableName.objects.create(name='i1')
NullableName.objects.create()
def test_null_in_exclude_qs(self):
none_val = '' if connection.features.interprets_empty_strings_as_nulls else None
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=[]),
['i1', none_val], attrgetter('name'))
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=['i1']),
[none_val], attrgetter('name'))
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=['i3']),
['i1', none_val], attrgetter('name'))
inner_qs = NullableName.objects.filter(name='i1').values_list('name')
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=inner_qs),
[none_val], attrgetter('name'))
# The inner queryset wasn't executed - it should be turned
# into subquery above
self.assertIs(inner_qs._result_cache, None)
@unittest.expectedFailure
def test_col_not_in_list_containing_null(self):
"""
The following case is not handled properly because
SQL's COL NOT IN (list containing null) handling is too weird to
abstract away.
"""
self.assertQuerysetEqual(
NullableName.objects.exclude(name__in=[None]),
['i1'], attrgetter('name'))
def test_double_exclude(self):
self.assertEqual(
list(NullableName.objects.filter(~~Q(name='i1'))),
list(NullableName.objects.filter(Q(name='i1'))))
self.assertNotIn(
'IS NOT NULL',
str(NullableName.objects.filter(~~Q(name='i1')).query))
class EmptyStringsAsNullTest(TestCase):
"""
Filtering on non-null character fields works as expected.
The reason for these tests is that Oracle treats '' as NULL, and this
can cause problems in query construction. Refs #17957.
"""
@classmethod
def setUpTestData(cls):
cls.nc = NamedCategory.objects.create(name='')
def test_direct_exclude(self):
self.assertQuerysetEqual(
NamedCategory.objects.exclude(name__in=['nonexistent']),
[self.nc.pk], attrgetter('pk')
)
def test_joined_exclude(self):
self.assertQuerysetEqual(
DumbCategory.objects.exclude(namedcategory__name__in=['nonexistent']),
[self.nc.pk], attrgetter('pk')
)
def test_21001(self):
foo = NamedCategory.objects.create(name='foo')
self.assertQuerysetEqual(
NamedCategory.objects.exclude(name=''),
[foo.pk], attrgetter('pk')
)
class ProxyQueryCleanupTest(TestCase):
def test_evaluated_proxy_count(self):
"""
Generating the query string doesn't alter the query's state
in irreversible ways. Refs #18248.
"""
ProxyCategory.objects.create()
qs = ProxyCategory.objects.all()
self.assertEqual(qs.count(), 1)
str(qs.query)
self.assertEqual(qs.count(), 1)
class WhereNodeTest(SimpleTestCase):
class DummyNode:
def as_sql(self, compiler, connection):
return 'dummy', []
class MockCompiler:
def compile(self, node):
return node.as_sql(self, connection)
def __call__(self, name):
return connection.ops.quote_name(name)
def test_empty_full_handling_conjunction(self):
compiler = WhereNodeTest.MockCompiler()
w = WhereNode(children=[NothingNode()])
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ('', []))
w = WhereNode(children=[self.DummyNode(), self.DummyNode()])
self.assertEqual(w.as_sql(compiler, connection), ('(dummy AND dummy)', []))
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ('NOT (dummy AND dummy)', []))
w = WhereNode(children=[NothingNode(), self.DummyNode()])
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ('', []))
def test_empty_full_handling_disjunction(self):
compiler = WhereNodeTest.MockCompiler()
w = WhereNode(children=[NothingNode()], connector='OR')
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ('', []))
w = WhereNode(children=[self.DummyNode(), self.DummyNode()], connector='OR')
self.assertEqual(w.as_sql(compiler, connection), ('(dummy OR dummy)', []))
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ('NOT (dummy OR dummy)', []))
w = WhereNode(children=[NothingNode(), self.DummyNode()], connector='OR')
self.assertEqual(w.as_sql(compiler, connection), ('dummy', []))
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ('NOT (dummy)', []))
def test_empty_nodes(self):
compiler = WhereNodeTest.MockCompiler()
empty_w = WhereNode()
w = WhereNode(children=[empty_w, empty_w])
self.assertEqual(w.as_sql(compiler, connection), ('', []))
w.negate()
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.connector = 'OR'
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
w.negate()
self.assertEqual(w.as_sql(compiler, connection), ('', []))
w = WhereNode(children=[empty_w, NothingNode()], connector='OR')
self.assertEqual(w.as_sql(compiler, connection), ('', []))
w = WhereNode(children=[empty_w, NothingNode()], connector='AND')
with self.assertRaises(EmptyResultSet):
w.as_sql(compiler, connection)
class QuerySetExceptionTests(SimpleTestCase):
def test_iter_exceptions(self):
qs = ExtraInfo.objects.only('author')
msg = "'ManyToOneRel' object has no attribute 'attname'"
with self.assertRaisesMessage(AttributeError, msg):
list(qs)
def test_invalid_qs_list(self):
# Test for #19895 - second iteration over invalid queryset
# raises errors.
qs = Article.objects.order_by('invalid_column')
msg = "Cannot resolve keyword 'invalid_column' into field."
with self.assertRaisesMessage(FieldError, msg):
list(qs)
with self.assertRaisesMessage(FieldError, msg):
list(qs)
def test_invalid_order_by(self):
msg = "Invalid order_by arguments: ['*']"
with self.assertRaisesMessage(FieldError, msg):
list(Article.objects.order_by('*'))
def test_invalid_queryset_model(self):
msg = 'Cannot use QuerySet for "Article": Use a QuerySet for "ExtraInfo".'
with self.assertRaisesMessage(ValueError, msg):
list(Author.objects.filter(extra=Article.objects.all()))
class NullJoinPromotionOrTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.d1 = ModelD.objects.create(name='foo')
d2 = ModelD.objects.create(name='bar')
cls.a1 = ModelA.objects.create(name='a1', d=cls.d1)
c = ModelC.objects.create(name='c')
b = ModelB.objects.create(name='b', c=c)
cls.a2 = ModelA.objects.create(name='a2', b=b, d=d2)
def test_ticket_17886(self):
# The first Q-object is generating the match, the rest of the filters
# should not remove the match even if they do not match anything. The
# problem here was that b__name generates a LOUTER JOIN, then
# b__c__name generates join to c, which the ORM tried to promote but
# failed as that join isn't nullable.
q_obj = (
Q(d__name='foo') |
Q(b__name='foo') |
Q(b__c__name='foo')
)
qset = ModelA.objects.filter(q_obj)
self.assertEqual(list(qset), [self.a1])
# We generate one INNER JOIN to D. The join is direct and not nullable
# so we can use INNER JOIN for it. However, we can NOT use INNER JOIN
# for the b->c join, as a->b is nullable.
self.assertEqual(str(qset.query).count('INNER JOIN'), 1)
def test_isnull_filter_promotion(self):
qs = ModelA.objects.filter(Q(b__name__isnull=True))
self.assertEqual(str(qs.query).count('LEFT OUTER'), 1)
self.assertEqual(list(qs), [self.a1])
qs = ModelA.objects.filter(~Q(b__name__isnull=True))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(list(qs), [self.a2])
qs = ModelA.objects.filter(~~Q(b__name__isnull=True))
self.assertEqual(str(qs.query).count('LEFT OUTER'), 1)
self.assertEqual(list(qs), [self.a1])
qs = ModelA.objects.filter(Q(b__name__isnull=False))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(list(qs), [self.a2])
qs = ModelA.objects.filter(~Q(b__name__isnull=False))
self.assertEqual(str(qs.query).count('LEFT OUTER'), 1)
self.assertEqual(list(qs), [self.a1])
qs = ModelA.objects.filter(~~Q(b__name__isnull=False))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(list(qs), [self.a2])
def test_null_join_demotion(self):
qs = ModelA.objects.filter(Q(b__name__isnull=False) & Q(b__name__isnull=True))
self.assertIn(' INNER JOIN ', str(qs.query))
qs = ModelA.objects.filter(Q(b__name__isnull=True) & Q(b__name__isnull=False))
self.assertIn(' INNER JOIN ', str(qs.query))
qs = ModelA.objects.filter(Q(b__name__isnull=False) | Q(b__name__isnull=True))
self.assertIn(' LEFT OUTER JOIN ', str(qs.query))
qs = ModelA.objects.filter(Q(b__name__isnull=True) | Q(b__name__isnull=False))
self.assertIn(' LEFT OUTER JOIN ', str(qs.query))
def test_ticket_21366(self):
n = Note.objects.create(note='n', misc='m')
e = ExtraInfo.objects.create(info='info', note=n)
a = Author.objects.create(name='Author1', num=1, extra=e)
Ranking.objects.create(rank=1, author=a)
r1 = Report.objects.create(name='Foo', creator=a)
r2 = Report.objects.create(name='Bar')
Report.objects.create(name='Bar', creator=a)
qs = Report.objects.filter(
Q(creator__ranking__isnull=True) |
Q(creator__ranking__rank=1, name='Foo')
)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2)
self.assertEqual(str(qs.query).count(' JOIN '), 2)
self.assertSequenceEqual(qs.order_by('name'), [r2, r1])
def test_ticket_21748(self):
i1 = Identifier.objects.create(name='i1')
i2 = Identifier.objects.create(name='i2')
i3 = Identifier.objects.create(name='i3')
Program.objects.create(identifier=i1)
Channel.objects.create(identifier=i1)
Program.objects.create(identifier=i2)
self.assertSequenceEqual(Identifier.objects.filter(program=None, channel=None), [i3])
self.assertSequenceEqual(Identifier.objects.exclude(program=None, channel=None).order_by('name'), [i1, i2])
def test_ticket_21748_double_negated_and(self):
i1 = Identifier.objects.create(name='i1')
i2 = Identifier.objects.create(name='i2')
Identifier.objects.create(name='i3')
p1 = Program.objects.create(identifier=i1)
c1 = Channel.objects.create(identifier=i1)
Program.objects.create(identifier=i2)
# Check the ~~Q() (or equivalently .exclude(~Q)) works like Q() for
# join promotion.
qs1_doubleneg = Identifier.objects.exclude(~Q(program__id=p1.id, channel__id=c1.id)).order_by('pk')
qs1_filter = Identifier.objects.filter(program__id=p1.id, channel__id=c1.id).order_by('pk')
self.assertQuerysetEqual(qs1_doubleneg, qs1_filter, lambda x: x)
self.assertEqual(str(qs1_filter.query).count('JOIN'),
str(qs1_doubleneg.query).count('JOIN'))
self.assertEqual(2, str(qs1_doubleneg.query).count('INNER JOIN'))
self.assertEqual(str(qs1_filter.query).count('INNER JOIN'),
str(qs1_doubleneg.query).count('INNER JOIN'))
def test_ticket_21748_double_negated_or(self):
i1 = Identifier.objects.create(name='i1')
i2 = Identifier.objects.create(name='i2')
Identifier.objects.create(name='i3')
p1 = Program.objects.create(identifier=i1)
c1 = Channel.objects.create(identifier=i1)
p2 = Program.objects.create(identifier=i2)
# Test OR + doubleneg. The expected result is that channel is LOUTER
# joined, program INNER joined
qs1_filter = Identifier.objects.filter(
Q(program__id=p2.id, channel__id=c1.id) | Q(program__id=p1.id)
).order_by('pk')
qs1_doubleneg = Identifier.objects.exclude(
~Q(Q(program__id=p2.id, channel__id=c1.id) | Q(program__id=p1.id))
).order_by('pk')
self.assertQuerysetEqual(qs1_doubleneg, qs1_filter, lambda x: x)
self.assertEqual(str(qs1_filter.query).count('JOIN'),
str(qs1_doubleneg.query).count('JOIN'))
self.assertEqual(1, str(qs1_doubleneg.query).count('INNER JOIN'))
self.assertEqual(str(qs1_filter.query).count('INNER JOIN'),
str(qs1_doubleneg.query).count('INNER JOIN'))
def test_ticket_21748_complex_filter(self):
i1 = Identifier.objects.create(name='i1')
i2 = Identifier.objects.create(name='i2')
Identifier.objects.create(name='i3')
p1 = Program.objects.create(identifier=i1)
c1 = Channel.objects.create(identifier=i1)
p2 = Program.objects.create(identifier=i2)
# Finally, a more complex case, one time in a way where each
# NOT is pushed to lowest level in the boolean tree, and
# another query where this isn't done.
qs1 = Identifier.objects.filter(
~Q(~Q(program__id=p2.id, channel__id=c1.id) & Q(program__id=p1.id))
).order_by('pk')
qs2 = Identifier.objects.filter(
Q(Q(program__id=p2.id, channel__id=c1.id) | ~Q(program__id=p1.id))
).order_by('pk')
self.assertQuerysetEqual(qs1, qs2, lambda x: x)
self.assertEqual(str(qs1.query).count('JOIN'),
str(qs2.query).count('JOIN'))
self.assertEqual(0, str(qs1.query).count('INNER JOIN'))
self.assertEqual(str(qs1.query).count('INNER JOIN'),
str(qs2.query).count('INNER JOIN'))
class ReverseJoinTrimmingTest(TestCase):
def test_reverse_trimming(self):
# We don't accidentally trim reverse joins - we can't know if there is
# anything on the other side of the join, so trimming reverse joins
# can't be done, ever.
t = Tag.objects.create()
qs = Tag.objects.filter(annotation__tag=t.pk)
self.assertIn('INNER JOIN', str(qs.query))
self.assertEqual(list(qs), [])
class JoinReuseTest(TestCase):
"""
The queries reuse joins sensibly (for example, direct joins
are always reused).
"""
def test_fk_reuse(self):
qs = Annotation.objects.filter(tag__name='foo').filter(tag__name='bar')
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_fk_reuse_select_related(self):
qs = Annotation.objects.filter(tag__name='foo').select_related('tag')
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_fk_reuse_annotation(self):
qs = Annotation.objects.filter(tag__name='foo').annotate(cnt=Count('tag__name'))
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_fk_reuse_disjunction(self):
qs = Annotation.objects.filter(Q(tag__name='foo') | Q(tag__name='bar'))
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_fk_reuse_order_by(self):
qs = Annotation.objects.filter(tag__name='foo').order_by('tag__name')
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_revo2o_reuse(self):
qs = Detail.objects.filter(member__name='foo').filter(member__name='foo')
self.assertEqual(str(qs.query).count('JOIN'), 1)
def test_revfk_noreuse(self):
qs = Author.objects.filter(report__name='r4').filter(report__name='r1')
self.assertEqual(str(qs.query).count('JOIN'), 2)
def test_inverted_q_across_relations(self):
"""
When a trimmable join is specified in the query (here school__), the
ORM detects it and removes unnecessary joins. The set of reusable joins
are updated after trimming the query so that other lookups don't
consider that the outer query's filters are in effect for the subquery
(#26551).
"""
springfield_elementary = School.objects.create()
hogward = School.objects.create()
Student.objects.create(school=springfield_elementary)
hp = Student.objects.create(school=hogward)
Classroom.objects.create(school=hogward, name='Potion')
Classroom.objects.create(school=springfield_elementary, name='Main')
qs = Student.objects.filter(
~(Q(school__classroom__name='Main') & Q(school__classroom__has_blackboard=None))
)
self.assertSequenceEqual(qs, [hp])
class DisjunctionPromotionTests(TestCase):
def test_disjunction_promotion_select_related(self):
fk1 = FK1.objects.create(f1='f1', f2='f2')
basea = BaseA.objects.create(a=fk1)
qs = BaseA.objects.filter(Q(a=fk1) | Q(b=2))
self.assertEqual(str(qs.query).count(' JOIN '), 0)
qs = qs.select_related('a', 'b')
self.assertEqual(str(qs.query).count(' INNER JOIN '), 0)
self.assertEqual(str(qs.query).count(' LEFT OUTER JOIN '), 2)
with self.assertNumQueries(1):
self.assertSequenceEqual(qs, [basea])
self.assertEqual(qs[0].a, fk1)
self.assertIs(qs[0].b, None)
def test_disjunction_promotion1(self):
# Pre-existing join, add two ORed filters to the same join,
# all joins can be INNER JOINS.
qs = BaseA.objects.filter(a__f1='foo')
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
qs = qs.filter(Q(b__f1='foo') | Q(b__f2='foo'))
self.assertEqual(str(qs.query).count('INNER JOIN'), 2)
# Reverse the order of AND and OR filters.
qs = BaseA.objects.filter(Q(b__f1='foo') | Q(b__f2='foo'))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
qs = qs.filter(a__f1='foo')
self.assertEqual(str(qs.query).count('INNER JOIN'), 2)
def test_disjunction_promotion2(self):
qs = BaseA.objects.filter(a__f1='foo')
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
# Now we have two different joins in an ORed condition, these
# must be OUTER joins. The pre-existing join should remain INNER.
qs = qs.filter(Q(b__f1='foo') | Q(c__f2='foo'))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2)
# Reverse case.
qs = BaseA.objects.filter(Q(b__f1='foo') | Q(c__f2='foo'))
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2)
qs = qs.filter(a__f1='foo')
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2)
def test_disjunction_promotion3(self):
qs = BaseA.objects.filter(a__f2='bar')
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
# The ANDed a__f2 filter allows us to use keep using INNER JOIN
# even inside the ORed case. If the join to a__ returns nothing,
# the ANDed filter for a__f2 can't be true.
qs = qs.filter(Q(a__f1='foo') | Q(b__f2='foo'))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1)
def test_disjunction_promotion3_demote(self):
# This one needs demotion logic: the first filter causes a to be
# outer joined, the second filter makes it inner join again.
qs = BaseA.objects.filter(
Q(a__f1='foo') | Q(b__f2='foo')).filter(a__f2='bar')
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1)
def test_disjunction_promotion4_demote(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count('JOIN'), 0)
# Demote needed for the "a" join. It is marked as outer join by
# above filter (even if it is trimmed away).
qs = qs.filter(a__f1='foo')
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
def test_disjunction_promotion4(self):
qs = BaseA.objects.filter(a__f1='foo')
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
qs = qs.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
def test_disjunction_promotion5_demote(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
# Note that the above filters on a force the join to an
# inner join even if it is trimmed.
self.assertEqual(str(qs.query).count('JOIN'), 0)
qs = qs.filter(Q(a__f1='foo') | Q(b__f1='foo'))
# So, now the a__f1 join doesn't need promotion.
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
# But b__f1 does.
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1)
qs = BaseA.objects.filter(Q(a__f1='foo') | Q(b__f1='foo'))
# Now the join to a is created as LOUTER
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2)
qs = qs.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1)
def test_disjunction_promotion6(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count('JOIN'), 0)
qs = BaseA.objects.filter(Q(a__f1='foo') & Q(b__f1='foo'))
self.assertEqual(str(qs.query).count('INNER JOIN'), 2)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 0)
qs = BaseA.objects.filter(Q(a__f1='foo') & Q(b__f1='foo'))
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 0)
self.assertEqual(str(qs.query).count('INNER JOIN'), 2)
qs = qs.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count('INNER JOIN'), 2)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 0)
def test_disjunction_promotion7(self):
qs = BaseA.objects.filter(Q(a=1) | Q(a=2))
self.assertEqual(str(qs.query).count('JOIN'), 0)
qs = BaseA.objects.filter(Q(a__f1='foo') | (Q(b__f1='foo') & Q(a__f1='bar')))
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1)
qs = BaseA.objects.filter(
(Q(a__f1='foo') | Q(b__f1='foo')) & (Q(a__f1='bar') | Q(c__f1='foo'))
)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 3)
self.assertEqual(str(qs.query).count('INNER JOIN'), 0)
qs = BaseA.objects.filter(
(Q(a__f1='foo') | (Q(a__f1='bar')) & (Q(b__f1='bar') | Q(c__f1='foo')))
)
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2)
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
def test_disjunction_promotion_fexpression(self):
qs = BaseA.objects.filter(Q(a__f1=F('b__f1')) | Q(b__f1='foo'))
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 1)
self.assertEqual(str(qs.query).count('INNER JOIN'), 1)
qs = BaseA.objects.filter(Q(a__f1=F('c__f1')) | Q(b__f1='foo'))
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 3)
qs = BaseA.objects.filter(Q(a__f1=F('b__f1')) | Q(a__f2=F('b__f2')) | Q(c__f1='foo'))
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 3)
qs = BaseA.objects.filter(Q(a__f1=F('c__f1')) | (Q(pk=1) & Q(pk=2)))
self.assertEqual(str(qs.query).count('LEFT OUTER JOIN'), 2)
self.assertEqual(str(qs.query).count('INNER JOIN'), 0)
class ManyToManyExcludeTest(TestCase):
def test_exclude_many_to_many(self):
Identifier.objects.create(name='extra')
program = Program.objects.create(identifier=Identifier.objects.create(name='program'))
channel = Channel.objects.create(identifier=Identifier.objects.create(name='channel'))
channel.programs.add(program)
# channel contains 'program1', so all Identifiers except that one
# should be returned
self.assertQuerysetEqual(
Identifier.objects.exclude(program__channel=channel).order_by('name'),
['<Identifier: channel>', '<Identifier: extra>']
)
self.assertQuerysetEqual(
Identifier.objects.exclude(program__channel=None).order_by('name'),
['<Identifier: program>']
)
def test_ticket_12823(self):
pg3 = Page.objects.create(text='pg3')
pg2 = Page.objects.create(text='pg2')
pg1 = Page.objects.create(text='pg1')
pa1 = Paragraph.objects.create(text='pa1')
pa1.page.set([pg1, pg2])
pa2 = Paragraph.objects.create(text='pa2')
pa2.page.set([pg2, pg3])
pa3 = Paragraph.objects.create(text='pa3')
ch1 = Chapter.objects.create(title='ch1', paragraph=pa1)
ch2 = Chapter.objects.create(title='ch2', paragraph=pa2)
ch3 = Chapter.objects.create(title='ch3', paragraph=pa3)
b1 = Book.objects.create(title='b1', chapter=ch1)
b2 = Book.objects.create(title='b2', chapter=ch2)
b3 = Book.objects.create(title='b3', chapter=ch3)
q = Book.objects.exclude(chapter__paragraph__page__text='pg1')
self.assertNotIn('IS NOT NULL', str(q.query))
self.assertEqual(len(q), 2)
self.assertNotIn(b1, q)
self.assertIn(b2, q)
self.assertIn(b3, q)
class RelabelCloneTest(TestCase):
def test_ticket_19964(self):
my1 = MyObject.objects.create(data='foo')
my1.parent = my1
my1.save()
my2 = MyObject.objects.create(data='bar', parent=my1)
parents = MyObject.objects.filter(parent=F('id'))
children = MyObject.objects.filter(parent__in=parents).exclude(parent=F('id'))
self.assertEqual(list(parents), [my1])
# Evaluating the children query (which has parents as part of it) does
# not change results for the parents query.
self.assertEqual(list(children), [my2])
self.assertEqual(list(parents), [my1])
class Ticket20101Tests(TestCase):
def test_ticket_20101(self):
"""
Tests QuerySet ORed combining in exclude subquery case.
"""
t = Tag.objects.create(name='foo')
a1 = Annotation.objects.create(tag=t, name='a1')
a2 = Annotation.objects.create(tag=t, name='a2')
a3 = Annotation.objects.create(tag=t, name='a3')
n = Note.objects.create(note='foo', misc='bar')
qs1 = Note.objects.exclude(annotation__in=[a1, a2])
qs2 = Note.objects.filter(annotation__in=[a3])
self.assertIn(n, qs1)
self.assertNotIn(n, qs2)
self.assertIn(n, (qs1 | qs2))
class EmptyStringPromotionTests(SimpleTestCase):
def test_empty_string_promotion(self):
qs = RelatedObject.objects.filter(single__name='')
if connection.features.interprets_empty_strings_as_nulls:
self.assertIn('LEFT OUTER JOIN', str(qs.query))
else:
self.assertNotIn('LEFT OUTER JOIN', str(qs.query))
class ValuesSubqueryTests(TestCase):
def test_values_in_subquery(self):
# If a values() queryset is used, then the given values
# will be used instead of forcing use of the relation's field.
o1 = Order.objects.create(id=-2)
o2 = Order.objects.create(id=-1)
oi1 = OrderItem.objects.create(order=o1, status=0)
oi1.status = oi1.pk
oi1.save()
OrderItem.objects.create(order=o2, status=0)
# The query below should match o1 as it has related order_item
# with id == status.
self.assertSequenceEqual(Order.objects.filter(items__in=OrderItem.objects.values_list('status')), [o1])
class DoubleInSubqueryTests(TestCase):
def test_double_subquery_in(self):
lfa1 = LeafA.objects.create(data='foo')
lfa2 = LeafA.objects.create(data='bar')
lfb1 = LeafB.objects.create(data='lfb1')
lfb2 = LeafB.objects.create(data='lfb2')
Join.objects.create(a=lfa1, b=lfb1)
Join.objects.create(a=lfa2, b=lfb2)
leaf_as = LeafA.objects.filter(data='foo').values_list('pk', flat=True)
joins = Join.objects.filter(a__in=leaf_as).values_list('b__id', flat=True)
qs = LeafB.objects.filter(pk__in=joins)
self.assertSequenceEqual(qs, [lfb1])
class Ticket18785Tests(SimpleTestCase):
def test_ticket_18785(self):
# Test join trimming from ticket18785
qs = Item.objects.exclude(
note__isnull=False
).filter(
name='something', creator__extra__isnull=True
).order_by()
self.assertEqual(1, str(qs.query).count('INNER JOIN'))
self.assertEqual(0, str(qs.query).count('OUTER JOIN'))
class Ticket20788Tests(TestCase):
def test_ticket_20788(self):
Paragraph.objects.create()
paragraph = Paragraph.objects.create()
page = paragraph.page.create()
chapter = Chapter.objects.create(paragraph=paragraph)
Book.objects.create(chapter=chapter)
paragraph2 = Paragraph.objects.create()
Page.objects.create()
chapter2 = Chapter.objects.create(paragraph=paragraph2)
book2 = Book.objects.create(chapter=chapter2)
sentences_not_in_pub = Book.objects.exclude(chapter__paragraph__page=page)
self.assertSequenceEqual(sentences_not_in_pub, [book2])
class Ticket12807Tests(TestCase):
def test_ticket_12807(self):
p1 = Paragraph.objects.create()
p2 = Paragraph.objects.create()
# The ORed condition below should have no effect on the query - the
# ~Q(pk__in=[]) will always be True.
qs = Paragraph.objects.filter((Q(pk=p2.pk) | ~Q(pk__in=[])) & Q(pk=p1.pk))
self.assertSequenceEqual(qs, [p1])
class RelatedLookupTypeTests(TestCase):
error = 'Cannot query "%s": Must be "%s" instance.'
@classmethod
def setUpTestData(cls):
cls.oa = ObjectA.objects.create(name="oa")
cls.poa = ProxyObjectA.objects.get(name="oa")
cls.coa = ChildObjectA.objects.create(name="coa")
cls.wrong_type = Order.objects.create(id=cls.oa.pk)
cls.ob = ObjectB.objects.create(name="ob", objecta=cls.oa, num=1)
ProxyObjectB.objects.create(name="pob", objecta=cls.oa, num=2)
cls.pob = ProxyObjectB.objects.all()
ObjectC.objects.create(childobjecta=cls.coa)
def test_wrong_type_lookup(self):
"""
A ValueError is raised when the incorrect object type is passed to a
query lookup.
"""
# Passing incorrect object type
with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)):
ObjectB.objects.get(objecta=self.wrong_type)
with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)):
ObjectB.objects.filter(objecta__in=[self.wrong_type])
with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectA._meta.object_name)):
ObjectB.objects.filter(objecta=self.wrong_type)
with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectB._meta.object_name)):
ObjectA.objects.filter(objectb__in=[self.wrong_type, self.ob])
# Passing an object of the class on which query is done.
with self.assertRaisesMessage(ValueError, self.error % (self.ob, ObjectA._meta.object_name)):
ObjectB.objects.filter(objecta__in=[self.poa, self.ob])
with self.assertRaisesMessage(ValueError, self.error % (self.ob, ChildObjectA._meta.object_name)):
ObjectC.objects.exclude(childobjecta__in=[self.coa, self.ob])
def test_wrong_backward_lookup(self):
"""
A ValueError is raised when the incorrect object type is passed to a
query lookup for backward relations.
"""
with self.assertRaisesMessage(ValueError, self.error % (self.oa, ObjectB._meta.object_name)):
ObjectA.objects.filter(objectb__in=[self.oa, self.ob])
with self.assertRaisesMessage(ValueError, self.error % (self.oa, ObjectB._meta.object_name)):
ObjectA.objects.exclude(objectb=self.oa)
with self.assertRaisesMessage(ValueError, self.error % (self.wrong_type, ObjectB._meta.object_name)):
ObjectA.objects.get(objectb=self.wrong_type)
def test_correct_lookup(self):
"""
When passing proxy model objects, child objects, or parent objects,
lookups work fine.
"""
out_a = ['<ObjectA: oa>']
out_b = ['<ObjectB: ob>', '<ObjectB: pob>']
out_c = ['<ObjectC: >']
# proxy model objects
self.assertQuerysetEqual(ObjectB.objects.filter(objecta=self.poa).order_by('name'), out_b)
self.assertQuerysetEqual(ObjectA.objects.filter(objectb__in=self.pob).order_by('pk'), out_a * 2)
# child objects
self.assertQuerysetEqual(ObjectB.objects.filter(objecta__in=[self.coa]), [])
self.assertQuerysetEqual(ObjectB.objects.filter(objecta__in=[self.poa, self.coa]).order_by('name'), out_b)
self.assertQuerysetEqual(
ObjectB.objects.filter(objecta__in=iter([self.poa, self.coa])).order_by('name'),
out_b
)
# parent objects
self.assertQuerysetEqual(ObjectC.objects.exclude(childobjecta=self.oa), out_c)
# QuerySet related object type checking shouldn't issue queries
# (the querysets aren't evaluated here, hence zero queries) (#23266).
with self.assertNumQueries(0):
ObjectB.objects.filter(objecta__in=ObjectA.objects.all())
def test_values_queryset_lookup(self):
"""
#23396 - Ensure ValueQuerySets are not checked for compatibility with the lookup field
"""
# Make sure the num and objecta field values match.
ob = ObjectB.objects.get(name='ob')
ob.num = ob.objecta.pk
ob.save()
pob = ObjectB.objects.get(name='pob')
pob.num = pob.objecta.pk
pob.save()
self.assertQuerysetEqual(ObjectB.objects.filter(
objecta__in=ObjectB.objects.all().values_list('num')
).order_by('pk'), ['<ObjectB: ob>', '<ObjectB: pob>'])
class Ticket14056Tests(TestCase):
def test_ticket_14056(self):
s1 = SharedConnection.objects.create(data='s1')
s2 = SharedConnection.objects.create(data='s2')
s3 = SharedConnection.objects.create(data='s3')
PointerA.objects.create(connection=s2)
expected_ordering = (
[s1, s3, s2] if connection.features.nulls_order_largest
else [s2, s1, s3]
)
self.assertSequenceEqual(SharedConnection.objects.order_by('-pointera__connection', 'pk'), expected_ordering)
class Ticket20955Tests(TestCase):
def test_ticket_20955(self):
jack = Staff.objects.create(name='jackstaff')
jackstaff = StaffUser.objects.create(staff=jack)
jill = Staff.objects.create(name='jillstaff')
jillstaff = StaffUser.objects.create(staff=jill)
task = Task.objects.create(creator=jackstaff, owner=jillstaff, title="task")
task_get = Task.objects.get(pk=task.pk)
# Load data so that assertNumQueries doesn't complain about the get
# version's queries.
task_get.creator.staffuser.staff
task_get.owner.staffuser.staff
qs = Task.objects.select_related(
'creator__staffuser__staff', 'owner__staffuser__staff')
self.assertEqual(str(qs.query).count(' JOIN '), 6)
task_select_related = qs.get(pk=task.pk)
with self.assertNumQueries(0):
self.assertEqual(task_select_related.creator.staffuser.staff,
task_get.creator.staffuser.staff)
self.assertEqual(task_select_related.owner.staffuser.staff,
task_get.owner.staffuser.staff)
class Ticket21203Tests(TestCase):
def test_ticket_21203(self):
p = Ticket21203Parent.objects.create(parent_bool=True)
c = Ticket21203Child.objects.create(parent=p)
qs = Ticket21203Child.objects.select_related('parent').defer('parent__created')
self.assertSequenceEqual(qs, [c])
self.assertIs(qs[0].parent.parent_bool, True)
class ValuesJoinPromotionTests(TestCase):
def test_values_no_promotion_for_existing(self):
qs = Node.objects.filter(parent__parent__isnull=False)
self.assertIn(' INNER JOIN ', str(qs.query))
qs = qs.values('parent__parent__id')
self.assertIn(' INNER JOIN ', str(qs.query))
# Make sure there is a left outer join without the filter.
qs = Node.objects.values('parent__parent__id')
self.assertIn(' LEFT OUTER JOIN ', str(qs.query))
def test_non_nullable_fk_not_promoted(self):
qs = ObjectB.objects.values('objecta__name')
self.assertIn(' INNER JOIN ', str(qs.query))
def test_ticket_21376(self):
a = ObjectA.objects.create()
ObjectC.objects.create(objecta=a)
qs = ObjectC.objects.filter(
Q(objecta=a) | Q(objectb__objecta=a),
)
qs = qs.filter(
Q(objectb=1) | Q(objecta=a),
)
self.assertEqual(qs.count(), 1)
tblname = connection.ops.quote_name(ObjectB._meta.db_table)
self.assertIn(' LEFT OUTER JOIN %s' % tblname, str(qs.query))
class ForeignKeyToBaseExcludeTests(TestCase):
def test_ticket_21787(self):
sc1 = SpecialCategory.objects.create(special_name='sc1', name='sc1')
sc2 = SpecialCategory.objects.create(special_name='sc2', name='sc2')
sc3 = SpecialCategory.objects.create(special_name='sc3', name='sc3')
c1 = CategoryItem.objects.create(category=sc1)
CategoryItem.objects.create(category=sc2)
self.assertSequenceEqual(SpecialCategory.objects.exclude(categoryitem__id=c1.pk).order_by('name'), [sc2, sc3])
self.assertSequenceEqual(SpecialCategory.objects.filter(categoryitem__id=c1.pk), [sc1])
class ReverseM2MCustomPkTests(TestCase):
def test_ticket_21879(self):
cpt1 = CustomPkTag.objects.create(id='cpt1', tag='cpt1')
cp1 = CustomPk.objects.create(name='cp1', extra='extra')
cp1.custompktag_set.add(cpt1)
self.assertSequenceEqual(CustomPk.objects.filter(custompktag=cpt1), [cp1])
self.assertSequenceEqual(CustomPkTag.objects.filter(custom_pk=cp1), [cpt1])
class Ticket22429Tests(TestCase):
def test_ticket_22429(self):
sc1 = School.objects.create()
st1 = Student.objects.create(school=sc1)
sc2 = School.objects.create()
st2 = Student.objects.create(school=sc2)
cr = Classroom.objects.create(school=sc1)
cr.students.add(st1)
queryset = Student.objects.filter(~Q(classroom__school=F('school')))
self.assertSequenceEqual(queryset, [st2])
class Ticket23605Tests(TestCase):
def test_ticket_23605(self):
# Test filtering on a complicated q-object from ticket's report.
# The query structure is such that we have multiple nested subqueries.
# The original problem was that the inner queries weren't relabeled
# correctly.
# See also #24090.
a1 = Ticket23605A.objects.create()
a2 = Ticket23605A.objects.create()
c1 = Ticket23605C.objects.create(field_c0=10000.0)
Ticket23605B.objects.create(
field_b0=10000.0, field_b1=True,
modelc_fk=c1, modela_fk=a1)
complex_q = Q(pk__in=Ticket23605A.objects.filter(
Q(
# True for a1 as field_b0 = 10000, field_c0=10000
# False for a2 as no ticket23605b found
ticket23605b__field_b0__gte=1000000 /
F("ticket23605b__modelc_fk__field_c0")
) &
# True for a1 (field_b1=True)
Q(ticket23605b__field_b1=True) & ~Q(ticket23605b__pk__in=Ticket23605B.objects.filter(
~(
# Same filters as above commented filters, but
# double-negated (one for Q() above, one for
# parentheses). So, again a1 match, a2 not.
Q(field_b1=True) &
Q(field_b0__gte=1000000 / F("modelc_fk__field_c0"))
)
))).filter(ticket23605b__field_b1=True))
qs1 = Ticket23605A.objects.filter(complex_q)
self.assertSequenceEqual(qs1, [a1])
qs2 = Ticket23605A.objects.exclude(complex_q)
self.assertSequenceEqual(qs2, [a2])
class TestTicket24279(TestCase):
def test_ticket_24278(self):
School.objects.create()
qs = School.objects.filter(Q(pk__in=()) | Q())
self.assertQuerysetEqual(qs, [])
class TestInvalidValuesRelation(SimpleTestCase):
def test_invalid_values(self):
msg = "invalid literal for int() with base 10: 'abc'"
with self.assertRaisesMessage(ValueError, msg):
Annotation.objects.filter(tag='abc')
with self.assertRaisesMessage(ValueError, msg):
Annotation.objects.filter(tag__in=[123, 'abc'])
class TestTicket24605(TestCase):
def test_ticket_24605(self):
"""
Subquery table names should be quoted.
"""
i1 = Individual.objects.create(alive=True)
RelatedIndividual.objects.create(related=i1)
i2 = Individual.objects.create(alive=False)
RelatedIndividual.objects.create(related=i2)
i3 = Individual.objects.create(alive=True)
i4 = Individual.objects.create(alive=False)
self.assertSequenceEqual(Individual.objects.filter(Q(alive=False), Q(related_individual__isnull=True)), [i4])
self.assertSequenceEqual(
Individual.objects.exclude(Q(alive=False), Q(related_individual__isnull=True)).order_by('pk'),
[i1, i2, i3]
)
class Ticket23622Tests(TestCase):
@skipUnlessDBFeature('can_distinct_on_fields')
def test_ticket_23622(self):
"""
Make sure __pk__in and __in work the same for related fields when
using a distinct on subquery.
"""
a1 = Ticket23605A.objects.create()
a2 = Ticket23605A.objects.create()
c1 = Ticket23605C.objects.create(field_c0=0.0)
Ticket23605B.objects.create(
modela_fk=a1, field_b0=123,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a1, field_b0=23,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a1, field_b0=234,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a1, field_b0=12,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a2, field_b0=567,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a2, field_b0=76,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a2, field_b0=7,
field_b1=True,
modelc_fk=c1,
)
Ticket23605B.objects.create(
modela_fk=a2, field_b0=56,
field_b1=True,
modelc_fk=c1,
)
qx = (
Q(ticket23605b__pk__in=Ticket23605B.objects.order_by('modela_fk', '-field_b1').distinct('modela_fk')) &
Q(ticket23605b__field_b0__gte=300)
)
qy = (
Q(ticket23605b__in=Ticket23605B.objects.order_by('modela_fk', '-field_b1').distinct('modela_fk')) &
Q(ticket23605b__field_b0__gte=300)
)
self.assertEqual(
set(Ticket23605A.objects.filter(qx).values_list('pk', flat=True)),
set(Ticket23605A.objects.filter(qy).values_list('pk', flat=True))
)
self.assertSequenceEqual(Ticket23605A.objects.filter(qx), [a2])
|
22ab2859ee1343e764ac6dd248d7ada5bab7d4ba8839eb55e10b7de69bca4efb | from django.db.models import Exists, F, IntegerField, OuterRef, Value
from django.db.utils import DatabaseError, NotSupportedError
from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
from .models import Number, ReservedName
@skipUnlessDBFeature('supports_select_union')
class QuerySetSetOperationTests(TestCase):
@classmethod
def setUpTestData(cls):
Number.objects.bulk_create(Number(num=i) for i in range(10))
def number_transform(self, value):
return value.num
def assertNumbersEqual(self, queryset, expected_numbers, ordered=True):
self.assertQuerysetEqual(queryset, expected_numbers, self.number_transform, ordered)
def test_simple_union(self):
qs1 = Number.objects.filter(num__lte=1)
qs2 = Number.objects.filter(num__gte=8)
qs3 = Number.objects.filter(num=5)
self.assertNumbersEqual(qs1.union(qs2, qs3), [0, 1, 5, 8, 9], ordered=False)
@skipUnlessDBFeature('supports_select_intersection')
def test_simple_intersection(self):
qs1 = Number.objects.filter(num__lte=5)
qs2 = Number.objects.filter(num__gte=5)
qs3 = Number.objects.filter(num__gte=4, num__lte=6)
self.assertNumbersEqual(qs1.intersection(qs2, qs3), [5], ordered=False)
@skipUnlessDBFeature('supports_select_intersection')
def test_intersection_with_values(self):
ReservedName.objects.create(name='a', order=2)
qs1 = ReservedName.objects.all()
reserved_name = qs1.intersection(qs1).values('name', 'order', 'id').get()
self.assertEqual(reserved_name['name'], 'a')
self.assertEqual(reserved_name['order'], 2)
reserved_name = qs1.intersection(qs1).values_list('name', 'order', 'id').get()
self.assertEqual(reserved_name[:2], ('a', 2))
@skipUnlessDBFeature('supports_select_difference')
def test_simple_difference(self):
qs1 = Number.objects.filter(num__lte=5)
qs2 = Number.objects.filter(num__lte=4)
self.assertNumbersEqual(qs1.difference(qs2), [5], ordered=False)
def test_union_distinct(self):
qs1 = Number.objects.all()
qs2 = Number.objects.all()
self.assertEqual(len(list(qs1.union(qs2, all=True))), 20)
self.assertEqual(len(list(qs1.union(qs2))), 10)
@skipUnlessDBFeature('supports_select_intersection')
def test_intersection_with_empty_qs(self):
qs1 = Number.objects.all()
qs2 = Number.objects.none()
qs3 = Number.objects.filter(pk__in=[])
self.assertEqual(len(qs1.intersection(qs2)), 0)
self.assertEqual(len(qs1.intersection(qs3)), 0)
self.assertEqual(len(qs2.intersection(qs1)), 0)
self.assertEqual(len(qs3.intersection(qs1)), 0)
self.assertEqual(len(qs2.intersection(qs2)), 0)
self.assertEqual(len(qs3.intersection(qs3)), 0)
@skipUnlessDBFeature('supports_select_difference')
def test_difference_with_empty_qs(self):
qs1 = Number.objects.all()
qs2 = Number.objects.none()
qs3 = Number.objects.filter(pk__in=[])
self.assertEqual(len(qs1.difference(qs2)), 10)
self.assertEqual(len(qs1.difference(qs3)), 10)
self.assertEqual(len(qs2.difference(qs1)), 0)
self.assertEqual(len(qs3.difference(qs1)), 0)
self.assertEqual(len(qs2.difference(qs2)), 0)
self.assertEqual(len(qs3.difference(qs3)), 0)
@skipUnlessDBFeature('supports_select_difference')
def test_difference_with_values(self):
ReservedName.objects.create(name='a', order=2)
qs1 = ReservedName.objects.all()
qs2 = ReservedName.objects.none()
reserved_name = qs1.difference(qs2).values('name', 'order', 'id').get()
self.assertEqual(reserved_name['name'], 'a')
self.assertEqual(reserved_name['order'], 2)
reserved_name = qs1.difference(qs2).values_list('name', 'order', 'id').get()
self.assertEqual(reserved_name[:2], ('a', 2))
def test_union_with_empty_qs(self):
qs1 = Number.objects.all()
qs2 = Number.objects.none()
qs3 = Number.objects.filter(pk__in=[])
self.assertEqual(len(qs1.union(qs2)), 10)
self.assertEqual(len(qs2.union(qs1)), 10)
self.assertEqual(len(qs1.union(qs3)), 10)
self.assertEqual(len(qs3.union(qs1)), 10)
self.assertEqual(len(qs2.union(qs1, qs1, qs1)), 10)
self.assertEqual(len(qs2.union(qs1, qs1, all=True)), 20)
self.assertEqual(len(qs2.union(qs2)), 0)
self.assertEqual(len(qs3.union(qs3)), 0)
def test_limits(self):
qs1 = Number.objects.all()
qs2 = Number.objects.all()
self.assertEqual(len(list(qs1.union(qs2)[:2])), 2)
def test_ordering(self):
qs1 = Number.objects.filter(num__lte=1)
qs2 = Number.objects.filter(num__gte=2, num__lte=3)
self.assertNumbersEqual(qs1.union(qs2).order_by('-num'), [3, 2, 1, 0])
def test_ordering_by_f_expression(self):
qs1 = Number.objects.filter(num__lte=1)
qs2 = Number.objects.filter(num__gte=2, num__lte=3)
self.assertNumbersEqual(qs1.union(qs2).order_by(F('num').desc()), [3, 2, 1, 0])
def test_union_with_values(self):
ReservedName.objects.create(name='a', order=2)
qs1 = ReservedName.objects.all()
reserved_name = qs1.union(qs1).values('name', 'order', 'id').get()
self.assertEqual(reserved_name['name'], 'a')
self.assertEqual(reserved_name['order'], 2)
reserved_name = qs1.union(qs1).values_list('name', 'order', 'id').get()
self.assertEqual(reserved_name[:2], ('a', 2))
def test_union_with_two_annotated_values_list(self):
qs1 = Number.objects.filter(num=1).annotate(
count=Value(0, IntegerField()),
).values_list('num', 'count')
qs2 = Number.objects.filter(num=2).values('pk').annotate(
count=F('num'),
).annotate(
num=Value(1, IntegerField()),
).values_list('num', 'count')
self.assertCountEqual(qs1.union(qs2), [(1, 0), (2, 1)])
def test_union_with_extra_and_values_list(self):
qs1 = Number.objects.filter(num=1).extra(
select={'count': 0},
).values_list('num', 'count')
qs2 = Number.objects.filter(num=2).extra(select={'count': 1})
self.assertCountEqual(qs1.union(qs2), [(1, 0), (2, 1)])
def test_union_with_values_list_on_annotated_and_unannotated(self):
ReservedName.objects.create(name='rn1', order=1)
qs1 = Number.objects.annotate(
has_reserved_name=Exists(ReservedName.objects.filter(order=OuterRef('num')))
).filter(has_reserved_name=True)
qs2 = Number.objects.filter(num=9)
self.assertCountEqual(qs1.union(qs2).values_list('num', flat=True), [1, 9])
def test_count_union(self):
qs1 = Number.objects.filter(num__lte=1).values('num')
qs2 = Number.objects.filter(num__gte=2, num__lte=3).values('num')
self.assertEqual(qs1.union(qs2).count(), 4)
def test_count_union_empty_result(self):
qs = Number.objects.filter(pk__in=[])
self.assertEqual(qs.union(qs).count(), 0)
@skipUnlessDBFeature('supports_select_difference')
def test_count_difference(self):
qs1 = Number.objects.filter(num__lt=10)
qs2 = Number.objects.filter(num__lt=9)
self.assertEqual(qs1.difference(qs2).count(), 1)
@skipUnlessDBFeature('supports_select_intersection')
def test_count_intersection(self):
qs1 = Number.objects.filter(num__gte=5)
qs2 = Number.objects.filter(num__lte=5)
self.assertEqual(qs1.intersection(qs2).count(), 1)
@skipUnlessDBFeature('supports_slicing_ordering_in_compound')
def test_ordering_subqueries(self):
qs1 = Number.objects.order_by('num')[:2]
qs2 = Number.objects.order_by('-num')[:2]
self.assertNumbersEqual(qs1.union(qs2).order_by('-num')[:4], [9, 8, 1, 0])
@skipIfDBFeature('supports_slicing_ordering_in_compound')
def test_unsupported_ordering_slicing_raises_db_error(self):
qs1 = Number.objects.all()
qs2 = Number.objects.all()
msg = 'LIMIT/OFFSET not allowed in subqueries of compound statements'
with self.assertRaisesMessage(DatabaseError, msg):
list(qs1.union(qs2[:10]))
msg = 'ORDER BY not allowed in subqueries of compound statements'
with self.assertRaisesMessage(DatabaseError, msg):
list(qs1.order_by('id').union(qs2))
@skipIfDBFeature('supports_select_intersection')
def test_unsupported_intersection_raises_db_error(self):
qs1 = Number.objects.all()
qs2 = Number.objects.all()
msg = 'intersection is not supported on this database backend'
with self.assertRaisesMessage(NotSupportedError, msg):
list(qs1.intersection(qs2))
def test_combining_multiple_models(self):
ReservedName.objects.create(name='99 little bugs', order=99)
qs1 = Number.objects.filter(num=1).values_list('num', flat=True)
qs2 = ReservedName.objects.values_list('order')
self.assertEqual(list(qs1.union(qs2).order_by('num')), [1, 99])
def test_order_raises_on_non_selected_column(self):
qs1 = Number.objects.filter().annotate(
annotation=Value(1, IntegerField()),
).values('annotation', num2=F('num'))
qs2 = Number.objects.filter().values('id', 'num')
# Should not raise
list(qs1.union(qs2).order_by('annotation'))
list(qs1.union(qs2).order_by('num2'))
msg = 'ORDER BY term does not match any column in the result set'
# 'id' is not part of the select
with self.assertRaisesMessage(DatabaseError, msg):
list(qs1.union(qs2).order_by('id'))
# 'num' got realiased to num2
with self.assertRaisesMessage(DatabaseError, msg):
list(qs1.union(qs2).order_by('num'))
# switched order, now 'exists' again:
list(qs2.union(qs1).order_by('num'))
@skipUnlessDBFeature('supports_select_difference', 'supports_select_intersection')
def test_qs_with_subcompound_qs(self):
qs1 = Number.objects.all()
qs2 = Number.objects.intersection(Number.objects.filter(num__gt=1))
self.assertEqual(qs1.difference(qs2).count(), 2)
|
f6f7875e8eedca288532d34a0f0ffe2f01161799fab6fe32f1dc4bbb2f0dcf4e | from datetime import datetime
from django.core.exceptions import FieldError
from django.db.models import CharField, F, Q
from django.db.models.expressions import SimpleCol
from django.db.models.fields.related_lookups import RelatedIsNull
from django.db.models.functions import Lower
from django.db.models.lookups import Exact, GreaterThan, IsNull, LessThan
from django.db.models.sql.query import Query
from django.db.models.sql.where import OR
from django.test import SimpleTestCase
from django.test.utils import register_lookup
from .models import Author, Item, ObjectC, Ranking
class TestQuery(SimpleTestCase):
def test_simple_query(self):
query = Query(Author)
where = query.build_where(Q(num__gt=2))
lookup = where.children[0]
self.assertIsInstance(lookup, GreaterThan)
self.assertEqual(lookup.rhs, 2)
self.assertEqual(lookup.lhs.target, Author._meta.get_field('num'))
def test_complex_query(self):
query = Query(Author)
where = query.build_where(Q(num__gt=2) | Q(num__lt=0))
self.assertEqual(where.connector, OR)
lookup = where.children[0]
self.assertIsInstance(lookup, GreaterThan)
self.assertEqual(lookup.rhs, 2)
self.assertEqual(lookup.lhs.target, Author._meta.get_field('num'))
lookup = where.children[1]
self.assertIsInstance(lookup, LessThan)
self.assertEqual(lookup.rhs, 0)
self.assertEqual(lookup.lhs.target, Author._meta.get_field('num'))
def test_multiple_fields(self):
query = Query(Item)
where = query.build_where(Q(modified__gt=F('created')))
lookup = where.children[0]
self.assertIsInstance(lookup, GreaterThan)
self.assertIsInstance(lookup.rhs, SimpleCol)
self.assertIsInstance(lookup.lhs, SimpleCol)
self.assertEqual(lookup.rhs.target, Item._meta.get_field('created'))
self.assertEqual(lookup.lhs.target, Item._meta.get_field('modified'))
def test_transform(self):
query = Query(Author)
with register_lookup(CharField, Lower):
where = query.build_where(~Q(name__lower='foo'))
lookup = where.children[0]
self.assertIsInstance(lookup, Exact)
self.assertIsInstance(lookup.lhs, Lower)
self.assertIsInstance(lookup.lhs.lhs, SimpleCol)
self.assertEqual(lookup.lhs.lhs.target, Author._meta.get_field('name'))
def test_negated_nullable(self):
query = Query(Item)
where = query.build_where(~Q(modified__lt=datetime(2017, 1, 1)))
self.assertTrue(where.negated)
lookup = where.children[0]
self.assertIsInstance(lookup, LessThan)
self.assertEqual(lookup.lhs.target, Item._meta.get_field('modified'))
lookup = where.children[1]
self.assertIsInstance(lookup, IsNull)
self.assertEqual(lookup.lhs.target, Item._meta.get_field('modified'))
def test_foreign_key(self):
query = Query(Item)
msg = 'Joined field references are not permitted in this query'
with self.assertRaisesMessage(FieldError, msg):
query.build_where(Q(creator__num__gt=2))
def test_foreign_key_f(self):
query = Query(Ranking)
with self.assertRaises(FieldError):
query.build_where(Q(rank__gt=F('author__num')))
def test_foreign_key_exclusive(self):
query = Query(ObjectC)
where = query.build_where(Q(objecta=None) | Q(objectb=None))
a_isnull = where.children[0]
self.assertIsInstance(a_isnull, RelatedIsNull)
self.assertIsInstance(a_isnull.lhs, SimpleCol)
self.assertEqual(a_isnull.lhs.target, ObjectC._meta.get_field('objecta'))
b_isnull = where.children[1]
self.assertIsInstance(b_isnull, RelatedIsNull)
self.assertIsInstance(b_isnull.lhs, SimpleCol)
self.assertEqual(b_isnull.lhs.target, ObjectC._meta.get_field('objectb'))
|
277ad80b4f7be075deb7f01675ed7e2cef84f22abb93e4b16d083f3bb9daeeb7 | from django.http import HttpRequest
from django.template import (
Context, Engine, RequestContext, Template, Variable, VariableDoesNotExist,
)
from django.template.context import RenderContext
from django.test import RequestFactory, SimpleTestCase
class ContextTests(SimpleTestCase):
def test_context(self):
c = Context({"a": 1, "b": "xyzzy"})
self.assertEqual(c["a"], 1)
self.assertEqual(c.push(), {})
c["a"] = 2
self.assertEqual(c["a"], 2)
self.assertEqual(c.get("a"), 2)
self.assertEqual(c.pop(), {"a": 2})
self.assertEqual(c["a"], 1)
self.assertEqual(c.get("foo", 42), 42)
def test_push_context_manager(self):
c = Context({"a": 1})
with c.push():
c['a'] = 2
self.assertEqual(c['a'], 2)
self.assertEqual(c['a'], 1)
with c.push(a=3):
self.assertEqual(c['a'], 3)
self.assertEqual(c['a'], 1)
def test_update_context_manager(self):
c = Context({"a": 1})
with c.update({}):
c['a'] = 2
self.assertEqual(c['a'], 2)
self.assertEqual(c['a'], 1)
with c.update({'a': 3}):
self.assertEqual(c['a'], 3)
self.assertEqual(c['a'], 1)
def test_push_context_manager_with_context_object(self):
c = Context({'a': 1})
with c.push(Context({'a': 3})):
self.assertEqual(c['a'], 3)
self.assertEqual(c['a'], 1)
def test_update_context_manager_with_context_object(self):
c = Context({'a': 1})
with c.update(Context({'a': 3})):
self.assertEqual(c['a'], 3)
self.assertEqual(c['a'], 1)
def test_push_proper_layering(self):
c = Context({'a': 1})
c.push(Context({'b': 2}))
c.push(Context({'c': 3, 'd': {'z': '26'}}))
self.assertEqual(
c.dicts,
[
{'False': False, 'None': None, 'True': True},
{'a': 1},
{'b': 2},
{'c': 3, 'd': {'z': '26'}},
]
)
def test_update_proper_layering(self):
c = Context({'a': 1})
c.update(Context({'b': 2}))
c.update(Context({'c': 3, 'd': {'z': '26'}}))
self.assertEqual(
c.dicts,
[
{'False': False, 'None': None, 'True': True},
{'a': 1},
{'b': 2},
{'c': 3, 'd': {'z': '26'}},
]
)
def test_setdefault(self):
c = Context()
x = c.setdefault('x', 42)
self.assertEqual(x, 42)
self.assertEqual(c['x'], 42)
x = c.setdefault('x', 100)
self.assertEqual(x, 42)
self.assertEqual(c['x'], 42)
def test_resolve_on_context_method(self):
"""
#17778 -- Variable shouldn't resolve RequestContext methods
"""
empty_context = Context()
with self.assertRaises(VariableDoesNotExist):
Variable('no_such_variable').resolve(empty_context)
with self.assertRaises(VariableDoesNotExist):
Variable('new').resolve(empty_context)
self.assertEqual(
Variable('new').resolve(Context({'new': 'foo'})),
'foo',
)
def test_render_context(self):
test_context = RenderContext({'fruit': 'papaya'})
# push() limits access to the topmost dict
test_context.push()
test_context['vegetable'] = 'artichoke'
self.assertEqual(list(test_context), ['vegetable'])
self.assertNotIn('fruit', test_context)
with self.assertRaises(KeyError):
test_context['fruit']
self.assertIsNone(test_context.get('fruit'))
def test_flatten_context(self):
a = Context()
a.update({'a': 2})
a.update({'b': 4})
a.update({'c': 8})
self.assertEqual(a.flatten(), {
'False': False, 'None': None, 'True': True,
'a': 2, 'b': 4, 'c': 8
})
def test_flatten_context_with_context(self):
"""
Context.push() with a Context argument should work.
"""
a = Context({'a': 2})
a.push(Context({'z': '8'}))
self.assertEqual(a.flatten(), {
'False': False,
'None': None,
'True': True,
'a': 2,
'z': '8',
})
def test_context_comparable(self):
"""
#21765 -- equality comparison should work
"""
test_data = {'x': 'y', 'v': 'z', 'd': {'o': object, 'a': 'b'}}
self.assertEqual(Context(test_data), Context(test_data))
a = Context()
b = Context()
self.assertEqual(a, b)
# update only a
a.update({'a': 1})
self.assertNotEqual(a, b)
# update both to check regression
a.update({'c': 3})
b.update({'c': 3})
self.assertNotEqual(a, b)
# make contexts equals again
b.update({'a': 1})
self.assertEqual(a, b)
def test_copy_request_context_twice(self):
"""
#24273 -- Copy twice shouldn't raise an exception
"""
RequestContext(HttpRequest()).new().new()
def test_set_upward(self):
c = Context({'a': 1})
c.set_upward('a', 2)
self.assertEqual(c.get('a'), 2)
def test_set_upward_empty_context(self):
empty_context = Context()
empty_context.set_upward('a', 1)
self.assertEqual(empty_context.get('a'), 1)
def test_set_upward_with_push(self):
"""
The highest context which has the given key is used.
"""
c = Context({'a': 1})
c.push({'a': 2})
c.set_upward('a', 3)
self.assertEqual(c.get('a'), 3)
c.pop()
self.assertEqual(c.get('a'), 1)
def test_set_upward_with_push_no_match(self):
"""
The highest context is used if the given key isn't found.
"""
c = Context({'b': 1})
c.push({'b': 2})
c.set_upward('a', 2)
self.assertEqual(len(c.dicts), 3)
self.assertEqual(c.dicts[-1]['a'], 2)
class RequestContextTests(SimpleTestCase):
request_factory = RequestFactory()
def test_include_only(self):
"""
#15721 -- ``{% include %}`` and ``RequestContext`` should work
together.
"""
engine = Engine(loaders=[
('django.template.loaders.locmem.Loader', {
'child': '{{ var|default:"none" }}',
}),
])
request = self.request_factory.get('/')
ctx = RequestContext(request, {'var': 'parent'})
self.assertEqual(engine.from_string('{% include "child" %}').render(ctx), 'parent')
self.assertEqual(engine.from_string('{% include "child" only %}').render(ctx), 'none')
def test_stack_size(self):
"""
#7116 -- Optimize RequetsContext construction
"""
request = self.request_factory.get('/')
ctx = RequestContext(request, {})
# The stack should now contain 3 items:
# [builtins, supplied context, context processor, empty dict]
self.assertEqual(len(ctx.dicts), 4)
def test_context_comparable(self):
# Create an engine without any context processors.
test_data = {'x': 'y', 'v': 'z', 'd': {'o': object, 'a': 'b'}}
# test comparing RequestContext to prevent problems if somebody
# adds __eq__ in the future
request = self.request_factory.get('/')
self.assertEqual(
RequestContext(request, dict_=test_data),
RequestContext(request, dict_=test_data),
)
def test_modify_context_and_render(self):
template = Template('{{ foo }}')
request = self.request_factory.get('/')
context = RequestContext(request, {})
context['foo'] = 'foo'
self.assertEqual(template.render(context), 'foo')
|
2958fe45c288a13d3b348111b5e0c9a6c689fb8d08424aca9e90d0d3e2334321 | from django.template import Library
from django.template.base import Node
from django.test import SimpleTestCase
class FilterRegistrationTests(SimpleTestCase):
def setUp(self):
self.library = Library()
def test_filter(self):
@self.library.filter
def func():
return ''
self.assertEqual(self.library.filters['func'], func)
def test_filter_parens(self):
@self.library.filter()
def func():
return ''
self.assertEqual(self.library.filters['func'], func)
def test_filter_name_arg(self):
@self.library.filter('name')
def func():
return ''
self.assertEqual(self.library.filters['name'], func)
def test_filter_name_kwarg(self):
@self.library.filter(name='name')
def func():
return ''
self.assertEqual(self.library.filters['name'], func)
def test_filter_call(self):
def func():
return ''
self.library.filter('name', func)
self.assertEqual(self.library.filters['name'], func)
def test_filter_invalid(self):
msg = "Unsupported arguments to Library.filter: (None, '')"
with self.assertRaisesMessage(ValueError, msg):
self.library.filter(None, '')
class InclusionTagRegistrationTests(SimpleTestCase):
def setUp(self):
self.library = Library()
def test_inclusion_tag(self):
@self.library.inclusion_tag('template.html')
def func():
return ''
self.assertIn('func', self.library.tags)
def test_inclusion_tag_name(self):
@self.library.inclusion_tag('template.html', name='name')
def func():
return ''
self.assertIn('name', self.library.tags)
class SimpleTagRegistrationTests(SimpleTestCase):
def setUp(self):
self.library = Library()
def test_simple_tag(self):
@self.library.simple_tag
def func():
return ''
self.assertIn('func', self.library.tags)
def test_simple_tag_parens(self):
@self.library.simple_tag()
def func():
return ''
self.assertIn('func', self.library.tags)
def test_simple_tag_name_kwarg(self):
@self.library.simple_tag(name='name')
def func():
return ''
self.assertIn('name', self.library.tags)
def test_simple_tag_invalid(self):
msg = "Invalid arguments provided to simple_tag"
with self.assertRaisesMessage(ValueError, msg):
self.library.simple_tag('invalid')
class TagRegistrationTests(SimpleTestCase):
def setUp(self):
self.library = Library()
def test_tag(self):
@self.library.tag
def func(parser, token):
return Node()
self.assertEqual(self.library.tags['func'], func)
def test_tag_parens(self):
@self.library.tag()
def func(parser, token):
return Node()
self.assertEqual(self.library.tags['func'], func)
def test_tag_name_arg(self):
@self.library.tag('name')
def func(parser, token):
return Node()
self.assertEqual(self.library.tags['name'], func)
def test_tag_name_kwarg(self):
@self.library.tag(name='name')
def func(parser, token):
return Node()
self.assertEqual(self.library.tags['name'], func)
def test_tag_call(self):
def func(parser, token):
return Node()
self.library.tag('name', func)
self.assertEqual(self.library.tags['name'], func)
def test_tag_invalid(self):
msg = "Unsupported arguments to Library.tag: (None, '')"
with self.assertRaisesMessage(ValueError, msg):
self.library.tag(None, '')
|
a2974d8583b19cff5bb47940c976cb2c4f29ea588437403d50a5d04b766ef222 | import os.path
import sys
import tempfile
import unittest
from contextlib import contextmanager
from django.template import TemplateDoesNotExist
from django.template.engine import Engine
from django.test import SimpleTestCase, override_settings
from django.utils.functional import lazystr
from .utils import TEMPLATE_DIR
class CachedLoaderTests(SimpleTestCase):
def setUp(self):
self.engine = Engine(
dirs=[TEMPLATE_DIR],
loaders=[
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader',
]),
],
)
def test_get_template(self):
template = self.engine.get_template('index.html')
self.assertEqual(template.origin.name, os.path.join(TEMPLATE_DIR, 'index.html'))
self.assertEqual(template.origin.template_name, 'index.html')
self.assertEqual(template.origin.loader, self.engine.template_loaders[0].loaders[0])
cache = self.engine.template_loaders[0].get_template_cache
self.assertEqual(cache['index.html'], template)
# Run a second time from cache
template = self.engine.get_template('index.html')
self.assertEqual(template.origin.name, os.path.join(TEMPLATE_DIR, 'index.html'))
self.assertEqual(template.origin.template_name, 'index.html')
self.assertEqual(template.origin.loader, self.engine.template_loaders[0].loaders[0])
def test_get_template_missing_debug_off(self):
"""
With template debugging disabled, the raw TemplateDoesNotExist class
should be cached when a template is missing. See ticket #26306 and
docstrings in the cached loader for details.
"""
self.engine.debug = False
with self.assertRaises(TemplateDoesNotExist):
self.engine.get_template('prod-template-missing.html')
e = self.engine.template_loaders[0].get_template_cache['prod-template-missing.html']
self.assertEqual(e, TemplateDoesNotExist)
def test_get_template_missing_debug_on(self):
"""
With template debugging enabled, a TemplateDoesNotExist instance
should be cached when a template is missing.
"""
self.engine.debug = True
with self.assertRaises(TemplateDoesNotExist):
self.engine.get_template('debug-template-missing.html')
e = self.engine.template_loaders[0].get_template_cache['debug-template-missing.html']
self.assertIsInstance(e, TemplateDoesNotExist)
self.assertEqual(e.args[0], 'debug-template-missing.html')
def test_cached_exception_no_traceback(self):
"""
When a TemplateDoesNotExist instance is cached, the cached instance
should not contain the __traceback__, __context__, or __cause__
attributes that Python sets when raising exceptions.
"""
self.engine.debug = True
with self.assertRaises(TemplateDoesNotExist):
self.engine.get_template('no-traceback-in-cache.html')
e = self.engine.template_loaders[0].get_template_cache['no-traceback-in-cache.html']
error_msg = "Cached TemplateDoesNotExist must not have been thrown."
self.assertIsNone(e.__traceback__, error_msg)
self.assertIsNone(e.__context__, error_msg)
self.assertIsNone(e.__cause__, error_msg)
def test_template_name_leading_dash_caching(self):
"""
#26536 -- A leading dash in a template name shouldn't be stripped
from its cache key.
"""
self.assertEqual(self.engine.template_loaders[0].cache_key('-template.html', []), '-template.html')
def test_template_name_lazy_string(self):
"""
#26603 -- A template name specified as a lazy string should be forced
to text before computing its cache key.
"""
self.assertEqual(self.engine.template_loaders[0].cache_key(lazystr('template.html'), []), 'template.html')
class FileSystemLoaderTests(SimpleTestCase):
@classmethod
def setUpClass(cls):
cls.engine = Engine(dirs=[TEMPLATE_DIR], loaders=['django.template.loaders.filesystem.Loader'])
super().setUpClass()
@contextmanager
def set_dirs(self, dirs):
original_dirs = self.engine.dirs
self.engine.dirs = dirs
try:
yield
finally:
self.engine.dirs = original_dirs
@contextmanager
def source_checker(self, dirs):
loader = self.engine.template_loaders[0]
def check_sources(path, expected_sources):
expected_sources = [os.path.abspath(s) for s in expected_sources]
self.assertEqual(
[origin.name for origin in loader.get_template_sources(path)],
expected_sources,
)
with self.set_dirs(dirs):
yield check_sources
def test_get_template(self):
template = self.engine.get_template('index.html')
self.assertEqual(template.origin.name, os.path.join(TEMPLATE_DIR, 'index.html'))
self.assertEqual(template.origin.template_name, 'index.html')
self.assertEqual(template.origin.loader, self.engine.template_loaders[0])
self.assertEqual(template.origin.loader_name, 'django.template.loaders.filesystem.Loader')
def test_loaders_dirs(self):
engine = Engine(loaders=[('django.template.loaders.filesystem.Loader', [TEMPLATE_DIR])])
template = engine.get_template('index.html')
self.assertEqual(template.origin.name, os.path.join(TEMPLATE_DIR, 'index.html'))
def test_loaders_dirs_empty(self):
"""An empty dirs list in loaders overrides top level dirs."""
engine = Engine(dirs=[TEMPLATE_DIR], loaders=[('django.template.loaders.filesystem.Loader', [])])
with self.assertRaises(TemplateDoesNotExist):
engine.get_template('index.html')
def test_directory_security(self):
with self.source_checker(['/dir1', '/dir2']) as check_sources:
check_sources('index.html', ['/dir1/index.html', '/dir2/index.html'])
check_sources('/etc/passwd', [])
check_sources('etc/passwd', ['/dir1/etc/passwd', '/dir2/etc/passwd'])
check_sources('../etc/passwd', [])
check_sources('../../../etc/passwd', [])
check_sources('/dir1/index.html', ['/dir1/index.html'])
check_sources('../dir2/index.html', ['/dir2/index.html'])
check_sources('/dir1blah', [])
check_sources('../dir1blah', [])
def test_unicode_template_name(self):
with self.source_checker(['/dir1', '/dir2']) as check_sources:
check_sources('Ångström', ['/dir1/Ångström', '/dir2/Ångström'])
def test_bytestring(self):
loader = self.engine.template_loaders[0]
msg = "Can't mix strings and bytes in path components"
with self.assertRaisesMessage(TypeError, msg):
list(loader.get_template_sources(b'\xc3\x85ngstr\xc3\xb6m'))
def test_unicode_dir_name(self):
with self.source_checker(['/Straße']) as check_sources:
check_sources('Ångström', ['/Straße/Ångström'])
@unittest.skipUnless(
os.path.normcase('/TEST') == os.path.normpath('/test'),
"This test only runs on case-sensitive file systems.",
)
def test_case_sensitivity(self):
with self.source_checker(['/dir1', '/DIR2']) as check_sources:
check_sources('index.html', ['/dir1/index.html', '/DIR2/index.html'])
check_sources('/DIR1/index.HTML', ['/DIR1/index.HTML'])
def test_file_does_not_exist(self):
with self.assertRaises(TemplateDoesNotExist):
self.engine.get_template('doesnotexist.html')
@unittest.skipIf(
sys.platform == 'win32',
"Python on Windows doesn't have working os.chmod().",
)
def test_permissions_error(self):
with tempfile.NamedTemporaryFile() as tmpfile:
tmpdir = os.path.dirname(tmpfile.name)
tmppath = os.path.join(tmpdir, tmpfile.name)
os.chmod(tmppath, 0o0222)
with self.set_dirs([tmpdir]):
with self.assertRaisesMessage(IOError, 'Permission denied'):
self.engine.get_template(tmpfile.name)
def test_notafile_error(self):
with self.assertRaises(IOError):
self.engine.get_template('first')
class AppDirectoriesLoaderTests(SimpleTestCase):
@classmethod
def setUpClass(cls):
cls.engine = Engine(
loaders=['django.template.loaders.app_directories.Loader'],
)
super().setUpClass()
@override_settings(INSTALLED_APPS=['template_tests'])
def test_get_template(self):
template = self.engine.get_template('index.html')
self.assertEqual(template.origin.name, os.path.join(TEMPLATE_DIR, 'index.html'))
self.assertEqual(template.origin.template_name, 'index.html')
self.assertEqual(template.origin.loader, self.engine.template_loaders[0])
@override_settings(INSTALLED_APPS=[])
def test_not_installed(self):
with self.assertRaises(TemplateDoesNotExist):
self.engine.get_template('index.html')
class LocmemLoaderTests(SimpleTestCase):
@classmethod
def setUpClass(cls):
cls.engine = Engine(
loaders=[('django.template.loaders.locmem.Loader', {
'index.html': 'index',
})],
)
super().setUpClass()
def test_get_template(self):
template = self.engine.get_template('index.html')
self.assertEqual(template.origin.name, 'index.html')
self.assertEqual(template.origin.template_name, 'index.html')
self.assertEqual(template.origin.loader, self.engine.template_loaders[0])
|
35e57ad711ff0c563a0c568218aa5c89d18f18942471eece956d2021988588f5 | import pickle
import time
from datetime import datetime
from django.template import engines
from django.template.response import (
ContentNotRenderedError, SimpleTemplateResponse, TemplateResponse,
)
from django.test import (
RequestFactory, SimpleTestCase, modify_settings, override_settings,
)
from django.test.utils import require_jinja2
from django.utils.deprecation import MiddlewareMixin
from .utils import TEMPLATE_DIR
def test_processor(request):
return {'processors': 'yes'}
test_processor_name = 'template_tests.test_response.test_processor'
# A test middleware that installs a temporary URLConf
class CustomURLConfMiddleware(MiddlewareMixin):
def process_request(self, request):
request.urlconf = 'template_tests.alternate_urls'
class SimpleTemplateResponseTest(SimpleTestCase):
def _response(self, template='foo', *args, **kwargs):
template = engines['django'].from_string(template)
return SimpleTemplateResponse(template, *args, **kwargs)
def test_template_resolving(self):
response = SimpleTemplateResponse('first/test.html')
response.render()
self.assertEqual(response.content, b'First template\n')
templates = ['foo.html', 'second/test.html', 'first/test.html']
response = SimpleTemplateResponse(templates)
response.render()
self.assertEqual(response.content, b'Second template\n')
response = self._response()
response.render()
self.assertEqual(response.content, b'foo')
def test_explicit_baking(self):
# explicit baking
response = self._response()
self.assertFalse(response.is_rendered)
response.render()
self.assertTrue(response.is_rendered)
def test_render(self):
# response is not re-rendered without the render call
response = self._response().render()
self.assertEqual(response.content, b'foo')
# rebaking doesn't change the rendered content
template = engines['django'].from_string('bar{{ baz }}')
response.template_name = template
response.render()
self.assertEqual(response.content, b'foo')
# but rendered content can be overridden by manually
# setting content
response.content = 'bar'
self.assertEqual(response.content, b'bar')
def test_iteration_unrendered(self):
# unrendered response raises an exception on iteration
response = self._response()
self.assertFalse(response.is_rendered)
def iteration():
for x in response:
pass
with self.assertRaises(ContentNotRenderedError):
iteration()
self.assertFalse(response.is_rendered)
def test_iteration_rendered(self):
# iteration works for rendered responses
response = self._response().render()
res = [x for x in response]
self.assertEqual(res, [b'foo'])
def test_content_access_unrendered(self):
# unrendered response raises an exception when content is accessed
response = self._response()
self.assertFalse(response.is_rendered)
with self.assertRaises(ContentNotRenderedError):
response.content
self.assertFalse(response.is_rendered)
def test_content_access_rendered(self):
# rendered response content can be accessed
response = self._response().render()
self.assertEqual(response.content, b'foo')
def test_set_content(self):
# content can be overridden
response = self._response()
self.assertFalse(response.is_rendered)
response.content = 'spam'
self.assertTrue(response.is_rendered)
self.assertEqual(response.content, b'spam')
response.content = 'baz'
self.assertEqual(response.content, b'baz')
def test_dict_context(self):
response = self._response('{{ foo }}{{ processors }}',
{'foo': 'bar'})
self.assertEqual(response.context_data, {'foo': 'bar'})
response.render()
self.assertEqual(response.content, b'bar')
def test_kwargs(self):
response = self._response(content_type='application/json', status=504, charset='ascii')
self.assertEqual(response['content-type'], 'application/json')
self.assertEqual(response.status_code, 504)
self.assertEqual(response.charset, 'ascii')
def test_args(self):
response = SimpleTemplateResponse('', {}, 'application/json', 504)
self.assertEqual(response['content-type'], 'application/json')
self.assertEqual(response.status_code, 504)
@require_jinja2
def test_using(self):
response = SimpleTemplateResponse('template_tests/using.html').render()
self.assertEqual(response.content, b'DTL\n')
response = SimpleTemplateResponse('template_tests/using.html', using='django').render()
self.assertEqual(response.content, b'DTL\n')
response = SimpleTemplateResponse('template_tests/using.html', using='jinja2').render()
self.assertEqual(response.content, b'Jinja2\n')
def test_post_callbacks(self):
"Rendering a template response triggers the post-render callbacks"
post = []
def post1(obj):
post.append('post1')
def post2(obj):
post.append('post2')
response = SimpleTemplateResponse('first/test.html', {})
response.add_post_render_callback(post1)
response.add_post_render_callback(post2)
# When the content is rendered, all the callbacks are invoked, too.
response.render()
self.assertEqual(response.content, b'First template\n')
self.assertEqual(post, ['post1', 'post2'])
def test_pickling(self):
# Create a template response. The context is
# known to be unpicklable (e.g., a function).
response = SimpleTemplateResponse('first/test.html', {
'value': 123,
'fn': datetime.now,
})
with self.assertRaises(ContentNotRenderedError):
pickle.dumps(response)
# But if we render the response, we can pickle it.
response.render()
pickled_response = pickle.dumps(response)
unpickled_response = pickle.loads(pickled_response)
self.assertEqual(unpickled_response.content, response.content)
self.assertEqual(unpickled_response['content-type'], response['content-type'])
self.assertEqual(unpickled_response.status_code, response.status_code)
# ...and the unpickled response doesn't have the
# template-related attributes, so it can't be re-rendered
template_attrs = ('template_name', 'context_data', '_post_render_callbacks')
for attr in template_attrs:
self.assertFalse(hasattr(unpickled_response, attr))
# ...and requesting any of those attributes raises an exception
for attr in template_attrs:
with self.assertRaises(AttributeError):
getattr(unpickled_response, attr)
def test_repickling(self):
response = SimpleTemplateResponse('first/test.html', {
'value': 123,
'fn': datetime.now,
})
with self.assertRaises(ContentNotRenderedError):
pickle.dumps(response)
response.render()
pickled_response = pickle.dumps(response)
unpickled_response = pickle.loads(pickled_response)
pickle.dumps(unpickled_response)
def test_pickling_cookie(self):
response = SimpleTemplateResponse('first/test.html', {
'value': 123,
'fn': datetime.now,
})
response.cookies['key'] = 'value'
response.render()
pickled_response = pickle.dumps(response, pickle.HIGHEST_PROTOCOL)
unpickled_response = pickle.loads(pickled_response)
self.assertEqual(unpickled_response.cookies['key'].value, 'value')
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [TEMPLATE_DIR],
'OPTIONS': {
'context_processors': [test_processor_name],
},
}])
class TemplateResponseTest(SimpleTestCase):
factory = RequestFactory()
def _response(self, template='foo', *args, **kwargs):
self._request = self.factory.get('/')
template = engines['django'].from_string(template)
return TemplateResponse(self._request, template, *args, **kwargs)
def test_render(self):
response = self._response('{{ foo }}{{ processors }}').render()
self.assertEqual(response.content, b'yes')
def test_render_with_requestcontext(self):
response = self._response('{{ foo }}{{ processors }}',
{'foo': 'bar'}).render()
self.assertEqual(response.content, b'baryes')
def test_context_processor_priority(self):
# context processors should be overridden by passed-in context
response = self._response('{{ foo }}{{ processors }}',
{'processors': 'no'}).render()
self.assertEqual(response.content, b'no')
def test_kwargs(self):
response = self._response(content_type='application/json', status=504)
self.assertEqual(response['content-type'], 'application/json')
self.assertEqual(response.status_code, 504)
def test_args(self):
response = TemplateResponse(self.factory.get('/'), '', {},
'application/json', 504)
self.assertEqual(response['content-type'], 'application/json')
self.assertEqual(response.status_code, 504)
@require_jinja2
def test_using(self):
request = self.factory.get('/')
response = TemplateResponse(request, 'template_tests/using.html').render()
self.assertEqual(response.content, b'DTL\n')
response = TemplateResponse(request, 'template_tests/using.html', using='django').render()
self.assertEqual(response.content, b'DTL\n')
response = TemplateResponse(request, 'template_tests/using.html', using='jinja2').render()
self.assertEqual(response.content, b'Jinja2\n')
def test_pickling(self):
# Create a template response. The context is
# known to be unpicklable (e.g., a function).
response = TemplateResponse(
self.factory.get('/'),
'first/test.html', {
'value': 123,
'fn': datetime.now,
}
)
with self.assertRaises(ContentNotRenderedError):
pickle.dumps(response)
# But if we render the response, we can pickle it.
response.render()
pickled_response = pickle.dumps(response)
unpickled_response = pickle.loads(pickled_response)
self.assertEqual(unpickled_response.content, response.content)
self.assertEqual(unpickled_response['content-type'], response['content-type'])
self.assertEqual(unpickled_response.status_code, response.status_code)
# ...and the unpickled response doesn't have the
# template-related attributes, so it can't be re-rendered
template_attrs = (
'template_name',
'context_data',
'_post_render_callbacks',
'_request',
)
for attr in template_attrs:
self.assertFalse(hasattr(unpickled_response, attr))
# ...and requesting any of those attributes raises an exception
for attr in template_attrs:
with self.assertRaises(AttributeError):
getattr(unpickled_response, attr)
def test_repickling(self):
response = SimpleTemplateResponse('first/test.html', {
'value': 123,
'fn': datetime.now,
})
with self.assertRaises(ContentNotRenderedError):
pickle.dumps(response)
response.render()
pickled_response = pickle.dumps(response)
unpickled_response = pickle.loads(pickled_response)
pickle.dumps(unpickled_response)
@modify_settings(MIDDLEWARE={'append': ['template_tests.test_response.CustomURLConfMiddleware']})
@override_settings(ROOT_URLCONF='template_tests.urls')
class CustomURLConfTest(SimpleTestCase):
def test_custom_urlconf(self):
response = self.client.get('/template_response_view/')
self.assertContains(response, 'This is where you can find the snark: /snark/')
@modify_settings(
MIDDLEWARE={
'append': [
'django.middleware.cache.FetchFromCacheMiddleware',
'django.middleware.cache.UpdateCacheMiddleware',
],
},
)
@override_settings(CACHE_MIDDLEWARE_SECONDS=2.0, ROOT_URLCONF='template_tests.alternate_urls')
class CacheMiddlewareTest(SimpleTestCase):
def test_middleware_caching(self):
response = self.client.get('/template_response_view/')
self.assertEqual(response.status_code, 200)
time.sleep(1.0)
response2 = self.client.get('/template_response_view/')
self.assertEqual(response2.status_code, 200)
self.assertEqual(response.content, response2.content)
time.sleep(2.0)
# Let the cache expire and test again
response2 = self.client.get('/template_response_view/')
self.assertEqual(response2.status_code, 200)
self.assertNotEqual(response.content, response2.content)
|
331135f2f418e2667b0162630c4984fec65a3f9d3637221dead780fbfede1014 | import datetime
from unittest import skipIf, skipUnless
from django.db import connection
from django.db.models import Index
from django.db.models.deletion import CASCADE
from django.db.models.fields.related import ForeignKey
from django.db.models.query_utils import Q
from django.test import (
TestCase, TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature,
)
from django.test.utils import override_settings
from django.utils import timezone
from .models import (
Article, ArticleTranslation, IndexedArticle2, IndexTogetherSingleList,
)
class SchemaIndexesTests(TestCase):
"""
Test index handling by the db.backends.schema infrastructure.
"""
def test_index_name_hash(self):
"""
Index names should be deterministic.
"""
editor = connection.schema_editor()
index_name = editor._create_index_name(
table_name=Article._meta.db_table,
column_names=("c1",),
suffix="123",
)
self.assertEqual(index_name, "indexes_article_c1_a52bd80b123")
def test_index_name(self):
"""
Index names on the built-in database backends::
* Are truncated as needed.
* Include all the column names.
* Include a deterministic hash.
"""
long_name = 'l%sng' % ('o' * 100)
editor = connection.schema_editor()
index_name = editor._create_index_name(
table_name=Article._meta.db_table,
column_names=('c1', 'c2', long_name),
suffix='ix',
)
expected = {
'mysql': 'indexes_article_c1_c2_looooooooooooooooooo_255179b2ix',
'oracle': 'indexes_a_c1_c2_loo_255179b2ix',
'postgresql': 'indexes_article_c1_c2_loooooooooooooooooo_255179b2ix',
'sqlite': 'indexes_article_c1_c2_l%sng_255179b2ix' % ('o' * 100),
}
if connection.vendor not in expected:
self.skipTest('This test is only supported on the built-in database backends.')
self.assertEqual(index_name, expected[connection.vendor])
def test_index_together(self):
editor = connection.schema_editor()
index_sql = [str(statement) for statement in editor._model_indexes_sql(Article)]
self.assertEqual(len(index_sql), 1)
# Ensure the index name is properly quoted
self.assertIn(
connection.ops.quote_name(
editor._create_index_name(Article._meta.db_table, ['headline', 'pub_date'], suffix='_idx')
),
index_sql[0]
)
def test_index_together_single_list(self):
# Test for using index_together with a single list (#22172)
index_sql = connection.schema_editor()._model_indexes_sql(IndexTogetherSingleList)
self.assertEqual(len(index_sql), 1)
@skipIf(connection.vendor == 'postgresql', 'opclasses are PostgreSQL only')
class SchemaIndexesNotPostgreSQLTests(TransactionTestCase):
available_apps = ['indexes']
def test_create_index_ignores_opclasses(self):
index = Index(
name='test_ops_class',
fields=['headline'],
opclasses=['varchar_pattern_ops'],
)
with connection.schema_editor() as editor:
# This would error if opclasses weren't ignored.
editor.add_index(IndexedArticle2, index)
# The `condition` parameter is ignored by databases that don't support partial
# indexes.
@skipIfDBFeature('supports_partial_indexes')
class PartialIndexConditionIgnoredTests(TransactionTestCase):
available_apps = ['indexes']
def test_condition_ignored(self):
index = Index(
name='test_condition_ignored',
fields=['published'],
condition=Q(published=True),
)
with connection.schema_editor() as editor:
# This would error if condition weren't ignored.
editor.add_index(Article, index)
self.assertNotIn(
'WHERE %s.%s' % (editor.quote_name(Article._meta.db_table), 'published'),
str(index.create_sql(Article, editor))
)
@skipUnless(connection.vendor == 'postgresql', 'PostgreSQL tests')
class SchemaIndexesPostgreSQLTests(TransactionTestCase):
available_apps = ['indexes']
get_opclass_query = '''
SELECT opcname, c.relname FROM pg_opclass AS oc
JOIN pg_index as i on oc.oid = ANY(i.indclass)
JOIN pg_class as c on c.oid = i.indexrelid
WHERE c.relname = '%s'
'''
def test_text_indexes(self):
"""Test creation of PostgreSQL-specific text indexes (#12234)"""
from .models import IndexedArticle
index_sql = [str(statement) for statement in connection.schema_editor()._model_indexes_sql(IndexedArticle)]
self.assertEqual(len(index_sql), 5)
self.assertIn('("headline" varchar_pattern_ops)', index_sql[1])
self.assertIn('("body" text_pattern_ops)', index_sql[3])
# unique=True and db_index=True should only create the varchar-specific
# index (#19441).
self.assertIn('("slug" varchar_pattern_ops)', index_sql[4])
def test_virtual_relation_indexes(self):
"""Test indexes are not created for related objects"""
index_sql = connection.schema_editor()._model_indexes_sql(Article)
self.assertEqual(len(index_sql), 1)
def test_ops_class(self):
index = Index(
name='test_ops_class',
fields=['headline'],
opclasses=['varchar_pattern_ops'],
)
with connection.schema_editor() as editor:
editor.add_index(IndexedArticle2, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query % 'test_ops_class')
self.assertEqual(cursor.fetchall(), [('varchar_pattern_ops', 'test_ops_class')])
def test_ops_class_multiple_columns(self):
index = Index(
name='test_ops_class_multiple',
fields=['headline', 'body'],
opclasses=['varchar_pattern_ops', 'text_pattern_ops'],
)
with connection.schema_editor() as editor:
editor.add_index(IndexedArticle2, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query % 'test_ops_class_multiple')
expected_ops_classes = (
('varchar_pattern_ops', 'test_ops_class_multiple'),
('text_pattern_ops', 'test_ops_class_multiple'),
)
self.assertCountEqual(cursor.fetchall(), expected_ops_classes)
def test_ops_class_partial(self):
index = Index(
name='test_ops_class_partial',
fields=['body'],
opclasses=['text_pattern_ops'],
condition=Q(headline__contains='China'),
)
with connection.schema_editor() as editor:
editor.add_index(IndexedArticle2, index)
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query % 'test_ops_class_partial')
self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', 'test_ops_class_partial')])
def test_ops_class_partial_tablespace(self):
indexname = 'test_ops_class_tblspace'
index = Index(
name=indexname,
fields=['body'],
opclasses=['text_pattern_ops'],
condition=Q(headline__contains='China'),
db_tablespace='pg_default',
)
with connection.schema_editor() as editor:
editor.add_index(IndexedArticle2, index)
self.assertIn('TABLESPACE "pg_default" ', str(index.create_sql(IndexedArticle2, editor)))
with editor.connection.cursor() as cursor:
cursor.execute(self.get_opclass_query % indexname)
self.assertCountEqual(cursor.fetchall(), [('text_pattern_ops', indexname)])
@skipUnless(connection.vendor == 'mysql', 'MySQL tests')
class SchemaIndexesMySQLTests(TransactionTestCase):
available_apps = ['indexes']
def test_no_index_for_foreignkey(self):
"""
MySQL on InnoDB already creates indexes automatically for foreign keys.
(#14180). An index should be created if db_constraint=False (#26171).
"""
storage = connection.introspection.get_storage_engine(
connection.cursor(), ArticleTranslation._meta.db_table
)
if storage != "InnoDB":
self.skip("This test only applies to the InnoDB storage engine")
index_sql = [str(statement) for statement in connection.schema_editor()._model_indexes_sql(ArticleTranslation)]
self.assertEqual(index_sql, [
'CREATE INDEX `indexes_articletranslation_article_no_constraint_id_d6c0806b` '
'ON `indexes_articletranslation` (`article_no_constraint_id`)'
])
# The index also shouldn't be created if the ForeignKey is added after
# the model was created.
field_created = False
try:
with connection.schema_editor() as editor:
new_field = ForeignKey(Article, CASCADE)
new_field.set_attributes_from_name('new_foreign_key')
editor.add_field(ArticleTranslation, new_field)
field_created = True
self.assertEqual([str(statement) for statement in editor.deferred_sql], [
'ALTER TABLE `indexes_articletranslation` '
'ADD CONSTRAINT `indexes_articletrans_new_foreign_key_id_d27a9146_fk_indexes_a` '
'FOREIGN KEY (`new_foreign_key_id`) REFERENCES `indexes_article` (`id`)'
])
finally:
if field_created:
with connection.schema_editor() as editor:
editor.remove_field(ArticleTranslation, new_field)
@skipUnlessDBFeature('supports_partial_indexes')
# SQLite doesn't support timezone-aware datetimes when USE_TZ is False.
@override_settings(USE_TZ=True)
class PartialIndexTests(TransactionTestCase):
# Schema editor is used to create the index to test that it works.
available_apps = ['indexes']
def test_partial_index(self):
with connection.schema_editor() as editor:
index = Index(
name='recent_article_idx',
fields=['pub_date'],
condition=Q(
pub_date__gt=datetime.datetime(
year=2015, month=1, day=1,
# PostgreSQL would otherwise complain about the lookup
# being converted to a mutable function (by removing
# the timezone in the cast) which is forbidden.
tzinfo=timezone.get_current_timezone(),
),
)
)
self.assertIn(
'WHERE %s.%s' % (editor.quote_name(Article._meta.db_table), editor.quote_name("pub_date")),
str(index.create_sql(Article, schema_editor=editor))
)
editor.add_index(index=index, model=Article)
self.assertIn(index.name, connection.introspection.get_constraints(
cursor=connection.cursor(), table_name=Article._meta.db_table,
))
editor.remove_index(index=index, model=Article)
def test_integer_restriction_partial(self):
with connection.schema_editor() as editor:
index = Index(
name='recent_article_idx',
fields=['id'],
condition=Q(pk__gt=1),
)
self.assertIn(
'WHERE %s.%s' % (editor.quote_name(Article._meta.db_table), editor.quote_name('id')),
str(index.create_sql(Article, schema_editor=editor))
)
editor.add_index(index=index, model=Article)
self.assertIn(index.name, connection.introspection.get_constraints(
cursor=connection.cursor(), table_name=Article._meta.db_table,
))
editor.remove_index(index=index, model=Article)
def test_boolean_restriction_partial(self):
with connection.schema_editor() as editor:
index = Index(
name='published_index',
fields=['published'],
condition=Q(published=True),
)
self.assertIn(
'WHERE %s.%s' % (editor.quote_name(Article._meta.db_table), editor.quote_name('published')),
str(index.create_sql(Article, schema_editor=editor))
)
editor.add_index(index=index, model=Article)
self.assertIn(index.name, connection.introspection.get_constraints(
cursor=connection.cursor(), table_name=Article._meta.db_table,
))
editor.remove_index(index=index, model=Article)
@skipUnlessDBFeature('supports_functions_in_partial_indexes')
def test_multiple_conditions(self):
with connection.schema_editor() as editor:
index = Index(
name='recent_article_idx',
fields=['pub_date', 'headline'],
condition=(
Q(pub_date__gt=datetime.datetime(
year=2015,
month=1,
day=1,
tzinfo=timezone.get_current_timezone(),
)) & Q(headline__contains='China')
),
)
sql = str(index.create_sql(Article, schema_editor=editor))
where = sql.find('WHERE')
self.assertIn(
'WHERE (%s.%s' % (editor.quote_name(Article._meta.db_table), editor.quote_name("pub_date")),
sql
)
# Because each backend has different syntax for the operators,
# check ONLY the occurrence of headline in the SQL.
self.assertGreater(sql.rfind('headline'), where)
editor.add_index(index=index, model=Article)
self.assertIn(index.name, connection.introspection.get_constraints(
cursor=connection.cursor(), table_name=Article._meta.db_table,
))
editor.remove_index(index=index, model=Article)
def test_is_null_condition(self):
with connection.schema_editor() as editor:
index = Index(
name='recent_article_idx',
fields=['pub_date'],
condition=Q(pub_date__isnull=False),
)
self.assertIn(
'WHERE %s.%s IS NOT NULL' % (editor.quote_name(Article._meta.db_table), editor.quote_name("pub_date")),
str(index.create_sql(Article, schema_editor=editor))
)
editor.add_index(index=index, model=Article)
self.assertIn(index.name, connection.introspection.get_constraints(
cursor=connection.cursor(), table_name=Article._meta.db_table,
))
editor.remove_index(index=index, model=Article)
|
b2c47e237323785ec79b170e46e752004e5e8067673dc4a8f1ddcc1173d84bd7 | import datetime
from copy import deepcopy
from django.core.exceptions import FieldError, MultipleObjectsReturned
from django.db import models, transaction
from django.db.utils import IntegrityError
from django.test import TestCase
from django.utils.translation import gettext_lazy
from .models import (
Article, Category, Child, City, District, First, Parent, Record, Relation,
Reporter, School, Student, Third, ToFieldChild,
)
class ManyToOneTests(TestCase):
def setUp(self):
# Create a few Reporters.
self.r = Reporter(first_name='John', last_name='Smith', email='[email protected]')
self.r.save()
self.r2 = Reporter(first_name='Paul', last_name='Jones', email='[email protected]')
self.r2.save()
# Create an Article.
self.a = Article(headline="This is a test", pub_date=datetime.date(2005, 7, 27), reporter=self.r)
self.a.save()
def test_get(self):
# Article objects have access to their related Reporter objects.
r = self.a.reporter
self.assertEqual(r.id, self.r.id)
self.assertEqual((r.first_name, self.r.last_name), ('John', 'Smith'))
def test_create(self):
# You can also instantiate an Article by passing the Reporter's ID
# instead of a Reporter object.
a3 = Article(headline="Third article", pub_date=datetime.date(2005, 7, 27), reporter_id=self.r.id)
a3.save()
self.assertEqual(a3.reporter.id, self.r.id)
# Similarly, the reporter ID can be a string.
a4 = Article(headline="Fourth article", pub_date=datetime.date(2005, 7, 27), reporter_id=str(self.r.id))
a4.save()
self.assertEqual(repr(a4.reporter), "<Reporter: John Smith>")
def test_add(self):
# Create an Article via the Reporter object.
new_article = self.r.article_set.create(headline="John's second story", pub_date=datetime.date(2005, 7, 29))
self.assertEqual(repr(new_article), "<Article: John's second story>")
self.assertEqual(new_article.reporter.id, self.r.id)
# Create a new article, and add it to the article set.
new_article2 = Article(headline="Paul's story", pub_date=datetime.date(2006, 1, 17))
msg = "<Article: Paul's story> instance isn't saved. Use bulk=False or save the object first."
with self.assertRaisesMessage(ValueError, msg):
self.r.article_set.add(new_article2)
self.r.article_set.add(new_article2, bulk=False)
self.assertEqual(new_article2.reporter.id, self.r.id)
self.assertQuerysetEqual(
self.r.article_set.all(),
["<Article: John's second story>", "<Article: Paul's story>", "<Article: This is a test>"]
)
# Add the same article to a different article set - check that it moves.
self.r2.article_set.add(new_article2)
self.assertEqual(new_article2.reporter.id, self.r2.id)
self.assertQuerysetEqual(self.r2.article_set.all(), ["<Article: Paul's story>"])
# Adding an object of the wrong type raises TypeError.
with transaction.atomic():
with self.assertRaisesMessage(TypeError, "'Article' instance expected, got <Reporter:"):
self.r.article_set.add(self.r2)
self.assertQuerysetEqual(
self.r.article_set.all(),
["<Article: John's second story>", "<Article: This is a test>"]
)
def test_set(self):
new_article = self.r.article_set.create(headline="John's second story", pub_date=datetime.date(2005, 7, 29))
new_article2 = self.r2.article_set.create(headline="Paul's story", pub_date=datetime.date(2006, 1, 17))
# Assign the article to the reporter.
new_article2.reporter = self.r
new_article2.save()
self.assertEqual(repr(new_article2.reporter), "<Reporter: John Smith>")
self.assertEqual(new_article2.reporter.id, self.r.id)
self.assertQuerysetEqual(self.r.article_set.all(), [
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(self.r2.article_set.all(), [])
# Set the article back again.
self.r2.article_set.set([new_article, new_article2])
self.assertQuerysetEqual(self.r.article_set.all(), ["<Article: This is a test>"])
self.assertQuerysetEqual(
self.r2.article_set.all(),
["<Article: John's second story>", "<Article: Paul's story>"]
)
# Funny case - because the ForeignKey cannot be null,
# existing members of the set must remain.
self.r.article_set.set([new_article])
self.assertQuerysetEqual(
self.r.article_set.all(),
["<Article: John's second story>", "<Article: This is a test>"]
)
self.assertQuerysetEqual(self.r2.article_set.all(), ["<Article: Paul's story>"])
def test_reverse_assignment_deprecation(self):
msg = (
"Direct assignment to the reverse side of a related set is "
"prohibited. Use article_set.set() instead."
)
with self.assertRaisesMessage(TypeError, msg):
self.r2.article_set = []
def test_assign(self):
new_article = self.r.article_set.create(headline="John's second story", pub_date=datetime.date(2005, 7, 29))
new_article2 = self.r2.article_set.create(headline="Paul's story", pub_date=datetime.date(2006, 1, 17))
# Assign the article to the reporter directly using the descriptor.
new_article2.reporter = self.r
new_article2.save()
self.assertEqual(repr(new_article2.reporter), "<Reporter: John Smith>")
self.assertEqual(new_article2.reporter.id, self.r.id)
self.assertQuerysetEqual(self.r.article_set.all(), [
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(self.r2.article_set.all(), [])
# Set the article back again using set() method.
self.r2.article_set.set([new_article, new_article2])
self.assertQuerysetEqual(self.r.article_set.all(), ["<Article: This is a test>"])
self.assertQuerysetEqual(
self.r2.article_set.all(),
["<Article: John's second story>", "<Article: Paul's story>"]
)
# Because the ForeignKey cannot be null, existing members of the set
# must remain.
self.r.article_set.set([new_article])
self.assertQuerysetEqual(
self.r.article_set.all(),
["<Article: John's second story>", "<Article: This is a test>"]
)
self.assertQuerysetEqual(self.r2.article_set.all(), ["<Article: Paul's story>"])
# Reporter cannot be null - there should not be a clear or remove method
self.assertFalse(hasattr(self.r2.article_set, 'remove'))
self.assertFalse(hasattr(self.r2.article_set, 'clear'))
def test_selects(self):
self.r.article_set.create(headline="John's second story", pub_date=datetime.date(2005, 7, 29))
self.r2.article_set.create(headline="Paul's story", pub_date=datetime.date(2006, 1, 17))
# Reporter objects have access to their related Article objects.
self.assertQuerysetEqual(self.r.article_set.all(), [
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(self.r.article_set.filter(headline__startswith='This'), ["<Article: This is a test>"])
self.assertEqual(self.r.article_set.count(), 2)
self.assertEqual(self.r2.article_set.count(), 1)
# Get articles by id
self.assertQuerysetEqual(Article.objects.filter(id__exact=self.a.id), ["<Article: This is a test>"])
self.assertQuerysetEqual(Article.objects.filter(pk=self.a.id), ["<Article: This is a test>"])
# Query on an article property
self.assertQuerysetEqual(Article.objects.filter(headline__startswith='This'), ["<Article: This is a test>"])
# The API automatically follows relationships as far as you need.
# Use double underscores to separate relationships.
# This works as many levels deep as you want. There's no limit.
# Find all Articles for any Reporter whose first name is "John".
self.assertQuerysetEqual(
Article.objects.filter(reporter__first_name__exact='John'),
["<Article: John's second story>", "<Article: This is a test>"]
)
# Implied __exact also works
self.assertQuerysetEqual(
Article.objects.filter(reporter__first_name='John'),
["<Article: John's second story>", "<Article: This is a test>"]
)
# Query twice over the related field.
self.assertQuerysetEqual(
Article.objects.filter(reporter__first_name__exact='John', reporter__last_name__exact='Smith'),
["<Article: John's second story>", "<Article: This is a test>"]
)
# The underlying query only makes one join when a related table is referenced twice.
queryset = Article.objects.filter(reporter__first_name__exact='John', reporter__last_name__exact='Smith')
self.assertNumQueries(1, list, queryset)
self.assertEqual(queryset.query.get_compiler(queryset.db).as_sql()[0].count('INNER JOIN'), 1)
# The automatically joined table has a predictable name.
self.assertQuerysetEqual(
Article.objects.filter(reporter__first_name__exact='John').extra(
where=["many_to_one_reporter.last_name='Smith'"]),
["<Article: John's second story>", "<Article: This is a test>"]
)
# ... and should work fine with the string that comes out of forms.Form.cleaned_data
self.assertQuerysetEqual(
(Article.objects
.filter(reporter__first_name__exact='John')
.extra(where=["many_to_one_reporter.last_name='%s'" % 'Smith'])),
["<Article: John's second story>", "<Article: This is a test>"]
)
# Find all Articles for a Reporter.
# Use direct ID check, pk check, and object comparison
self.assertQuerysetEqual(
Article.objects.filter(reporter__id__exact=self.r.id),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter__pk=self.r.id),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter=self.r.id),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter=self.r),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter__in=[self.r.id, self.r2.id]).distinct(),
[
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: This is a test>",
])
self.assertQuerysetEqual(
Article.objects.filter(reporter__in=[self.r, self.r2]).distinct(),
[
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: This is a test>",
])
# You can also use a queryset instead of a literal list of instances.
# The queryset must be reduced to a list of values using values(),
# then converted into a query
self.assertQuerysetEqual(
Article.objects.filter(
reporter__in=Reporter.objects.filter(first_name='John').values('pk').query
).distinct(),
[
"<Article: John's second story>",
"<Article: This is a test>",
])
def test_reverse_selects(self):
a3 = Article.objects.create(
headline="Third article",
pub_date=datetime.date(2005, 7, 27),
reporter_id=self.r.id,
)
Article.objects.create(
headline="Fourth article",
pub_date=datetime.date(2005, 7, 27),
reporter_id=self.r.id,
)
john_smith = ["<Reporter: John Smith>"]
# Reporters can be queried
self.assertQuerysetEqual(Reporter.objects.filter(id__exact=self.r.id), john_smith)
self.assertQuerysetEqual(Reporter.objects.filter(pk=self.r.id), john_smith)
self.assertQuerysetEqual(Reporter.objects.filter(first_name__startswith='John'), john_smith)
# Reporters can query in opposite direction of ForeignKey definition
self.assertQuerysetEqual(Reporter.objects.filter(article__id__exact=self.a.id), john_smith)
self.assertQuerysetEqual(Reporter.objects.filter(article__pk=self.a.id), john_smith)
self.assertQuerysetEqual(Reporter.objects.filter(article=self.a.id), john_smith)
self.assertQuerysetEqual(Reporter.objects.filter(article=self.a), john_smith)
self.assertQuerysetEqual(Reporter.objects.filter(article__in=[self.a.id, a3.id]).distinct(), john_smith)
self.assertQuerysetEqual(Reporter.objects.filter(article__in=[self.a.id, a3]).distinct(), john_smith)
self.assertQuerysetEqual(Reporter.objects.filter(article__in=[self.a, a3]).distinct(), john_smith)
self.assertQuerysetEqual(
Reporter.objects.filter(article__headline__startswith='T'),
["<Reporter: John Smith>", "<Reporter: John Smith>"],
ordered=False
)
self.assertQuerysetEqual(Reporter.objects.filter(article__headline__startswith='T').distinct(), john_smith)
# Counting in the opposite direction works in conjunction with distinct()
self.assertEqual(Reporter.objects.filter(article__headline__startswith='T').count(), 2)
self.assertEqual(Reporter.objects.filter(article__headline__startswith='T').distinct().count(), 1)
# Queries can go round in circles.
self.assertQuerysetEqual(
Reporter.objects.filter(article__reporter__first_name__startswith='John'),
[
"<Reporter: John Smith>",
"<Reporter: John Smith>",
"<Reporter: John Smith>",
],
ordered=False
)
self.assertQuerysetEqual(
Reporter.objects.filter(article__reporter__first_name__startswith='John').distinct(),
john_smith
)
self.assertQuerysetEqual(Reporter.objects.filter(article__reporter__exact=self.r).distinct(), john_smith)
# Implied __exact also works.
self.assertQuerysetEqual(Reporter.objects.filter(article__reporter=self.r).distinct(), john_smith)
# It's possible to use values() calls across many-to-one relations.
# (Note, too, that we clear the ordering here so as not to drag the
# 'headline' field into the columns being used to determine uniqueness)
d = {'reporter__first_name': 'John', 'reporter__last_name': 'Smith'}
qs = Article.objects.filter(
reporter=self.r,
).distinct().order_by().values('reporter__first_name', 'reporter__last_name')
self.assertEqual([d], list(qs))
def test_select_related(self):
# Article.objects.select_related().dates() works properly when there
# are multiple Articles with the same date but different foreign-key
# objects (Reporters).
r1 = Reporter.objects.create(first_name='Mike', last_name='Royko', email='[email protected]')
r2 = Reporter.objects.create(first_name='John', last_name='Kass', email='[email protected]')
Article.objects.create(headline='First', pub_date=datetime.date(1980, 4, 23), reporter=r1)
Article.objects.create(headline='Second', pub_date=datetime.date(1980, 4, 23), reporter=r2)
self.assertEqual(
list(Article.objects.select_related().dates('pub_date', 'day')),
[datetime.date(1980, 4, 23), datetime.date(2005, 7, 27)]
)
self.assertEqual(
list(Article.objects.select_related().dates('pub_date', 'month')),
[datetime.date(1980, 4, 1), datetime.date(2005, 7, 1)]
)
self.assertEqual(
list(Article.objects.select_related().dates('pub_date', 'year')),
[datetime.date(1980, 1, 1), datetime.date(2005, 1, 1)]
)
def test_delete(self):
self.r.article_set.create(headline="John's second story", pub_date=datetime.date(2005, 7, 29))
self.r2.article_set.create(headline="Paul's story", pub_date=datetime.date(2006, 1, 17))
Article.objects.create(headline="Third article", pub_date=datetime.date(2005, 7, 27), reporter_id=self.r.id)
Article.objects.create(
headline="Fourth article",
pub_date=datetime.date(2005, 7, 27),
reporter_id=str(self.r.id),
)
# If you delete a reporter, his articles will be deleted.
self.assertQuerysetEqual(
Article.objects.all(),
[
"<Article: Fourth article>",
"<Article: John's second story>",
"<Article: Paul's story>",
"<Article: Third article>",
"<Article: This is a test>",
]
)
self.assertQuerysetEqual(
Reporter.objects.order_by('first_name'),
["<Reporter: John Smith>", "<Reporter: Paul Jones>"]
)
self.r2.delete()
self.assertQuerysetEqual(
Article.objects.all(),
[
"<Article: Fourth article>",
"<Article: John's second story>",
"<Article: Third article>",
"<Article: This is a test>",
]
)
self.assertQuerysetEqual(Reporter.objects.order_by('first_name'), ["<Reporter: John Smith>"])
# You can delete using a JOIN in the query.
Reporter.objects.filter(article__headline__startswith='This').delete()
self.assertQuerysetEqual(Reporter.objects.all(), [])
self.assertQuerysetEqual(Article.objects.all(), [])
def test_explicit_fk(self):
# Create a new Article with get_or_create using an explicit value
# for a ForeignKey.
a2, created = Article.objects.get_or_create(
headline="John's second test",
pub_date=datetime.date(2011, 5, 7),
reporter_id=self.r.id,
)
self.assertTrue(created)
self.assertEqual(a2.reporter.id, self.r.id)
# You can specify filters containing the explicit FK value.
self.assertQuerysetEqual(
Article.objects.filter(reporter_id__exact=self.r.id),
["<Article: John's second test>", "<Article: This is a test>"]
)
# Create an Article by Paul for the same date.
a3 = Article.objects.create(
headline="Paul's commentary",
pub_date=datetime.date(2011, 5, 7),
reporter_id=self.r2.id,
)
self.assertEqual(a3.reporter.id, self.r2.id)
# Get should respect explicit foreign keys as well.
msg = 'get() returned more than one Article -- it returned 2!'
with self.assertRaisesMessage(MultipleObjectsReturned, msg):
Article.objects.get(reporter_id=self.r.id)
self.assertEqual(
repr(a3),
repr(Article.objects.get(reporter_id=self.r2.id, pub_date=datetime.date(2011, 5, 7)))
)
def test_deepcopy_and_circular_references(self):
# Regression for #12876 -- Model methods that include queries that
# recursive don't cause recursion depth problems under deepcopy.
self.r.cached_query = Article.objects.filter(reporter=self.r)
self.assertEqual(repr(deepcopy(self.r)), "<Reporter: John Smith>")
def test_manager_class_caching(self):
r1 = Reporter.objects.create(first_name='Mike')
r2 = Reporter.objects.create(first_name='John')
# Same twice
self.assertIs(r1.article_set.__class__, r1.article_set.__class__)
# Same as each other
self.assertIs(r1.article_set.__class__, r2.article_set.__class__)
def test_create_relation_with_gettext_lazy(self):
reporter = Reporter.objects.create(first_name='John', last_name='Smith', email='[email protected]')
lazy = gettext_lazy('test')
reporter.article_set.create(headline=lazy, pub_date=datetime.date(2011, 6, 10))
notlazy = str(lazy)
article = reporter.article_set.get()
self.assertEqual(article.headline, notlazy)
def test_values_list_exception(self):
expected_message = "Cannot resolve keyword 'notafield' into field. Choices are: %s"
reporter_fields = ', '.join(sorted(f.name for f in Reporter._meta.get_fields()))
with self.assertRaisesMessage(FieldError, expected_message % reporter_fields):
Article.objects.values_list('reporter__notafield')
article_fields = ', '.join(['EXTRA'] + sorted(f.name for f in Article._meta.get_fields()))
with self.assertRaisesMessage(FieldError, expected_message % article_fields):
Article.objects.extra(select={'EXTRA': 'EXTRA_SELECT'}).values_list('notafield')
def test_fk_assignment_and_related_object_cache(self):
# Tests of ForeignKey assignment and the related-object cache (see #6886).
p = Parent.objects.create(name="Parent")
c = Child.objects.create(name="Child", parent=p)
# Look up the object again so that we get a "fresh" object.
c = Child.objects.get(name="Child")
p = c.parent
# Accessing the related object again returns the exactly same object.
self.assertIs(c.parent, p)
# But if we kill the cache, we get a new object.
del c._state.fields_cache['parent']
self.assertIsNot(c.parent, p)
# Assigning a new object results in that object getting cached immediately.
p2 = Parent.objects.create(name="Parent 2")
c.parent = p2
self.assertIs(c.parent, p2)
# Assigning None succeeds if field is null=True.
p.bestchild = None
self.assertIsNone(p.bestchild)
# bestchild should still be None after saving.
p.save()
self.assertIsNone(p.bestchild)
# bestchild should still be None after fetching the object again.
p = Parent.objects.get(name="Parent")
self.assertIsNone(p.bestchild)
# Assigning None will not fail: Child.parent is null=False.
setattr(c, "parent", None)
# You also can't assign an object of the wrong type here
msg = (
'Cannot assign "<First: First object (1)>": "Child.parent" must '
'be a "Parent" instance.'
)
with self.assertRaisesMessage(ValueError, msg):
setattr(c, "parent", First(id=1, second=1))
# You can assign None to Child.parent during object creation.
Child(name='xyzzy', parent=None)
# But when trying to save a Child with parent=None, the database will
# raise IntegrityError.
with self.assertRaises(IntegrityError), transaction.atomic():
Child.objects.create(name='xyzzy', parent=None)
# Creation using keyword argument should cache the related object.
p = Parent.objects.get(name="Parent")
c = Child(parent=p)
self.assertIs(c.parent, p)
# Creation using keyword argument and unsaved related instance (#8070).
p = Parent()
msg = "save() prohibited to prevent data loss due to unsaved related object 'parent'."
with self.assertRaisesMessage(ValueError, msg):
Child.objects.create(parent=p)
msg = "save() prohibited to prevent data loss due to unsaved related object 'parent'."
with self.assertRaisesMessage(ValueError, msg):
ToFieldChild.objects.create(parent=p)
# Creation using attname keyword argument and an id will cause the
# related object to be fetched.
p = Parent.objects.get(name="Parent")
c = Child(parent_id=p.id)
self.assertIsNot(c.parent, p)
self.assertEqual(c.parent, p)
def test_fk_to_bigautofield(self):
ch = City.objects.create(name='Chicago')
District.objects.create(city=ch, name='Far South')
District.objects.create(city=ch, name='North')
ny = City.objects.create(name='New York', id=2 ** 33)
District.objects.create(city=ny, name='Brooklyn')
District.objects.create(city=ny, name='Manhattan')
def test_multiple_foreignkeys(self):
# Test of multiple ForeignKeys to the same model (bug #7125).
c1 = Category.objects.create(name='First')
c2 = Category.objects.create(name='Second')
c3 = Category.objects.create(name='Third')
r1 = Record.objects.create(category=c1)
r2 = Record.objects.create(category=c1)
r3 = Record.objects.create(category=c2)
r4 = Record.objects.create(category=c2)
r5 = Record.objects.create(category=c3)
Relation.objects.create(left=r1, right=r2)
Relation.objects.create(left=r3, right=r4)
Relation.objects.create(left=r1, right=r3)
Relation.objects.create(left=r5, right=r2)
Relation.objects.create(left=r3, right=r2)
q1 = Relation.objects.filter(left__category__name__in=['First'], right__category__name__in=['Second'])
self.assertQuerysetEqual(q1, ["<Relation: First - Second>"])
q2 = Category.objects.filter(record__left_set__right__category__name='Second').order_by('name')
self.assertQuerysetEqual(q2, ["<Category: First>", "<Category: Second>"])
p = Parent.objects.create(name="Parent")
c = Child.objects.create(name="Child", parent=p)
msg = 'Cannot assign "%r": "Child.parent" must be a "Parent" instance.' % c
with self.assertRaisesMessage(ValueError, msg):
Child.objects.create(name="Grandchild", parent=c)
def test_fk_instantiation_outside_model(self):
# Regression for #12190 -- Should be able to instantiate a FK outside
# of a model, and interrogate its related field.
cat = models.ForeignKey(Category, models.CASCADE)
self.assertEqual('id', cat.remote_field.get_related_field().name)
def test_relation_unsaved(self):
# The <field>_set manager does not join on Null value fields (#17541)
Third.objects.create(name='Third 1')
Third.objects.create(name='Third 2')
th = Third(name="testing")
# The object isn't saved and thus the relation field is null - we won't even
# execute a query in this case.
with self.assertNumQueries(0):
self.assertEqual(th.child_set.count(), 0)
th.save()
# Now the model is saved, so we will need to execute a query.
with self.assertNumQueries(1):
self.assertEqual(th.child_set.count(), 0)
def test_related_object(self):
public_school = School.objects.create(is_public=True)
public_student = Student.objects.create(school=public_school)
private_school = School.objects.create(is_public=False)
private_student = Student.objects.create(school=private_school)
# Only one school is available via all() due to the custom default manager.
self.assertSequenceEqual(School.objects.all(), [public_school])
self.assertEqual(public_student.school, public_school)
# Make sure the base manager is used so that a student can still access
# its related school even if the default manager doesn't normally
# allow it.
self.assertEqual(private_student.school, private_school)
School._meta.base_manager_name = 'objects'
School._meta._expire_cache()
try:
private_student = Student.objects.get(pk=private_student.pk)
with self.assertRaises(School.DoesNotExist):
private_student.school
finally:
School._meta.base_manager_name = None
School._meta._expire_cache()
def test_hasattr_related_object(self):
# The exception raised on attribute access when a related object
# doesn't exist should be an instance of a subclass of `AttributeError`
# refs #21563
self.assertFalse(hasattr(Article(), 'reporter'))
def test_clear_after_prefetch(self):
c = City.objects.create(name='Musical City')
District.objects.create(name='Ladida', city=c)
city = City.objects.prefetch_related('districts').get(id=c.id)
self.assertQuerysetEqual(city.districts.all(), ['<District: Ladida>'])
city.districts.clear()
self.assertQuerysetEqual(city.districts.all(), [])
def test_remove_after_prefetch(self):
c = City.objects.create(name='Musical City')
d = District.objects.create(name='Ladida', city=c)
city = City.objects.prefetch_related('districts').get(id=c.id)
self.assertQuerysetEqual(city.districts.all(), ['<District: Ladida>'])
city.districts.remove(d)
self.assertQuerysetEqual(city.districts.all(), [])
def test_add_after_prefetch(self):
c = City.objects.create(name='Musical City')
District.objects.create(name='Ladida', city=c)
d2 = District.objects.create(name='Ladidu')
city = City.objects.prefetch_related('districts').get(id=c.id)
self.assertEqual(city.districts.count(), 1)
city.districts.add(d2)
self.assertEqual(city.districts.count(), 2)
def test_set_after_prefetch(self):
c = City.objects.create(name='Musical City')
District.objects.create(name='Ladida', city=c)
d2 = District.objects.create(name='Ladidu')
city = City.objects.prefetch_related('districts').get(id=c.id)
self.assertEqual(city.districts.count(), 1)
city.districts.set([d2])
self.assertQuerysetEqual(city.districts.all(), ['<District: Ladidu>'])
def test_add_then_remove_after_prefetch(self):
c = City.objects.create(name='Musical City')
District.objects.create(name='Ladida', city=c)
d2 = District.objects.create(name='Ladidu')
city = City.objects.prefetch_related('districts').get(id=c.id)
self.assertEqual(city.districts.count(), 1)
city.districts.add(d2)
self.assertEqual(city.districts.count(), 2)
city.districts.remove(d2)
self.assertEqual(city.districts.count(), 1)
def test_cached_relation_invalidated_on_save(self):
"""
Model.save() invalidates stale ForeignKey relations after a primary key
assignment.
"""
self.assertEqual(self.a.reporter, self.r) # caches a.reporter
self.a.reporter_id = self.r2.pk
self.a.save()
self.assertEqual(self.a.reporter, self.r2)
def test_cached_foreign_key_with_to_field_not_cleared_by_save(self):
parent = Parent.objects.create(name='a')
child = ToFieldChild.objects.create(parent=parent)
with self.assertNumQueries(0):
self.assertIs(child.parent, parent)
def test_reverse_foreign_key_instance_to_field_caching(self):
parent = Parent.objects.create(name='a')
ToFieldChild.objects.create(parent=parent)
child = parent.to_field_children.get()
with self.assertNumQueries(0):
self.assertIs(child.parent, parent)
|
bb5bba3216555e41e09036c98caaad8cd929889ff0701bd1d8a5310a5ee5843a | """
Many-to-one relationships
To define a many-to-one relationship, use ``ForeignKey()``.
"""
from django.db import models
class Reporter(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
email = models.EmailField()
def __str__(self):
return "%s %s" % (self.first_name, self.last_name)
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateField()
reporter = models.ForeignKey(Reporter, models.CASCADE)
class Meta:
ordering = ('headline',)
def __str__(self):
return self.headline
class City(models.Model):
id = models.BigAutoField(primary_key=True)
name = models.CharField(max_length=50)
def __str__(self):
return self.name
class District(models.Model):
city = models.ForeignKey(City, models.CASCADE, related_name='districts', null=True)
name = models.CharField(max_length=50)
def __str__(self):
return self.name
# If ticket #1578 ever slips back in, these models will not be able to be
# created (the field names being lowercased versions of their opposite classes
# is important here).
class First(models.Model):
second = models.IntegerField()
class Second(models.Model):
first = models.ForeignKey(First, models.CASCADE, related_name='the_first')
# Protect against repetition of #1839, #2415 and #2536.
class Third(models.Model):
name = models.CharField(max_length=20)
third = models.ForeignKey('self', models.SET_NULL, null=True, related_name='child_set')
class Parent(models.Model):
name = models.CharField(max_length=20, unique=True)
bestchild = models.ForeignKey('Child', models.SET_NULL, null=True, related_name='favored_by')
class Child(models.Model):
name = models.CharField(max_length=20)
parent = models.ForeignKey(Parent, models.CASCADE)
class ToFieldChild(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE, to_field='name', related_name='to_field_children')
# Multiple paths to the same model (#7110, #7125)
class Category(models.Model):
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class Record(models.Model):
category = models.ForeignKey(Category, models.CASCADE)
class Relation(models.Model):
left = models.ForeignKey(Record, models.CASCADE, related_name='left_set')
right = models.ForeignKey(Record, models.CASCADE, related_name='right_set')
def __str__(self):
return "%s - %s" % (self.left.category.name, self.right.category.name)
# Test related objects visibility.
class SchoolManager(models.Manager):
def get_queryset(self):
return super().get_queryset().filter(is_public=True)
class School(models.Model):
is_public = models.BooleanField(default=False)
objects = SchoolManager()
class Student(models.Model):
school = models.ForeignKey(School, models.CASCADE)
|
fc8eb6836c75141f823e5dda42685e94a509f25d4b79855da17cb4d7fe903990 | import base64
import os
import shutil
import string
import tempfile
import unittest
from datetime import timedelta
from http import cookies
from django.conf import settings
from django.contrib.sessions.backends.base import UpdateError
from django.contrib.sessions.backends.cache import SessionStore as CacheSession
from django.contrib.sessions.backends.cached_db import (
SessionStore as CacheDBSession,
)
from django.contrib.sessions.backends.db import SessionStore as DatabaseSession
from django.contrib.sessions.backends.file import SessionStore as FileSession
from django.contrib.sessions.backends.signed_cookies import (
SessionStore as CookieSession,
)
from django.contrib.sessions.exceptions import InvalidSessionKey
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.sessions.models import Session
from django.contrib.sessions.serializers import (
JSONSerializer, PickleSerializer,
)
from django.core import management
from django.core.cache import caches
from django.core.cache.backends.base import InvalidCacheBackendError
from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation
from django.http import HttpResponse
from django.test import (
RequestFactory, TestCase, ignore_warnings, override_settings,
)
from django.utils import timezone
from .models import SessionStore as CustomDatabaseSession
class SessionTestsMixin:
# This does not inherit from TestCase to avoid any tests being run with this
# class, which wouldn't work, and to allow different TestCase subclasses to
# be used.
backend = None # subclasses must specify
def setUp(self):
self.session = self.backend()
def tearDown(self):
# NB: be careful to delete any sessions created; stale sessions fill up
# the /tmp (with some backends) and eventually overwhelm it after lots
# of runs (think buildbots)
self.session.delete()
def test_new_session(self):
self.assertIs(self.session.modified, False)
self.assertIs(self.session.accessed, False)
def test_get_empty(self):
self.assertIsNone(self.session.get('cat'))
def test_store(self):
self.session['cat'] = "dog"
self.assertIs(self.session.modified, True)
self.assertEqual(self.session.pop('cat'), 'dog')
def test_pop(self):
self.session['some key'] = 'exists'
# Need to reset these to pretend we haven't accessed it:
self.accessed = False
self.modified = False
self.assertEqual(self.session.pop('some key'), 'exists')
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, True)
self.assertIsNone(self.session.get('some key'))
def test_pop_default(self):
self.assertEqual(self.session.pop('some key', 'does not exist'),
'does not exist')
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_pop_default_named_argument(self):
self.assertEqual(self.session.pop('some key', default='does not exist'), 'does not exist')
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_pop_no_default_keyerror_raised(self):
with self.assertRaises(KeyError):
self.session.pop('some key')
def test_setdefault(self):
self.assertEqual(self.session.setdefault('foo', 'bar'), 'bar')
self.assertEqual(self.session.setdefault('foo', 'baz'), 'bar')
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, True)
def test_update(self):
self.session.update({'update key': 1})
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, True)
self.assertEqual(self.session.get('update key', None), 1)
def test_has_key(self):
self.session['some key'] = 1
self.session.modified = False
self.session.accessed = False
self.assertIn('some key', self.session)
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_values(self):
self.assertEqual(list(self.session.values()), [])
self.assertIs(self.session.accessed, True)
self.session['some key'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.values()), [1])
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_keys(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.keys()), ['x'])
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_items(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.items()), [('x', 1)])
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, False)
def test_clear(self):
self.session['x'] = 1
self.session.modified = False
self.session.accessed = False
self.assertEqual(list(self.session.items()), [('x', 1)])
self.session.clear()
self.assertEqual(list(self.session.items()), [])
self.assertIs(self.session.accessed, True)
self.assertIs(self.session.modified, True)
def test_save(self):
self.session.save()
self.assertIs(self.session.exists(self.session.session_key), True)
def test_delete(self):
self.session.save()
self.session.delete(self.session.session_key)
self.assertIs(self.session.exists(self.session.session_key), False)
def test_flush(self):
self.session['foo'] = 'bar'
self.session.save()
prev_key = self.session.session_key
self.session.flush()
self.assertIs(self.session.exists(prev_key), False)
self.assertNotEqual(self.session.session_key, prev_key)
self.assertIsNone(self.session.session_key)
self.assertIs(self.session.modified, True)
self.assertIs(self.session.accessed, True)
def test_cycle(self):
self.session['a'], self.session['b'] = 'c', 'd'
self.session.save()
prev_key = self.session.session_key
prev_data = list(self.session.items())
self.session.cycle_key()
self.assertIs(self.session.exists(prev_key), False)
self.assertNotEqual(self.session.session_key, prev_key)
self.assertEqual(list(self.session.items()), prev_data)
def test_cycle_with_no_session_cache(self):
self.session['a'], self.session['b'] = 'c', 'd'
self.session.save()
prev_data = self.session.items()
self.session = self.backend(self.session.session_key)
self.assertIs(hasattr(self.session, '_session_cache'), False)
self.session.cycle_key()
self.assertCountEqual(self.session.items(), prev_data)
def test_save_doesnt_clear_data(self):
self.session['a'] = 'b'
self.session.save()
self.assertEqual(self.session['a'], 'b')
def test_invalid_key(self):
# Submitting an invalid session key (either by guessing, or if the db has
# removed the key) results in a new key being generated.
try:
session = self.backend('1')
session.save()
self.assertNotEqual(session.session_key, '1')
self.assertIsNone(session.get('cat'))
session.delete()
finally:
# Some backends leave a stale cache entry for the invalid
# session key; make sure that entry is manually deleted
session.delete('1')
def test_session_key_empty_string_invalid(self):
"""Falsey values (Such as an empty string) are rejected."""
self.session._session_key = ''
self.assertIsNone(self.session.session_key)
def test_session_key_too_short_invalid(self):
"""Strings shorter than 8 characters are rejected."""
self.session._session_key = '1234567'
self.assertIsNone(self.session.session_key)
def test_session_key_valid_string_saved(self):
"""Strings of length 8 and up are accepted and stored."""
self.session._session_key = '12345678'
self.assertEqual(self.session.session_key, '12345678')
def test_session_key_is_read_only(self):
def set_session_key(session):
session.session_key = session._get_new_session_key()
with self.assertRaises(AttributeError):
set_session_key(self.session)
# Custom session expiry
def test_default_expiry(self):
# A normal session has a max age equal to settings
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
# So does a custom session with an idle expiration time of 0 (but it'll
# expire at browser close)
self.session.set_expiry(0)
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
def test_custom_expiry_seconds(self):
modification = timezone.now()
self.session.set_expiry(10)
date = self.session.get_expiry_date(modification=modification)
self.assertEqual(date, modification + timedelta(seconds=10))
age = self.session.get_expiry_age(modification=modification)
self.assertEqual(age, 10)
def test_custom_expiry_timedelta(self):
modification = timezone.now()
# Mock timezone.now, because set_expiry calls it on this code path.
original_now = timezone.now
try:
timezone.now = lambda: modification
self.session.set_expiry(timedelta(seconds=10))
finally:
timezone.now = original_now
date = self.session.get_expiry_date(modification=modification)
self.assertEqual(date, modification + timedelta(seconds=10))
age = self.session.get_expiry_age(modification=modification)
self.assertEqual(age, 10)
def test_custom_expiry_datetime(self):
modification = timezone.now()
self.session.set_expiry(modification + timedelta(seconds=10))
date = self.session.get_expiry_date(modification=modification)
self.assertEqual(date, modification + timedelta(seconds=10))
age = self.session.get_expiry_age(modification=modification)
self.assertEqual(age, 10)
def test_custom_expiry_reset(self):
self.session.set_expiry(None)
self.session.set_expiry(10)
self.session.set_expiry(None)
self.assertEqual(self.session.get_expiry_age(), settings.SESSION_COOKIE_AGE)
def test_get_expire_at_browser_close(self):
# Tests get_expire_at_browser_close with different settings and different
# set_expiry calls
with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=False):
self.session.set_expiry(10)
self.assertIs(self.session.get_expire_at_browser_close(), False)
self.session.set_expiry(0)
self.assertIs(self.session.get_expire_at_browser_close(), True)
self.session.set_expiry(None)
self.assertIs(self.session.get_expire_at_browser_close(), False)
with override_settings(SESSION_EXPIRE_AT_BROWSER_CLOSE=True):
self.session.set_expiry(10)
self.assertIs(self.session.get_expire_at_browser_close(), False)
self.session.set_expiry(0)
self.assertIs(self.session.get_expire_at_browser_close(), True)
self.session.set_expiry(None)
self.assertIs(self.session.get_expire_at_browser_close(), True)
def test_decode(self):
# Ensure we can decode what we encode
data = {'a test key': 'a test value'}
encoded = self.session.encode(data)
self.assertEqual(self.session.decode(encoded), data)
def test_decode_failure_logged_to_security(self):
bad_encode = base64.b64encode(b'flaskdj:alkdjf').decode('ascii')
with self.assertLogs('django.security.SuspiciousSession', 'WARNING') as cm:
self.assertEqual({}, self.session.decode(bad_encode))
# The failed decode is logged.
self.assertIn('corrupted', cm.output[0])
def test_actual_expiry(self):
# this doesn't work with JSONSerializer (serializing timedelta)
with override_settings(SESSION_SERIALIZER='django.contrib.sessions.serializers.PickleSerializer'):
self.session = self.backend() # reinitialize after overriding settings
# Regression test for #19200
old_session_key = None
new_session_key = None
try:
self.session['foo'] = 'bar'
self.session.set_expiry(-timedelta(seconds=10))
self.session.save()
old_session_key = self.session.session_key
# With an expiry date in the past, the session expires instantly.
new_session = self.backend(self.session.session_key)
new_session_key = new_session.session_key
self.assertNotIn('foo', new_session)
finally:
self.session.delete(old_session_key)
self.session.delete(new_session_key)
def test_session_load_does_not_create_record(self):
"""
Loading an unknown session key does not create a session record.
Creating session records on load is a DOS vulnerability.
"""
session = self.backend('someunknownkey')
session.load()
self.assertIsNone(session.session_key)
self.assertIs(session.exists(session.session_key), False)
# provided unknown key was cycled, not reused
self.assertNotEqual(session.session_key, 'someunknownkey')
def test_session_save_does_not_resurrect_session_logged_out_in_other_context(self):
"""
Sessions shouldn't be resurrected by a concurrent request.
"""
# Create new session.
s1 = self.backend()
s1['test_data'] = 'value1'
s1.save(must_create=True)
# Logout in another context.
s2 = self.backend(s1.session_key)
s2.delete()
# Modify session in first context.
s1['test_data'] = 'value2'
with self.assertRaises(UpdateError):
# This should throw an exception as the session is deleted, not
# resurrect the session.
s1.save()
self.assertEqual(s1.load(), {})
class DatabaseSessionTests(SessionTestsMixin, TestCase):
backend = DatabaseSession
session_engine = 'django.contrib.sessions.backends.db'
@property
def model(self):
return self.backend.get_model_class()
def test_session_str(self):
"Session repr should be the session key."
self.session['x'] = 1
self.session.save()
session_key = self.session.session_key
s = self.model.objects.get(session_key=session_key)
self.assertEqual(str(s), session_key)
def test_session_get_decoded(self):
"""
Test we can use Session.get_decoded to retrieve data stored
in normal way
"""
self.session['x'] = 1
self.session.save()
s = self.model.objects.get(session_key=self.session.session_key)
self.assertEqual(s.get_decoded(), {'x': 1})
def test_sessionmanager_save(self):
"""
Test SessionManager.save method
"""
# Create a session
self.session['y'] = 1
self.session.save()
s = self.model.objects.get(session_key=self.session.session_key)
# Change it
self.model.objects.save(s.session_key, {'y': 2}, s.expire_date)
# Clear cache, so that it will be retrieved from DB
del self.session._session_cache
self.assertEqual(self.session['y'], 2)
def test_clearsessions_command(self):
"""
Test clearsessions command for clearing expired sessions.
"""
self.assertEqual(0, self.model.objects.count())
# One object in the future
self.session['foo'] = 'bar'
self.session.set_expiry(3600)
self.session.save()
# One object in the past
other_session = self.backend()
other_session['foo'] = 'bar'
other_session.set_expiry(-3600)
other_session.save()
# Two sessions are in the database before clearsessions...
self.assertEqual(2, self.model.objects.count())
with override_settings(SESSION_ENGINE=self.session_engine):
management.call_command('clearsessions')
# ... and one is deleted.
self.assertEqual(1, self.model.objects.count())
@override_settings(USE_TZ=True)
class DatabaseSessionWithTimeZoneTests(DatabaseSessionTests):
pass
class CustomDatabaseSessionTests(DatabaseSessionTests):
backend = CustomDatabaseSession
session_engine = 'sessions_tests.models'
def test_extra_session_field(self):
# Set the account ID to be picked up by a custom session storage
# and saved to a custom session model database column.
self.session['_auth_user_id'] = 42
self.session.save()
# Make sure that the customized create_model_instance() was called.
s = self.model.objects.get(session_key=self.session.session_key)
self.assertEqual(s.account_id, 42)
# Make the session "anonymous".
self.session.pop('_auth_user_id')
self.session.save()
# Make sure that save() on an existing session did the right job.
s = self.model.objects.get(session_key=self.session.session_key)
self.assertIsNone(s.account_id)
class CacheDBSessionTests(SessionTestsMixin, TestCase):
backend = CacheDBSession
def test_exists_searches_cache_first(self):
self.session.save()
with self.assertNumQueries(0):
self.assertIs(self.session.exists(self.session.session_key), True)
# Some backends might issue a warning
@ignore_warnings(module="django.core.cache.backends.base")
def test_load_overlong_key(self):
self.session._session_key = (string.ascii_letters + string.digits) * 20
self.assertEqual(self.session.load(), {})
@override_settings(SESSION_CACHE_ALIAS='sessions')
def test_non_default_cache(self):
# 21000 - CacheDB backend should respect SESSION_CACHE_ALIAS.
with self.assertRaises(InvalidCacheBackendError):
self.backend()
@override_settings(USE_TZ=True)
class CacheDBSessionWithTimeZoneTests(CacheDBSessionTests):
pass
# Don't need DB flushing for these tests, so can use unittest.TestCase as base class
class FileSessionTests(SessionTestsMixin, unittest.TestCase):
backend = FileSession
def setUp(self):
# Do file session tests in an isolated directory, and kill it after we're done.
self.original_session_file_path = settings.SESSION_FILE_PATH
self.temp_session_store = settings.SESSION_FILE_PATH = tempfile.mkdtemp()
# Reset the file session backend's internal caches
if hasattr(self.backend, '_storage_path'):
del self.backend._storage_path
super().setUp()
def tearDown(self):
super().tearDown()
settings.SESSION_FILE_PATH = self.original_session_file_path
shutil.rmtree(self.temp_session_store)
@override_settings(
SESSION_FILE_PATH='/if/this/directory/exists/you/have/a/weird/computer',
)
def test_configuration_check(self):
del self.backend._storage_path
# Make sure the file backend checks for a good storage dir
with self.assertRaises(ImproperlyConfigured):
self.backend()
def test_invalid_key_backslash(self):
# Ensure we don't allow directory-traversal.
# This is tested directly on _key_to_file, as load() will swallow
# a SuspiciousOperation in the same way as an IOError - by creating
# a new session, making it unclear whether the slashes were detected.
with self.assertRaises(InvalidSessionKey):
self.backend()._key_to_file("a\\b\\c")
def test_invalid_key_forwardslash(self):
# Ensure we don't allow directory-traversal
with self.assertRaises(InvalidSessionKey):
self.backend()._key_to_file("a/b/c")
@override_settings(
SESSION_ENGINE="django.contrib.sessions.backends.file",
SESSION_COOKIE_AGE=0,
)
def test_clearsessions_command(self):
"""
Test clearsessions command for clearing expired sessions.
"""
storage_path = self.backend._get_storage_path()
file_prefix = settings.SESSION_COOKIE_NAME
def count_sessions():
return len([
session_file for session_file in os.listdir(storage_path)
if session_file.startswith(file_prefix)
])
self.assertEqual(0, count_sessions())
# One object in the future
self.session['foo'] = 'bar'
self.session.set_expiry(3600)
self.session.save()
# One object in the past
other_session = self.backend()
other_session['foo'] = 'bar'
other_session.set_expiry(-3600)
other_session.save()
# One object in the present without an expiry (should be deleted since
# its modification time + SESSION_COOKIE_AGE will be in the past when
# clearsessions runs).
other_session2 = self.backend()
other_session2['foo'] = 'bar'
other_session2.save()
# Three sessions are in the filesystem before clearsessions...
self.assertEqual(3, count_sessions())
management.call_command('clearsessions')
# ... and two are deleted.
self.assertEqual(1, count_sessions())
class CacheSessionTests(SessionTestsMixin, unittest.TestCase):
backend = CacheSession
# Some backends might issue a warning
@ignore_warnings(module="django.core.cache.backends.base")
def test_load_overlong_key(self):
self.session._session_key = (string.ascii_letters + string.digits) * 20
self.assertEqual(self.session.load(), {})
def test_default_cache(self):
self.session.save()
self.assertIsNotNone(caches['default'].get(self.session.cache_key))
@override_settings(CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
},
'sessions': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'session',
},
}, SESSION_CACHE_ALIAS='sessions')
def test_non_default_cache(self):
# Re-initialize the session backend to make use of overridden settings.
self.session = self.backend()
self.session.save()
self.assertIsNone(caches['default'].get(self.session.cache_key))
self.assertIsNotNone(caches['sessions'].get(self.session.cache_key))
def test_create_and_save(self):
self.session = self.backend()
self.session.create()
self.session.save()
self.assertIsNotNone(caches['default'].get(self.session.cache_key))
class SessionMiddlewareTests(TestCase):
request_factory = RequestFactory()
@override_settings(SESSION_COOKIE_SECURE=True)
def test_secure_session_cookie(self):
request = self.request_factory.get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
self.assertIs(response.cookies[settings.SESSION_COOKIE_NAME]['secure'], True)
@override_settings(SESSION_COOKIE_HTTPONLY=True)
def test_httponly_session_cookie(self):
request = self.request_factory.get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
self.assertIs(response.cookies[settings.SESSION_COOKIE_NAME]['httponly'], True)
self.assertIn(
cookies.Morsel._reserved['httponly'],
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
@override_settings(SESSION_COOKIE_SAMESITE='Strict')
def test_samesite_session_cookie(self):
request = self.request_factory.get('/')
response = HttpResponse()
middleware = SessionMiddleware()
middleware.process_request(request)
request.session['hello'] = 'world'
response = middleware.process_response(request, response)
self.assertEqual(response.cookies[settings.SESSION_COOKIE_NAME]['samesite'], 'Strict')
@override_settings(SESSION_COOKIE_HTTPONLY=False)
def test_no_httponly_session_cookie(self):
request = self.request_factory.get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
self.assertEqual(response.cookies[settings.SESSION_COOKIE_NAME]['httponly'], '')
self.assertNotIn(
cookies.Morsel._reserved['httponly'],
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
def test_session_save_on_500(self):
request = self.request_factory.get('/')
response = HttpResponse('Horrible error')
response.status_code = 500
middleware = SessionMiddleware()
# Simulate a request the modifies the session
middleware.process_request(request)
request.session['hello'] = 'world'
# Handle the response through the middleware
response = middleware.process_response(request, response)
# The value wasn't saved above.
self.assertNotIn('hello', request.session.load())
def test_session_update_error_redirect(self):
path = '/foo/'
request = self.request_factory.get(path)
response = HttpResponse()
middleware = SessionMiddleware()
request.session = DatabaseSession()
request.session.save(must_create=True)
request.session.delete()
msg = (
"The request's session was deleted before the request completed. "
"The user may have logged out in a concurrent request, for example."
)
with self.assertRaisesMessage(SuspiciousOperation, msg):
# Handle the response through the middleware. It will try to save
# the deleted session which will cause an UpdateError that's caught
# and raised as a SuspiciousOperation.
middleware.process_response(request, response)
def test_session_delete_on_end(self):
request = self.request_factory.get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Before deleting, there has to be an existing cookie
request.COOKIES[settings.SESSION_COOKIE_NAME] = 'abc'
# Simulate a request that ends the session
middleware.process_request(request)
request.session.flush()
# Handle the response through the middleware
response = middleware.process_response(request, response)
# The cookie was deleted, not recreated.
# A deleted cookie header looks like:
# Set-Cookie: sessionid=; expires=Thu, 01 Jan 1970 00:00:00 GMT; Max-Age=0; Path=/
self.assertEqual(
'Set-Cookie: {}=""; expires=Thu, 01 Jan 1970 00:00:00 GMT; '
'Max-Age=0; Path=/'.format(
settings.SESSION_COOKIE_NAME,
),
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
@override_settings(SESSION_COOKIE_DOMAIN='.example.local', SESSION_COOKIE_PATH='/example/')
def test_session_delete_on_end_with_custom_domain_and_path(self):
request = self.request_factory.get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Before deleting, there has to be an existing cookie
request.COOKIES[settings.SESSION_COOKIE_NAME] = 'abc'
# Simulate a request that ends the session
middleware.process_request(request)
request.session.flush()
# Handle the response through the middleware
response = middleware.process_response(request, response)
# The cookie was deleted, not recreated.
# A deleted cookie header with a custom domain and path looks like:
# Set-Cookie: sessionid=; Domain=.example.local;
# expires=Thu, 01 Jan 1970 00:00:00 GMT; Max-Age=0;
# Path=/example/
self.assertEqual(
'Set-Cookie: {}=""; Domain=.example.local; expires=Thu, '
'01 Jan 1970 00:00:00 GMT; Max-Age=0; Path=/example/'.format(
settings.SESSION_COOKIE_NAME,
),
str(response.cookies[settings.SESSION_COOKIE_NAME])
)
def test_flush_empty_without_session_cookie_doesnt_set_cookie(self):
request = self.request_factory.get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Simulate a request that ends the session
middleware.process_request(request)
request.session.flush()
# Handle the response through the middleware
response = middleware.process_response(request, response)
# A cookie should not be set.
self.assertEqual(response.cookies, {})
# The session is accessed so "Vary: Cookie" should be set.
self.assertEqual(response['Vary'], 'Cookie')
def test_empty_session_saved(self):
"""
If a session is emptied of data but still has a key, it should still
be updated.
"""
request = self.request_factory.get('/')
response = HttpResponse('Session test')
middleware = SessionMiddleware()
# Set a session key and some data.
middleware.process_request(request)
request.session['foo'] = 'bar'
# Handle the response through the middleware.
response = middleware.process_response(request, response)
self.assertEqual(tuple(request.session.items()), (('foo', 'bar'),))
# A cookie should be set, along with Vary: Cookie.
self.assertIn(
'Set-Cookie: sessionid=%s' % request.session.session_key,
str(response.cookies)
)
self.assertEqual(response['Vary'], 'Cookie')
# Empty the session data.
del request.session['foo']
# Handle the response through the middleware.
response = HttpResponse('Session test')
response = middleware.process_response(request, response)
self.assertEqual(dict(request.session.values()), {})
session = Session.objects.get(session_key=request.session.session_key)
self.assertEqual(session.get_decoded(), {})
# While the session is empty, it hasn't been flushed so a cookie should
# still be set, along with Vary: Cookie.
self.assertGreater(len(request.session.session_key), 8)
self.assertIn(
'Set-Cookie: sessionid=%s' % request.session.session_key,
str(response.cookies)
)
self.assertEqual(response['Vary'], 'Cookie')
# Don't need DB flushing for these tests, so can use unittest.TestCase as base class
class CookieSessionTests(SessionTestsMixin, unittest.TestCase):
backend = CookieSession
def test_save(self):
"""
This test tested exists() in the other session backends, but that
doesn't make sense for us.
"""
pass
def test_cycle(self):
"""
This test tested cycle_key() which would create a new session
key for the same session data. But we can't invalidate previously
signed cookies (other than letting them expire naturally) so
testing for this behavior is meaningless.
"""
pass
@unittest.expectedFailure
def test_actual_expiry(self):
# The cookie backend doesn't handle non-default expiry dates, see #19201
super().test_actual_expiry()
def test_unpickling_exception(self):
# signed_cookies backend should handle unpickle exceptions gracefully
# by creating a new session
self.assertEqual(self.session.serializer, JSONSerializer)
self.session.save()
self.session.serializer = PickleSerializer
self.session.load()
@unittest.skip("Cookie backend doesn't have an external store to create records in.")
def test_session_load_does_not_create_record(self):
pass
@unittest.skip("CookieSession is stored in the client and there is no way to query it.")
def test_session_save_does_not_resurrect_session_logged_out_in_other_context(self):
pass
|
85e66661365fcd0c85b5fa89eab864c981d01151e749373670c329b7264268b6 | """
Testing using the Test Client
The test client is a class that can act like a simple
browser for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
``Client`` objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the ``Client`` instance.
This is not intended as a replacement for Twill, Selenium, or
other browser automation frameworks - it is here to allow
testing against the contexts and templates produced by a view,
rather than the HTML rendered to the end-user.
"""
import itertools
import tempfile
from unittest import mock
from django.contrib.auth.models import User
from django.core import mail
from django.http import HttpResponse
from django.test import (
Client, RequestFactory, SimpleTestCase, TestCase, override_settings,
)
from django.urls import reverse_lazy
from .views import TwoArgException, get_view, post_view, trace_view
@override_settings(ROOT_URLCONF='test_client.urls')
class ClientTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create_user(username='testclient', password='password')
cls.u2 = User.objects.create_user(username='inactive', password='password', is_active=False)
def test_get_view(self):
"GET a view"
# The data is ignored, but let's check it doesn't crash the system
# anyway.
data = {'var': '\xf2'}
response = self.client.get('/get_view/', data)
# Check some response details
self.assertContains(response, 'This is a test')
self.assertEqual(response.context['var'], '\xf2')
self.assertEqual(response.templates[0].name, 'GET Template')
def test_query_string_encoding(self):
# WSGI requires latin-1 encoded strings.
response = self.client.get('/get_view/?var=1\ufffd')
self.assertEqual(response.context['var'], '1\ufffd')
def test_get_data_none(self):
msg = (
'Cannot encode None in a query string. Did you mean to pass an '
'empty string or omit the value?'
)
with self.assertRaisesMessage(TypeError, msg):
self.client.get('/get_view/', {'value': None})
def test_get_post_view(self):
"GET a view that normally expects POSTs"
response = self.client.get('/post_view/', {})
# Check some response details
self.assertEqual(response.status_code, 200)
self.assertEqual(response.templates[0].name, 'Empty GET Template')
self.assertTemplateUsed(response, 'Empty GET Template')
self.assertTemplateNotUsed(response, 'Empty POST Template')
def test_empty_post(self):
"POST an empty dictionary to a view"
response = self.client.post('/post_view/', {})
# Check some response details
self.assertEqual(response.status_code, 200)
self.assertEqual(response.templates[0].name, 'Empty POST Template')
self.assertTemplateNotUsed(response, 'Empty GET Template')
self.assertTemplateUsed(response, 'Empty POST Template')
def test_post(self):
"POST some data to a view"
post_data = {
'value': 37
}
response = self.client.post('/post_view/', post_data)
# Check some response details
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['data'], '37')
self.assertEqual(response.templates[0].name, 'POST Template')
self.assertContains(response, 'Data received')
def test_post_data_none(self):
msg = (
'Cannot encode None as POST data. Did you mean to pass an empty '
'string or omit the value?'
)
with self.assertRaisesMessage(TypeError, msg):
self.client.post('/post_view/', {'value': None})
def test_json_serialization(self):
"""The test client serializes JSON data."""
methods = ('post', 'put', 'patch', 'delete')
tests = (
({'value': 37}, {'value': 37}),
([37, True], [37, True]),
((37, False), [37, False]),
)
for method in methods:
with self.subTest(method=method):
for data, expected in tests:
with self.subTest(data):
client_method = getattr(self.client, method)
method_name = method.upper()
response = client_method('/json_view/', data, content_type='application/json')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['data'], expected)
self.assertContains(response, 'Viewing %s page.' % method_name)
def test_json_encoder_argument(self):
"""The test Client accepts a json_encoder."""
mock_encoder = mock.MagicMock()
mock_encoding = mock.MagicMock()
mock_encoder.return_value = mock_encoding
mock_encoding.encode.return_value = '{"value": 37}'
client = self.client_class(json_encoder=mock_encoder)
# Vendored tree JSON content types are accepted.
client.post('/json_view/', {'value': 37}, content_type='application/vnd.api+json')
self.assertTrue(mock_encoder.called)
self.assertTrue(mock_encoding.encode.called)
def test_put(self):
response = self.client.put('/put_view/', {'foo': 'bar'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.templates[0].name, 'PUT Template')
self.assertEqual(response.context['data'], "{'foo': 'bar'}")
self.assertEqual(response.context['Content-Length'], '14')
def test_trace(self):
"""TRACE a view"""
response = self.client.trace('/trace_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['method'], 'TRACE')
self.assertEqual(response.templates[0].name, 'TRACE Template')
def test_response_headers(self):
"Check the value of HTTP headers returned in a response"
response = self.client.get("/header_view/")
self.assertEqual(response['X-DJANGO-TEST'], 'Slartibartfast')
def test_response_attached_request(self):
"""
The returned response has a ``request`` attribute with the originating
environ dict and a ``wsgi_request`` with the originating WSGIRequest.
"""
response = self.client.get("/header_view/")
self.assertTrue(hasattr(response, 'request'))
self.assertTrue(hasattr(response, 'wsgi_request'))
for key, value in response.request.items():
self.assertIn(key, response.wsgi_request.environ)
self.assertEqual(response.wsgi_request.environ[key], value)
def test_response_resolver_match(self):
"""
The response contains a ResolverMatch instance.
"""
response = self.client.get('/header_view/')
self.assertTrue(hasattr(response, 'resolver_match'))
def test_response_resolver_match_redirect_follow(self):
"""
The response ResolverMatch instance contains the correct
information when following redirects.
"""
response = self.client.get('/redirect_view/', follow=True)
self.assertEqual(response.resolver_match.url_name, 'get_view')
def test_response_resolver_match_regular_view(self):
"""
The response ResolverMatch instance contains the correct
information when accessing a regular view.
"""
response = self.client.get('/get_view/')
self.assertEqual(response.resolver_match.url_name, 'get_view')
def test_raw_post(self):
"POST raw data (with a content type) to a view"
test_doc = """<?xml version="1.0" encoding="utf-8"?>
<library><book><title>Blink</title><author>Malcolm Gladwell</author></book></library>
"""
response = self.client.post('/raw_post_view/', test_doc, content_type='text/xml')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.templates[0].name, "Book template")
self.assertEqual(response.content, b"Blink - Malcolm Gladwell")
def test_insecure(self):
"GET a URL through http"
response = self.client.get('/secure_view/', secure=False)
self.assertFalse(response.test_was_secure_request)
self.assertEqual(response.test_server_port, '80')
def test_secure(self):
"GET a URL through https"
response = self.client.get('/secure_view/', secure=True)
self.assertTrue(response.test_was_secure_request)
self.assertEqual(response.test_server_port, '443')
def test_redirect(self):
"GET a URL that redirects elsewhere"
response = self.client.get('/redirect_view/')
self.assertRedirects(response, '/get_view/')
def test_redirect_with_query(self):
"GET a URL that redirects with given GET parameters"
response = self.client.get('/redirect_view/', {'var': 'value'})
self.assertRedirects(response, '/get_view/?var=value')
def test_redirect_with_query_ordering(self):
"""assertRedirects() ignores the order of query string parameters."""
response = self.client.get('/redirect_view/', {'var': 'value', 'foo': 'bar'})
self.assertRedirects(response, '/get_view/?var=value&foo=bar')
self.assertRedirects(response, '/get_view/?foo=bar&var=value')
def test_permanent_redirect(self):
"GET a URL that redirects permanently elsewhere"
response = self.client.get('/permanent_redirect_view/')
self.assertRedirects(response, '/get_view/', status_code=301)
def test_temporary_redirect(self):
"GET a URL that does a non-permanent redirect"
response = self.client.get('/temporary_redirect_view/')
self.assertRedirects(response, '/get_view/', status_code=302)
def test_redirect_to_strange_location(self):
"GET a URL that redirects to a non-200 page"
response = self.client.get('/double_redirect_view/')
# The response was a 302, and that the attempt to get the redirection
# location returned 301 when retrieved
self.assertRedirects(response, '/permanent_redirect_view/', target_status_code=301)
def test_follow_redirect(self):
"A URL that redirects can be followed to termination."
response = self.client.get('/double_redirect_view/', follow=True)
self.assertRedirects(response, '/get_view/', status_code=302, target_status_code=200)
self.assertEqual(len(response.redirect_chain), 2)
def test_follow_relative_redirect(self):
"A URL with a relative redirect can be followed."
response = self.client.get('/accounts/', follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.request['PATH_INFO'], '/accounts/login/')
def test_follow_relative_redirect_no_trailing_slash(self):
"A URL with a relative redirect with no trailing slash can be followed."
response = self.client.get('/accounts/no_trailing_slash', follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.request['PATH_INFO'], '/accounts/login/')
def test_follow_307_and_308_redirect(self):
"""
A 307 or 308 redirect preserves the request method after the redirect.
"""
methods = ('get', 'post', 'head', 'options', 'put', 'patch', 'delete', 'trace')
codes = (307, 308)
for method, code in itertools.product(methods, codes):
with self.subTest(method=method, code=code):
req_method = getattr(self.client, method)
response = req_method('/redirect_view_%s/' % code, data={'value': 'test'}, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.request['PATH_INFO'], '/post_view/')
self.assertEqual(response.request['REQUEST_METHOD'], method.upper())
def test_follow_307_and_308_preserves_post_data(self):
for code in (307, 308):
with self.subTest(code=code):
response = self.client.post('/redirect_view_%s/' % code, data={'value': 'test'}, follow=True)
self.assertContains(response, 'test is the value')
def test_follow_307_and_308_preserves_put_body(self):
for code in (307, 308):
with self.subTest(code=code):
response = self.client.put('/redirect_view_%s/?to=/put_view/' % code, data='a=b', follow=True)
self.assertContains(response, 'a=b is the body')
def test_follow_307_and_308_preserves_get_params(self):
data = {'var': 30, 'to': '/get_view/'}
for code in (307, 308):
with self.subTest(code=code):
response = self.client.get('/redirect_view_%s/' % code, data=data, follow=True)
self.assertContains(response, '30 is the value')
def test_redirect_http(self):
"GET a URL that redirects to an http URI"
response = self.client.get('/http_redirect_view/', follow=True)
self.assertFalse(response.test_was_secure_request)
def test_redirect_https(self):
"GET a URL that redirects to an https URI"
response = self.client.get('/https_redirect_view/', follow=True)
self.assertTrue(response.test_was_secure_request)
def test_notfound_response(self):
"GET a URL that responds as '404:Not Found'"
response = self.client.get('/bad_view/')
self.assertContains(response, 'MAGIC', status_code=404)
def test_valid_form(self):
"POST valid data to a form"
post_data = {
'text': 'Hello World',
'email': '[email protected]',
'value': 37,
'single': 'b',
'multi': ('b', 'c', 'e')
}
response = self.client.post('/form_view/', post_data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "Valid POST Template")
def test_valid_form_with_hints(self):
"GET a form, providing hints in the GET data"
hints = {
'text': 'Hello World',
'multi': ('b', 'c', 'e')
}
response = self.client.get('/form_view/', data=hints)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "Form GET Template")
# The multi-value data has been rolled out ok
self.assertContains(response, 'Select a valid choice.', 0)
def test_incomplete_data_form(self):
"POST incomplete data to a form"
post_data = {
'text': 'Hello World',
'value': 37
}
response = self.client.post('/form_view/', post_data)
self.assertContains(response, 'This field is required.', 3)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "Invalid POST Template")
self.assertFormError(response, 'form', 'email', 'This field is required.')
self.assertFormError(response, 'form', 'single', 'This field is required.')
self.assertFormError(response, 'form', 'multi', 'This field is required.')
def test_form_error(self):
"POST erroneous data to a form"
post_data = {
'text': 'Hello World',
'email': 'not an email address',
'value': 37,
'single': 'b',
'multi': ('b', 'c', 'e')
}
response = self.client.post('/form_view/', post_data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "Invalid POST Template")
self.assertFormError(response, 'form', 'email', 'Enter a valid email address.')
def test_valid_form_with_template(self):
"POST valid data to a form using multiple templates"
post_data = {
'text': 'Hello World',
'email': '[email protected]',
'value': 37,
'single': 'b',
'multi': ('b', 'c', 'e')
}
response = self.client.post('/form_view_with_template/', post_data)
self.assertContains(response, 'POST data OK')
self.assertTemplateUsed(response, "form_view.html")
self.assertTemplateUsed(response, 'base.html')
self.assertTemplateNotUsed(response, "Valid POST Template")
def test_incomplete_data_form_with_template(self):
"POST incomplete data to a form using multiple templates"
post_data = {
'text': 'Hello World',
'value': 37
}
response = self.client.post('/form_view_with_template/', post_data)
self.assertContains(response, 'POST data has errors')
self.assertTemplateUsed(response, 'form_view.html')
self.assertTemplateUsed(response, 'base.html')
self.assertTemplateNotUsed(response, "Invalid POST Template")
self.assertFormError(response, 'form', 'email', 'This field is required.')
self.assertFormError(response, 'form', 'single', 'This field is required.')
self.assertFormError(response, 'form', 'multi', 'This field is required.')
def test_form_error_with_template(self):
"POST erroneous data to a form using multiple templates"
post_data = {
'text': 'Hello World',
'email': 'not an email address',
'value': 37,
'single': 'b',
'multi': ('b', 'c', 'e')
}
response = self.client.post('/form_view_with_template/', post_data)
self.assertContains(response, 'POST data has errors')
self.assertTemplateUsed(response, "form_view.html")
self.assertTemplateUsed(response, 'base.html')
self.assertTemplateNotUsed(response, "Invalid POST Template")
self.assertFormError(response, 'form', 'email', 'Enter a valid email address.')
def test_unknown_page(self):
"GET an invalid URL"
response = self.client.get('/unknown_view/')
# The response was a 404
self.assertEqual(response.status_code, 404)
def test_url_parameters(self):
"Make sure that URL ;-parameters are not stripped."
response = self.client.get('/unknown_view/;some-parameter')
# The path in the response includes it (ignore that it's a 404)
self.assertEqual(response.request['PATH_INFO'], '/unknown_view/;some-parameter')
def test_view_with_login(self):
"Request a page that is protected with @login_required"
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/')
# Log in
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
@override_settings(
INSTALLED_APPS=['django.contrib.auth'],
SESSION_ENGINE='django.contrib.sessions.backends.file',
)
def test_view_with_login_when_sessions_app_is_not_installed(self):
self.test_view_with_login()
def test_view_with_force_login(self):
"Request a page that is protected with @login_required"
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/')
# Log in
self.client.force_login(self.u1)
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
def test_view_with_method_login(self):
"Request a page that is protected with a @login_required method"
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_method_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_method_view/')
# Log in
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
# Request a page that requires a login
response = self.client.get('/login_protected_method_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
def test_view_with_method_force_login(self):
"Request a page that is protected with a @login_required method"
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_method_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_method_view/')
# Log in
self.client.force_login(self.u1)
# Request a page that requires a login
response = self.client.get('/login_protected_method_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
def test_view_with_login_and_custom_redirect(self):
"Request a page that is protected with @login_required(redirect_field_name='redirect_to')"
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_view_custom_redirect/')
self.assertRedirects(response, '/accounts/login/?redirect_to=/login_protected_view_custom_redirect/')
# Log in
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
# Request a page that requires a login
response = self.client.get('/login_protected_view_custom_redirect/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
def test_view_with_force_login_and_custom_redirect(self):
"""
Request a page that is protected with
@login_required(redirect_field_name='redirect_to')
"""
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_view_custom_redirect/')
self.assertRedirects(response, '/accounts/login/?redirect_to=/login_protected_view_custom_redirect/')
# Log in
self.client.force_login(self.u1)
# Request a page that requires a login
response = self.client.get('/login_protected_view_custom_redirect/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
def test_view_with_bad_login(self):
"Request a page that is protected with @login, but use bad credentials"
login = self.client.login(username='otheruser', password='nopassword')
self.assertFalse(login)
def test_view_with_inactive_login(self):
"""
An inactive user may login if the authenticate backend allows it.
"""
credentials = {'username': 'inactive', 'password': 'password'}
self.assertFalse(self.client.login(**credentials))
with self.settings(AUTHENTICATION_BACKENDS=['django.contrib.auth.backends.AllowAllUsersModelBackend']):
self.assertTrue(self.client.login(**credentials))
@override_settings(
AUTHENTICATION_BACKENDS=[
'django.contrib.auth.backends.ModelBackend',
'django.contrib.auth.backends.AllowAllUsersModelBackend',
]
)
def test_view_with_inactive_force_login(self):
"Request a page that is protected with @login, but use an inactive login"
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/')
# Log in
self.client.force_login(self.u2, backend='django.contrib.auth.backends.AllowAllUsersModelBackend')
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'inactive')
def test_logout(self):
"Request a logout after logging in"
# Log in
self.client.login(username='testclient', password='password')
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
# Log out
self.client.logout()
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/')
def test_logout_with_force_login(self):
"Request a logout after logging in"
# Log in
self.client.force_login(self.u1)
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
# Log out
self.client.logout()
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/')
@override_settings(
AUTHENTICATION_BACKENDS=[
'django.contrib.auth.backends.ModelBackend',
'test_client.auth_backends.TestClientBackend',
],
)
def test_force_login_with_backend(self):
"""
Request a page that is protected with @login_required when using
force_login() and passing a backend.
"""
# Get the page without logging in. Should result in 302.
response = self.client.get('/login_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/login_protected_view/')
# Log in
self.client.force_login(self.u1, backend='test_client.auth_backends.TestClientBackend')
self.assertEqual(self.u1.backend, 'test_client.auth_backends.TestClientBackend')
# Request a page that requires a login
response = self.client.get('/login_protected_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
@override_settings(
AUTHENTICATION_BACKENDS=[
'django.contrib.auth.backends.ModelBackend',
'test_client.auth_backends.TestClientBackend',
],
)
def test_force_login_without_backend(self):
"""
force_login() without passing a backend and with multiple backends
configured should automatically use the first backend.
"""
self.client.force_login(self.u1)
response = self.client.get('/login_protected_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['user'].username, 'testclient')
self.assertEqual(self.u1.backend, 'django.contrib.auth.backends.ModelBackend')
@override_settings(AUTHENTICATION_BACKENDS=[
'test_client.auth_backends.BackendWithoutGetUserMethod',
'django.contrib.auth.backends.ModelBackend',
])
def test_force_login_with_backend_missing_get_user(self):
"""
force_login() skips auth backends without a get_user() method.
"""
self.client.force_login(self.u1)
self.assertEqual(self.u1.backend, 'django.contrib.auth.backends.ModelBackend')
@override_settings(SESSION_ENGINE="django.contrib.sessions.backends.signed_cookies")
def test_logout_cookie_sessions(self):
self.test_logout()
def test_view_with_permissions(self):
"Request a page that is protected with @permission_required"
# Get the page without logging in. Should result in 302.
response = self.client.get('/permission_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/permission_protected_view/')
# Log in
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
# Log in with wrong permissions. Should result in 302.
response = self.client.get('/permission_protected_view/')
self.assertRedirects(response, '/accounts/login/?next=/permission_protected_view/')
# TODO: Log in with right permissions and request the page again
def test_view_with_permissions_exception(self):
"Request a page that is protected with @permission_required but raises an exception"
# Get the page without logging in. Should result in 403.
response = self.client.get('/permission_protected_view_exception/')
self.assertEqual(response.status_code, 403)
# Log in
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
# Log in with wrong permissions. Should result in 403.
response = self.client.get('/permission_protected_view_exception/')
self.assertEqual(response.status_code, 403)
def test_view_with_method_permissions(self):
"Request a page that is protected with a @permission_required method"
# Get the page without logging in. Should result in 302.
response = self.client.get('/permission_protected_method_view/')
self.assertRedirects(response, '/accounts/login/?next=/permission_protected_method_view/')
# Log in
login = self.client.login(username='testclient', password='password')
self.assertTrue(login, 'Could not log in')
# Log in with wrong permissions. Should result in 302.
response = self.client.get('/permission_protected_method_view/')
self.assertRedirects(response, '/accounts/login/?next=/permission_protected_method_view/')
# TODO: Log in with right permissions and request the page again
def test_external_redirect(self):
response = self.client.get('/django_project_redirect/')
self.assertRedirects(response, 'https://www.djangoproject.com/', fetch_redirect_response=False)
def test_external_redirect_with_fetch_error_msg(self):
"""
assertRedirects without fetch_redirect_response=False raises
a relevant ValueError rather than a non-descript AssertionError.
"""
response = self.client.get('/django_project_redirect/')
msg = (
"The test client is unable to fetch remote URLs (got "
"https://www.djangoproject.com/). If the host is served by Django, "
"add 'www.djangoproject.com' to ALLOWED_HOSTS. "
"Otherwise, use assertRedirects(..., fetch_redirect_response=False)."
)
with self.assertRaisesMessage(ValueError, msg):
self.assertRedirects(response, 'https://www.djangoproject.com/')
def test_session_modifying_view(self):
"Request a page that modifies the session"
# Session value isn't set initially
with self.assertRaises(KeyError):
self.client.session['tobacconist']
self.client.post('/session_view/')
# The session was modified
self.assertEqual(self.client.session['tobacconist'], 'hovercraft')
@override_settings(
INSTALLED_APPS=[],
SESSION_ENGINE='django.contrib.sessions.backends.file',
)
def test_sessions_app_is_not_installed(self):
self.test_session_modifying_view()
@override_settings(
INSTALLED_APPS=[],
SESSION_ENGINE='django.contrib.sessions.backends.nonexistent',
)
def test_session_engine_is_invalid(self):
with self.assertRaisesMessage(ImportError, 'nonexistent'):
self.test_session_modifying_view()
def test_view_with_exception(self):
"Request a page that is known to throw an error"
with self.assertRaises(KeyError):
self.client.get("/broken_view/")
def test_mail_sending(self):
"Mail is redirected to a dummy outbox during test setup"
response = self.client.get('/mail_sending_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Test message')
self.assertEqual(mail.outbox[0].body, 'This is a test email')
self.assertEqual(mail.outbox[0].from_email, '[email protected]')
self.assertEqual(mail.outbox[0].to[0], '[email protected]')
self.assertEqual(mail.outbox[0].to[1], '[email protected]')
def test_reverse_lazy_decodes(self):
"reverse_lazy() works in the test client"
data = {'var': 'data'}
response = self.client.get(reverse_lazy('get_view'), data)
# Check some response details
self.assertContains(response, 'This is a test')
def test_relative_redirect(self):
response = self.client.get('/accounts/')
self.assertRedirects(response, '/accounts/login/')
def test_relative_redirect_no_trailing_slash(self):
response = self.client.get('/accounts/no_trailing_slash')
self.assertRedirects(response, '/accounts/login/')
def test_mass_mail_sending(self):
"Mass mail is redirected to a dummy outbox during test setup"
response = self.client.get('/mass_mail_sending_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(mail.outbox[0].subject, 'First Test message')
self.assertEqual(mail.outbox[0].body, 'This is the first test email')
self.assertEqual(mail.outbox[0].from_email, '[email protected]')
self.assertEqual(mail.outbox[0].to[0], '[email protected]')
self.assertEqual(mail.outbox[0].to[1], '[email protected]')
self.assertEqual(mail.outbox[1].subject, 'Second Test message')
self.assertEqual(mail.outbox[1].body, 'This is the second test email')
self.assertEqual(mail.outbox[1].from_email, '[email protected]')
self.assertEqual(mail.outbox[1].to[0], '[email protected]')
self.assertEqual(mail.outbox[1].to[1], '[email protected]')
def test_exception_following_nested_client_request(self):
"""
A nested test client request shouldn't clobber exception signals from
the outer client request.
"""
with self.assertRaisesMessage(Exception, 'exception message'):
self.client.get('/nesting_exception_view/')
def test_response_raises_multi_arg_exception(self):
"""A request may raise an exception with more than one required arg."""
with self.assertRaises(TwoArgException):
self.client.get('/two_arg_exception/')
def test_uploading_temp_file(self):
with tempfile.TemporaryFile() as test_file:
response = self.client.post('/upload_view/', data={'temp_file': test_file})
self.assertEqual(response.content, b'temp_file')
def test_uploading_named_temp_file(self):
test_file = tempfile.NamedTemporaryFile()
response = self.client.post('/upload_view/', data={'named_temp_file': test_file})
self.assertEqual(response.content, b'named_temp_file')
@override_settings(
MIDDLEWARE=['django.middleware.csrf.CsrfViewMiddleware'],
ROOT_URLCONF='test_client.urls',
)
class CSRFEnabledClientTests(SimpleTestCase):
def test_csrf_enabled_client(self):
"A client can be instantiated with CSRF checks enabled"
csrf_client = Client(enforce_csrf_checks=True)
# The normal client allows the post
response = self.client.post('/post_view/', {})
self.assertEqual(response.status_code, 200)
# The CSRF-enabled client rejects it
response = csrf_client.post('/post_view/', {})
self.assertEqual(response.status_code, 403)
class CustomTestClient(Client):
i_am_customized = "Yes"
class CustomTestClientTest(SimpleTestCase):
client_class = CustomTestClient
def test_custom_test_client(self):
"""A test case can specify a custom class for self.client."""
self.assertIs(hasattr(self.client, "i_am_customized"), True)
def _generic_view(request):
return HttpResponse(status=200)
@override_settings(ROOT_URLCONF='test_client.urls')
class RequestFactoryTest(SimpleTestCase):
"""Tests for the request factory."""
# A mapping between names of HTTP/1.1 methods and their test views.
http_methods_and_views = (
('get', get_view),
('post', post_view),
('put', _generic_view),
('patch', _generic_view),
('delete', _generic_view),
('head', _generic_view),
('options', _generic_view),
('trace', trace_view),
)
request_factory = RequestFactory()
def test_request_factory(self):
"""The request factory implements all the HTTP/1.1 methods."""
for method_name, view in self.http_methods_and_views:
method = getattr(self.request_factory, method_name)
request = method('/somewhere/')
response = view(request)
self.assertEqual(response.status_code, 200)
def test_get_request_from_factory(self):
"""
The request factory returns a templated response for a GET request.
"""
request = self.request_factory.get('/somewhere/')
response = get_view(request)
self.assertContains(response, 'This is a test')
def test_trace_request_from_factory(self):
"""The request factory returns an echo response for a TRACE request."""
url_path = '/somewhere/'
request = self.request_factory.trace(url_path)
response = trace_view(request)
protocol = request.META["SERVER_PROTOCOL"]
echoed_request_line = "TRACE {} {}".format(url_path, protocol)
self.assertContains(response, echoed_request_line)
|
0899e6cc16888f9794691236eebb5612f3105b88ab70bf931340a01c8107cce0 | """Tests related to django.db.backends that haven't been organized."""
import datetime
import threading
import unittest
import warnings
from django.core.management.color import no_style
from django.db import (
DEFAULT_DB_ALIAS, DatabaseError, IntegrityError, connection, connections,
reset_queries, transaction,
)
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.signals import connection_created
from django.db.backends.utils import CursorWrapper
from django.db.models.sql.constants import CURSOR
from django.test import (
TestCase, TransactionTestCase, override_settings, skipIfDBFeature,
skipUnlessDBFeature,
)
from .models import (
Article, Object, ObjectReference, Person, Post, RawData, Reporter,
ReporterProxy, SchoolClass, Square,
VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ,
)
class DateQuotingTest(TestCase):
def test_django_date_trunc(self):
"""
Test the custom ``django_date_trunc method``, in particular against
fields which clash with strings passed to it (e.g. 'year') (#12818).
"""
updated = datetime.datetime(2010, 2, 20)
SchoolClass.objects.create(year=2009, last_updated=updated)
years = SchoolClass.objects.dates('last_updated', 'year')
self.assertEqual(list(years), [datetime.date(2010, 1, 1)])
def test_django_date_extract(self):
"""
Test the custom ``django_date_extract method``, in particular against fields
which clash with strings passed to it (e.g. 'day') (#12818).
"""
updated = datetime.datetime(2010, 2, 20)
SchoolClass.objects.create(year=2009, last_updated=updated)
classes = SchoolClass.objects.filter(last_updated__day=20)
self.assertEqual(len(classes), 1)
@override_settings(DEBUG=True)
class LastExecutedQueryTest(TestCase):
def test_last_executed_query(self):
"""
last_executed_query should not raise an exception even if no previous
query has been run.
"""
with connection.cursor() as cursor:
connection.ops.last_executed_query(cursor, '', ())
def test_debug_sql(self):
list(Reporter.objects.filter(first_name="test"))
sql = connection.queries[-1]['sql'].lower()
self.assertIn("select", sql)
self.assertIn(Reporter._meta.db_table, sql)
def test_query_encoding(self):
"""last_executed_query() returns a string."""
data = RawData.objects.filter(raw_data=b'\x00\x46 \xFE').extra(select={'föö': 1})
sql, params = data.query.sql_with_params()
cursor = data.query.get_compiler('default').execute_sql(CURSOR)
last_sql = cursor.db.ops.last_executed_query(cursor, sql, params)
self.assertIsInstance(last_sql, str)
class ParameterHandlingTest(TestCase):
def test_bad_parameter_count(self):
"An executemany call with too many/not enough parameters will raise an exception (Refs #12612)"
with connection.cursor() as cursor:
query = ('INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (
connection.introspection.identifier_converter('backends_square'),
connection.ops.quote_name('root'),
connection.ops.quote_name('square')
))
with self.assertRaises(Exception):
cursor.executemany(query, [(1, 2, 3)])
with self.assertRaises(Exception):
cursor.executemany(query, [(1,)])
class LongNameTest(TransactionTestCase):
"""Long primary keys and model names can result in a sequence name
that exceeds the database limits, which will result in truncation
on certain databases (e.g., Postgres). The backend needs to use
the correct sequence name in last_insert_id and other places, so
check it is. Refs #8901.
"""
available_apps = ['backends']
def test_sequence_name_length_limits_create(self):
"""Test creation of model with long name and long pk name doesn't error. Ref #8901"""
VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
def test_sequence_name_length_limits_m2m(self):
"""
An m2m save of a model with a long name and a long m2m field name
doesn't error (#8901).
"""
obj = VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
rel_obj = Person.objects.create(first_name='Django', last_name='Reinhardt')
obj.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.add(rel_obj)
def test_sequence_name_length_limits_flush(self):
"""
Sequence resetting as part of a flush with model with long name and
long pk name doesn't error (#8901).
"""
# A full flush is expensive to the full test, so we dig into the
# internals to generate the likely offending SQL and run it manually
# Some convenience aliases
VLM = VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ
VLM_m2m = VLM.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.through
tables = [
VLM._meta.db_table,
VLM_m2m._meta.db_table,
]
sequences = [
{
'column': VLM._meta.pk.column,
'table': VLM._meta.db_table
},
]
sql_list = connection.ops.sql_flush(no_style(), tables, sequences)
with connection.cursor() as cursor:
for statement in sql_list:
cursor.execute(statement)
class SequenceResetTest(TestCase):
def test_generic_relation(self):
"Sequence names are correct when resetting generic relations (Ref #13941)"
# Create an object with a manually specified PK
Post.objects.create(id=10, name='1st post', text='hello world')
# Reset the sequences for the database
commands = connections[DEFAULT_DB_ALIAS].ops.sequence_reset_sql(no_style(), [Post])
with connection.cursor() as cursor:
for sql in commands:
cursor.execute(sql)
# If we create a new object now, it should have a PK greater
# than the PK we specified manually.
obj = Post.objects.create(name='New post', text='goodbye world')
self.assertGreater(obj.pk, 10)
# This test needs to run outside of a transaction, otherwise closing the
# connection would implicitly rollback and cause problems during teardown.
class ConnectionCreatedSignalTest(TransactionTestCase):
available_apps = []
# Unfortunately with sqlite3 the in-memory test database cannot be closed,
# and so it cannot be re-opened during testing.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_signal(self):
data = {}
def receiver(sender, connection, **kwargs):
data["connection"] = connection
connection_created.connect(receiver)
connection.close()
connection.cursor()
self.assertIs(data["connection"].connection, connection.connection)
connection_created.disconnect(receiver)
data.clear()
connection.cursor()
self.assertEqual(data, {})
class EscapingChecks(TestCase):
"""
All tests in this test case are also run with settings.DEBUG=True in
EscapingChecksDebug test case, to also test CursorDebugWrapper.
"""
bare_select_suffix = connection.features.bare_select_suffix
def test_paramless_no_escaping(self):
with connection.cursor() as cursor:
cursor.execute("SELECT '%s'" + self.bare_select_suffix)
self.assertEqual(cursor.fetchall()[0][0], '%s')
def test_parameter_escaping(self):
with connection.cursor() as cursor:
cursor.execute("SELECT '%%', %s" + self.bare_select_suffix, ('%d',))
self.assertEqual(cursor.fetchall()[0], ('%', '%d'))
@override_settings(DEBUG=True)
class EscapingChecksDebug(EscapingChecks):
pass
class BackendTestCase(TransactionTestCase):
available_apps = ['backends']
def create_squares_with_executemany(self, args):
self.create_squares(args, 'format', True)
def create_squares(self, args, paramstyle, multiple):
opts = Square._meta
tbl = connection.introspection.identifier_converter(opts.db_table)
f1 = connection.ops.quote_name(opts.get_field('root').column)
f2 = connection.ops.quote_name(opts.get_field('square').column)
if paramstyle == 'format':
query = 'INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (tbl, f1, f2)
elif paramstyle == 'pyformat':
query = 'INSERT INTO %s (%s, %s) VALUES (%%(root)s, %%(square)s)' % (tbl, f1, f2)
else:
raise ValueError("unsupported paramstyle in test")
with connection.cursor() as cursor:
if multiple:
cursor.executemany(query, args)
else:
cursor.execute(query, args)
def test_cursor_executemany(self):
# Test cursor.executemany #4896
args = [(i, i ** 2) for i in range(-5, 6)]
self.create_squares_with_executemany(args)
self.assertEqual(Square.objects.count(), 11)
for i in range(-5, 6):
square = Square.objects.get(root=i)
self.assertEqual(square.square, i ** 2)
def test_cursor_executemany_with_empty_params_list(self):
# Test executemany with params=[] does nothing #4765
args = []
self.create_squares_with_executemany(args)
self.assertEqual(Square.objects.count(), 0)
def test_cursor_executemany_with_iterator(self):
# Test executemany accepts iterators #10320
args = iter((i, i ** 2) for i in range(-3, 2))
self.create_squares_with_executemany(args)
self.assertEqual(Square.objects.count(), 5)
args = iter((i, i ** 2) for i in range(3, 7))
with override_settings(DEBUG=True):
# same test for DebugCursorWrapper
self.create_squares_with_executemany(args)
self.assertEqual(Square.objects.count(), 9)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_execute_with_pyformat(self):
# Support pyformat style passing of parameters #10070
args = {'root': 3, 'square': 9}
self.create_squares(args, 'pyformat', multiple=False)
self.assertEqual(Square.objects.count(), 1)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_executemany_with_pyformat(self):
# Support pyformat style passing of parameters #10070
args = [{'root': i, 'square': i ** 2} for i in range(-5, 6)]
self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(Square.objects.count(), 11)
for i in range(-5, 6):
square = Square.objects.get(root=i)
self.assertEqual(square.square, i ** 2)
@skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_executemany_with_pyformat_iterator(self):
args = iter({'root': i, 'square': i ** 2} for i in range(-3, 2))
self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(Square.objects.count(), 5)
args = iter({'root': i, 'square': i ** 2} for i in range(3, 7))
with override_settings(DEBUG=True):
# same test for DebugCursorWrapper
self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(Square.objects.count(), 9)
def test_unicode_fetches(self):
# fetchone, fetchmany, fetchall return strings as unicode objects #6254
qn = connection.ops.quote_name
Person(first_name="John", last_name="Doe").save()
Person(first_name="Jane", last_name="Doe").save()
Person(first_name="Mary", last_name="Agnelline").save()
Person(first_name="Peter", last_name="Parker").save()
Person(first_name="Clark", last_name="Kent").save()
opts2 = Person._meta
f3, f4 = opts2.get_field('first_name'), opts2.get_field('last_name')
with connection.cursor() as cursor:
cursor.execute(
'SELECT %s, %s FROM %s ORDER BY %s' % (
qn(f3.column),
qn(f4.column),
connection.introspection.identifier_converter(opts2.db_table),
qn(f3.column),
)
)
self.assertEqual(cursor.fetchone(), ('Clark', 'Kent'))
self.assertEqual(list(cursor.fetchmany(2)), [('Jane', 'Doe'), ('John', 'Doe')])
self.assertEqual(list(cursor.fetchall()), [('Mary', 'Agnelline'), ('Peter', 'Parker')])
def test_unicode_password(self):
old_password = connection.settings_dict['PASSWORD']
connection.settings_dict['PASSWORD'] = "françois"
try:
connection.cursor()
except DatabaseError:
# As password is probably wrong, a database exception is expected
pass
except Exception as e:
self.fail("Unexpected error raised with unicode password: %s" % e)
finally:
connection.settings_dict['PASSWORD'] = old_password
def test_database_operations_helper_class(self):
# Ticket #13630
self.assertTrue(hasattr(connection, 'ops'))
self.assertTrue(hasattr(connection.ops, 'connection'))
self.assertEqual(connection, connection.ops.connection)
def test_database_operations_init(self):
"""
DatabaseOperations initialization doesn't query the database.
See #17656.
"""
with self.assertNumQueries(0):
connection.ops.__class__(connection)
def test_cached_db_features(self):
self.assertIn(connection.features.supports_transactions, (True, False))
self.assertIn(connection.features.can_introspect_foreign_keys, (True, False))
def test_duplicate_table_error(self):
""" Creating an existing table returns a DatabaseError """
query = 'CREATE TABLE %s (id INTEGER);' % Article._meta.db_table
with connection.cursor() as cursor:
with self.assertRaises(DatabaseError):
cursor.execute(query)
def test_cursor_contextmanager(self):
"""
Cursors can be used as a context manager
"""
with connection.cursor() as cursor:
self.assertIsInstance(cursor, CursorWrapper)
# Both InterfaceError and ProgrammingError seem to be used when
# accessing closed cursor (psycopg2 has InterfaceError, rest seem
# to use ProgrammingError).
with self.assertRaises(connection.features.closed_cursor_error_class):
# cursor should be closed, so no queries should be possible.
cursor.execute("SELECT 1" + connection.features.bare_select_suffix)
@unittest.skipUnless(connection.vendor == 'postgresql',
"Psycopg2 specific cursor.closed attribute needed")
def test_cursor_contextmanager_closing(self):
# There isn't a generic way to test that cursors are closed, but
# psycopg2 offers us a way to check that by closed attribute.
# So, run only on psycopg2 for that reason.
with connection.cursor() as cursor:
self.assertIsInstance(cursor, CursorWrapper)
self.assertTrue(cursor.closed)
# Unfortunately with sqlite3 the in-memory test database cannot be closed.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_is_usable_after_database_disconnects(self):
"""
is_usable() doesn't crash when the database disconnects (#21553).
"""
# Open a connection to the database.
with connection.cursor():
pass
# Emulate a connection close by the database.
connection._close()
# Even then is_usable() should not raise an exception.
try:
self.assertFalse(connection.is_usable())
finally:
# Clean up the mess created by connection._close(). Since the
# connection is already closed, this crashes on some backends.
try:
connection.close()
except Exception:
pass
@override_settings(DEBUG=True)
def test_queries(self):
"""
Test the documented API of connection.queries.
"""
with connection.cursor() as cursor:
reset_queries()
cursor.execute("SELECT 1" + connection.features.bare_select_suffix)
self.assertEqual(1, len(connection.queries))
self.assertIsInstance(connection.queries, list)
self.assertIsInstance(connection.queries[0], dict)
self.assertCountEqual(connection.queries[0], ['sql', 'time'])
reset_queries()
self.assertEqual(0, len(connection.queries))
# Unfortunately with sqlite3 the in-memory test database cannot be closed.
@skipUnlessDBFeature('test_db_allows_multiple_connections')
@override_settings(DEBUG=True)
def test_queries_limit(self):
"""
The backend doesn't store an unlimited number of queries (#12581).
"""
old_queries_limit = BaseDatabaseWrapper.queries_limit
BaseDatabaseWrapper.queries_limit = 3
new_connection = connection.copy()
# Initialize the connection and clear initialization statements.
with new_connection.cursor():
pass
new_connection.queries_log.clear()
try:
with new_connection.cursor() as cursor:
cursor.execute("SELECT 1" + new_connection.features.bare_select_suffix)
cursor.execute("SELECT 2" + new_connection.features.bare_select_suffix)
with warnings.catch_warnings(record=True) as w:
self.assertEqual(2, len(new_connection.queries))
self.assertEqual(0, len(w))
with new_connection.cursor() as cursor:
cursor.execute("SELECT 3" + new_connection.features.bare_select_suffix)
cursor.execute("SELECT 4" + new_connection.features.bare_select_suffix)
msg = "Limit for query logging exceeded, only the last 3 queries will be returned."
with self.assertWarnsMessage(UserWarning, msg):
self.assertEqual(3, len(new_connection.queries))
finally:
BaseDatabaseWrapper.queries_limit = old_queries_limit
new_connection.close()
def test_timezone_none_use_tz_false(self):
connection.ensure_connection()
with self.settings(TIME_ZONE=None, USE_TZ=False):
connection.init_connection_state()
# These tests aren't conditional because it would require differentiating
# between MySQL+InnoDB and MySQL+MYISAM (something we currently can't do).
class FkConstraintsTests(TransactionTestCase):
available_apps = ['backends']
def setUp(self):
# Create a Reporter.
self.r = Reporter.objects.create(first_name='John', last_name='Smith')
def test_integrity_checks_on_creation(self):
"""
Try to create a model instance that violates a FK constraint. If it
fails it should fail with IntegrityError.
"""
a1 = Article(headline="This is a test", pub_date=datetime.datetime(2005, 7, 27), reporter_id=30)
try:
a1.save()
except IntegrityError:
pass
else:
self.skipTest("This backend does not support integrity checks.")
# Now that we know this backend supports integrity checks we make sure
# constraints are also enforced for proxy Refs #17519
a2 = Article(
headline='This is another test', reporter=self.r,
pub_date=datetime.datetime(2012, 8, 3),
reporter_proxy_id=30,
)
with self.assertRaises(IntegrityError):
a2.save()
def test_integrity_checks_on_update(self):
"""
Try to update a model instance introducing a FK constraint violation.
If it fails it should fail with IntegrityError.
"""
# Create an Article.
Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrieve it from the DB
a1 = Article.objects.get(headline="Test article")
a1.reporter_id = 30
try:
a1.save()
except IntegrityError:
pass
else:
self.skipTest("This backend does not support integrity checks.")
# Now that we know this backend supports integrity checks we make sure
# constraints are also enforced for proxy Refs #17519
# Create another article
r_proxy = ReporterProxy.objects.get(pk=self.r.pk)
Article.objects.create(
headline='Another article',
pub_date=datetime.datetime(1988, 5, 15),
reporter=self.r, reporter_proxy=r_proxy,
)
# Retrieve the second article from the DB
a2 = Article.objects.get(headline='Another article')
a2.reporter_proxy_id = 30
with self.assertRaises(IntegrityError):
a2.save()
def test_disable_constraint_checks_manually(self):
"""
When constraint checks are disabled, should be able to write bad data
without IntegrityErrors.
"""
with transaction.atomic():
# Create an Article.
Article.objects.create(
headline="Test article",
pub_date=datetime.datetime(2010, 9, 4),
reporter=self.r,
)
# Retrieve it from the DB
a = Article.objects.get(headline="Test article")
a.reporter_id = 30
try:
connection.disable_constraint_checking()
a.save()
connection.enable_constraint_checking()
except IntegrityError:
self.fail("IntegrityError should not have occurred.")
transaction.set_rollback(True)
def test_disable_constraint_checks_context_manager(self):
"""
When constraint checks are disabled (using context manager), should be
able to write bad data without IntegrityErrors.
"""
with transaction.atomic():
# Create an Article.
Article.objects.create(
headline="Test article",
pub_date=datetime.datetime(2010, 9, 4),
reporter=self.r,
)
# Retrieve it from the DB
a = Article.objects.get(headline="Test article")
a.reporter_id = 30
try:
with connection.constraint_checks_disabled():
a.save()
except IntegrityError:
self.fail("IntegrityError should not have occurred.")
transaction.set_rollback(True)
def test_check_constraints(self):
"""
Constraint checks should raise an IntegrityError when bad data is in the DB.
"""
with transaction.atomic():
# Create an Article.
Article.objects.create(
headline="Test article",
pub_date=datetime.datetime(2010, 9, 4),
reporter=self.r,
)
# Retrieve it from the DB
a = Article.objects.get(headline="Test article")
a.reporter_id = 30
with connection.constraint_checks_disabled():
a.save()
with self.assertRaises(IntegrityError):
connection.check_constraints()
transaction.set_rollback(True)
class ThreadTests(TransactionTestCase):
available_apps = ['backends']
def test_default_connection_thread_local(self):
"""
The default connection (i.e. django.db.connection) is different for
each thread (#17258).
"""
# Map connections by id because connections with identical aliases
# have the same hash.
connections_dict = {}
connection.cursor()
connections_dict[id(connection)] = connection
def runner():
# Passing django.db.connection between threads doesn't work while
# connections[DEFAULT_DB_ALIAS] does.
from django.db import connections
connection = connections[DEFAULT_DB_ALIAS]
# Allow thread sharing so the connection can be closed by the
# main thread.
connection.allow_thread_sharing = True
connection.cursor()
connections_dict[id(connection)] = connection
for x in range(2):
t = threading.Thread(target=runner)
t.start()
t.join()
# Each created connection got different inner connection.
self.assertEqual(len({conn.connection for conn in connections_dict.values()}), 3)
# Finish by closing the connections opened by the other threads (the
# connection opened in the main thread will automatically be closed on
# teardown).
for conn in connections_dict.values():
if conn is not connection:
conn.close()
def test_connections_thread_local(self):
"""
The connections are different for each thread (#17258).
"""
# Map connections by id because connections with identical aliases
# have the same hash.
connections_dict = {}
for conn in connections.all():
connections_dict[id(conn)] = conn
def runner():
from django.db import connections
for conn in connections.all():
# Allow thread sharing so the connection can be closed by the
# main thread.
conn.allow_thread_sharing = True
connections_dict[id(conn)] = conn
for x in range(2):
t = threading.Thread(target=runner)
t.start()
t.join()
self.assertEqual(len(connections_dict), 6)
# Finish by closing the connections opened by the other threads (the
# connection opened in the main thread will automatically be closed on
# teardown).
for conn in connections_dict.values():
if conn is not connection:
conn.close()
def test_pass_connection_between_threads(self):
"""
A connection can be passed from one thread to the other (#17258).
"""
Person.objects.create(first_name="John", last_name="Doe")
def do_thread():
def runner(main_thread_connection):
from django.db import connections
connections['default'] = main_thread_connection
try:
Person.objects.get(first_name="John", last_name="Doe")
except Exception as e:
exceptions.append(e)
t = threading.Thread(target=runner, args=[connections['default']])
t.start()
t.join()
# Without touching allow_thread_sharing, which should be False by default.
exceptions = []
do_thread()
# Forbidden!
self.assertIsInstance(exceptions[0], DatabaseError)
# If explicitly setting allow_thread_sharing to False
connections['default'].allow_thread_sharing = False
exceptions = []
do_thread()
# Forbidden!
self.assertIsInstance(exceptions[0], DatabaseError)
# If explicitly setting allow_thread_sharing to True
connections['default'].allow_thread_sharing = True
exceptions = []
do_thread()
# All good
self.assertEqual(exceptions, [])
def test_closing_non_shared_connections(self):
"""
A connection that is not explicitly shareable cannot be closed by
another thread (#17258).
"""
# First, without explicitly enabling the connection for sharing.
exceptions = set()
def runner1():
def runner2(other_thread_connection):
try:
other_thread_connection.close()
except DatabaseError as e:
exceptions.add(e)
t2 = threading.Thread(target=runner2, args=[connections['default']])
t2.start()
t2.join()
t1 = threading.Thread(target=runner1)
t1.start()
t1.join()
# The exception was raised
self.assertEqual(len(exceptions), 1)
# Then, with explicitly enabling the connection for sharing.
exceptions = set()
def runner1():
def runner2(other_thread_connection):
try:
other_thread_connection.close()
except DatabaseError as e:
exceptions.add(e)
# Enable thread sharing
connections['default'].allow_thread_sharing = True
t2 = threading.Thread(target=runner2, args=[connections['default']])
t2.start()
t2.join()
t1 = threading.Thread(target=runner1)
t1.start()
t1.join()
# No exception was raised
self.assertEqual(len(exceptions), 0)
class MySQLPKZeroTests(TestCase):
"""
Zero as id for AutoField should raise exception in MySQL, because MySQL
does not allow zero for autoincrement primary key.
"""
@skipIfDBFeature('allows_auto_pk_0')
def test_zero_as_autoval(self):
with self.assertRaises(ValueError):
Square.objects.create(id=0, root=0, square=1)
class DBConstraintTestCase(TestCase):
def test_can_reference_existent(self):
obj = Object.objects.create()
ref = ObjectReference.objects.create(obj=obj)
self.assertEqual(ref.obj, obj)
ref = ObjectReference.objects.get(obj=obj)
self.assertEqual(ref.obj, obj)
def test_can_reference_non_existent(self):
self.assertFalse(Object.objects.filter(id=12345).exists())
ref = ObjectReference.objects.create(obj_id=12345)
ref_new = ObjectReference.objects.get(obj_id=12345)
self.assertEqual(ref, ref_new)
with self.assertRaises(Object.DoesNotExist):
ref.obj
def test_many_to_many(self):
obj = Object.objects.create()
obj.related_objects.create()
self.assertEqual(Object.objects.count(), 2)
self.assertEqual(obj.related_objects.count(), 1)
intermediary_model = Object._meta.get_field("related_objects").remote_field.through
intermediary_model.objects.create(from_object_id=obj.id, to_object_id=12345)
self.assertEqual(obj.related_objects.count(), 1)
self.assertEqual(intermediary_model.objects.count(), 2)
|
45efe2023eae9abbf9900454117cd8d0e454e2ca755be13603a8d98d4aeed27b | import datetime
import itertools
import unittest
from copy import copy
from unittest import mock
from django.db import (
DatabaseError, IntegrityError, OperationalError, connection,
)
from django.db.models import Model
from django.db.models.deletion import CASCADE, PROTECT
from django.db.models.fields import (
AutoField, BigAutoField, BigIntegerField, BinaryField, BooleanField,
CharField, DateField, DateTimeField, IntegerField, PositiveIntegerField,
SlugField, TextField, TimeField, UUIDField,
)
from django.db.models.fields.related import (
ForeignKey, ForeignObject, ManyToManyField, OneToOneField,
)
from django.db.models.indexes import Index
from django.db.transaction import TransactionManagementError, atomic
from django.test import (
TransactionTestCase, skipIfDBFeature, skipUnlessDBFeature,
)
from django.test.utils import CaptureQueriesContext, isolate_apps
from django.utils import timezone
from .fields import (
CustomManyToManyField, InheritedManyToManyField, MediumBlobField,
)
from .models import (
Author, AuthorCharFieldWithIndex, AuthorTextFieldWithIndex,
AuthorWithDefaultHeight, AuthorWithEvenLongerName, AuthorWithIndexedName,
Book, BookForeignObj, BookWeak, BookWithLongName, BookWithO2O,
BookWithoutAuthor, BookWithSlug, IntegerPK, Node, Note, NoteRename, Tag,
TagIndexed, TagM2MTest, TagUniqueRename, Thing, UniqueTest, new_apps,
)
class SchemaTests(TransactionTestCase):
"""
Tests for the schema-alteration code.
Be aware that these tests are more liable than most to false results,
as sometimes the code to check if a test has worked is almost as complex
as the code it is testing.
"""
available_apps = []
models = [
Author, AuthorCharFieldWithIndex, AuthorTextFieldWithIndex,
AuthorWithDefaultHeight, AuthorWithEvenLongerName, Book, BookWeak,
BookWithLongName, BookWithO2O, BookWithSlug, IntegerPK, Node, Note,
Tag, TagIndexed, TagM2MTest, TagUniqueRename, Thing, UniqueTest,
]
# Utility functions
def setUp(self):
# local_models should contain test dependent model classes that will be
# automatically removed from the app cache on test tear down.
self.local_models = []
# isolated_local_models contains models that are in test methods
# decorated with @isolate_apps.
self.isolated_local_models = []
def tearDown(self):
# Delete any tables made for our models
self.delete_tables()
new_apps.clear_cache()
for model in new_apps.get_models():
model._meta._expire_cache()
if 'schema' in new_apps.all_models:
for model in self.local_models:
for many_to_many in model._meta.many_to_many:
through = many_to_many.remote_field.through
if through and through._meta.auto_created:
del new_apps.all_models['schema'][through._meta.model_name]
del new_apps.all_models['schema'][model._meta.model_name]
if self.isolated_local_models:
with connection.schema_editor() as editor:
for model in self.isolated_local_models:
editor.delete_model(model)
def delete_tables(self):
"Deletes all model tables for our models for a clean test environment"
converter = connection.introspection.identifier_converter
with connection.schema_editor() as editor:
connection.disable_constraint_checking()
table_names = connection.introspection.table_names()
for model in itertools.chain(SchemaTests.models, self.local_models):
tbl = converter(model._meta.db_table)
if tbl in table_names:
editor.delete_model(model)
table_names.remove(tbl)
connection.enable_constraint_checking()
def column_classes(self, model):
with connection.cursor() as cursor:
columns = {
d[0]: (connection.introspection.get_field_type(d[1], d), d)
for d in connection.introspection.get_table_description(
cursor,
model._meta.db_table,
)
}
# SQLite has a different format for field_type
for name, (type, desc) in columns.items():
if isinstance(type, tuple):
columns[name] = (type[0], desc)
# SQLite also doesn't error properly
if not columns:
raise DatabaseError("Table does not exist (empty pragma)")
return columns
def get_primary_key(self, table):
with connection.cursor() as cursor:
return connection.introspection.get_primary_key_column(cursor, table)
def get_indexes(self, table):
"""
Get the indexes on the table using a new cursor.
"""
with connection.cursor() as cursor:
return [
c['columns'][0]
for c in connection.introspection.get_constraints(cursor, table).values()
if c['index'] and len(c['columns']) == 1
]
def get_constraints(self, table):
"""
Get the constraints on a table using a new cursor.
"""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def get_constraints_for_column(self, model, column_name):
constraints = self.get_constraints(model._meta.db_table)
constraints_for_column = []
for name, details in constraints.items():
if details['columns'] == [column_name]:
constraints_for_column.append(name)
return sorted(constraints_for_column)
def check_added_field_default(self, schema_editor, model, field, field_name, expected_default,
cast_function=None):
with connection.cursor() as cursor:
schema_editor.add_field(model, field)
cursor.execute("SELECT {} FROM {};".format(field_name, model._meta.db_table))
database_default = cursor.fetchall()[0][0]
if cast_function and not type(database_default) == type(expected_default):
database_default = cast_function(database_default)
self.assertEqual(database_default, expected_default)
def get_constraints_count(self, table, column, fk_to):
"""
Return a dict with keys 'fks', 'uniques, and 'indexes' indicating the
number of foreign keys, unique constraints, and indexes on
`table`.`column`. The `fk_to` argument is a 2-tuple specifying the
expected foreign key relationship's (table, column).
"""
with connection.cursor() as cursor:
constraints = connection.introspection.get_constraints(cursor, table)
counts = {'fks': 0, 'uniques': 0, 'indexes': 0}
for c in constraints.values():
if c['columns'] == [column]:
if c['foreign_key'] == fk_to:
counts['fks'] += 1
if c['unique']:
counts['uniques'] += 1
elif c['index']:
counts['indexes'] += 1
return counts
def assertIndexOrder(self, table, index, order):
constraints = self.get_constraints(table)
self.assertIn(index, constraints)
index_orders = constraints[index]['orders']
self.assertTrue(all(val == expected for val, expected in zip(index_orders, order)))
def assertForeignKeyExists(self, model, column, expected_fk_table, field='id'):
"""
Fail if the FK constraint on `model.Meta.db_table`.`column` to
`expected_fk_table`.id doesn't exist.
"""
constraints = self.get_constraints(model._meta.db_table)
constraint_fk = None
for details in constraints.values():
if details['columns'] == [column] and details['foreign_key']:
constraint_fk = details['foreign_key']
break
self.assertEqual(constraint_fk, (expected_fk_table, field))
def assertForeignKeyNotExists(self, model, column, expected_fk_table):
with self.assertRaises(AssertionError):
self.assertForeignKeyExists(model, column, expected_fk_table)
# Tests
def test_creation_deletion(self):
"""
Tries creating a model's table, and then deleting it.
"""
with connection.schema_editor() as editor:
# Create the table
editor.create_model(Author)
# The table is there
list(Author.objects.all())
# Clean up that table
editor.delete_model(Author)
# No deferred SQL should be left over.
self.assertEqual(editor.deferred_sql, [])
# The table is gone
with self.assertRaises(DatabaseError):
list(Author.objects.all())
@skipUnlessDBFeature('supports_foreign_keys')
def test_fk(self):
"Creating tables out of FK order, then repointing, works"
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Book)
editor.create_model(Author)
editor.create_model(Tag)
# Initial tables are there
list(Author.objects.all())
list(Book.objects.all())
# Make sure the FK constraint is present
with self.assertRaises(IntegrityError):
Book.objects.create(
author_id=1,
title="Much Ado About Foreign Keys",
pub_date=datetime.datetime.now(),
)
# Repoint the FK constraint
old_field = Book._meta.get_field("author")
new_field = ForeignKey(Tag, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
self.assertForeignKeyExists(Book, 'author_id', 'schema_tag')
@skipUnlessDBFeature('supports_foreign_keys')
def test_char_field_with_db_index_to_fk(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(AuthorCharFieldWithIndex)
# Change CharField to FK
old_field = AuthorCharFieldWithIndex._meta.get_field('char_field')
new_field = ForeignKey(Author, CASCADE, blank=True)
new_field.set_attributes_from_name('char_field')
with connection.schema_editor() as editor:
editor.alter_field(AuthorCharFieldWithIndex, old_field, new_field, strict=True)
self.assertForeignKeyExists(AuthorCharFieldWithIndex, 'char_field_id', 'schema_author')
@skipUnlessDBFeature('supports_foreign_keys')
@skipUnlessDBFeature('supports_index_on_text_field')
def test_text_field_with_db_index_to_fk(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(AuthorTextFieldWithIndex)
# Change TextField to FK
old_field = AuthorTextFieldWithIndex._meta.get_field('text_field')
new_field = ForeignKey(Author, CASCADE, blank=True)
new_field.set_attributes_from_name('text_field')
with connection.schema_editor() as editor:
editor.alter_field(AuthorTextFieldWithIndex, old_field, new_field, strict=True)
self.assertForeignKeyExists(AuthorTextFieldWithIndex, 'text_field_id', 'schema_author')
@skipUnlessDBFeature('supports_foreign_keys')
def test_fk_to_proxy(self):
"Creating a FK to a proxy model creates database constraints."
class AuthorProxy(Author):
class Meta:
app_label = 'schema'
apps = new_apps
proxy = True
class AuthorRef(Model):
author = ForeignKey(AuthorProxy, on_delete=CASCADE)
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [AuthorProxy, AuthorRef]
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(AuthorRef)
self.assertForeignKeyExists(AuthorRef, 'author_id', 'schema_author')
@skipUnlessDBFeature('supports_foreign_keys')
def test_fk_db_constraint(self):
"The db_constraint parameter is respected"
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
editor.create_model(Author)
editor.create_model(BookWeak)
# Initial tables are there
list(Author.objects.all())
list(Tag.objects.all())
list(BookWeak.objects.all())
self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author')
# Make a db_constraint=False FK
new_field = ForeignKey(Tag, CASCADE, db_constraint=False)
new_field.set_attributes_from_name("tag")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
self.assertForeignKeyNotExists(Author, 'tag_id', 'schema_tag')
# Alter to one with a constraint
new_field2 = ForeignKey(Tag, CASCADE)
new_field2.set_attributes_from_name("tag")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
self.assertForeignKeyExists(Author, 'tag_id', 'schema_tag')
# Alter to one without a constraint again
new_field2 = ForeignKey(Tag, CASCADE)
new_field2.set_attributes_from_name("tag")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field2, new_field, strict=True)
self.assertForeignKeyNotExists(Author, 'tag_id', 'schema_tag')
@isolate_apps('schema')
def test_no_db_constraint_added_during_primary_key_change(self):
"""
When a primary key that's pointed to by a ForeignKey with
db_constraint=False is altered, a foreign key constraint isn't added.
"""
class Author(Model):
class Meta:
app_label = 'schema'
class BookWeak(Model):
author = ForeignKey(Author, CASCADE, db_constraint=False)
class Meta:
app_label = 'schema'
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWeak)
self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author')
old_field = Author._meta.get_field('id')
new_field = BigAutoField(primary_key=True)
new_field.model = Author
new_field.set_attributes_from_name('id')
# @isolate_apps() and inner models are needed to have the model
# relations populated, otherwise this doesn't act as a regression test.
self.assertEqual(len(new_field.model._meta.related_objects), 1)
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertForeignKeyNotExists(BookWeak, 'author_id', 'schema_author')
def _test_m2m_db_constraint(self, M2MFieldClass):
class LocalAuthorWithM2M(Model):
name = CharField(max_length=255)
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalAuthorWithM2M]
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
editor.create_model(LocalAuthorWithM2M)
# Initial tables are there
list(LocalAuthorWithM2M.objects.all())
list(Tag.objects.all())
# Make a db_constraint=False FK
new_field = M2MFieldClass(Tag, related_name="authors", db_constraint=False)
new_field.contribute_to_class(LocalAuthorWithM2M, "tags")
# Add the field
with connection.schema_editor() as editor:
editor.add_field(LocalAuthorWithM2M, new_field)
self.assertForeignKeyNotExists(new_field.remote_field.through, 'tag_id', 'schema_tag')
@skipUnlessDBFeature('supports_foreign_keys')
def test_m2m_db_constraint(self):
self._test_m2m_db_constraint(ManyToManyField)
@skipUnlessDBFeature('supports_foreign_keys')
def test_m2m_db_constraint_custom(self):
self._test_m2m_db_constraint(CustomManyToManyField)
@skipUnlessDBFeature('supports_foreign_keys')
def test_m2m_db_constraint_inherited(self):
self._test_m2m_db_constraint(InheritedManyToManyField)
def test_add_field(self):
"""
Tests adding fields to models
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Add the new field
new_field = IntegerField(null=True)
new_field.set_attributes_from_name("age")
with CaptureQueriesContext(connection) as ctx, connection.schema_editor() as editor:
editor.add_field(Author, new_field)
drop_default_sql = editor.sql_alter_column_no_default % {
'column': editor.quote_name(new_field.name),
}
self.assertFalse(any(drop_default_sql in query['sql'] for query in ctx.captured_queries))
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertEqual(columns['age'][0], "IntegerField")
self.assertEqual(columns['age'][1][6], True)
def test_add_field_remove_field(self):
"""
Adding a field and removing it removes all deferred sql referring to it.
"""
with connection.schema_editor() as editor:
# Create a table with a unique constraint on the slug field.
editor.create_model(Tag)
# Remove the slug column.
editor.remove_field(Tag, Tag._meta.get_field('slug'))
self.assertEqual(editor.deferred_sql, [])
def test_add_field_temp_default(self):
"""
Tests adding fields to models with a temporary default
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Add some rows of data
Author.objects.create(name="Andrew", height=30)
Author.objects.create(name="Andrea")
# Add a not-null field
new_field = CharField(max_length=30, default="Godwin")
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertEqual(columns['surname'][0], "CharField")
self.assertEqual(columns['surname'][1][6],
connection.features.interprets_empty_strings_as_nulls)
def test_add_field_temp_default_boolean(self):
"""
Tests adding fields to models with a temporary default where
the default is False. (#21783)
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no age field
columns = self.column_classes(Author)
self.assertNotIn("age", columns)
# Add some rows of data
Author.objects.create(name="Andrew", height=30)
Author.objects.create(name="Andrea")
# Add a not-null field
new_field = BooleanField(default=False)
new_field.set_attributes_from_name("awesome")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
# BooleanField are stored as TINYINT(1) on MySQL.
field_type = columns['awesome'][0]
self.assertEqual(field_type, connection.features.introspected_boolean_field_type)
def test_add_field_default_transform(self):
"""
Tests adding fields to models with a default that is not directly
valid in the database (#22581)
"""
class TestTransformField(IntegerField):
# Weird field that saves the count of items in its value
def get_default(self):
return self.default
def get_prep_value(self, value):
if value is None:
return 0
return len(value)
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add some rows of data
Author.objects.create(name="Andrew", height=30)
Author.objects.create(name="Andrea")
# Add the field with a default it needs to cast (to string in this case)
new_field = TestTransformField(default={1: 2})
new_field.set_attributes_from_name("thing")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure the field is there
columns = self.column_classes(Author)
field_type, field_info = columns['thing']
self.assertEqual(field_type, 'IntegerField')
# Make sure the values were transformed correctly
self.assertEqual(Author.objects.extra(where=["thing = 1"]).count(), 2)
def test_add_field_binary(self):
"""
Tests binary fields get a sane default (#22851)
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add the new field
new_field = BinaryField(blank=True)
new_field.set_attributes_from_name("bits")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
# MySQL annoyingly uses the same backend, so it'll come back as one of
# these two types.
self.assertIn(columns['bits'][0], ("BinaryField", "TextField"))
@unittest.skipUnless(connection.vendor == 'mysql', "MySQL specific")
def test_add_binaryfield_mediumblob(self):
"""
Test adding a custom-sized binary field on MySQL (#24846).
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Add the new field with default
new_field = MediumBlobField(blank=True, default=b'123')
new_field.set_attributes_from_name('bits')
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
columns = self.column_classes(Author)
# Introspection treats BLOBs as TextFields
self.assertEqual(columns['bits'][0], "TextField")
def test_alter(self):
"""
Tests simple altering of fields
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "CharField")
self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls))
# Alter the name field to a TextField
old_field = Author._meta.get_field("name")
new_field = TextField(null=True)
new_field.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "TextField")
self.assertEqual(columns['name'][1][6], True)
# Change nullability again
new_field2 = TextField(null=False)
new_field2.set_attributes_from_name("name")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "TextField")
self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls))
def test_alter_auto_field_to_integer_field(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change AutoField to IntegerField
old_field = Author._meta.get_field('id')
new_field = IntegerField(primary_key=True)
new_field.set_attributes_from_name('id')
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
def test_alter_auto_field_to_char_field(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change AutoField to CharField
old_field = Author._meta.get_field('id')
new_field = CharField(primary_key=True, max_length=50)
new_field.set_attributes_from_name('id')
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
def test_alter_not_unique_field_to_primary_key(self):
# Create the table.
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change UUIDField to primary key.
old_field = Author._meta.get_field('uuid')
new_field = UUIDField(primary_key=True)
new_field.set_attributes_from_name('uuid')
new_field.model = Author
with connection.schema_editor() as editor:
editor.remove_field(Author, Author._meta.get_field('id'))
editor.alter_field(Author, old_field, new_field, strict=True)
def test_alter_text_field(self):
# Regression for "BLOB/TEXT column 'info' can't have a default value")
# on MySQL.
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Note)
old_field = Note._meta.get_field("info")
new_field = TextField(blank=True)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
@skipUnlessDBFeature('can_defer_constraint_checks', 'can_rollback_ddl')
def test_alter_fk_checks_deferred_constraints(self):
"""
#25492 - Altering a foreign key's structure and data in the same
transaction.
"""
with connection.schema_editor() as editor:
editor.create_model(Node)
old_field = Node._meta.get_field('parent')
new_field = ForeignKey(Node, CASCADE)
new_field.set_attributes_from_name('parent')
parent = Node.objects.create()
with connection.schema_editor() as editor:
# Update the parent FK to create a deferred constraint check.
Node.objects.update(parent=parent)
editor.alter_field(Node, old_field, new_field, strict=True)
def test_alter_text_field_to_date_field(self):
"""
#25002 - Test conversion of text field to date field.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
Note.objects.create(info='1988-05-05')
old_field = Note._meta.get_field('info')
new_field = DateField(blank=True)
new_field.set_attributes_from_name('info')
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
# Make sure the field isn't nullable
columns = self.column_classes(Note)
self.assertFalse(columns['info'][1][6])
def test_alter_text_field_to_datetime_field(self):
"""
#25002 - Test conversion of text field to datetime field.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
Note.objects.create(info='1988-05-05 3:16:17.4567')
old_field = Note._meta.get_field('info')
new_field = DateTimeField(blank=True)
new_field.set_attributes_from_name('info')
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
# Make sure the field isn't nullable
columns = self.column_classes(Note)
self.assertFalse(columns['info'][1][6])
def test_alter_text_field_to_time_field(self):
"""
#25002 - Test conversion of text field to time field.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
Note.objects.create(info='3:16:17.4567')
old_field = Note._meta.get_field('info')
new_field = TimeField(blank=True)
new_field.set_attributes_from_name('info')
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
# Make sure the field isn't nullable
columns = self.column_classes(Note)
self.assertFalse(columns['info'][1][6])
@skipIfDBFeature('interprets_empty_strings_as_nulls')
def test_alter_textual_field_keep_null_status(self):
"""
Changing a field type shouldn't affect the not null status.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
with self.assertRaises(IntegrityError):
Note.objects.create(info=None)
old_field = Note._meta.get_field("info")
new_field = CharField(max_length=50)
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
with self.assertRaises(IntegrityError):
Note.objects.create(info=None)
def test_alter_numeric_field_keep_null_status(self):
"""
Changing a field type shouldn't affect the not null status.
"""
with connection.schema_editor() as editor:
editor.create_model(UniqueTest)
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=None, slug='aaa')
old_field = UniqueTest._meta.get_field("year")
new_field = BigIntegerField()
new_field.set_attributes_from_name("year")
with connection.schema_editor() as editor:
editor.alter_field(UniqueTest, old_field, new_field, strict=True)
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=None, slug='bbb')
def test_alter_null_to_not_null(self):
"""
#23609 - Tests handling of default values when altering from NULL to NOT NULL.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertTrue(columns['height'][1][6])
# Create some test data
Author.objects.create(name='Not null author', height=12)
Author.objects.create(name='Null author')
# Verify null value
self.assertEqual(Author.objects.get(name='Not null author').height, 12)
self.assertIsNone(Author.objects.get(name='Null author').height)
# Alter the height field to NOT NULL with default
old_field = Author._meta.get_field("height")
new_field = PositiveIntegerField(default=42)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertFalse(columns['height'][1][6])
# Verify default value
self.assertEqual(Author.objects.get(name='Not null author').height, 12)
self.assertEqual(Author.objects.get(name='Null author').height, 42)
def test_alter_charfield_to_null(self):
"""
#24307 - Should skip an alter statement on databases with
interprets_empty_strings_as_null when changing a CharField to null.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Change the CharField to null
old_field = Author._meta.get_field('name')
new_field = copy(old_field)
new_field.null = True
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
def test_alter_textfield_to_null(self):
"""
#24307 - Should skip an alter statement on databases with
interprets_empty_strings_as_null when changing a TextField to null.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Note)
# Change the TextField to null
old_field = Note._meta.get_field('info')
new_field = copy(old_field)
new_field.null = True
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
@skipUnlessDBFeature('supports_combined_alters')
def test_alter_null_to_not_null_keeping_default(self):
"""
#23738 - Can change a nullable field with default to non-nullable
with the same default.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(AuthorWithDefaultHeight)
# Ensure the field is right to begin with
columns = self.column_classes(AuthorWithDefaultHeight)
self.assertTrue(columns['height'][1][6])
# Alter the height field to NOT NULL keeping the previous default
old_field = AuthorWithDefaultHeight._meta.get_field("height")
new_field = PositiveIntegerField(default=42)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(AuthorWithDefaultHeight, old_field, new_field, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(AuthorWithDefaultHeight)
self.assertFalse(columns['height'][1][6])
@skipUnlessDBFeature('supports_foreign_keys')
def test_alter_fk(self):
"""
Tests altering of FKs
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the field is right to begin with
columns = self.column_classes(Book)
self.assertEqual(columns['author_id'][0], "IntegerField")
self.assertForeignKeyExists(Book, 'author_id', 'schema_author')
# Alter the FK
old_field = Book._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE, editable=False)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(Book)
self.assertEqual(columns['author_id'][0], "IntegerField")
self.assertForeignKeyExists(Book, 'author_id', 'schema_author')
@skipUnlessDBFeature('supports_foreign_keys')
def test_alter_to_fk(self):
"""
#24447 - Tests adding a FK constraint for an existing column
"""
class LocalBook(Model):
author = IntegerField()
title = CharField(max_length=100, db_index=True)
pub_date = DateTimeField()
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalBook]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(LocalBook)
# Ensure no FK constraint exists
constraints = self.get_constraints(LocalBook._meta.db_table)
for details in constraints.values():
if details['foreign_key']:
self.fail('Found an unexpected FK constraint to %s' % details['columns'])
old_field = LocalBook._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(LocalBook, old_field, new_field, strict=True)
self.assertForeignKeyExists(LocalBook, 'author_id', 'schema_author')
@skipUnlessDBFeature('supports_foreign_keys')
def test_alter_o2o_to_fk(self):
"""
#24163 - Tests altering of OneToOneField to ForeignKey
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithO2O)
# Ensure the field is right to begin with
columns = self.column_classes(BookWithO2O)
self.assertEqual(columns['author_id'][0], "IntegerField")
# Ensure the field is unique
author = Author.objects.create(name="Joe")
BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now())
with self.assertRaises(IntegrityError):
BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now())
BookWithO2O.objects.all().delete()
self.assertForeignKeyExists(BookWithO2O, 'author_id', 'schema_author')
# Alter the OneToOneField to ForeignKey
old_field = BookWithO2O._meta.get_field("author")
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(BookWithO2O, old_field, new_field, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(Book)
self.assertEqual(columns['author_id'][0], "IntegerField")
# Ensure the field is not unique anymore
Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now())
Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now())
self.assertForeignKeyExists(Book, 'author_id', 'schema_author')
@skipUnlessDBFeature('supports_foreign_keys')
def test_alter_fk_to_o2o(self):
"""
#24163 - Tests altering of ForeignKey to OneToOneField
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the field is right to begin with
columns = self.column_classes(Book)
self.assertEqual(columns['author_id'][0], "IntegerField")
# Ensure the field is not unique
author = Author.objects.create(name="Joe")
Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now())
Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now())
Book.objects.all().delete()
self.assertForeignKeyExists(Book, 'author_id', 'schema_author')
# Alter the ForeignKey to OneToOneField
old_field = Book._meta.get_field("author")
new_field = OneToOneField(Author, CASCADE)
new_field.set_attributes_from_name("author")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(BookWithO2O)
self.assertEqual(columns['author_id'][0], "IntegerField")
# Ensure the field is unique now
BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now())
with self.assertRaises(IntegrityError):
BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now())
self.assertForeignKeyExists(BookWithO2O, 'author_id', 'schema_author')
def test_alter_field_fk_to_o2o(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
expected_fks = 1 if connection.features.supports_foreign_keys else 0
# Check the index is right to begin with.
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(counts, {'fks': expected_fks, 'uniques': 0, 'indexes': 1})
old_field = Book._meta.get_field('author')
new_field = OneToOneField(Author, CASCADE)
new_field.set_attributes_from_name('author')
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The index on ForeignKey is replaced with a unique constraint for OneToOneField.
self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0})
def test_alter_field_fk_keeps_index(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
expected_fks = 1 if connection.features.supports_foreign_keys else 0
# Check the index is right to begin with.
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(counts, {'fks': expected_fks, 'uniques': 0, 'indexes': 1})
old_field = Book._meta.get_field('author')
# on_delete changed from CASCADE.
new_field = ForeignKey(Author, PROTECT)
new_field.set_attributes_from_name('author')
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
counts = self.get_constraints_count(
Book._meta.db_table,
Book._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The index remains.
self.assertEqual(counts, {'fks': expected_fks, 'uniques': 0, 'indexes': 1})
def test_alter_field_o2o_to_fk(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithO2O)
expected_fks = 1 if connection.features.supports_foreign_keys else 0
# Check the unique constraint is right to begin with.
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0})
old_field = BookWithO2O._meta.get_field('author')
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name('author')
with connection.schema_editor() as editor:
editor.alter_field(BookWithO2O, old_field, new_field, strict=True)
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The unique constraint on OneToOneField is replaced with an index for ForeignKey.
self.assertEqual(counts, {'fks': expected_fks, 'uniques': 0, 'indexes': 1})
def test_alter_field_o2o_keeps_unique(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithO2O)
expected_fks = 1 if connection.features.supports_foreign_keys else 0
# Check the unique constraint is right to begin with.
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0})
old_field = BookWithO2O._meta.get_field('author')
# on_delete changed from CASCADE.
new_field = OneToOneField(Author, PROTECT)
new_field.set_attributes_from_name('author')
with connection.schema_editor() as editor:
editor.alter_field(BookWithO2O, old_field, new_field, strict=True)
counts = self.get_constraints_count(
BookWithO2O._meta.db_table,
BookWithO2O._meta.get_field('author').column,
(Author._meta.db_table, Author._meta.pk.column),
)
# The unique constraint remains.
self.assertEqual(counts, {'fks': expected_fks, 'uniques': 1, 'indexes': 0})
def test_alter_db_table_case(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Alter the case of the table
old_table_name = Author._meta.db_table
with connection.schema_editor() as editor:
editor.alter_db_table(Author, old_table_name, old_table_name.upper())
def test_alter_implicit_id_to_explicit(self):
"""
Should be able to convert an implicit "id" field to an explicit "id"
primary key field.
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
old_field = Author._meta.get_field("id")
new_field = AutoField(primary_key=True)
new_field.set_attributes_from_name("id")
new_field.model = Author
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
# This will fail if DROP DEFAULT is inadvertently executed on this
# field which drops the id sequence, at least on PostgreSQL.
Author.objects.create(name='Foo')
Author.objects.create(name='Bar')
def test_alter_int_pk_to_autofield_pk(self):
"""
Should be able to rename an IntegerField(primary_key=True) to
AutoField(primary_key=True).
"""
with connection.schema_editor() as editor:
editor.create_model(IntegerPK)
old_field = IntegerPK._meta.get_field('i')
new_field = AutoField(primary_key=True)
new_field.model = IntegerPK
new_field.set_attributes_from_name('i')
with connection.schema_editor() as editor:
editor.alter_field(IntegerPK, old_field, new_field, strict=True)
def test_alter_int_pk_to_bigautofield_pk(self):
"""
Should be able to rename an IntegerField(primary_key=True) to
BigAutoField(primary_key=True).
"""
with connection.schema_editor() as editor:
editor.create_model(IntegerPK)
old_field = IntegerPK._meta.get_field('i')
new_field = BigAutoField(primary_key=True)
new_field.model = IntegerPK
new_field.set_attributes_from_name('i')
with connection.schema_editor() as editor:
editor.alter_field(IntegerPK, old_field, new_field, strict=True)
def test_alter_int_pk_to_int_unique(self):
"""
Should be able to rename an IntegerField(primary_key=True) to
IntegerField(unique=True).
"""
with connection.schema_editor() as editor:
editor.create_model(IntegerPK)
# Delete the old PK
old_field = IntegerPK._meta.get_field('i')
new_field = IntegerField(unique=True)
new_field.model = IntegerPK
new_field.set_attributes_from_name('i')
with connection.schema_editor() as editor:
editor.alter_field(IntegerPK, old_field, new_field, strict=True)
# The primary key constraint is gone. Result depends on database:
# 'id' for SQLite, None for others (must not be 'i').
self.assertIn(self.get_primary_key(IntegerPK._meta.db_table), ('id', None))
# Set up a model class as it currently stands. The original IntegerPK
# class is now out of date and some backends make use of the whole
# model class when modifying a field (such as sqlite3 when remaking a
# table) so an outdated model class leads to incorrect results.
class Transitional(Model):
i = IntegerField(unique=True)
j = IntegerField(unique=True)
class Meta:
app_label = 'schema'
apps = new_apps
db_table = 'INTEGERPK'
# model requires a new PK
old_field = Transitional._meta.get_field('j')
new_field = IntegerField(primary_key=True)
new_field.model = Transitional
new_field.set_attributes_from_name('j')
with connection.schema_editor() as editor:
editor.alter_field(Transitional, old_field, new_field, strict=True)
# Create a model class representing the updated model.
class IntegerUnique(Model):
i = IntegerField(unique=True)
j = IntegerField(primary_key=True)
class Meta:
app_label = 'schema'
apps = new_apps
db_table = 'INTEGERPK'
# Ensure unique constraint works.
IntegerUnique.objects.create(i=1, j=1)
with self.assertRaises(IntegrityError):
IntegerUnique.objects.create(i=1, j=2)
def test_rename(self):
"""
Tests simple altering of fields
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the field is right to begin with
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "CharField")
self.assertNotIn("display_name", columns)
# Alter the name field's name
old_field = Author._meta.get_field("name")
new_field = CharField(max_length=254)
new_field.set_attributes_from_name("display_name")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
# Ensure the field is right afterwards
columns = self.column_classes(Author)
self.assertEqual(columns['display_name'][0], "CharField")
self.assertNotIn("name", columns)
@isolate_apps('schema')
def test_rename_referenced_field(self):
class Author(Model):
name = CharField(max_length=255, unique=True)
class Meta:
app_label = 'schema'
class Book(Model):
author = ForeignKey(Author, CASCADE, to_field='name')
class Meta:
app_label = 'schema'
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name('renamed')
with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor:
editor.alter_field(Author, Author._meta.get_field('name'), new_field)
# Ensure the foreign key reference was updated.
self.assertForeignKeyExists(Book, 'author_id', 'schema_author', 'renamed')
@skipIfDBFeature('interprets_empty_strings_as_nulls')
def test_rename_keep_null_status(self):
"""
Renaming a field shouldn't affect the not null status.
"""
with connection.schema_editor() as editor:
editor.create_model(Note)
with self.assertRaises(IntegrityError):
Note.objects.create(info=None)
old_field = Note._meta.get_field("info")
new_field = TextField()
new_field.set_attributes_from_name("detail_info")
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
columns = self.column_classes(Note)
self.assertEqual(columns['detail_info'][0], "TextField")
self.assertNotIn("info", columns)
with self.assertRaises(IntegrityError):
NoteRename.objects.create(detail_info=None)
def _test_m2m_create(self, M2MFieldClass):
"""
Tests M2M fields on models during creation
"""
class LocalBookWithM2M(Model):
author = ForeignKey(Author, CASCADE)
title = CharField(max_length=100, db_index=True)
pub_date = DateTimeField()
tags = M2MFieldClass("TagM2MTest", related_name="books")
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalBookWithM2M]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(TagM2MTest)
editor.create_model(LocalBookWithM2M)
# Ensure there is now an m2m table there
columns = self.column_classes(LocalBookWithM2M._meta.get_field("tags").remote_field.through)
self.assertEqual(columns['tagm2mtest_id'][0], "IntegerField")
def test_m2m_create(self):
self._test_m2m_create(ManyToManyField)
def test_m2m_create_custom(self):
self._test_m2m_create(CustomManyToManyField)
def test_m2m_create_inherited(self):
self._test_m2m_create(InheritedManyToManyField)
def _test_m2m_create_through(self, M2MFieldClass):
"""
Tests M2M fields on models during creation with through models
"""
class LocalTagThrough(Model):
book = ForeignKey("schema.LocalBookWithM2MThrough", CASCADE)
tag = ForeignKey("schema.TagM2MTest", CASCADE)
class Meta:
app_label = 'schema'
apps = new_apps
class LocalBookWithM2MThrough(Model):
tags = M2MFieldClass("TagM2MTest", related_name="books", through=LocalTagThrough)
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalTagThrough, LocalBookWithM2MThrough]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(LocalTagThrough)
editor.create_model(TagM2MTest)
editor.create_model(LocalBookWithM2MThrough)
# Ensure there is now an m2m table there
columns = self.column_classes(LocalTagThrough)
self.assertEqual(columns['book_id'][0], "IntegerField")
self.assertEqual(columns['tag_id'][0], "IntegerField")
def test_m2m_create_through(self):
self._test_m2m_create_through(ManyToManyField)
def test_m2m_create_through_custom(self):
self._test_m2m_create_through(CustomManyToManyField)
def test_m2m_create_through_inherited(self):
self._test_m2m_create_through(InheritedManyToManyField)
def _test_m2m(self, M2MFieldClass):
"""
Tests adding/removing M2M fields on models
"""
class LocalAuthorWithM2M(Model):
name = CharField(max_length=255)
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalAuthorWithM2M]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(LocalAuthorWithM2M)
editor.create_model(TagM2MTest)
# Create an M2M field
new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors")
new_field.contribute_to_class(LocalAuthorWithM2M, "tags")
# Ensure there's no m2m table there
with self.assertRaises(DatabaseError):
self.column_classes(new_field.remote_field.through)
# Add the field
with connection.schema_editor() as editor:
editor.add_field(LocalAuthorWithM2M, new_field)
# Ensure there is now an m2m table there
columns = self.column_classes(new_field.remote_field.through)
self.assertEqual(columns['tagm2mtest_id'][0], "IntegerField")
# "Alter" the field. This should not rename the DB table to itself.
with connection.schema_editor() as editor:
editor.alter_field(LocalAuthorWithM2M, new_field, new_field, strict=True)
# Remove the M2M table again
with connection.schema_editor() as editor:
editor.remove_field(LocalAuthorWithM2M, new_field)
# Ensure there's no m2m table there
with self.assertRaises(DatabaseError):
self.column_classes(new_field.remote_field.through)
# Make sure the model state is coherent with the table one now that
# we've removed the tags field.
opts = LocalAuthorWithM2M._meta
opts.local_many_to_many.remove(new_field)
del new_apps.all_models['schema'][new_field.remote_field.through._meta.model_name]
opts._expire_cache()
def test_m2m(self):
self._test_m2m(ManyToManyField)
def test_m2m_custom(self):
self._test_m2m(CustomManyToManyField)
def test_m2m_inherited(self):
self._test_m2m(InheritedManyToManyField)
def _test_m2m_through_alter(self, M2MFieldClass):
"""
Tests altering M2Ms with explicit through models (should no-op)
"""
class LocalAuthorTag(Model):
author = ForeignKey("schema.LocalAuthorWithM2MThrough", CASCADE)
tag = ForeignKey("schema.TagM2MTest", CASCADE)
class Meta:
app_label = 'schema'
apps = new_apps
class LocalAuthorWithM2MThrough(Model):
name = CharField(max_length=255)
tags = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag)
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalAuthorTag, LocalAuthorWithM2MThrough]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(LocalAuthorTag)
editor.create_model(LocalAuthorWithM2MThrough)
editor.create_model(TagM2MTest)
# Ensure the m2m table is there
self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3)
# "Alter" the field's blankness. This should not actually do anything.
old_field = LocalAuthorWithM2MThrough._meta.get_field("tags")
new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag)
new_field.contribute_to_class(LocalAuthorWithM2MThrough, "tags")
with connection.schema_editor() as editor:
editor.alter_field(LocalAuthorWithM2MThrough, old_field, new_field, strict=True)
# Ensure the m2m table is still there
self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3)
def test_m2m_through_alter(self):
self._test_m2m_through_alter(ManyToManyField)
def test_m2m_through_alter_custom(self):
self._test_m2m_through_alter(CustomManyToManyField)
def test_m2m_through_alter_inherited(self):
self._test_m2m_through_alter(InheritedManyToManyField)
def _test_m2m_repoint(self, M2MFieldClass):
"""
Tests repointing M2M fields
"""
class LocalBookWithM2M(Model):
author = ForeignKey(Author, CASCADE)
title = CharField(max_length=100, db_index=True)
pub_date = DateTimeField()
tags = M2MFieldClass("TagM2MTest", related_name="books")
class Meta:
app_label = 'schema'
apps = new_apps
self.local_models = [LocalBookWithM2M]
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(LocalBookWithM2M)
editor.create_model(TagM2MTest)
editor.create_model(UniqueTest)
# Ensure the M2M exists and points to TagM2MTest
if connection.features.supports_foreign_keys:
self.assertForeignKeyExists(
LocalBookWithM2M._meta.get_field("tags").remote_field.through,
'tagm2mtest_id',
'schema_tagm2mtest',
)
# Repoint the M2M
old_field = LocalBookWithM2M._meta.get_field("tags")
new_field = M2MFieldClass(UniqueTest)
new_field.contribute_to_class(LocalBookWithM2M, "uniques")
with connection.schema_editor() as editor:
editor.alter_field(LocalBookWithM2M, old_field, new_field, strict=True)
# Ensure old M2M is gone
with self.assertRaises(DatabaseError):
self.column_classes(LocalBookWithM2M._meta.get_field("tags").remote_field.through)
# This model looks like the new model and is used for teardown.
opts = LocalBookWithM2M._meta
opts.local_many_to_many.remove(old_field)
# Ensure the new M2M exists and points to UniqueTest
if connection.features.supports_foreign_keys:
self.assertForeignKeyExists(new_field.remote_field.through, 'uniquetest_id', 'schema_uniquetest')
def test_m2m_repoint(self):
self._test_m2m_repoint(ManyToManyField)
def test_m2m_repoint_custom(self):
self._test_m2m_repoint(CustomManyToManyField)
def test_m2m_repoint_inherited(self):
self._test_m2m_repoint(InheritedManyToManyField)
@isolate_apps('schema')
def test_m2m_rename_field_in_target_model(self):
class LocalTagM2MTest(Model):
title = CharField(max_length=255)
class Meta:
app_label = 'schema'
class LocalM2M(Model):
tags = ManyToManyField(LocalTagM2MTest)
class Meta:
app_label = 'schema'
# Create the tables.
with connection.schema_editor() as editor:
editor.create_model(LocalM2M)
editor.create_model(LocalTagM2MTest)
self.isolated_local_models = [LocalM2M, LocalTagM2MTest]
# Ensure the m2m table is there.
self.assertEqual(len(self.column_classes(LocalM2M)), 1)
# Alter a field in LocalTagM2MTest.
old_field = LocalTagM2MTest._meta.get_field('title')
new_field = CharField(max_length=254)
new_field.contribute_to_class(LocalTagM2MTest, 'title1')
# @isolate_apps() and inner models are needed to have the model
# relations populated, otherwise this doesn't act as a regression test.
self.assertEqual(len(new_field.model._meta.related_objects), 1)
with connection.schema_editor() as editor:
editor.alter_field(LocalTagM2MTest, old_field, new_field, strict=True)
# Ensure the m2m table is still there.
self.assertEqual(len(self.column_classes(LocalM2M)), 1)
@skipUnlessDBFeature('supports_column_check_constraints')
def test_check_constraints(self):
"""
Tests creating/deleting CHECK constraints
"""
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the constraint exists
constraints = self.get_constraints(Author._meta.db_table)
if not any(details['columns'] == ['height'] and details['check'] for details in constraints.values()):
self.fail("No check constraint for height found")
# Alter the column to remove it
old_field = Author._meta.get_field("height")
new_field = IntegerField(null=True, blank=True)
new_field.set_attributes_from_name("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
for details in constraints.values():
if details['columns'] == ["height"] and details['check']:
self.fail("Check constraint for height found")
# Alter the column to re-add it
new_field2 = Author._meta.get_field("height")
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True)
constraints = self.get_constraints(Author._meta.db_table)
if not any(details['columns'] == ['height'] and details['check'] for details in constraints.values()):
self.fail("No check constraint for height found")
def test_unique(self):
"""
Tests removing and adding unique constraints to a single column.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure the field is unique to begin with
Tag.objects.create(title="foo", slug="foo")
with self.assertRaises(IntegrityError):
Tag.objects.create(title="bar", slug="foo")
Tag.objects.all().delete()
# Alter the slug field to be non-unique
old_field = Tag._meta.get_field("slug")
new_field = SlugField(unique=False)
new_field.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(Tag, old_field, new_field, strict=True)
# Ensure the field is no longer unique
Tag.objects.create(title="foo", slug="foo")
Tag.objects.create(title="bar", slug="foo")
Tag.objects.all().delete()
# Alter the slug field to be unique
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(Tag, new_field, new_field2, strict=True)
# Ensure the field is unique again
Tag.objects.create(title="foo", slug="foo")
with self.assertRaises(IntegrityError):
Tag.objects.create(title="bar", slug="foo")
Tag.objects.all().delete()
# Rename the field
new_field3 = SlugField(unique=True)
new_field3.set_attributes_from_name("slug2")
with connection.schema_editor() as editor:
editor.alter_field(Tag, new_field2, new_field3, strict=True)
# Ensure the field is still unique
TagUniqueRename.objects.create(title="foo", slug2="foo")
with self.assertRaises(IntegrityError):
TagUniqueRename.objects.create(title="bar", slug2="foo")
Tag.objects.all().delete()
def test_unique_name_quoting(self):
old_table_name = TagUniqueRename._meta.db_table
try:
with connection.schema_editor() as editor:
editor.create_model(TagUniqueRename)
editor.alter_db_table(TagUniqueRename, old_table_name, 'unique-table')
TagUniqueRename._meta.db_table = 'unique-table'
# This fails if the unique index name isn't quoted.
editor.alter_unique_together(TagUniqueRename, [], (('title', 'slug2'),))
finally:
TagUniqueRename._meta.db_table = old_table_name
@isolate_apps('schema')
@unittest.skipIf(connection.vendor == 'sqlite', 'SQLite naively remakes the table on field alteration.')
@skipUnlessDBFeature('supports_foreign_keys')
def test_unique_no_unnecessary_fk_drops(self):
"""
If AlterField isn't selective about dropping foreign key constraints
when modifying a field with a unique constraint, the AlterField
incorrectly drops and recreates the Book.author foreign key even though
it doesn't restrict the field being changed (#29193).
"""
class Author(Model):
name = CharField(max_length=254, unique=True)
class Meta:
app_label = 'schema'
class Book(Model):
author = ForeignKey(Author, CASCADE)
class Meta:
app_label = 'schema'
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
new_field = CharField(max_length=255, unique=True)
new_field.model = Author
new_field.set_attributes_from_name('name')
with self.assertLogs('django.db.backends.schema', 'DEBUG') as cm:
with connection.schema_editor() as editor:
editor.alter_field(Author, Author._meta.get_field('name'), new_field)
# One SQL statement is executed to alter the field.
self.assertEqual(len(cm.records), 1)
@isolate_apps('schema')
@unittest.skipIf(connection.vendor == 'sqlite', 'SQLite remakes the table on field alteration.')
def test_unique_and_reverse_m2m(self):
"""
AlterField can modify a unique field when there's a reverse M2M
relation on the model.
"""
class Tag(Model):
title = CharField(max_length=255)
slug = SlugField(unique=True)
class Meta:
app_label = 'schema'
class Book(Model):
tags = ManyToManyField(Tag, related_name='books')
class Meta:
app_label = 'schema'
self.isolated_local_models = [Book._meta.get_field('tags').remote_field.through]
with connection.schema_editor() as editor:
editor.create_model(Tag)
editor.create_model(Book)
new_field = SlugField(max_length=75, unique=True)
new_field.model = Tag
new_field.set_attributes_from_name('slug')
with self.assertLogs('django.db.backends.schema', 'DEBUG') as cm:
with connection.schema_editor() as editor:
editor.alter_field(Tag, Tag._meta.get_field('slug'), new_field)
# One SQL statement is executed to alter the field.
self.assertEqual(len(cm.records), 1)
# Ensure that the field is still unique.
Tag.objects.create(title='foo', slug='foo')
with self.assertRaises(IntegrityError):
Tag.objects.create(title='bar', slug='foo')
def test_unique_together(self):
"""
Tests removing and adding unique_together constraints on a model.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(UniqueTest)
# Ensure the fields are unique to begin with
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.create(year=2011, slug="foo")
UniqueTest.objects.create(year=2011, slug="bar")
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.all().delete()
# Alter the model to its non-unique-together companion
with connection.schema_editor() as editor:
editor.alter_unique_together(UniqueTest, UniqueTest._meta.unique_together, [])
# Ensure the fields are no longer unique
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.all().delete()
# Alter it back
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_unique_together(UniqueTest, [], UniqueTest._meta.unique_together)
# Ensure the fields are unique again
UniqueTest.objects.create(year=2012, slug="foo")
with self.assertRaises(IntegrityError):
UniqueTest.objects.create(year=2012, slug="foo")
UniqueTest.objects.all().delete()
def test_unique_together_with_fk(self):
"""
Tests removing and adding unique_together constraints that include
a foreign key.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the fields are unique to begin with
self.assertEqual(Book._meta.unique_together, ())
# Add the unique_together constraint
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [], [['author', 'title']])
# Alter it back
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [['author', 'title']], [])
def test_unique_together_with_fk_with_existing_index(self):
"""
Tests removing and adding unique_together constraints that include
a foreign key, where the foreign key is added after the model is
created.
"""
# Create the tables
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(BookWithoutAuthor)
new_field = ForeignKey(Author, CASCADE)
new_field.set_attributes_from_name('author')
editor.add_field(BookWithoutAuthor, new_field)
# Ensure the fields aren't unique to begin with
self.assertEqual(Book._meta.unique_together, ())
# Add the unique_together constraint
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [], [['author', 'title']])
# Alter it back
with connection.schema_editor() as editor:
editor.alter_unique_together(Book, [['author', 'title']], [])
def test_index_together(self):
"""
Tests removing and adding index_together constraints on a model.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure there's no index on the year/slug columns first
self.assertEqual(
False,
any(
c["index"]
for c in self.get_constraints("schema_tag").values()
if c['columns'] == ["slug", "title"]
),
)
# Alter the model to add an index
with connection.schema_editor() as editor:
editor.alter_index_together(Tag, [], [("slug", "title")])
# Ensure there is now an index
self.assertEqual(
True,
any(
c["index"]
for c in self.get_constraints("schema_tag").values()
if c['columns'] == ["slug", "title"]
),
)
# Alter it back
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_index_together(Tag, [("slug", "title")], [])
# Ensure there's no index
self.assertEqual(
False,
any(
c["index"]
for c in self.get_constraints("schema_tag").values()
if c['columns'] == ["slug", "title"]
),
)
def test_index_together_with_fk(self):
"""
Tests removing and adding index_together constraints that include
a foreign key.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the fields are unique to begin with
self.assertEqual(Book._meta.index_together, ())
# Add the unique_together constraint
with connection.schema_editor() as editor:
editor.alter_index_together(Book, [], [['author', 'title']])
# Alter it back
with connection.schema_editor() as editor:
editor.alter_index_together(Book, [['author', 'title']], [])
def test_create_index_together(self):
"""
Tests creating models with index_together already defined
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(TagIndexed)
# Ensure there is an index
self.assertEqual(
True,
any(
c["index"]
for c in self.get_constraints("schema_tagindexed").values()
if c['columns'] == ["slug", "title"]
),
)
@isolate_apps('schema')
def test_db_table(self):
"""
Tests renaming of the table
"""
class Author(Model):
name = CharField(max_length=255)
class Meta:
app_label = 'schema'
class Book(Model):
author = ForeignKey(Author, CASCADE)
class Meta:
app_label = 'schema'
# Create the table and one referring it.
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the table is there to begin with
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "CharField")
# Alter the table
with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor:
editor.alter_db_table(Author, "schema_author", "schema_otherauthor")
# Ensure the table is there afterwards
Author._meta.db_table = "schema_otherauthor"
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "CharField")
# Ensure the foreign key reference was updated
self.assertForeignKeyExists(Book, "author_id", "schema_otherauthor")
# Alter the table again
with connection.schema_editor(atomic=connection.features.supports_atomic_references_rename) as editor:
editor.alter_db_table(Author, "schema_otherauthor", "schema_author")
# Ensure the table is still there
Author._meta.db_table = "schema_author"
columns = self.column_classes(Author)
self.assertEqual(columns['name'][0], "CharField")
def test_add_remove_index(self):
"""
Tests index addition and removal
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure the table is there and has no index
self.assertNotIn('title', self.get_indexes(Author._meta.db_table))
# Add the index
index = Index(fields=['name'], name='author_title_idx')
with connection.schema_editor() as editor:
editor.add_index(Author, index)
self.assertIn('name', self.get_indexes(Author._meta.db_table))
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
self.assertNotIn('name', self.get_indexes(Author._meta.db_table))
def test_remove_db_index_doesnt_remove_custom_indexes(self):
"""
Changing db_index to False doesn't remove indexes from Meta.indexes.
"""
with connection.schema_editor() as editor:
editor.create_model(AuthorWithIndexedName)
# Ensure the table has its index
self.assertIn('name', self.get_indexes(AuthorWithIndexedName._meta.db_table))
# Add the custom index
index = Index(fields=['-name'], name='author_name_idx')
author_index_name = index.name
with connection.schema_editor() as editor:
db_index_name = editor._create_index_name(
table_name=AuthorWithIndexedName._meta.db_table,
column_names=('name',),
)
try:
AuthorWithIndexedName._meta.indexes = [index]
with connection.schema_editor() as editor:
editor.add_index(AuthorWithIndexedName, index)
old_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table)
self.assertIn(author_index_name, old_constraints)
self.assertIn(db_index_name, old_constraints)
# Change name field to db_index=False
old_field = AuthorWithIndexedName._meta.get_field('name')
new_field = CharField(max_length=255)
new_field.set_attributes_from_name('name')
with connection.schema_editor() as editor:
editor.alter_field(AuthorWithIndexedName, old_field, new_field, strict=True)
new_constraints = self.get_constraints(AuthorWithIndexedName._meta.db_table)
self.assertNotIn(db_index_name, new_constraints)
# The index from Meta.indexes is still in the database.
self.assertIn(author_index_name, new_constraints)
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(AuthorWithIndexedName, index)
finally:
AuthorWithIndexedName._meta.indexes = []
def test_order_index(self):
"""
Indexes defined with ordering (ASC/DESC) defined on column
"""
with connection.schema_editor() as editor:
editor.create_model(Author)
# The table doesn't have an index
self.assertNotIn('title', self.get_indexes(Author._meta.db_table))
index_name = 'author_name_idx'
# Add the index
index = Index(fields=['name', '-weight'], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(Author, index)
if connection.features.supports_index_column_ordering:
self.assertIndexOrder(Author._meta.db_table, index_name, ['ASC', 'DESC'])
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(Author, index)
def test_indexes(self):
"""
Tests creation/altering of indexes
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
# Ensure the table is there and has the right index
self.assertIn(
"title",
self.get_indexes(Book._meta.db_table),
)
# Alter to remove the index
old_field = Book._meta.get_field("title")
new_field = CharField(max_length=100, db_index=False)
new_field.set_attributes_from_name("title")
with connection.schema_editor() as editor:
editor.alter_field(Book, old_field, new_field, strict=True)
# Ensure the table is there and has no index
self.assertNotIn(
"title",
self.get_indexes(Book._meta.db_table),
)
# Alter to re-add the index
new_field2 = Book._meta.get_field("title")
with connection.schema_editor() as editor:
editor.alter_field(Book, new_field, new_field2, strict=True)
# Ensure the table is there and has the index again
self.assertIn(
"title",
self.get_indexes(Book._meta.db_table),
)
# Add a unique column, verify that creates an implicit index
new_field3 = BookWithSlug._meta.get_field("slug")
with connection.schema_editor() as editor:
editor.add_field(Book, new_field3)
self.assertIn(
"slug",
self.get_indexes(Book._meta.db_table),
)
# Remove the unique, check the index goes with it
new_field4 = CharField(max_length=20, unique=False)
new_field4.set_attributes_from_name("slug")
with connection.schema_editor() as editor:
editor.alter_field(BookWithSlug, new_field3, new_field4, strict=True)
self.assertNotIn(
"slug",
self.get_indexes(Book._meta.db_table),
)
def test_text_field_with_db_index(self):
with connection.schema_editor() as editor:
editor.create_model(AuthorTextFieldWithIndex)
# The text_field index is present if the database supports it.
assertion = self.assertIn if connection.features.supports_index_on_text_field else self.assertNotIn
assertion('text_field', self.get_indexes(AuthorTextFieldWithIndex._meta.db_table))
def test_primary_key(self):
"""
Tests altering of the primary key
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Tag)
# Ensure the table is there and has the right PK
self.assertEqual(self.get_primary_key(Tag._meta.db_table), 'id')
# Alter to change the PK
id_field = Tag._meta.get_field("id")
old_field = Tag._meta.get_field("slug")
new_field = SlugField(primary_key=True)
new_field.set_attributes_from_name("slug")
new_field.model = Tag
with connection.schema_editor() as editor:
editor.remove_field(Tag, id_field)
editor.alter_field(Tag, old_field, new_field)
# Ensure the PK changed
self.assertNotIn(
'id',
self.get_indexes(Tag._meta.db_table),
)
self.assertEqual(self.get_primary_key(Tag._meta.db_table), 'slug')
def test_context_manager_exit(self):
"""
Ensures transaction is correctly closed when an error occurs
inside a SchemaEditor context.
"""
class SomeError(Exception):
pass
try:
with connection.schema_editor():
raise SomeError
except SomeError:
self.assertFalse(connection.in_atomic_block)
@skipIfDBFeature('can_rollback_ddl')
def test_unsupported_transactional_ddl_disallowed(self):
message = (
"Executing DDL statements while in a transaction on databases "
"that can't perform a rollback is prohibited."
)
with atomic(), connection.schema_editor() as editor:
with self.assertRaisesMessage(TransactionManagementError, message):
editor.execute(editor.sql_create_table % {'table': 'foo', 'definition': ''})
@skipUnlessDBFeature('supports_foreign_keys')
def test_foreign_key_index_long_names_regression(self):
"""
Regression test for #21497.
Only affects databases that supports foreign keys.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(AuthorWithEvenLongerName)
editor.create_model(BookWithLongName)
# Find the properly shortened column name
column_name = connection.ops.quote_name("author_foreign_key_with_really_long_field_name_id")
column_name = column_name[1:-1].lower() # unquote, and, for Oracle, un-upcase
# Ensure the table is there and has an index on the column
self.assertIn(
column_name,
self.get_indexes(BookWithLongName._meta.db_table),
)
@skipUnlessDBFeature('supports_foreign_keys')
def test_add_foreign_key_long_names(self):
"""
Regression test for #23009.
Only affects databases that supports foreign keys.
"""
# Create the initial tables
with connection.schema_editor() as editor:
editor.create_model(AuthorWithEvenLongerName)
editor.create_model(BookWithLongName)
# Add a second FK, this would fail due to long ref name before the fix
new_field = ForeignKey(AuthorWithEvenLongerName, CASCADE, related_name="something")
new_field.set_attributes_from_name("author_other_really_long_named_i_mean_so_long_fk")
with connection.schema_editor() as editor:
editor.add_field(BookWithLongName, new_field)
@isolate_apps('schema')
@skipUnlessDBFeature('supports_foreign_keys')
def test_add_foreign_key_quoted_db_table(self):
class Author(Model):
class Meta:
db_table = '"table_author_double_quoted"'
app_label = 'schema'
class Book(Model):
author = ForeignKey(Author, CASCADE)
class Meta:
app_label = 'schema'
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
if connection.vendor == 'mysql':
self.assertForeignKeyExists(Book, 'author_id', '"table_author_double_quoted"')
else:
self.assertForeignKeyExists(Book, 'author_id', 'table_author_double_quoted')
def test_add_foreign_object(self):
with connection.schema_editor() as editor:
editor.create_model(BookForeignObj)
new_field = ForeignObject(Author, on_delete=CASCADE, from_fields=['author_id'], to_fields=['id'])
new_field.set_attributes_from_name('author')
with connection.schema_editor() as editor:
editor.add_field(BookForeignObj, new_field)
def test_creation_deletion_reserved_names(self):
"""
Tries creating a model's table, and then deleting it when it has a
SQL reserved name.
"""
# Create the table
with connection.schema_editor() as editor:
try:
editor.create_model(Thing)
except OperationalError as e:
self.fail("Errors when applying initial migration for a model "
"with a table named after an SQL reserved word: %s" % e)
# The table is there
list(Thing.objects.all())
# Clean up that table
with connection.schema_editor() as editor:
editor.delete_model(Thing)
# The table is gone
with self.assertRaises(DatabaseError):
list(Thing.objects.all())
def test_remove_constraints_capital_letters(self):
"""
#23065 - Constraint names must be quoted if they contain capital letters.
"""
def get_field(*args, field_class=IntegerField, **kwargs):
kwargs['db_column'] = "CamelCase"
field = field_class(*args, **kwargs)
field.set_attributes_from_name("CamelCase")
return field
model = Author
field = get_field()
table = model._meta.db_table
column = field.column
identifier_converter = connection.introspection.identifier_converter
with connection.schema_editor() as editor:
editor.create_model(model)
editor.add_field(model, field)
constraint_name = 'CamelCaseIndex'
expected_constraint_name = identifier_converter(constraint_name)
editor.execute(
editor.sql_create_index % {
"table": editor.quote_name(table),
"name": editor.quote_name(constraint_name),
"using": "",
"columns": editor.quote_name(column),
"extra": "",
"condition": "",
}
)
self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
editor.alter_field(model, get_field(db_index=True), field, strict=True)
self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
constraint_name = 'CamelCaseUniqConstraint'
expected_constraint_name = identifier_converter(constraint_name)
editor.execute(editor._create_unique_sql(model, [field.column], constraint_name))
self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
editor.alter_field(model, get_field(unique=True), field, strict=True)
self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
if editor.sql_create_fk:
constraint_name = 'CamelCaseFKConstraint'
expected_constraint_name = identifier_converter(constraint_name)
editor.execute(
editor.sql_create_fk % {
"table": editor.quote_name(table),
"name": editor.quote_name(constraint_name),
"column": editor.quote_name(column),
"to_table": editor.quote_name(table),
"to_column": editor.quote_name(model._meta.auto_field.column),
"deferrable": connection.ops.deferrable_sql(),
}
)
self.assertIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
editor.alter_field(model, get_field(Author, CASCADE, field_class=ForeignKey), field, strict=True)
self.assertNotIn(expected_constraint_name, self.get_constraints(model._meta.db_table))
def test_add_field_use_effective_default(self):
"""
#23987 - effective_default() should be used as the field default when
adding a new field.
"""
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no surname field
columns = self.column_classes(Author)
self.assertNotIn("surname", columns)
# Create a row
Author.objects.create(name='Anonymous1')
# Add new CharField to ensure default will be used from effective_default
new_field = CharField(max_length=15, blank=True)
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure field was added with the right default
with connection.cursor() as cursor:
cursor.execute("SELECT surname FROM schema_author;")
item = cursor.fetchall()[0]
self.assertEqual(item[0], None if connection.features.interprets_empty_strings_as_nulls else '')
def test_add_field_default_dropped(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Ensure there's no surname field
columns = self.column_classes(Author)
self.assertNotIn("surname", columns)
# Create a row
Author.objects.create(name='Anonymous1')
# Add new CharField with a default
new_field = CharField(max_length=15, blank=True, default='surname default')
new_field.set_attributes_from_name("surname")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
# Ensure field was added with the right default
with connection.cursor() as cursor:
cursor.execute("SELECT surname FROM schema_author;")
item = cursor.fetchall()[0]
self.assertEqual(item[0], 'surname default')
# And that the default is no longer set in the database.
field = next(
f for f in connection.introspection.get_table_description(cursor, "schema_author")
if f.name == "surname"
)
if connection.features.can_introspect_default:
self.assertIsNone(field.default)
def test_alter_field_default_dropped(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Create a row
Author.objects.create(name='Anonymous1')
self.assertIsNone(Author.objects.get().height)
old_field = Author._meta.get_field('height')
# The default from the new field is used in updating existing rows.
new_field = IntegerField(blank=True, default=42)
new_field.set_attributes_from_name('height')
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(Author.objects.get().height, 42)
# The database default should be removed.
with connection.cursor() as cursor:
field = next(
f for f in connection.introspection.get_table_description(cursor, "schema_author")
if f.name == "height"
)
if connection.features.can_introspect_default:
self.assertIsNone(field.default)
@unittest.skipIf(connection.vendor == 'sqlite', 'SQLite naively remakes the table on field alteration.')
def test_alter_field_default_doesnt_perfom_queries(self):
"""
No queries are performed if a field default changes and the field's
not changing from null to non-null.
"""
with connection.schema_editor() as editor:
editor.create_model(AuthorWithDefaultHeight)
old_field = AuthorWithDefaultHeight._meta.get_field('height')
new_default = old_field.default * 2
new_field = PositiveIntegerField(null=True, blank=True, default=new_default)
new_field.set_attributes_from_name('height')
with connection.schema_editor() as editor, self.assertNumQueries(0):
editor.alter_field(AuthorWithDefaultHeight, old_field, new_field, strict=True)
def test_add_textfield_unhashable_default(self):
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Create a row
Author.objects.create(name='Anonymous1')
# Create a field that has an unhashable default
new_field = TextField(default={})
new_field.set_attributes_from_name("info")
with connection.schema_editor() as editor:
editor.add_field(Author, new_field)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_add_indexed_charfield(self):
field = CharField(max_length=255, db_index=True)
field.set_attributes_from_name('nom_de_plume')
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.add_field(Author, field)
# Should create two indexes; one for like operator.
self.assertEqual(
self.get_constraints_for_column(Author, 'nom_de_plume'),
['schema_author_nom_de_plume_7570a851', 'schema_author_nom_de_plume_7570a851_like'],
)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_add_unique_charfield(self):
field = CharField(max_length=255, unique=True)
field.set_attributes_from_name('nom_de_plume')
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.add_field(Author, field)
# Should create two indexes; one for like operator.
self.assertEqual(
self.get_constraints_for_column(Author, 'nom_de_plume'),
['schema_author_nom_de_plume_7570a851_like', 'schema_author_nom_de_plume_key']
)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_alter_field_add_index_to_charfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Author)
self.assertEqual(self.get_constraints_for_column(Author, 'name'), [])
# Alter to add db_index=True and create 2 indexes.
old_field = Author._meta.get_field('name')
new_field = CharField(max_length=255, db_index=True)
new_field.set_attributes_from_name('name')
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Author, 'name'),
['schema_author_name_1fbc5617', 'schema_author_name_1fbc5617_like']
)
# Remove db_index=True to drop both indexes.
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Author, 'name'), [])
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_alter_field_add_unique_to_charfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Author)
self.assertEqual(self.get_constraints_for_column(Author, 'name'), [])
# Alter to add unique=True and create 2 indexes.
old_field = Author._meta.get_field('name')
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name('name')
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Author, 'name'),
['schema_author_name_1fbc5617_like', 'schema_author_name_1fbc5617_uniq']
)
# Remove unique=True to drop both indexes.
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Author, 'name'), [])
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_alter_field_add_index_to_textfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Note)
self.assertEqual(self.get_constraints_for_column(Note, 'info'), [])
# Alter to add db_index=True and create 2 indexes.
old_field = Note._meta.get_field('info')
new_field = TextField(db_index=True)
new_field.set_attributes_from_name('info')
with connection.schema_editor() as editor:
editor.alter_field(Note, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Note, 'info'),
['schema_note_info_4b0ea695', 'schema_note_info_4b0ea695_like']
)
# Remove db_index=True to drop both indexes.
with connection.schema_editor() as editor:
editor.alter_field(Note, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Note, 'info'), [])
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_alter_field_add_unique_to_charfield_with_db_index(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(BookWithoutAuthor)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like']
)
# Alter to add unique=True (should replace the index)
old_field = BookWithoutAuthor._meta.get_field('title')
new_field = CharField(max_length=100, db_index=True, unique=True)
new_field.set_attributes_from_name('title')
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq']
)
# Alter to remove unique=True (should drop unique index)
new_field2 = CharField(max_length=100, db_index=True)
new_field2.set_attributes_from_name('title')
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like']
)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_alter_field_remove_unique_and_db_index_from_charfield(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(BookWithoutAuthor)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like']
)
# Alter to add unique=True (should replace the index)
old_field = BookWithoutAuthor._meta.get_field('title')
new_field = CharField(max_length=100, db_index=True, unique=True)
new_field.set_attributes_from_name('title')
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq']
)
# Alter to remove both unique=True and db_index=True (should drop all indexes)
new_field2 = CharField(max_length=100)
new_field2.set_attributes_from_name('title')
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True)
self.assertEqual(self.get_constraints_for_column(BookWithoutAuthor, 'title'), [])
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_alter_field_swap_unique_and_db_index_with_charfield(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(BookWithoutAuthor)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like']
)
# Alter to set unique=True and remove db_index=True (should replace the index)
old_field = BookWithoutAuthor._meta.get_field('title')
new_field = CharField(max_length=100, unique=True)
new_field.set_attributes_from_name('title')
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff_like', 'schema_book_title_2dfb2dff_uniq']
)
# Alter to set db_index=True and remove unique=True (should restore index)
new_field2 = CharField(max_length=100, db_index=True)
new_field2.set_attributes_from_name('title')
with connection.schema_editor() as editor:
editor.alter_field(BookWithoutAuthor, new_field, new_field2, strict=True)
self.assertEqual(
self.get_constraints_for_column(BookWithoutAuthor, 'title'),
['schema_book_title_2dfb2dff', 'schema_book_title_2dfb2dff_like']
)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific")
def test_alter_field_add_db_index_to_charfield_with_unique(self):
# Create the table and verify initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Tag)
self.assertEqual(
self.get_constraints_for_column(Tag, 'slug'),
['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key']
)
# Alter to add db_index=True
old_field = Tag._meta.get_field('slug')
new_field = SlugField(db_index=True, unique=True)
new_field.set_attributes_from_name('slug')
with connection.schema_editor() as editor:
editor.alter_field(Tag, old_field, new_field, strict=True)
self.assertEqual(
self.get_constraints_for_column(Tag, 'slug'),
['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key']
)
# Alter to remove db_index=True
new_field2 = SlugField(unique=True)
new_field2.set_attributes_from_name('slug')
with connection.schema_editor() as editor:
editor.alter_field(Tag, new_field, new_field2, strict=True)
self.assertEqual(
self.get_constraints_for_column(Tag, 'slug'),
['schema_tag_slug_2c418ba3_like', 'schema_tag_slug_key']
)
def test_alter_field_add_index_to_integerfield(self):
# Create the table and verify no initial indexes.
with connection.schema_editor() as editor:
editor.create_model(Author)
self.assertEqual(self.get_constraints_for_column(Author, 'weight'), [])
# Alter to add db_index=True and create index.
old_field = Author._meta.get_field('weight')
new_field = IntegerField(null=True, db_index=True)
new_field.set_attributes_from_name('weight')
with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Author, 'weight'), ['schema_author_weight_587740f9'])
# Remove db_index=True to drop index.
with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, old_field, strict=True)
self.assertEqual(self.get_constraints_for_column(Author, 'weight'), [])
def test_alter_pk_with_self_referential_field(self):
"""
Changing the primary key field name of a model with a self-referential
foreign key (#26384).
"""
with connection.schema_editor() as editor:
editor.create_model(Node)
old_field = Node._meta.get_field('node_id')
new_field = AutoField(primary_key=True)
new_field.set_attributes_from_name('id')
with connection.schema_editor() as editor:
editor.alter_field(Node, old_field, new_field, strict=True)
self.assertForeignKeyExists(Node, 'parent_id', Node._meta.db_table)
@mock.patch('django.db.backends.base.schema.datetime')
@mock.patch('django.db.backends.base.schema.timezone')
def test_add_datefield_and_datetimefield_use_effective_default(self, mocked_datetime, mocked_tz):
"""
effective_default() should be used for DateField, DateTimeField, and
TimeField if auto_now or auto_add_now is set (#25005).
"""
now = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1)
now_tz = datetime.datetime(month=1, day=1, year=2000, hour=1, minute=1, tzinfo=timezone.utc)
mocked_datetime.now = mock.MagicMock(return_value=now)
mocked_tz.now = mock.MagicMock(return_value=now_tz)
# Create the table
with connection.schema_editor() as editor:
editor.create_model(Author)
# Check auto_now/auto_now_add attributes are not defined
columns = self.column_classes(Author)
self.assertNotIn("dob_auto_now", columns)
self.assertNotIn("dob_auto_now_add", columns)
self.assertNotIn("dtob_auto_now", columns)
self.assertNotIn("dtob_auto_now_add", columns)
self.assertNotIn("tob_auto_now", columns)
self.assertNotIn("tob_auto_now_add", columns)
# Create a row
Author.objects.create(name='Anonymous1')
# Ensure fields were added with the correct defaults
dob_auto_now = DateField(auto_now=True)
dob_auto_now.set_attributes_from_name('dob_auto_now')
self.check_added_field_default(
editor, Author, dob_auto_now, 'dob_auto_now', now.date(),
cast_function=lambda x: x.date(),
)
dob_auto_now_add = DateField(auto_now_add=True)
dob_auto_now_add.set_attributes_from_name('dob_auto_now_add')
self.check_added_field_default(
editor, Author, dob_auto_now_add, 'dob_auto_now_add', now.date(),
cast_function=lambda x: x.date(),
)
dtob_auto_now = DateTimeField(auto_now=True)
dtob_auto_now.set_attributes_from_name('dtob_auto_now')
self.check_added_field_default(
editor, Author, dtob_auto_now, 'dtob_auto_now', now,
)
dt_tm_of_birth_auto_now_add = DateTimeField(auto_now_add=True)
dt_tm_of_birth_auto_now_add.set_attributes_from_name('dtob_auto_now_add')
self.check_added_field_default(
editor, Author, dt_tm_of_birth_auto_now_add, 'dtob_auto_now_add', now,
)
tob_auto_now = TimeField(auto_now=True)
tob_auto_now.set_attributes_from_name('tob_auto_now')
self.check_added_field_default(
editor, Author, tob_auto_now, 'tob_auto_now', now.time(),
cast_function=lambda x: x.time(),
)
tob_auto_now_add = TimeField(auto_now_add=True)
tob_auto_now_add.set_attributes_from_name('tob_auto_now_add')
self.check_added_field_default(
editor, Author, tob_auto_now_add, 'tob_auto_now_add', now.time(),
cast_function=lambda x: x.time(),
)
def test_namespaced_db_table_create_index_name(self):
"""
Table names are stripped of their namespace/schema before being used to
generate index names.
"""
with connection.schema_editor() as editor:
max_name_length = connection.ops.max_name_length() or 200
namespace = 'n' * max_name_length
table_name = 't' * max_name_length
namespaced_table_name = '"%s"."%s"' % (namespace, table_name)
self.assertEqual(
editor._create_index_name(table_name, []),
editor._create_index_name(namespaced_table_name, []),
)
@unittest.skipUnless(connection.vendor == 'oracle', 'Oracle specific db_table syntax')
def test_creation_with_db_table_double_quotes(self):
oracle_user = connection.creation._test_database_user()
class Student(Model):
name = CharField(max_length=30)
class Meta:
app_label = 'schema'
apps = new_apps
db_table = '"%s"."DJANGO_STUDENT_TABLE"' % oracle_user
class Document(Model):
name = CharField(max_length=30)
students = ManyToManyField(Student)
class Meta:
app_label = 'schema'
apps = new_apps
db_table = '"%s"."DJANGO_DOCUMENT_TABLE"' % oracle_user
self.local_models = [Student, Document]
with connection.schema_editor() as editor:
editor.create_model(Student)
editor.create_model(Document)
doc = Document.objects.create(name='Test Name')
student = Student.objects.create(name='Some man')
doc.students.add(student)
def test_rename_table_renames_deferred_sql_references(self):
atomic_rename = connection.features.supports_atomic_references_rename
with connection.schema_editor(atomic=atomic_rename) as editor:
editor.create_model(Author)
editor.create_model(Book)
editor.alter_db_table(Author, 'schema_author', 'schema_renamed_author')
editor.alter_db_table(Author, 'schema_book', 'schema_renamed_book')
self.assertGreater(len(editor.deferred_sql), 0)
for statement in editor.deferred_sql:
self.assertIs(statement.references_table('schema_author'), False)
self.assertIs(statement.references_table('schema_book'), False)
@unittest.skipIf(connection.vendor == 'sqlite', 'SQLite naively remakes the table on field alteration.')
def test_rename_column_renames_deferred_sql_references(self):
with connection.schema_editor() as editor:
editor.create_model(Author)
editor.create_model(Book)
old_title = Book._meta.get_field('title')
new_title = CharField(max_length=100, db_index=True)
new_title.set_attributes_from_name('renamed_title')
editor.alter_field(Book, old_title, new_title)
old_author = Book._meta.get_field('author')
new_author = ForeignKey(Author, CASCADE)
new_author.set_attributes_from_name('renamed_author')
editor.alter_field(Book, old_author, new_author)
self.assertGreater(len(editor.deferred_sql), 0)
for statement in editor.deferred_sql:
self.assertIs(statement.references_column('book', 'title'), False)
self.assertIs(statement.references_column('book', 'author_id'), False)
@isolate_apps('schema')
def test_referenced_field_without_constraint_rename_inside_atomic_block(self):
"""
Foreign keys without database level constraint don't prevent the field
they reference from being renamed in an atomic block.
"""
class Foo(Model):
field = CharField(max_length=255, unique=True)
class Meta:
app_label = 'schema'
class Bar(Model):
foo = ForeignKey(Foo, CASCADE, to_field='field', db_constraint=False)
class Meta:
app_label = 'schema'
self.isolated_local_models = [Foo, Bar]
with connection.schema_editor() as editor:
editor.create_model(Foo)
editor.create_model(Bar)
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name('renamed')
with connection.schema_editor(atomic=True) as editor:
editor.alter_field(Foo, Foo._meta.get_field('field'), new_field)
@isolate_apps('schema')
def test_referenced_table_without_constraint_rename_inside_atomic_block(self):
"""
Foreign keys without database level constraint don't prevent the table
they reference from being renamed in an atomic block.
"""
class Foo(Model):
field = CharField(max_length=255, unique=True)
class Meta:
app_label = 'schema'
class Bar(Model):
foo = ForeignKey(Foo, CASCADE, to_field='field', db_constraint=False)
class Meta:
app_label = 'schema'
self.isolated_local_models = [Foo, Bar]
with connection.schema_editor() as editor:
editor.create_model(Foo)
editor.create_model(Bar)
new_field = CharField(max_length=255, unique=True)
new_field.set_attributes_from_name('renamed')
with connection.schema_editor(atomic=True) as editor:
editor.alter_db_table(Foo, Foo._meta.db_table, 'renamed_table')
Foo._meta.db_table = 'renamed_table'
|
28062509e2b8e4c4ff69c9db3c4b88ba86fd56ca048aabcd187e9b68f3b8bb02 | from django.contrib.admin import ModelAdmin, TabularInline
from django.contrib.admin.helpers import InlineAdminForm
from django.contrib.admin.tests import AdminSeleniumTestCase
from django.contrib.auth.models import Permission, User
from django.contrib.contenttypes.models import ContentType
from django.test import RequestFactory, TestCase, override_settings
from django.urls import reverse
from .admin import InnerInline, site as admin_site
from .models import (
Author, BinaryTree, Book, Chapter, Child, ChildModel1, ChildModel2,
Fashionista, FootNote, Holder, Holder2, Holder3, Holder4, Inner, Inner2,
Inner3, Inner4Stacked, Inner4Tabular, Novel, OutfitItem, Parent,
ParentModelWithCustomPk, Person, Poll, Profile, ProfileCollection,
Question, Sighting, SomeChildModel, SomeParentModel, Teacher,
)
INLINE_CHANGELINK_HTML = 'class="inlinechangelink">Change</a>'
class TestDataMixin:
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(username='super', email='[email protected]', password='secret')
@override_settings(ROOT_URLCONF='admin_inlines.urls')
class TestInline(TestDataMixin, TestCase):
factory = RequestFactory()
@classmethod
def setUpTestData(cls):
super().setUpTestData()
cls.holder = Holder.objects.create(dummy=13)
Inner.objects.create(dummy=42, holder=cls.holder)
def setUp(self):
self.client.force_login(self.superuser)
def test_can_delete(self):
"""
can_delete should be passed to inlineformset factory.
"""
response = self.client.get(
reverse('admin:admin_inlines_holder_change', args=(self.holder.id,))
)
inner_formset = response.context['inline_admin_formsets'][0].formset
expected = InnerInline.can_delete
actual = inner_formset.can_delete
self.assertEqual(expected, actual, 'can_delete must be equal')
def test_readonly_stacked_inline_label(self):
"""Bug #13174."""
holder = Holder.objects.create(dummy=42)
Inner.objects.create(holder=holder, dummy=42, readonly='')
response = self.client.get(
reverse('admin:admin_inlines_holder_change', args=(holder.id,))
)
self.assertContains(response, '<label>Inner readonly label:</label>')
def test_many_to_many_inlines(self):
"Autogenerated many-to-many inlines are displayed correctly (#13407)"
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# The heading for the m2m inline block uses the right text
self.assertContains(response, '<h2>Author-book relationships</h2>')
# The "add another" label is correct
self.assertContains(response, 'Add another Author-book relationship')
# The '+' is dropped from the autogenerated form prefix (Author_books+)
self.assertContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_primary(self):
person = Person.objects.create(firstname='Imelda')
item = OutfitItem.objects.create(name='Shoes')
# Imelda likes shoes, but can't carry her own bags.
data = {
'shoppingweakness_set-TOTAL_FORMS': 1,
'shoppingweakness_set-INITIAL_FORMS': 0,
'shoppingweakness_set-MAX_NUM_FORMS': 0,
'_save': 'Save',
'person': person.id,
'max_weight': 0,
'shoppingweakness_set-0-item': item.id,
}
response = self.client.post(reverse('admin:admin_inlines_fashionista_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(Fashionista.objects.filter(person__firstname='Imelda')), 1)
def test_tabular_inline_column_css_class(self):
"""
Field names are included in the context to output a field-specific
CSS class name in the column headers.
"""
response = self.client.get(reverse('admin:admin_inlines_poll_add'))
text_field, call_me_field = list(response.context['inline_admin_formset'].fields())
# Editable field.
self.assertEqual(text_field['name'], 'text')
self.assertContains(response, '<th class="column-text required">')
# Read-only field.
self.assertEqual(call_me_field['name'], 'call_me')
self.assertContains(response, '<th class="column-call_me">')
def test_custom_form_tabular_inline_label(self):
"""
A model form with a form field specified (TitleForm.title1) should have
its label rendered in the tabular inline.
"""
response = self.client.get(reverse('admin:admin_inlines_titlecollection_add'))
self.assertContains(response, '<th class="column-title1 required">Title1</th>', html=True)
def test_custom_form_tabular_inline_overridden_label(self):
"""
SomeChildModelForm.__init__() overrides the label of a form field.
That label is displayed in the TabularInline.
"""
response = self.client.get(reverse('admin:admin_inlines_someparentmodel_add'))
field = list(response.context['inline_admin_formset'].fields())[0]
self.assertEqual(field['label'], 'new label')
self.assertContains(response, '<th class="column-name required">New label</th>', html=True)
def test_tabular_non_field_errors(self):
"""
non_field_errors are displayed correctly, including the correct value
for colspan.
"""
data = {
'title_set-TOTAL_FORMS': 1,
'title_set-INITIAL_FORMS': 0,
'title_set-MAX_NUM_FORMS': 0,
'_save': 'Save',
'title_set-0-title1': 'a title',
'title_set-0-title2': 'a different title',
}
response = self.client.post(reverse('admin:admin_inlines_titlecollection_add'), data)
# Here colspan is "4": two fields (title1 and title2), one hidden field and the delete checkbox.
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield">'
'<li>The two titles must be the same</li></ul></td></tr>'
)
def test_no_parent_callable_lookup(self):
"""Admin inline `readonly_field` shouldn't invoke parent ModelAdmin callable"""
# Identically named callable isn't present in the parent ModelAdmin,
# rendering of the add view shouldn't explode
response = self.client.get(reverse('admin:admin_inlines_novel_add'))
self.assertEqual(response.status_code, 200)
# View should have the child inlines section
self.assertContains(
response,
'<div class="js-inline-admin-formset inline-group" id="chapter_set-group"'
)
def test_callable_lookup(self):
"""Admin inline should invoke local callable when its name is listed in readonly_fields"""
response = self.client.get(reverse('admin:admin_inlines_poll_add'))
self.assertEqual(response.status_code, 200)
# Add parent object view should have the child inlines section
self.assertContains(
response,
'<div class="js-inline-admin-formset inline-group" id="question_set-group"'
)
# The right callable should be used for the inline readonly_fields
# column cells
self.assertContains(response, '<p>Callable in QuestionInline</p>')
def test_help_text(self):
"""
The inlines' model field help texts are displayed when using both the
stacked and tabular layouts.
"""
response = self.client.get(reverse('admin:admin_inlines_holder4_add'))
self.assertContains(response, '<div class="help">Awesome stacked help text is awesome.</div>', 4)
self.assertContains(
response,
'<img src="/static/admin/img/icon-unknown.svg" '
'class="help help-tooltip" width="10" height="10" '
'alt="(Awesome tabular help text is awesome.)" '
'title="Awesome tabular help text is awesome.">',
1
)
# ReadOnly fields
response = self.client.get(reverse('admin:admin_inlines_capofamiglia_add'))
self.assertContains(
response,
'<img src="/static/admin/img/icon-unknown.svg" '
'class="help help-tooltip" width="10" height="10" '
'alt="(Help text for ReadOnlyInline)" '
'title="Help text for ReadOnlyInline">',
1
)
def test_tabular_model_form_meta_readonly_field(self):
"""
Tabular inlines use ModelForm.Meta.help_texts and labels for read-only
fields.
"""
response = self.client.get(reverse('admin:admin_inlines_someparentmodel_add'))
self.assertContains(
response,
'<img src="/static/admin/img/icon-unknown.svg" '
'class="help help-tooltip" width="10" height="10" '
'alt="(Help text from ModelForm.Meta)" '
'title="Help text from ModelForm.Meta">'
)
self.assertContains(response, 'Label from ModelForm.Meta')
def test_inline_hidden_field_no_column(self):
"""#18263 -- Make sure hidden fields don't get a column in tabular inlines"""
parent = SomeParentModel.objects.create(name='a')
SomeChildModel.objects.create(name='b', position='0', parent=parent)
SomeChildModel.objects.create(name='c', position='1', parent=parent)
response = self.client.get(reverse('admin:admin_inlines_someparentmodel_change', args=(parent.pk,)))
self.assertNotContains(response, '<td class="field-position">')
self.assertInHTML(
'<input id="id_somechildmodel_set-1-position" '
'name="somechildmodel_set-1-position" type="hidden" value="1">',
response.rendered_content,
)
def test_non_related_name_inline(self):
"""
Multiple inlines with related_name='+' have correct form prefixes.
"""
response = self.client.get(reverse('admin:admin_inlines_capofamiglia_add'))
self.assertContains(response, '<input type="hidden" name="-1-0-id" id="id_-1-0-id">', html=True)
self.assertContains(
response,
'<input type="hidden" name="-1-0-capo_famiglia" id="id_-1-0-capo_famiglia">',
html=True
)
self.assertContains(
response,
'<input id="id_-1-0-name" type="text" class="vTextField" name="-1-0-name" maxlength="100">',
html=True
)
self.assertContains(response, '<input type="hidden" name="-2-0-id" id="id_-2-0-id">', html=True)
self.assertContains(
response,
'<input type="hidden" name="-2-0-capo_famiglia" id="id_-2-0-capo_famiglia">',
html=True
)
self.assertContains(
response,
'<input id="id_-2-0-name" type="text" class="vTextField" name="-2-0-name" maxlength="100">',
html=True
)
@override_settings(USE_L10N=True, USE_THOUSAND_SEPARATOR=True)
def test_localize_pk_shortcut(self):
"""
The "View on Site" link is correct for locales that use thousand
separators.
"""
holder = Holder.objects.create(pk=123456789, dummy=42)
inner = Inner.objects.create(pk=987654321, holder=holder, dummy=42, readonly='')
response = self.client.get(reverse('admin:admin_inlines_holder_change', args=(holder.id,)))
inner_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(inner).pk, inner.pk)
self.assertContains(response, inner_shortcut)
def test_custom_pk_shortcut(self):
"""
The "View on Site" link is correct for models with a custom primary key
field.
"""
parent = ParentModelWithCustomPk.objects.create(my_own_pk="foo", name="Foo")
child1 = ChildModel1.objects.create(my_own_pk="bar", name="Bar", parent=parent)
child2 = ChildModel2.objects.create(my_own_pk="baz", name="Baz", parent=parent)
response = self.client.get(reverse('admin:admin_inlines_parentmodelwithcustompk_change', args=('foo',)))
child1_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(child1).pk, child1.pk)
child2_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(child2).pk, child2.pk)
self.assertContains(response, child1_shortcut)
self.assertContains(response, child2_shortcut)
def test_create_inlines_on_inherited_model(self):
"""
An object can be created with inlines when it inherits another class.
"""
data = {
'name': 'Martian',
'sighting_set-TOTAL_FORMS': 1,
'sighting_set-INITIAL_FORMS': 0,
'sighting_set-MAX_NUM_FORMS': 0,
'sighting_set-0-place': 'Zone 51',
'_save': 'Save',
}
response = self.client.post(reverse('admin:admin_inlines_extraterrestrial_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Sighting.objects.filter(et__name='Martian').count(), 1)
def test_custom_get_extra_form(self):
bt_head = BinaryTree.objects.create(name="Tree Head")
BinaryTree.objects.create(name="First Child", parent=bt_head)
# The maximum number of forms should respect 'get_max_num' on the
# ModelAdmin
max_forms_input = (
'<input id="id_binarytree_set-MAX_NUM_FORMS" '
'name="binarytree_set-MAX_NUM_FORMS" type="hidden" value="%d">'
)
# The total number of forms will remain the same in either case
total_forms_hidden = (
'<input id="id_binarytree_set-TOTAL_FORMS" '
'name="binarytree_set-TOTAL_FORMS" type="hidden" value="2">'
)
response = self.client.get(reverse('admin:admin_inlines_binarytree_add'))
self.assertInHTML(max_forms_input % 3, response.rendered_content)
self.assertInHTML(total_forms_hidden, response.rendered_content)
response = self.client.get(reverse('admin:admin_inlines_binarytree_change', args=(bt_head.id,)))
self.assertInHTML(max_forms_input % 2, response.rendered_content)
self.assertInHTML(total_forms_hidden, response.rendered_content)
def test_min_num(self):
"""
min_num and extra determine number of forms.
"""
class MinNumInline(TabularInline):
model = BinaryTree
min_num = 2
extra = 3
modeladmin = ModelAdmin(BinaryTree, admin_site)
modeladmin.inlines = [MinNumInline]
min_forms = (
'<input id="id_binarytree_set-MIN_NUM_FORMS" '
'name="binarytree_set-MIN_NUM_FORMS" type="hidden" value="2">'
)
total_forms = (
'<input id="id_binarytree_set-TOTAL_FORMS" '
'name="binarytree_set-TOTAL_FORMS" type="hidden" value="5">'
)
request = self.factory.get(reverse('admin:admin_inlines_binarytree_add'))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request)
self.assertInHTML(min_forms, response.rendered_content)
self.assertInHTML(total_forms, response.rendered_content)
def test_custom_min_num(self):
bt_head = BinaryTree.objects.create(name="Tree Head")
BinaryTree.objects.create(name="First Child", parent=bt_head)
class MinNumInline(TabularInline):
model = BinaryTree
extra = 3
def get_min_num(self, request, obj=None, **kwargs):
if obj:
return 5
return 2
modeladmin = ModelAdmin(BinaryTree, admin_site)
modeladmin.inlines = [MinNumInline]
min_forms = (
'<input id="id_binarytree_set-MIN_NUM_FORMS" '
'name="binarytree_set-MIN_NUM_FORMS" type="hidden" value="%d">'
)
total_forms = (
'<input id="id_binarytree_set-TOTAL_FORMS" '
'name="binarytree_set-TOTAL_FORMS" type="hidden" value="%d">'
)
request = self.factory.get(reverse('admin:admin_inlines_binarytree_add'))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request)
self.assertInHTML(min_forms % 2, response.rendered_content)
self.assertInHTML(total_forms % 5, response.rendered_content)
request = self.factory.get(reverse('admin:admin_inlines_binarytree_change', args=(bt_head.id,)))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(bt_head.id))
self.assertInHTML(min_forms % 5, response.rendered_content)
self.assertInHTML(total_forms % 8, response.rendered_content)
def test_inline_nonauto_noneditable_pk(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
self.assertContains(
response,
'<input id="id_nonautopkbook_set-0-rand_pk" '
'name="nonautopkbook_set-0-rand_pk" type="hidden">',
html=True
)
self.assertContains(
response,
'<input id="id_nonautopkbook_set-2-0-rand_pk" '
'name="nonautopkbook_set-2-0-rand_pk" type="hidden">',
html=True
)
def test_inline_nonauto_noneditable_inherited_pk(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
self.assertContains(
response,
'<input id="id_nonautopkbookchild_set-0-nonautopkbook_ptr" '
'name="nonautopkbookchild_set-0-nonautopkbook_ptr" type="hidden">',
html=True
)
self.assertContains(
response,
'<input id="id_nonautopkbookchild_set-2-nonautopkbook_ptr" '
'name="nonautopkbookchild_set-2-nonautopkbook_ptr" type="hidden">',
html=True
)
def test_inline_editable_pk(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
self.assertContains(
response,
'<input class="vIntegerField" id="id_editablepkbook_set-0-manual_pk" '
'name="editablepkbook_set-0-manual_pk" type="number">',
html=True, count=1
)
self.assertContains(
response,
'<input class="vIntegerField" id="id_editablepkbook_set-2-0-manual_pk" '
'name="editablepkbook_set-2-0-manual_pk" type="number">',
html=True, count=1
)
def test_stacked_inline_edit_form_contains_has_original_class(self):
holder = Holder.objects.create(dummy=1)
holder.inner_set.create(dummy=1)
response = self.client.get(reverse('admin:admin_inlines_holder_change', args=(holder.pk,)))
self.assertContains(
response,
'<div class="inline-related has_original" id="inner_set-0">',
count=1
)
self.assertContains(
response,
'<div class="inline-related" id="inner_set-1">',
count=1
)
def test_inlines_show_change_link_registered(self):
"Inlines `show_change_link` for registered models when enabled."
holder = Holder4.objects.create(dummy=1)
item1 = Inner4Stacked.objects.create(dummy=1, holder=holder)
item2 = Inner4Tabular.objects.create(dummy=1, holder=holder)
items = (
('inner4stacked', item1.pk),
('inner4tabular', item2.pk),
)
response = self.client.get(reverse('admin:admin_inlines_holder4_change', args=(holder.pk,)))
self.assertTrue(response.context['inline_admin_formset'].opts.has_registered_model)
for model, pk in items:
url = reverse('admin:admin_inlines_%s_change' % model, args=(pk,))
self.assertContains(response, '<a href="%s" %s' % (url, INLINE_CHANGELINK_HTML))
def test_inlines_show_change_link_unregistered(self):
"Inlines `show_change_link` disabled for unregistered models."
parent = ParentModelWithCustomPk.objects.create(my_own_pk="foo", name="Foo")
ChildModel1.objects.create(my_own_pk="bar", name="Bar", parent=parent)
ChildModel2.objects.create(my_own_pk="baz", name="Baz", parent=parent)
response = self.client.get(reverse('admin:admin_inlines_parentmodelwithcustompk_change', args=('foo',)))
self.assertFalse(response.context['inline_admin_formset'].opts.has_registered_model)
self.assertNotContains(response, INLINE_CHANGELINK_HTML)
def test_tabular_inline_show_change_link_false_registered(self):
"Inlines `show_change_link` disabled by default."
poll = Poll.objects.create(name="New poll")
Question.objects.create(poll=poll)
response = self.client.get(reverse('admin:admin_inlines_poll_change', args=(poll.pk,)))
self.assertTrue(response.context['inline_admin_formset'].opts.has_registered_model)
self.assertNotContains(response, INLINE_CHANGELINK_HTML)
def test_noneditable_inline_has_field_inputs(self):
"""Inlines without change permission shows field inputs on add form."""
response = self.client.get(reverse('admin:admin_inlines_novelreadonlychapter_add'))
self.assertContains(
response,
'<input type="text" name="chapter_set-0-name" '
'class="vTextField" maxlength="40" id="id_chapter_set-0-name">',
html=True
)
@override_settings(ROOT_URLCONF='admin_inlines.urls')
class TestInlineMedia(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_inline_media_only_base(self):
holder = Holder(dummy=13)
holder.save()
Inner(dummy=42, holder=holder).save()
change_url = reverse('admin:admin_inlines_holder_change', args=(holder.id,))
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
def test_inline_media_only_inline(self):
holder = Holder3(dummy=13)
holder.save()
Inner3(dummy=42, holder=holder).save()
change_url = reverse('admin:admin_inlines_holder3_change', args=(holder.id,))
response = self.client.get(change_url)
self.assertEqual(
response.context['inline_admin_formsets'][0].media._js,
[
'admin/js/vendor/jquery/jquery.min.js',
'admin/js/jquery.init.js',
'admin/js/inlines.min.js',
'my_awesome_inline_scripts.js',
'custom_number.js',
]
)
self.assertContains(response, 'my_awesome_inline_scripts.js')
def test_all_inline_media(self):
holder = Holder2(dummy=13)
holder.save()
Inner2(dummy=42, holder=holder).save()
change_url = reverse('admin:admin_inlines_holder2_change', args=(holder.id,))
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
self.assertContains(response, 'my_awesome_inline_scripts.js')
@override_settings(ROOT_URLCONF='admin_inlines.urls')
class TestInlineAdminForm(TestCase):
def test_immutable_content_type(self):
"""Regression for #9362
The problem depends only on InlineAdminForm and its "original"
argument, so we can safely set the other arguments to None/{}. We just
need to check that the content_type argument of Child isn't altered by
the internals of the inline form."""
sally = Teacher.objects.create(name='Sally')
john = Parent.objects.create(name='John')
joe = Child.objects.create(name='Joe', teacher=sally, parent=john)
iaf = InlineAdminForm(None, None, {}, {}, joe)
parent_ct = ContentType.objects.get_for_model(Parent)
self.assertEqual(iaf.original.content_type, parent_ct)
@override_settings(ROOT_URLCONF='admin_inlines.urls')
class TestInlineProtectedOnDelete(TestDataMixin, TestCase):
def setUp(self):
self.client.force_login(self.superuser)
def test_deleting_inline_with_protected_delete_does_not_validate(self):
lotr = Novel.objects.create(name='Lord of the rings')
chapter = Chapter.objects.create(novel=lotr, name='Many Meetings')
foot_note = FootNote.objects.create(chapter=chapter, note='yadda yadda')
change_url = reverse('admin:admin_inlines_novel_change', args=(lotr.id,))
response = self.client.get(change_url)
data = {
'name': lotr.name,
'chapter_set-TOTAL_FORMS': 1,
'chapter_set-INITIAL_FORMS': 1,
'chapter_set-MAX_NUM_FORMS': 1000,
'_save': 'Save',
'chapter_set-0-id': chapter.id,
'chapter_set-0-name': chapter.name,
'chapter_set-0-novel': lotr.id,
'chapter_set-0-DELETE': 'on'
}
response = self.client.post(change_url, data)
self.assertContains(response, "Deleting chapter %s would require deleting "
"the following protected related objects: foot note %s"
% (chapter, foot_note))
@override_settings(ROOT_URLCONF='admin_inlines.urls')
class TestInlinePermissions(TestCase):
"""
Make sure the admin respects permissions for objects that are edited
inline. Refs #8060.
"""
@classmethod
def setUpTestData(cls):
cls.user = User(username='admin', is_staff=True, is_active=True)
cls.user.set_password('secret')
cls.user.save()
cls.author_ct = ContentType.objects.get_for_model(Author)
cls.holder_ct = ContentType.objects.get_for_model(Holder2)
cls.book_ct = ContentType.objects.get_for_model(Book)
cls.inner_ct = ContentType.objects.get_for_model(Inner2)
# User always has permissions to add and change Authors, and Holders,
# the main (parent) models of the inlines. Permissions on the inlines
# vary per test.
permission = Permission.objects.get(codename='add_author', content_type=cls.author_ct)
cls.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_author', content_type=cls.author_ct)
cls.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='add_holder2', content_type=cls.holder_ct)
cls.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_holder2', content_type=cls.holder_ct)
cls.user.user_permissions.add(permission)
author = Author.objects.create(pk=1, name='The Author')
book = author.books.create(name='The inline Book')
cls.author_change_url = reverse('admin:admin_inlines_author_change', args=(author.id,))
# Get the ID of the automatically created intermediate model for the Author-Book m2m
author_book_auto_m2m_intermediate = Author.books.through.objects.get(author=author, book=book)
cls.author_book_auto_m2m_intermediate_id = author_book_auto_m2m_intermediate.pk
cls.holder = Holder2.objects.create(dummy=13)
cls.inner2 = Inner2.objects.create(dummy=42, holder=cls.holder)
def setUp(self):
self.holder_change_url = reverse('admin:admin_inlines_holder2_change', args=(self.holder.id,))
self.client.force_login(self.user)
def test_inline_add_m2m_noperm(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_add_fk_noperm(self):
response = self.client.get(reverse('admin:admin_inlines_holder2_add'))
# No permissions on Inner2s, so no inline
self.assertNotContains(response, '<h2>Inner2s</h2>')
self.assertNotContains(response, 'Add another Inner2')
self.assertNotContains(response, 'id="id_inner2_set-TOTAL_FORMS"')
def test_inline_change_m2m_noperm(self):
response = self.client.get(self.author_change_url)
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_change_fk_noperm(self):
response = self.client.get(self.holder_change_url)
# No permissions on Inner2s, so no inline
self.assertNotContains(response, '<h2>Inner2s</h2>')
self.assertNotContains(response, 'Add another Inner2')
self.assertNotContains(response, 'id="id_inner2_set-TOTAL_FORMS"')
def test_inline_add_m2m_add_perm(self):
permission = Permission.objects.get(codename='add_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# No change permission on Books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_add_fk_add_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(reverse('admin:admin_inlines_holder2_add'))
# Add permission on inner2s, so we get the inline
self.assertContains(response, '<h2>Inner2s</h2>')
self.assertContains(response, 'Add another Inner2')
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="3" name="inner2_set-TOTAL_FORMS">', html=True)
def test_inline_change_m2m_add_perm(self):
permission = Permission.objects.get(codename='add_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.author_change_url)
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author-Book Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
self.assertNotContains(response, 'id="id_Author_books-0-DELETE"')
def test_inline_change_m2m_change_perm(self):
permission = Permission.objects.get(codename='change_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.author_change_url)
# We have change perm on books, so we can add/change/delete inlines
self.assertContains(response, '<h2>Author-book relationships</h2>')
self.assertContains(response, 'Add another Author-book relationship')
self.assertContains(response, '<input type="hidden" id="id_Author_books-TOTAL_FORMS" '
'value="4" name="Author_books-TOTAL_FORMS">', html=True)
self.assertContains(
response,
'<input type="hidden" id="id_Author_books-0-id" value="%i" '
'name="Author_books-0-id">' % self.author_book_auto_m2m_intermediate_id,
html=True
)
self.assertContains(response, 'id="id_Author_books-0-DELETE"')
def test_inline_change_fk_add_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Add permission on inner2s, so we can add but not modify existing
self.assertContains(response, '<h2>Inner2s</h2>')
self.assertContains(response, 'Add another Inner2')
# 3 extra forms only, not the existing instance form
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-TOTAL_FORMS" value="3" '
'name="inner2_set-TOTAL_FORMS">',
html=True
)
self.assertNotContains(
response,
'<input type="hidden" id="id_inner2_set-0-id" value="%i" name="inner2_set-0-id">' % self.inner2.id,
html=True
)
def test_inline_change_fk_change_perm(self):
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Change permission on inner2s, so we can change existing but not add new
self.assertContains(response, '<h2>Inner2s</h2>', count=2)
# Just the one form for existing instances
self.assertContains(
response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" value="1" name="inner2_set-TOTAL_FORMS">',
html=True
)
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-0-id" value="%i" name="inner2_set-0-id">' % self.inner2.id,
html=True
)
# max-num 0 means we can't add new ones
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-MAX_NUM_FORMS" value="0" name="inner2_set-MAX_NUM_FORMS">',
html=True
)
# TabularInline
self.assertContains(response, '<th class="column-dummy required">Dummy</th>', html=True)
self.assertContains(
response,
'<input type="number" name="inner2_set-2-0-dummy" value="%s" '
'class="vIntegerField" id="id_inner2_set-2-0-dummy">' % self.inner2.dummy,
html=True,
)
def test_inline_change_fk_add_change_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Add/change perm, so we can add new and change existing
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance and three extra for new
self.assertContains(
response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" value="4" name="inner2_set-TOTAL_FORMS">',
html=True
)
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-0-id" value="%i" name="inner2_set-0-id">' % self.inner2.id,
html=True
)
def test_inline_change_fk_change_del_perm(self):
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='delete_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Change/delete perm on inner2s, so we can change/delete existing
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance only, no new
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-TOTAL_FORMS" value="1" name="inner2_set-TOTAL_FORMS">',
html=True
)
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-0-id" value="%i" name="inner2_set-0-id">' % self.inner2.id,
html=True
)
self.assertContains(response, 'id="id_inner2_set-0-DELETE"')
def test_inline_change_fk_all_perms(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='delete_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# All perms on inner2s, so we can add/change/delete
self.assertContains(response, '<h2>Inner2s</h2>', count=2)
# One form for existing instance only, three for new
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-TOTAL_FORMS" value="4" name="inner2_set-TOTAL_FORMS">',
html=True
)
self.assertContains(
response,
'<input type="hidden" id="id_inner2_set-0-id" value="%i" name="inner2_set-0-id">' % self.inner2.id,
html=True
)
self.assertContains(response, 'id="id_inner2_set-0-DELETE"')
# TabularInline
self.assertContains(response, '<th class="column-dummy required">Dummy</th>', html=True)
self.assertContains(
response,
'<input type="number" name="inner2_set-2-0-dummy" value="%s" '
'class="vIntegerField" id="id_inner2_set-2-0-dummy">' % self.inner2.dummy,
html=True,
)
@override_settings(ROOT_URLCONF='admin_inlines.urls')
class SeleniumTests(AdminSeleniumTestCase):
available_apps = ['admin_inlines'] + AdminSeleniumTestCase.available_apps
def setUp(self):
User.objects.create_superuser(username='super', password='secret', email='[email protected]')
def test_add_stackeds(self):
"""
The "Add another XXX" link correctly adds items to the stacked formset.
"""
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_holder4_add'))
inline_id = '#inner4stacked_set-group'
def rows_length():
return len(self.selenium.find_elements_by_css_selector('%s .dynamic-inner4stacked_set' % inline_id))
self.assertEqual(rows_length(), 3)
add_button = self.selenium.find_element_by_link_text(
'Add another Inner4 stacked')
add_button.click()
self.assertEqual(rows_length(), 4)
def test_delete_stackeds(self):
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_holder4_add'))
inline_id = '#inner4stacked_set-group'
def rows_length():
return len(self.selenium.find_elements_by_css_selector('%s .dynamic-inner4stacked_set' % inline_id))
self.assertEqual(rows_length(), 3)
add_button = self.selenium.find_element_by_link_text(
'Add another Inner4 stacked')
add_button.click()
add_button.click()
self.assertEqual(rows_length(), 5, msg="sanity check")
for delete_link in self.selenium.find_elements_by_css_selector('%s .inline-deletelink' % inline_id):
delete_link.click()
self.assertEqual(rows_length(), 3)
def test_add_inlines(self):
"""
The "Add another XXX" link correctly adds items to the inline form.
"""
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_profilecollection_add'))
# There's only one inline to start with and it has the correct ID.
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')), 1)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[0].get_attribute('id'),
'profile_set-0')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-0 input[name=profile_set-0-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-0 input[name=profile_set-0-last_name]')), 1)
# Add an inline
self.selenium.find_element_by_link_text('Add another Profile').click()
# The inline has been added, it has the right id, and it contains the
# correct fields.
self.assertEqual(len(self.selenium.find_elements_by_css_selector('.dynamic-profile_set')), 2)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[1].get_attribute('id'), 'profile_set-1')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-1 input[name=profile_set-1-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-1 input[name=profile_set-1-last_name]')), 1)
# Let's add another one to be sure
self.selenium.find_element_by_link_text('Add another Profile').click()
self.assertEqual(len(self.selenium.find_elements_by_css_selector('.dynamic-profile_set')), 3)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[2].get_attribute('id'), 'profile_set-2')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-2 input[name=profile_set-2-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-2 input[name=profile_set-2-last_name]')), 1)
# Enter some data and click 'Save'
self.selenium.find_element_by_name('profile_set-0-first_name').send_keys('0 first name 1')
self.selenium.find_element_by_name('profile_set-0-last_name').send_keys('0 last name 2')
self.selenium.find_element_by_name('profile_set-1-first_name').send_keys('1 first name 1')
self.selenium.find_element_by_name('profile_set-1-last_name').send_keys('1 last name 2')
self.selenium.find_element_by_name('profile_set-2-first_name').send_keys('2 first name 1')
self.selenium.find_element_by_name('profile_set-2-last_name').send_keys('2 last name 2')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
# The objects have been created in the database
self.assertEqual(ProfileCollection.objects.all().count(), 1)
self.assertEqual(Profile.objects.all().count(), 3)
def test_delete_inlines(self):
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_profilecollection_add'))
# Add a few inlines
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'#profile_set-group table tr.dynamic-profile_set')), 5)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-0')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-3')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-4')), 1)
# Click on a few delete buttons
self.selenium.find_element_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1 td.delete a').click()
self.selenium.find_element_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2 td.delete a').click()
# The rows are gone and the IDs have been re-sequenced
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'#profile_set-group table tr.dynamic-profile_set')), 3)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-0')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2')), 1)
def test_alternating_rows(self):
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_profilecollection_add'))
# Add a few inlines
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
row_selector = 'form#profilecollection_form tr.dynamic-profile_set'
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
"%s.row1" % row_selector)), 2, msg="Expect two row1 styled rows")
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
"%s.row2" % row_selector)), 1, msg="Expect one row2 styled row")
def test_collapsed_inlines(self):
# Collapsed inlines have SHOW/HIDE links.
self.admin_login(username='super', password='secret')
self.selenium.get(self.live_server_url + reverse('admin:admin_inlines_author_add'))
# One field is in a stacked inline, other in a tabular one.
test_fields = ['#id_nonautopkbook_set-0-title', '#id_nonautopkbook_set-2-0-title']
show_links = self.selenium.find_elements_by_link_text('SHOW')
self.assertEqual(len(show_links), 3)
for show_index, field_name in enumerate(test_fields, 0):
self.wait_until_invisible(field_name)
show_links[show_index].click()
self.wait_until_visible(field_name)
hide_links = self.selenium.find_elements_by_link_text('HIDE')
self.assertEqual(len(hide_links), 2)
for hide_index, field_name in enumerate(test_fields, 0):
self.wait_until_visible(field_name)
hide_links[hide_index].click()
self.wait_until_invisible(field_name)
|
517bff27e9a2b90b36444bc54b1131f59492e989c67153d9426fba11d786478e | """
Testing of admin inline formsets.
"""
import random
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.db import models
class Parent(models.Model):
name = models.CharField(max_length=50)
def __str__(self):
return self.name
class Teacher(models.Model):
name = models.CharField(max_length=50)
def __str__(self):
return self.name
class Child(models.Model):
name = models.CharField(max_length=50)
teacher = models.ForeignKey(Teacher, models.CASCADE)
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
parent = GenericForeignKey()
def __str__(self):
return 'I am %s, a child of %s' % (self.name, self.parent)
class Book(models.Model):
name = models.CharField(max_length=50)
class Author(models.Model):
name = models.CharField(max_length=50)
books = models.ManyToManyField(Book)
class NonAutoPKBook(models.Model):
rand_pk = models.IntegerField(primary_key=True, editable=False)
author = models.ForeignKey(Author, models.CASCADE)
title = models.CharField(max_length=50)
def save(self, *args, **kwargs):
while not self.rand_pk:
test_pk = random.randint(1, 99999)
if not NonAutoPKBook.objects.filter(rand_pk=test_pk).exists():
self.rand_pk = test_pk
super().save(*args, **kwargs)
class NonAutoPKBookChild(NonAutoPKBook):
pass
class EditablePKBook(models.Model):
manual_pk = models.IntegerField(primary_key=True)
author = models.ForeignKey(Author, models.CASCADE)
title = models.CharField(max_length=50)
class Holder(models.Model):
dummy = models.IntegerField()
class Inner(models.Model):
dummy = models.IntegerField()
holder = models.ForeignKey(Holder, models.CASCADE)
readonly = models.CharField("Inner readonly label", max_length=1)
def get_absolute_url(self):
return '/inner/'
class Holder2(models.Model):
dummy = models.IntegerField()
class Inner2(models.Model):
dummy = models.IntegerField()
holder = models.ForeignKey(Holder2, models.CASCADE)
class Holder3(models.Model):
dummy = models.IntegerField()
class Inner3(models.Model):
dummy = models.IntegerField()
holder = models.ForeignKey(Holder3, models.CASCADE)
# Models for ticket #8190
class Holder4(models.Model):
dummy = models.IntegerField()
class Inner4Stacked(models.Model):
dummy = models.IntegerField(help_text="Awesome stacked help text is awesome.")
holder = models.ForeignKey(Holder4, models.CASCADE)
class Inner4Tabular(models.Model):
dummy = models.IntegerField(help_text="Awesome tabular help text is awesome.")
holder = models.ForeignKey(Holder4, models.CASCADE)
# Models for #12749
class Person(models.Model):
firstname = models.CharField(max_length=15)
class OutfitItem(models.Model):
name = models.CharField(max_length=15)
class Fashionista(models.Model):
person = models.OneToOneField(Person, models.CASCADE, primary_key=True)
weaknesses = models.ManyToManyField(OutfitItem, through='ShoppingWeakness', blank=True)
class ShoppingWeakness(models.Model):
fashionista = models.ForeignKey(Fashionista, models.CASCADE)
item = models.ForeignKey(OutfitItem, models.CASCADE)
# Models for #13510
class TitleCollection(models.Model):
pass
class Title(models.Model):
collection = models.ForeignKey(TitleCollection, models.SET_NULL, blank=True, null=True)
title1 = models.CharField(max_length=100)
title2 = models.CharField(max_length=100)
# Models for #15424
class Poll(models.Model):
name = models.CharField(max_length=40)
class Question(models.Model):
text = models.CharField(max_length=40)
poll = models.ForeignKey(Poll, models.CASCADE)
class Novel(models.Model):
name = models.CharField(max_length=40)
class NovelReadonlyChapter(Novel):
class Meta:
proxy = True
class Chapter(models.Model):
name = models.CharField(max_length=40)
novel = models.ForeignKey(Novel, models.CASCADE)
class FootNote(models.Model):
"""
Model added for ticket 19838
"""
chapter = models.ForeignKey(Chapter, models.PROTECT)
note = models.CharField(max_length=40)
# Models for #16838
class CapoFamiglia(models.Model):
name = models.CharField(max_length=100)
class Consigliere(models.Model):
name = models.CharField(max_length=100, help_text='Help text for Consigliere')
capo_famiglia = models.ForeignKey(CapoFamiglia, models.CASCADE, related_name='+')
class SottoCapo(models.Model):
name = models.CharField(max_length=100)
capo_famiglia = models.ForeignKey(CapoFamiglia, models.CASCADE, related_name='+')
class ReadOnlyInline(models.Model):
name = models.CharField(max_length=100, help_text='Help text for ReadOnlyInline')
capo_famiglia = models.ForeignKey(CapoFamiglia, models.CASCADE)
# Models for #18433
class ParentModelWithCustomPk(models.Model):
my_own_pk = models.CharField(max_length=100, primary_key=True)
name = models.CharField(max_length=100)
class ChildModel1(models.Model):
my_own_pk = models.CharField(max_length=100, primary_key=True)
name = models.CharField(max_length=100)
parent = models.ForeignKey(ParentModelWithCustomPk, models.CASCADE)
def get_absolute_url(self):
return '/child_model1/'
class ChildModel2(models.Model):
my_own_pk = models.CharField(max_length=100, primary_key=True)
name = models.CharField(max_length=100)
parent = models.ForeignKey(ParentModelWithCustomPk, models.CASCADE)
def get_absolute_url(self):
return '/child_model2/'
# Models for #19425
class BinaryTree(models.Model):
name = models.CharField(max_length=100)
parent = models.ForeignKey('self', models.SET_NULL, null=True, blank=True)
# Models for #19524
class LifeForm(models.Model):
pass
class ExtraTerrestrial(LifeForm):
name = models.CharField(max_length=100)
class Sighting(models.Model):
et = models.ForeignKey(ExtraTerrestrial, models.CASCADE)
place = models.CharField(max_length=100)
# Models for #18263
class SomeParentModel(models.Model):
name = models.CharField(max_length=1)
class SomeChildModel(models.Model):
name = models.CharField(max_length=1)
position = models.PositiveIntegerField()
parent = models.ForeignKey(SomeParentModel, models.CASCADE)
readonly_field = models.CharField(max_length=1)
# Other models
class ProfileCollection(models.Model):
pass
class Profile(models.Model):
collection = models.ForeignKey(ProfileCollection, models.SET_NULL, blank=True, null=True)
first_name = models.CharField(max_length=100)
last_name = models.CharField(max_length=100)
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.