hash
stringlengths 64
64
| content
stringlengths 0
1.51M
|
---|---|
c9487a829bb4e04cb29ee0ee6c884f5c05520aa71bfb983d924de699ecfacfd7 | import json
import os
import sys
import uuid
from ctypes import (
addressof,
byref,
c_buffer,
c_char_p,
c_double,
c_int,
c_void_p,
string_at,
)
from django.contrib.gis.gdal.driver import Driver
from django.contrib.gis.gdal.error import GDALException
from django.contrib.gis.gdal.prototypes import raster as capi
from django.contrib.gis.gdal.raster.band import BandList
from django.contrib.gis.gdal.raster.base import GDALRasterBase
from django.contrib.gis.gdal.raster.const import (
GDAL_RESAMPLE_ALGORITHMS,
VSI_DELETE_BUFFER_ON_READ,
VSI_FILESYSTEM_PREFIX,
VSI_MEM_FILESYSTEM_BASE_PATH,
VSI_TAKE_BUFFER_OWNERSHIP,
)
from django.contrib.gis.gdal.srs import SpatialReference, SRSException
from django.contrib.gis.geometry import json_regex
from django.utils.encoding import force_bytes, force_str
from django.utils.functional import cached_property
class TransformPoint(list):
indices = {
"origin": (0, 3),
"scale": (1, 5),
"skew": (2, 4),
}
def __init__(self, raster, prop):
x = raster.geotransform[self.indices[prop][0]]
y = raster.geotransform[self.indices[prop][1]]
super().__init__([x, y])
self._raster = raster
self._prop = prop
@property
def x(self):
return self[0]
@x.setter
def x(self, value):
gtf = self._raster.geotransform
gtf[self.indices[self._prop][0]] = value
self._raster.geotransform = gtf
@property
def y(self):
return self[1]
@y.setter
def y(self, value):
gtf = self._raster.geotransform
gtf[self.indices[self._prop][1]] = value
self._raster.geotransform = gtf
class GDALRaster(GDALRasterBase):
"""
Wrap a raster GDAL Data Source object.
"""
destructor = capi.close_ds
def __init__(self, ds_input, write=False):
self._write = 1 if write else 0
Driver.ensure_registered()
# Preprocess json inputs. This converts json strings to dictionaries,
# which are parsed below the same way as direct dictionary inputs.
if isinstance(ds_input, str) and json_regex.match(ds_input):
ds_input = json.loads(ds_input)
# If input is a valid file path, try setting file as source.
if isinstance(ds_input, str):
if not ds_input.startswith(VSI_FILESYSTEM_PREFIX) and not os.path.exists(
ds_input
):
raise GDALException(
'Unable to read raster source input "%s".' % ds_input
)
try:
# GDALOpen will auto-detect the data source type.
self._ptr = capi.open_ds(force_bytes(ds_input), self._write)
except GDALException as err:
raise GDALException(
'Could not open the datasource at "{}" ({}).'.format(ds_input, err)
)
elif isinstance(ds_input, bytes):
# Create a new raster in write mode.
self._write = 1
# Get size of buffer.
size = sys.getsizeof(ds_input)
# Pass data to ctypes, keeping a reference to the ctypes object so
# that the vsimem file remains available until the GDALRaster is
# deleted.
self._ds_input = c_buffer(ds_input)
# Create random name to reference in vsimem filesystem.
vsi_path = os.path.join(VSI_MEM_FILESYSTEM_BASE_PATH, str(uuid.uuid4()))
# Create vsimem file from buffer.
capi.create_vsi_file_from_mem_buffer(
force_bytes(vsi_path),
byref(self._ds_input),
size,
VSI_TAKE_BUFFER_OWNERSHIP,
)
# Open the new vsimem file as a GDALRaster.
try:
self._ptr = capi.open_ds(force_bytes(vsi_path), self._write)
except GDALException:
# Remove the broken file from the VSI filesystem.
capi.unlink_vsi_file(force_bytes(vsi_path))
raise GDALException("Failed creating VSI raster from the input buffer.")
elif isinstance(ds_input, dict):
# A new raster needs to be created in write mode
self._write = 1
# Create driver (in memory by default)
driver = Driver(ds_input.get("driver", "MEM"))
# For out of memory drivers, check filename argument
if driver.name != "MEM" and "name" not in ds_input:
raise GDALException(
'Specify name for creation of raster with driver "{}".'.format(
driver.name
)
)
# Check if width and height where specified
if "width" not in ds_input or "height" not in ds_input:
raise GDALException(
"Specify width and height attributes for JSON or dict input."
)
# Check if srid was specified
if "srid" not in ds_input:
raise GDALException("Specify srid for JSON or dict input.")
# Create null terminated gdal options array.
papsz_options = []
for key, val in ds_input.get("papsz_options", {}).items():
option = "{}={}".format(key, val)
papsz_options.append(option.upper().encode())
papsz_options.append(None)
# Convert papszlist to ctypes array.
papsz_options = (c_char_p * len(papsz_options))(*papsz_options)
# Create GDAL Raster
self._ptr = capi.create_ds(
driver._ptr,
force_bytes(ds_input.get("name", "")),
ds_input["width"],
ds_input["height"],
ds_input.get("nr_of_bands", len(ds_input.get("bands", []))),
ds_input.get("datatype", 6),
byref(papsz_options),
)
# Set band data if provided
for i, band_input in enumerate(ds_input.get("bands", [])):
band = self.bands[i]
if "nodata_value" in band_input:
band.nodata_value = band_input["nodata_value"]
# Instantiate band filled with nodata values if only
# partial input data has been provided.
if band.nodata_value is not None and (
"data" not in band_input
or "size" in band_input
or "shape" in band_input
):
band.data(data=(band.nodata_value,), shape=(1, 1))
# Set band data values from input.
band.data(
data=band_input.get("data"),
size=band_input.get("size"),
shape=band_input.get("shape"),
offset=band_input.get("offset"),
)
# Set SRID
self.srs = ds_input.get("srid")
# Set additional properties if provided
if "origin" in ds_input:
self.origin.x, self.origin.y = ds_input["origin"]
if "scale" in ds_input:
self.scale.x, self.scale.y = ds_input["scale"]
if "skew" in ds_input:
self.skew.x, self.skew.y = ds_input["skew"]
elif isinstance(ds_input, c_void_p):
# Instantiate the object using an existing pointer to a gdal raster.
self._ptr = ds_input
else:
raise GDALException(
'Invalid data source input type: "{}".'.format(type(ds_input))
)
def __del__(self):
if self.is_vsi_based:
# Remove the temporary file from the VSI in-memory filesystem.
capi.unlink_vsi_file(force_bytes(self.name))
super().__del__()
def __str__(self):
return self.name
def __repr__(self):
"""
Short-hand representation because WKB may be very large.
"""
return "<Raster object at %s>" % hex(addressof(self._ptr))
def _flush(self):
"""
Flush all data from memory into the source file if it exists.
The data that needs flushing are geotransforms, coordinate systems,
nodata_values and pixel values. This function will be called
automatically wherever it is needed.
"""
# Raise an Exception if the value is being changed in read mode.
if not self._write:
raise GDALException(
"Raster needs to be opened in write mode to change values."
)
capi.flush_ds(self._ptr)
@property
def vsi_buffer(self):
if not (
self.is_vsi_based and self.name.startswith(VSI_MEM_FILESYSTEM_BASE_PATH)
):
return None
# Prepare an integer that will contain the buffer length.
out_length = c_int()
# Get the data using the vsi file name.
dat = capi.get_mem_buffer_from_vsi_file(
force_bytes(self.name),
byref(out_length),
VSI_DELETE_BUFFER_ON_READ,
)
# Read the full buffer pointer.
return string_at(dat, out_length.value)
@cached_property
def is_vsi_based(self):
return self._ptr and self.name.startswith(VSI_FILESYSTEM_PREFIX)
@property
def name(self):
"""
Return the name of this raster. Corresponds to filename
for file-based rasters.
"""
return force_str(capi.get_ds_description(self._ptr))
@cached_property
def driver(self):
"""
Return the GDAL Driver used for this raster.
"""
ds_driver = capi.get_ds_driver(self._ptr)
return Driver(ds_driver)
@property
def width(self):
"""
Width (X axis) in pixels.
"""
return capi.get_ds_xsize(self._ptr)
@property
def height(self):
"""
Height (Y axis) in pixels.
"""
return capi.get_ds_ysize(self._ptr)
@property
def srs(self):
"""
Return the SpatialReference used in this GDALRaster.
"""
try:
wkt = capi.get_ds_projection_ref(self._ptr)
if not wkt:
return None
return SpatialReference(wkt, srs_type="wkt")
except SRSException:
return None
@srs.setter
def srs(self, value):
"""
Set the spatial reference used in this GDALRaster. The input can be
a SpatialReference or any parameter accepted by the SpatialReference
constructor.
"""
if isinstance(value, SpatialReference):
srs = value
elif isinstance(value, (int, str)):
srs = SpatialReference(value)
else:
raise ValueError("Could not create a SpatialReference from input.")
capi.set_ds_projection_ref(self._ptr, srs.wkt.encode())
self._flush()
@property
def srid(self):
"""
Shortcut to access the srid of this GDALRaster.
"""
return self.srs.srid
@srid.setter
def srid(self, value):
"""
Shortcut to set this GDALRaster's srs from an srid.
"""
self.srs = value
@property
def geotransform(self):
"""
Return the geotransform of the data source.
Return the default geotransform if it does not exist or has not been
set previously. The default is [0.0, 1.0, 0.0, 0.0, 0.0, -1.0].
"""
# Create empty ctypes double array for data
gtf = (c_double * 6)()
capi.get_ds_geotransform(self._ptr, byref(gtf))
return list(gtf)
@geotransform.setter
def geotransform(self, values):
"Set the geotransform for the data source."
if len(values) != 6 or not all(isinstance(x, (int, float)) for x in values):
raise ValueError("Geotransform must consist of 6 numeric values.")
# Create ctypes double array with input and write data
values = (c_double * 6)(*values)
capi.set_ds_geotransform(self._ptr, byref(values))
self._flush()
@property
def origin(self):
"""
Coordinates of the raster origin.
"""
return TransformPoint(self, "origin")
@property
def scale(self):
"""
Pixel scale in units of the raster projection.
"""
return TransformPoint(self, "scale")
@property
def skew(self):
"""
Skew of pixels (rotation parameters).
"""
return TransformPoint(self, "skew")
@property
def extent(self):
"""
Return the extent as a 4-tuple (xmin, ymin, xmax, ymax).
"""
# Calculate boundary values based on scale and size
xval = self.origin.x + self.scale.x * self.width
yval = self.origin.y + self.scale.y * self.height
# Calculate min and max values
xmin = min(xval, self.origin.x)
xmax = max(xval, self.origin.x)
ymin = min(yval, self.origin.y)
ymax = max(yval, self.origin.y)
return xmin, ymin, xmax, ymax
@property
def bands(self):
return BandList(self)
def warp(self, ds_input, resampling="NearestNeighbour", max_error=0.0):
"""
Return a warped GDALRaster with the given input characteristics.
The input is expected to be a dictionary containing the parameters
of the target raster. Allowed values are width, height, SRID, origin,
scale, skew, datatype, driver, and name (filename).
By default, the warp functions keeps all parameters equal to the values
of the original source raster. For the name of the target raster, the
name of the source raster will be used and appended with
_copy. + source_driver_name.
In addition, the resampling algorithm can be specified with the "resampling"
input parameter. The default is NearestNeighbor. For a list of all options
consult the GDAL_RESAMPLE_ALGORITHMS constant.
"""
# Get the parameters defining the geotransform, srid, and size of the raster
ds_input.setdefault("width", self.width)
ds_input.setdefault("height", self.height)
ds_input.setdefault("srid", self.srs.srid)
ds_input.setdefault("origin", self.origin)
ds_input.setdefault("scale", self.scale)
ds_input.setdefault("skew", self.skew)
# Get the driver, name, and datatype of the target raster
ds_input.setdefault("driver", self.driver.name)
if "name" not in ds_input:
ds_input["name"] = self.name + "_copy." + self.driver.name
if "datatype" not in ds_input:
ds_input["datatype"] = self.bands[0].datatype()
# Instantiate raster bands filled with nodata values.
ds_input["bands"] = [{"nodata_value": bnd.nodata_value} for bnd in self.bands]
# Create target raster
target = GDALRaster(ds_input, write=True)
# Select resampling algorithm
algorithm = GDAL_RESAMPLE_ALGORITHMS[resampling]
# Reproject image
capi.reproject_image(
self._ptr,
self.srs.wkt.encode(),
target._ptr,
target.srs.wkt.encode(),
algorithm,
0.0,
max_error,
c_void_p(),
c_void_p(),
c_void_p(),
)
# Make sure all data is written to file
target._flush()
return target
def clone(self, name=None):
"""Return a clone of this GDALRaster."""
if name:
clone_name = name
elif self.driver.name != "MEM":
clone_name = self.name + "_copy." + self.driver.name
else:
clone_name = os.path.join(VSI_MEM_FILESYSTEM_BASE_PATH, str(uuid.uuid4()))
return GDALRaster(
capi.copy_ds(
self.driver._ptr,
force_bytes(clone_name),
self._ptr,
c_int(),
c_char_p(),
c_void_p(),
c_void_p(),
),
write=self._write,
)
def transform(
self, srs, driver=None, name=None, resampling="NearestNeighbour", max_error=0.0
):
"""
Return a copy of this raster reprojected into the given spatial
reference system.
"""
# Convert the resampling algorithm name into an algorithm id
algorithm = GDAL_RESAMPLE_ALGORITHMS[resampling]
if isinstance(srs, SpatialReference):
target_srs = srs
elif isinstance(srs, (int, str)):
target_srs = SpatialReference(srs)
else:
raise TypeError(
"Transform only accepts SpatialReference, string, and integer "
"objects."
)
if target_srs.srid == self.srid and (not driver or driver == self.driver.name):
return self.clone(name)
# Create warped virtual dataset in the target reference system
target = capi.auto_create_warped_vrt(
self._ptr,
self.srs.wkt.encode(),
target_srs.wkt.encode(),
algorithm,
max_error,
c_void_p(),
)
target = GDALRaster(target)
# Construct the target warp dictionary from the virtual raster
data = {
"srid": target_srs.srid,
"width": target.width,
"height": target.height,
"origin": [target.origin.x, target.origin.y],
"scale": [target.scale.x, target.scale.y],
"skew": [target.skew.x, target.skew.y],
}
# Set the driver and filepath if provided
if driver:
data["driver"] = driver
if name:
data["name"] = name
# Warp the raster into new srid
return self.warp(data, resampling=resampling, max_error=max_error)
@property
def info(self):
"""
Return information about this raster in a string format equivalent
to the output of the gdalinfo command line utility.
"""
return capi.get_ds_info(self.ptr, None).decode()
|
c41a0c5ba3deca45a0dff21f54811a19daca4da87108c10d0ad0d5cc7381f15f | """
GDAL - Constant definitions
"""
from ctypes import c_double, c_float, c_int16, c_int32, c_ubyte, c_uint16, c_uint32
# See https://gdal.org/api/raster_c_api.html#_CPPv412GDALDataType
GDAL_PIXEL_TYPES = {
0: "GDT_Unknown", # Unknown or unspecified type
1: "GDT_Byte", # Eight bit unsigned integer
2: "GDT_UInt16", # Sixteen bit unsigned integer
3: "GDT_Int16", # Sixteen bit signed integer
4: "GDT_UInt32", # Thirty-two bit unsigned integer
5: "GDT_Int32", # Thirty-two bit signed integer
6: "GDT_Float32", # Thirty-two bit floating point
7: "GDT_Float64", # Sixty-four bit floating point
8: "GDT_CInt16", # Complex Int16
9: "GDT_CInt32", # Complex Int32
10: "GDT_CFloat32", # Complex Float32
11: "GDT_CFloat64", # Complex Float64
}
# A list of gdal datatypes that are integers.
GDAL_INTEGER_TYPES = [1, 2, 3, 4, 5]
# Lookup values to convert GDAL pixel type indices into ctypes objects.
# The GDAL band-io works with ctypes arrays to hold data to be written
# or to hold the space for data to be read into. The lookup below helps
# selecting the right ctypes object for a given gdal pixel type.
GDAL_TO_CTYPES = [
None,
c_ubyte,
c_uint16,
c_int16,
c_uint32,
c_int32,
c_float,
c_double,
None,
None,
None,
None,
]
# List of resampling algorithms that can be used to warp a GDALRaster.
GDAL_RESAMPLE_ALGORITHMS = {
"NearestNeighbour": 0,
"Bilinear": 1,
"Cubic": 2,
"CubicSpline": 3,
"Lanczos": 4,
"Average": 5,
"Mode": 6,
}
# See https://gdal.org/api/raster_c_api.html#_CPPv415GDALColorInterp
GDAL_COLOR_TYPES = {
0: "GCI_Undefined", # Undefined, default value, i.e. not known
1: "GCI_GrayIndex", # Grayscale
2: "GCI_PaletteIndex", # Paletted
3: "GCI_RedBand", # Red band of RGBA image
4: "GCI_GreenBand", # Green band of RGBA image
5: "GCI_BlueBand", # Blue band of RGBA image
6: "GCI_AlphaBand", # Alpha (0=transparent, 255=opaque)
7: "GCI_HueBand", # Hue band of HLS image
8: "GCI_SaturationBand", # Saturation band of HLS image
9: "GCI_LightnessBand", # Lightness band of HLS image
10: "GCI_CyanBand", # Cyan band of CMYK image
11: "GCI_MagentaBand", # Magenta band of CMYK image
12: "GCI_YellowBand", # Yellow band of CMYK image
13: "GCI_BlackBand", # Black band of CMLY image
14: "GCI_YCbCr_YBand", # Y Luminance
15: "GCI_YCbCr_CbBand", # Cb Chroma
16: "GCI_YCbCr_CrBand", # Cr Chroma, also GCI_Max
}
# GDAL virtual filesystems prefix.
VSI_FILESYSTEM_PREFIX = "/vsi"
# Fixed base path for buffer-based GDAL in-memory files.
VSI_MEM_FILESYSTEM_BASE_PATH = "/vsimem/"
# Should the memory file system take ownership of the buffer, freeing it when
# the file is deleted? (No, GDALRaster.__del__() will delete the buffer.)
VSI_TAKE_BUFFER_OWNERSHIP = False
# Should a VSI file be removed when retrieving its buffer?
VSI_DELETE_BUFFER_ON_READ = False
|
d861a52fdd7d94fafb615185b5d2329cb3c81fe40a621ceb529a17c154653359 | from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.prototypes import raster as capi
class GDALRasterBase(GDALBase):
"""
Attributes that exist on both GDALRaster and GDALBand.
"""
@property
def metadata(self):
"""
Return the metadata for this raster or band. The return value is a
nested dictionary, where the first-level key is the metadata domain and
the second-level is the metadata item names and values for that domain.
"""
# The initial metadata domain list contains the default domain.
# The default is returned if domain name is None.
domain_list = ["DEFAULT"]
# Get additional metadata domains from the raster.
meta_list = capi.get_ds_metadata_domain_list(self._ptr)
if meta_list:
# The number of domains is unknown, so retrieve data until there
# are no more values in the ctypes array.
counter = 0
domain = meta_list[counter]
while domain:
domain_list.append(domain.decode())
counter += 1
domain = meta_list[counter]
# Free domain list array.
capi.free_dsl(meta_list)
# Retrieve metadata values for each domain.
result = {}
for domain in domain_list:
# Get metadata for this domain.
data = capi.get_ds_metadata(
self._ptr,
(None if domain == "DEFAULT" else domain.encode()),
)
if not data:
continue
# The number of metadata items is unknown, so retrieve data until
# there are no more values in the ctypes array.
domain_meta = {}
counter = 0
item = data[counter]
while item:
key, val = item.decode().split("=")
domain_meta[key] = val
counter += 1
item = data[counter]
# The default domain values are returned if domain is None.
result[domain or "DEFAULT"] = domain_meta
return result
@metadata.setter
def metadata(self, value):
"""
Set the metadata. Update only the domains that are contained in the
value dictionary.
"""
# Loop through domains.
for domain, metadata in value.items():
# Set the domain to None for the default, otherwise encode.
domain = None if domain == "DEFAULT" else domain.encode()
# Set each metadata entry separately.
for meta_name, meta_value in metadata.items():
capi.set_ds_metadata_item(
self._ptr,
meta_name.encode(),
meta_value.encode() if meta_value else None,
domain,
)
|
44f76eb7a05e43dbd6ef5aeb3ccc1bd829d7adb0b2b3c600b75040f00868972d | from ctypes import byref, c_double, c_int, c_void_p
from django.contrib.gis.gdal.error import GDALException
from django.contrib.gis.gdal.prototypes import raster as capi
from django.contrib.gis.gdal.raster.base import GDALRasterBase
from django.contrib.gis.shortcuts import numpy
from django.utils.encoding import force_str
from .const import (
GDAL_COLOR_TYPES,
GDAL_INTEGER_TYPES,
GDAL_PIXEL_TYPES,
GDAL_TO_CTYPES,
)
class GDALBand(GDALRasterBase):
"""
Wrap a GDAL raster band, needs to be obtained from a GDALRaster object.
"""
def __init__(self, source, index):
self.source = source
self._ptr = capi.get_ds_raster_band(source._ptr, index)
def _flush(self):
"""
Call the flush method on the Band's parent raster and force a refresh
of the statistics attribute when requested the next time.
"""
self.source._flush()
self._stats_refresh = True
@property
def description(self):
"""
Return the description string of the band.
"""
return force_str(capi.get_band_description(self._ptr))
@property
def width(self):
"""
Width (X axis) in pixels of the band.
"""
return capi.get_band_xsize(self._ptr)
@property
def height(self):
"""
Height (Y axis) in pixels of the band.
"""
return capi.get_band_ysize(self._ptr)
@property
def pixel_count(self):
"""
Return the total number of pixels in this band.
"""
return self.width * self.height
_stats_refresh = False
def statistics(self, refresh=False, approximate=False):
"""
Compute statistics on the pixel values of this band.
The return value is a tuple with the following structure:
(minimum, maximum, mean, standard deviation).
If approximate=True, the statistics may be computed based on overviews
or a subset of image tiles.
If refresh=True, the statistics will be computed from the data directly,
and the cache will be updated where applicable.
For empty bands (where all pixel values are nodata), all statistics
values are returned as None.
For raster formats using Persistent Auxiliary Metadata (PAM) services,
the statistics might be cached in an auxiliary file.
"""
# Prepare array with arguments for capi function
smin, smax, smean, sstd = c_double(), c_double(), c_double(), c_double()
stats_args = [
self._ptr,
c_int(approximate),
byref(smin),
byref(smax),
byref(smean),
byref(sstd),
c_void_p(),
c_void_p(),
]
if refresh or self._stats_refresh:
func = capi.compute_band_statistics
else:
# Add additional argument to force computation if there is no
# existing PAM file to take the values from.
force = True
stats_args.insert(2, c_int(force))
func = capi.get_band_statistics
# Computation of statistics fails for empty bands.
try:
func(*stats_args)
result = smin.value, smax.value, smean.value, sstd.value
except GDALException:
result = (None, None, None, None)
self._stats_refresh = False
return result
@property
def min(self):
"""
Return the minimum pixel value for this band.
"""
return self.statistics()[0]
@property
def max(self):
"""
Return the maximum pixel value for this band.
"""
return self.statistics()[1]
@property
def mean(self):
"""
Return the mean of all pixel values of this band.
"""
return self.statistics()[2]
@property
def std(self):
"""
Return the standard deviation of all pixel values of this band.
"""
return self.statistics()[3]
@property
def nodata_value(self):
"""
Return the nodata value for this band, or None if it isn't set.
"""
# Get value and nodata exists flag
nodata_exists = c_int()
value = capi.get_band_nodata_value(self._ptr, nodata_exists)
if not nodata_exists:
value = None
# If the pixeltype is an integer, convert to int
elif self.datatype() in GDAL_INTEGER_TYPES:
value = int(value)
return value
@nodata_value.setter
def nodata_value(self, value):
"""
Set the nodata value for this band.
"""
if value is None:
capi.delete_band_nodata_value(self._ptr)
elif not isinstance(value, (int, float)):
raise ValueError("Nodata value must be numeric or None.")
else:
capi.set_band_nodata_value(self._ptr, value)
self._flush()
def datatype(self, as_string=False):
"""
Return the GDAL Pixel Datatype for this band.
"""
dtype = capi.get_band_datatype(self._ptr)
if as_string:
dtype = GDAL_PIXEL_TYPES[dtype]
return dtype
def color_interp(self, as_string=False):
"""Return the GDAL color interpretation for this band."""
color = capi.get_band_color_interp(self._ptr)
if as_string:
color = GDAL_COLOR_TYPES[color]
return color
def data(self, data=None, offset=None, size=None, shape=None, as_memoryview=False):
"""
Read or writes pixel values for this band. Blocks of data can
be accessed by specifying the width, height and offset of the
desired block. The same specification can be used to update
parts of a raster by providing an array of values.
Allowed input data types are bytes, memoryview, list, tuple, and array.
"""
offset = offset or (0, 0)
size = size or (self.width - offset[0], self.height - offset[1])
shape = shape or size
if any(x <= 0 for x in size):
raise ValueError("Offset too big for this raster.")
if size[0] > self.width or size[1] > self.height:
raise ValueError("Size is larger than raster.")
# Create ctypes type array generator
ctypes_array = GDAL_TO_CTYPES[self.datatype()] * (shape[0] * shape[1])
if data is None:
# Set read mode
access_flag = 0
# Prepare empty ctypes array
data_array = ctypes_array()
else:
# Set write mode
access_flag = 1
# Instantiate ctypes array holding the input data
if isinstance(data, (bytes, memoryview)) or (
numpy and isinstance(data, numpy.ndarray)
):
data_array = ctypes_array.from_buffer_copy(data)
else:
data_array = ctypes_array(*data)
# Access band
capi.band_io(
self._ptr,
access_flag,
offset[0],
offset[1],
size[0],
size[1],
byref(data_array),
shape[0],
shape[1],
self.datatype(),
0,
0,
)
# Return data as numpy array if possible, otherwise as list
if data is None:
if as_memoryview:
return memoryview(data_array)
elif numpy:
# reshape() needs a reshape parameter with the height first.
return numpy.frombuffer(
data_array, dtype=numpy.dtype(data_array)
).reshape(tuple(reversed(size)))
else:
return list(data_array)
else:
self._flush()
class BandList(list):
def __init__(self, source):
self.source = source
super().__init__()
def __iter__(self):
for idx in range(1, len(self) + 1):
yield GDALBand(self.source, idx)
def __len__(self):
return capi.get_ds_raster_count(self.source._ptr)
def __getitem__(self, index):
try:
return GDALBand(self.source, index + 1)
except GDALException:
raise GDALException("Unable to get band index %d" % index)
|
2e3ca0292f966cd3178f863c91a6064a7d0e539f94950a630a6a668f768f8e16 | from ctypes import POINTER, c_char_p, c_double, c_int, c_void_p
from django.contrib.gis.gdal.envelope import OGREnvelope
from django.contrib.gis.gdal.libgdal import lgdal
from django.contrib.gis.gdal.prototypes.errcheck import check_envelope
from django.contrib.gis.gdal.prototypes.generation import (
const_string_output,
double_output,
geom_output,
int_output,
srs_output,
string_output,
void_output,
)
# ### Generation routines specific to this module ###
def env_func(f, argtypes):
"For getting OGREnvelopes."
f.argtypes = argtypes
f.restype = None
f.errcheck = check_envelope
return f
def pnt_func(f):
"For accessing point information."
return double_output(f, [c_void_p, c_int])
def topology_func(f):
f.argtypes = [c_void_p, c_void_p]
f.restype = c_int
f.errcheck = lambda result, func, cargs: bool(result)
return f
# ### OGR_G ctypes function prototypes ###
# GeoJSON routines.
from_json = geom_output(lgdal.OGR_G_CreateGeometryFromJson, [c_char_p])
to_json = string_output(
lgdal.OGR_G_ExportToJson, [c_void_p], str_result=True, decoding="ascii"
)
to_kml = string_output(
lgdal.OGR_G_ExportToKML, [c_void_p, c_char_p], str_result=True, decoding="ascii"
)
# GetX, GetY, GetZ all return doubles.
getx = pnt_func(lgdal.OGR_G_GetX)
gety = pnt_func(lgdal.OGR_G_GetY)
getz = pnt_func(lgdal.OGR_G_GetZ)
# Geometry creation routines.
from_wkb = geom_output(
lgdal.OGR_G_CreateFromWkb, [c_char_p, c_void_p, POINTER(c_void_p), c_int], offset=-2
)
from_wkt = geom_output(
lgdal.OGR_G_CreateFromWkt,
[POINTER(c_char_p), c_void_p, POINTER(c_void_p)],
offset=-1,
)
from_gml = geom_output(lgdal.OGR_G_CreateFromGML, [c_char_p])
create_geom = geom_output(lgdal.OGR_G_CreateGeometry, [c_int])
clone_geom = geom_output(lgdal.OGR_G_Clone, [c_void_p])
get_geom_ref = geom_output(lgdal.OGR_G_GetGeometryRef, [c_void_p, c_int])
get_boundary = geom_output(lgdal.OGR_G_GetBoundary, [c_void_p])
geom_convex_hull = geom_output(lgdal.OGR_G_ConvexHull, [c_void_p])
geom_diff = geom_output(lgdal.OGR_G_Difference, [c_void_p, c_void_p])
geom_intersection = geom_output(lgdal.OGR_G_Intersection, [c_void_p, c_void_p])
geom_sym_diff = geom_output(lgdal.OGR_G_SymmetricDifference, [c_void_p, c_void_p])
geom_union = geom_output(lgdal.OGR_G_Union, [c_void_p, c_void_p])
# Geometry modification routines.
add_geom = void_output(lgdal.OGR_G_AddGeometry, [c_void_p, c_void_p])
import_wkt = void_output(lgdal.OGR_G_ImportFromWkt, [c_void_p, POINTER(c_char_p)])
# Destroys a geometry
destroy_geom = void_output(lgdal.OGR_G_DestroyGeometry, [c_void_p], errcheck=False)
# Geometry export routines.
to_wkb = void_output(
lgdal.OGR_G_ExportToWkb, None, errcheck=True
) # special handling for WKB.
to_wkt = string_output(
lgdal.OGR_G_ExportToWkt, [c_void_p, POINTER(c_char_p)], decoding="ascii"
)
to_gml = string_output(
lgdal.OGR_G_ExportToGML, [c_void_p], str_result=True, decoding="ascii"
)
get_wkbsize = int_output(lgdal.OGR_G_WkbSize, [c_void_p])
# Geometry spatial-reference related routines.
assign_srs = void_output(
lgdal.OGR_G_AssignSpatialReference, [c_void_p, c_void_p], errcheck=False
)
get_geom_srs = srs_output(lgdal.OGR_G_GetSpatialReference, [c_void_p])
# Geometry properties
get_area = double_output(lgdal.OGR_G_GetArea, [c_void_p])
get_centroid = void_output(lgdal.OGR_G_Centroid, [c_void_p, c_void_p])
get_dims = int_output(lgdal.OGR_G_GetDimension, [c_void_p])
get_coord_dim = int_output(lgdal.OGR_G_GetCoordinateDimension, [c_void_p])
set_coord_dim = void_output(
lgdal.OGR_G_SetCoordinateDimension, [c_void_p, c_int], errcheck=False
)
is_empty = int_output(
lgdal.OGR_G_IsEmpty, [c_void_p], errcheck=lambda result, func, cargs: bool(result)
)
get_geom_count = int_output(lgdal.OGR_G_GetGeometryCount, [c_void_p])
get_geom_name = const_string_output(
lgdal.OGR_G_GetGeometryName, [c_void_p], decoding="ascii"
)
get_geom_type = int_output(lgdal.OGR_G_GetGeometryType, [c_void_p])
get_point_count = int_output(lgdal.OGR_G_GetPointCount, [c_void_p])
get_point = void_output(
lgdal.OGR_G_GetPoint,
[c_void_p, c_int, POINTER(c_double), POINTER(c_double), POINTER(c_double)],
errcheck=False,
)
geom_close_rings = void_output(lgdal.OGR_G_CloseRings, [c_void_p], errcheck=False)
# Topology routines.
ogr_contains = topology_func(lgdal.OGR_G_Contains)
ogr_crosses = topology_func(lgdal.OGR_G_Crosses)
ogr_disjoint = topology_func(lgdal.OGR_G_Disjoint)
ogr_equals = topology_func(lgdal.OGR_G_Equals)
ogr_intersects = topology_func(lgdal.OGR_G_Intersects)
ogr_overlaps = topology_func(lgdal.OGR_G_Overlaps)
ogr_touches = topology_func(lgdal.OGR_G_Touches)
ogr_within = topology_func(lgdal.OGR_G_Within)
# Transformation routines.
geom_transform = void_output(lgdal.OGR_G_Transform, [c_void_p, c_void_p])
geom_transform_to = void_output(lgdal.OGR_G_TransformTo, [c_void_p, c_void_p])
# For retrieving the envelope of the geometry.
get_envelope = env_func(lgdal.OGR_G_GetEnvelope, [c_void_p, POINTER(OGREnvelope)])
|
7389b7c744240e10c3698c5abc672f30bb3744d35bf44cdf293cc7b9d585d5ff | """
This module contains functions that generate ctypes prototypes for the
GDAL routines.
"""
from ctypes import POINTER, c_bool, c_char_p, c_double, c_int, c_int64, c_void_p
from functools import partial
from django.contrib.gis.gdal.prototypes.errcheck import (
check_arg_errcode,
check_const_string,
check_errcode,
check_geom,
check_geom_offset,
check_pointer,
check_srs,
check_str_arg,
check_string,
)
class gdal_char_p(c_char_p):
pass
def bool_output(func, argtypes, errcheck=None):
"""Generate a ctypes function that returns a boolean value."""
func.argtypes = argtypes
func.restype = c_bool
if errcheck:
func.errcheck = errcheck
return func
def double_output(func, argtypes, errcheck=False, strarg=False, cpl=False):
"Generate a ctypes function that returns a double value."
func.argtypes = argtypes
func.restype = c_double
if errcheck:
func.errcheck = partial(check_arg_errcode, cpl=cpl)
if strarg:
func.errcheck = check_str_arg
return func
def geom_output(func, argtypes, offset=None):
"""
Generate a function that returns a Geometry either by reference
or directly (if the return_geom keyword is set to True).
"""
# Setting the argument types
func.argtypes = argtypes
if not offset:
# When a geometry pointer is directly returned.
func.restype = c_void_p
func.errcheck = check_geom
else:
# Error code returned, geometry is returned by-reference.
func.restype = c_int
def geomerrcheck(result, func, cargs):
return check_geom_offset(result, func, cargs, offset)
func.errcheck = geomerrcheck
return func
def int_output(func, argtypes, errcheck=None):
"Generate a ctypes function that returns an integer value."
func.argtypes = argtypes
func.restype = c_int
if errcheck:
func.errcheck = errcheck
return func
def int64_output(func, argtypes):
"Generate a ctypes function that returns a 64-bit integer value."
func.argtypes = argtypes
func.restype = c_int64
return func
def srs_output(func, argtypes):
"""
Generate a ctypes prototype for the given function with
the given C arguments that returns a pointer to an OGR
Spatial Reference System.
"""
func.argtypes = argtypes
func.restype = c_void_p
func.errcheck = check_srs
return func
def const_string_output(func, argtypes, offset=None, decoding=None, cpl=False):
func.argtypes = argtypes
if offset:
func.restype = c_int
else:
func.restype = c_char_p
def _check_const(result, func, cargs):
res = check_const_string(result, func, cargs, offset=offset, cpl=cpl)
if res and decoding:
res = res.decode(decoding)
return res
func.errcheck = _check_const
return func
def string_output(func, argtypes, offset=-1, str_result=False, decoding=None):
"""
Generate a ctypes prototype for the given function with the
given argument types that returns a string from a GDAL pointer.
The `const` flag indicates whether the allocated pointer should
be freed via the GDAL library routine VSIFree -- but only applies
only when `str_result` is True.
"""
func.argtypes = argtypes
if str_result:
# Use subclass of c_char_p so the error checking routine
# can free the memory at the pointer's address.
func.restype = gdal_char_p
else:
# Error code is returned
func.restype = c_int
# Dynamically defining our error-checking function with the
# given offset.
def _check_str(result, func, cargs):
res = check_string(result, func, cargs, offset=offset, str_result=str_result)
if res and decoding:
res = res.decode(decoding)
return res
func.errcheck = _check_str
return func
def void_output(func, argtypes, errcheck=True, cpl=False):
"""
For functions that don't only return an error code that needs to
be examined.
"""
if argtypes:
func.argtypes = argtypes
if errcheck:
# `errcheck` keyword may be set to False for routines that
# return void, rather than a status code.
func.restype = c_int
func.errcheck = partial(check_errcode, cpl=cpl)
else:
func.restype = None
return func
def voidptr_output(func, argtypes, errcheck=True):
"For functions that return c_void_p."
func.argtypes = argtypes
func.restype = c_void_p
if errcheck:
func.errcheck = check_pointer
return func
def chararray_output(func, argtypes, errcheck=True):
"""For functions that return a c_char_p array."""
func.argtypes = argtypes
func.restype = POINTER(c_char_p)
if errcheck:
func.errcheck = check_pointer
return func
|
c2546aad59e8ccc8a782b608047ffda0c33363d0ccad7d34058ccffe7e788aed | """
This module houses the error-checking routines used by the GDAL
ctypes prototypes.
"""
from ctypes import c_void_p, string_at
from django.contrib.gis.gdal.error import GDALException, SRSException, check_err
from django.contrib.gis.gdal.libgdal import lgdal
# Helper routines for retrieving pointers and/or values from
# arguments passed in by reference.
def arg_byref(args, offset=-1):
"Return the pointer argument's by-reference value."
return args[offset]._obj.value
def ptr_byref(args, offset=-1):
"Return the pointer argument passed in by-reference."
return args[offset]._obj
# ### String checking Routines ###
def check_const_string(result, func, cargs, offset=None, cpl=False):
"""
Similar functionality to `check_string`, but does not free the pointer.
"""
if offset:
check_err(result, cpl=cpl)
ptr = ptr_byref(cargs, offset)
return ptr.value
else:
return result
def check_string(result, func, cargs, offset=-1, str_result=False):
"""
Check the string output returned from the given function, and free
the string pointer allocated by OGR. The `str_result` keyword
may be used when the result is the string pointer, otherwise
the OGR error code is assumed. The `offset` keyword may be used
to extract the string pointer passed in by-reference at the given
slice offset in the function arguments.
"""
if str_result:
# For routines that return a string.
ptr = result
if not ptr:
s = None
else:
s = string_at(result)
else:
# Error-code return specified.
check_err(result)
ptr = ptr_byref(cargs, offset)
# Getting the string value
s = ptr.value
# Correctly freeing the allocated memory behind GDAL pointer
# with the VSIFree routine.
if ptr:
lgdal.VSIFree(ptr)
return s
# ### DataSource, Layer error-checking ###
# ### Envelope checking ###
def check_envelope(result, func, cargs, offset=-1):
"Check a function that returns an OGR Envelope by reference."
return ptr_byref(cargs, offset)
# ### Geometry error-checking routines ###
def check_geom(result, func, cargs):
"Check a function that returns a geometry."
# OGR_G_Clone may return an integer, even though the
# restype is set to c_void_p
if isinstance(result, int):
result = c_void_p(result)
if not result:
raise GDALException(
'Invalid geometry pointer returned from "%s".' % func.__name__
)
return result
def check_geom_offset(result, func, cargs, offset=-1):
"Check the geometry at the given offset in the C parameter list."
check_err(result)
geom = ptr_byref(cargs, offset=offset)
return check_geom(geom, func, cargs)
# ### Spatial Reference error-checking routines ###
def check_srs(result, func, cargs):
if isinstance(result, int):
result = c_void_p(result)
if not result:
raise SRSException(
'Invalid spatial reference pointer returned from "%s".' % func.__name__
)
return result
# ### Other error-checking routines ###
def check_arg_errcode(result, func, cargs, cpl=False):
"""
The error code is returned in the last argument, by reference.
Check its value with `check_err` before returning the result.
"""
check_err(arg_byref(cargs), cpl=cpl)
return result
def check_errcode(result, func, cargs, cpl=False):
"""
Check the error code returned (c_int).
"""
check_err(result, cpl=cpl)
def check_pointer(result, func, cargs):
"Make sure the result pointer is valid."
if isinstance(result, int):
result = c_void_p(result)
if result:
return result
else:
raise GDALException('Invalid pointer returned from "%s"' % func.__name__)
def check_str_arg(result, func, cargs):
"""
This is for the OSRGet[Angular|Linear]Units functions, which
require that the returned string pointer not be freed. This
returns both the double and string values.
"""
dbl = result
ptr = cargs[-1]._obj
return dbl, ptr.value.decode()
|
1cf2dcda002c19184dc248d382d6731dd8d61bc2ca6dc49dc11975037aa34039 | """
This module houses the ctypes function prototypes for GDAL DataSource (raster)
related data structures.
"""
from ctypes import POINTER, c_bool, c_char_p, c_double, c_int, c_void_p
from functools import partial
from django.contrib.gis.gdal.libgdal import std_call
from django.contrib.gis.gdal.prototypes.generation import (
chararray_output,
const_string_output,
double_output,
int_output,
void_output,
voidptr_output,
)
# For more detail about c function names and definitions see
# https://gdal.org/api/raster_c_api.html
# https://gdal.org/doxygen/gdalwarper_8h.html
# https://gdal.org/api/gdal_utils.html
# Prepare partial functions that use cpl error codes
void_output = partial(void_output, cpl=True)
const_string_output = partial(const_string_output, cpl=True)
double_output = partial(double_output, cpl=True)
# Raster Driver Routines
register_all = void_output(std_call("GDALAllRegister"), [], errcheck=False)
get_driver = voidptr_output(std_call("GDALGetDriver"), [c_int])
get_driver_by_name = voidptr_output(
std_call("GDALGetDriverByName"), [c_char_p], errcheck=False
)
get_driver_count = int_output(std_call("GDALGetDriverCount"), [])
get_driver_description = const_string_output(std_call("GDALGetDescription"), [c_void_p])
# Raster Data Source Routines
create_ds = voidptr_output(
std_call("GDALCreate"), [c_void_p, c_char_p, c_int, c_int, c_int, c_int, c_void_p]
)
open_ds = voidptr_output(std_call("GDALOpen"), [c_char_p, c_int])
close_ds = void_output(std_call("GDALClose"), [c_void_p], errcheck=False)
flush_ds = int_output(std_call("GDALFlushCache"), [c_void_p])
copy_ds = voidptr_output(
std_call("GDALCreateCopy"),
[c_void_p, c_char_p, c_void_p, c_int, POINTER(c_char_p), c_void_p, c_void_p],
)
add_band_ds = void_output(std_call("GDALAddBand"), [c_void_p, c_int])
get_ds_description = const_string_output(std_call("GDALGetDescription"), [c_void_p])
get_ds_driver = voidptr_output(std_call("GDALGetDatasetDriver"), [c_void_p])
get_ds_info = const_string_output(std_call("GDALInfo"), [c_void_p, c_void_p])
get_ds_xsize = int_output(std_call("GDALGetRasterXSize"), [c_void_p])
get_ds_ysize = int_output(std_call("GDALGetRasterYSize"), [c_void_p])
get_ds_raster_count = int_output(std_call("GDALGetRasterCount"), [c_void_p])
get_ds_raster_band = voidptr_output(std_call("GDALGetRasterBand"), [c_void_p, c_int])
get_ds_projection_ref = const_string_output(
std_call("GDALGetProjectionRef"), [c_void_p]
)
set_ds_projection_ref = void_output(std_call("GDALSetProjection"), [c_void_p, c_char_p])
get_ds_geotransform = void_output(
std_call("GDALGetGeoTransform"), [c_void_p, POINTER(c_double * 6)], errcheck=False
)
set_ds_geotransform = void_output(
std_call("GDALSetGeoTransform"), [c_void_p, POINTER(c_double * 6)]
)
get_ds_metadata = chararray_output(
std_call("GDALGetMetadata"), [c_void_p, c_char_p], errcheck=False
)
set_ds_metadata = void_output(
std_call("GDALSetMetadata"), [c_void_p, POINTER(c_char_p), c_char_p]
)
get_ds_metadata_domain_list = chararray_output(
std_call("GDALGetMetadataDomainList"), [c_void_p], errcheck=False
)
get_ds_metadata_item = const_string_output(
std_call("GDALGetMetadataItem"), [c_void_p, c_char_p, c_char_p]
)
set_ds_metadata_item = const_string_output(
std_call("GDALSetMetadataItem"), [c_void_p, c_char_p, c_char_p, c_char_p]
)
free_dsl = void_output(std_call("CSLDestroy"), [POINTER(c_char_p)], errcheck=False)
# Raster Band Routines
band_io = void_output(
std_call("GDALRasterIO"),
[
c_void_p,
c_int,
c_int,
c_int,
c_int,
c_int,
c_void_p,
c_int,
c_int,
c_int,
c_int,
c_int,
],
)
get_band_xsize = int_output(std_call("GDALGetRasterBandXSize"), [c_void_p])
get_band_ysize = int_output(std_call("GDALGetRasterBandYSize"), [c_void_p])
get_band_index = int_output(std_call("GDALGetBandNumber"), [c_void_p])
get_band_description = const_string_output(std_call("GDALGetDescription"), [c_void_p])
get_band_ds = voidptr_output(std_call("GDALGetBandDataset"), [c_void_p])
get_band_datatype = int_output(std_call("GDALGetRasterDataType"), [c_void_p])
get_band_color_interp = int_output(
std_call("GDALGetRasterColorInterpretation"), [c_void_p]
)
get_band_nodata_value = double_output(
std_call("GDALGetRasterNoDataValue"), [c_void_p, POINTER(c_int)]
)
set_band_nodata_value = void_output(
std_call("GDALSetRasterNoDataValue"), [c_void_p, c_double]
)
delete_band_nodata_value = void_output(
std_call("GDALDeleteRasterNoDataValue"), [c_void_p]
)
get_band_statistics = void_output(
std_call("GDALGetRasterStatistics"),
[
c_void_p,
c_int,
c_int,
POINTER(c_double),
POINTER(c_double),
POINTER(c_double),
POINTER(c_double),
c_void_p,
c_void_p,
],
)
compute_band_statistics = void_output(
std_call("GDALComputeRasterStatistics"),
[
c_void_p,
c_int,
POINTER(c_double),
POINTER(c_double),
POINTER(c_double),
POINTER(c_double),
c_void_p,
c_void_p,
],
)
# Reprojection routine
reproject_image = void_output(
std_call("GDALReprojectImage"),
[
c_void_p,
c_char_p,
c_void_p,
c_char_p,
c_int,
c_double,
c_double,
c_void_p,
c_void_p,
c_void_p,
],
)
auto_create_warped_vrt = voidptr_output(
std_call("GDALAutoCreateWarpedVRT"),
[c_void_p, c_char_p, c_char_p, c_int, c_double, c_void_p],
)
# Create VSI gdal raster files from in-memory buffers.
# https://gdal.org/api/cpl.html#cpl-vsi-h
create_vsi_file_from_mem_buffer = voidptr_output(
std_call("VSIFileFromMemBuffer"), [c_char_p, c_void_p, c_int, c_int]
)
get_mem_buffer_from_vsi_file = voidptr_output(
std_call("VSIGetMemFileBuffer"), [c_char_p, POINTER(c_int), c_bool]
)
unlink_vsi_file = int_output(std_call("VSIUnlink"), [c_char_p])
|
696788b6e44b1abf4ddea72707bbeea2835b7867ab922c670d1523b5a5fbce4d | """
This module houses the ctypes function prototypes for OGR DataSource
related data structures. OGR_Dr_*, OGR_DS_*, OGR_L_*, OGR_F_*,
OGR_Fld_* routines are relevant here.
"""
from ctypes import POINTER, c_char_p, c_double, c_int, c_long, c_void_p
from django.contrib.gis.gdal.envelope import OGREnvelope
from django.contrib.gis.gdal.libgdal import lgdal
from django.contrib.gis.gdal.prototypes.generation import (
bool_output,
const_string_output,
double_output,
geom_output,
int64_output,
int_output,
srs_output,
void_output,
voidptr_output,
)
c_int_p = POINTER(c_int) # shortcut type
# Driver Routines
register_all = void_output(lgdal.OGRRegisterAll, [], errcheck=False)
cleanup_all = void_output(lgdal.OGRCleanupAll, [], errcheck=False)
get_driver = voidptr_output(lgdal.OGRGetDriver, [c_int])
get_driver_by_name = voidptr_output(
lgdal.OGRGetDriverByName, [c_char_p], errcheck=False
)
get_driver_count = int_output(lgdal.OGRGetDriverCount, [])
get_driver_name = const_string_output(
lgdal.OGR_Dr_GetName, [c_void_p], decoding="ascii"
)
# DataSource
open_ds = voidptr_output(lgdal.OGROpen, [c_char_p, c_int, POINTER(c_void_p)])
destroy_ds = void_output(lgdal.OGR_DS_Destroy, [c_void_p], errcheck=False)
release_ds = void_output(lgdal.OGRReleaseDataSource, [c_void_p])
get_ds_name = const_string_output(lgdal.OGR_DS_GetName, [c_void_p])
get_layer = voidptr_output(lgdal.OGR_DS_GetLayer, [c_void_p, c_int])
get_layer_by_name = voidptr_output(lgdal.OGR_DS_GetLayerByName, [c_void_p, c_char_p])
get_layer_count = int_output(lgdal.OGR_DS_GetLayerCount, [c_void_p])
# Layer Routines
get_extent = void_output(lgdal.OGR_L_GetExtent, [c_void_p, POINTER(OGREnvelope), c_int])
get_feature = voidptr_output(lgdal.OGR_L_GetFeature, [c_void_p, c_long])
get_feature_count = int_output(lgdal.OGR_L_GetFeatureCount, [c_void_p, c_int])
get_layer_defn = voidptr_output(lgdal.OGR_L_GetLayerDefn, [c_void_p])
get_layer_srs = srs_output(lgdal.OGR_L_GetSpatialRef, [c_void_p])
get_next_feature = voidptr_output(lgdal.OGR_L_GetNextFeature, [c_void_p])
reset_reading = void_output(lgdal.OGR_L_ResetReading, [c_void_p], errcheck=False)
test_capability = int_output(lgdal.OGR_L_TestCapability, [c_void_p, c_char_p])
get_spatial_filter = geom_output(lgdal.OGR_L_GetSpatialFilter, [c_void_p])
set_spatial_filter = void_output(
lgdal.OGR_L_SetSpatialFilter, [c_void_p, c_void_p], errcheck=False
)
set_spatial_filter_rect = void_output(
lgdal.OGR_L_SetSpatialFilterRect,
[c_void_p, c_double, c_double, c_double, c_double],
errcheck=False,
)
# Feature Definition Routines
get_fd_geom_type = int_output(lgdal.OGR_FD_GetGeomType, [c_void_p])
get_fd_name = const_string_output(lgdal.OGR_FD_GetName, [c_void_p])
get_feat_name = const_string_output(lgdal.OGR_FD_GetName, [c_void_p])
get_field_count = int_output(lgdal.OGR_FD_GetFieldCount, [c_void_p])
get_field_defn = voidptr_output(lgdal.OGR_FD_GetFieldDefn, [c_void_p, c_int])
# Feature Routines
clone_feature = voidptr_output(lgdal.OGR_F_Clone, [c_void_p])
destroy_feature = void_output(lgdal.OGR_F_Destroy, [c_void_p], errcheck=False)
feature_equal = int_output(lgdal.OGR_F_Equal, [c_void_p, c_void_p])
get_feat_geom_ref = geom_output(lgdal.OGR_F_GetGeometryRef, [c_void_p])
get_feat_field_count = int_output(lgdal.OGR_F_GetFieldCount, [c_void_p])
get_feat_field_defn = voidptr_output(lgdal.OGR_F_GetFieldDefnRef, [c_void_p, c_int])
get_fid = int_output(lgdal.OGR_F_GetFID, [c_void_p])
get_field_as_datetime = int_output(
lgdal.OGR_F_GetFieldAsDateTime,
[c_void_p, c_int, c_int_p, c_int_p, c_int_p, c_int_p, c_int_p, c_int_p],
)
get_field_as_double = double_output(lgdal.OGR_F_GetFieldAsDouble, [c_void_p, c_int])
get_field_as_integer = int_output(lgdal.OGR_F_GetFieldAsInteger, [c_void_p, c_int])
get_field_as_integer64 = int64_output(
lgdal.OGR_F_GetFieldAsInteger64, [c_void_p, c_int]
)
is_field_set = bool_output(lgdal.OGR_F_IsFieldSetAndNotNull, [c_void_p, c_int])
get_field_as_string = const_string_output(
lgdal.OGR_F_GetFieldAsString, [c_void_p, c_int]
)
get_field_index = int_output(lgdal.OGR_F_GetFieldIndex, [c_void_p, c_char_p])
# Field Routines
get_field_name = const_string_output(lgdal.OGR_Fld_GetNameRef, [c_void_p])
get_field_precision = int_output(lgdal.OGR_Fld_GetPrecision, [c_void_p])
get_field_type = int_output(lgdal.OGR_Fld_GetType, [c_void_p])
get_field_type_name = const_string_output(lgdal.OGR_GetFieldTypeName, [c_int])
get_field_width = int_output(lgdal.OGR_Fld_GetWidth, [c_void_p])
|
b89ed7827ad7ed3bafa6026ef2ec1eb3b09689d0bbf82e8f492a8d784a49babf | from ctypes import POINTER, c_char_p, c_int, c_void_p
from django.contrib.gis.gdal.libgdal import GDAL_VERSION, lgdal, std_call
from django.contrib.gis.gdal.prototypes.generation import (
const_string_output,
double_output,
int_output,
srs_output,
string_output,
void_output,
)
# Shortcut generation for routines with known parameters.
def srs_double(f):
"""
Create a function prototype for the OSR routines that take
the OSRSpatialReference object and return a double value.
"""
return double_output(f, [c_void_p, POINTER(c_int)], errcheck=True)
def units_func(f):
"""
Create a ctypes function prototype for OSR units functions, e.g.,
OSRGetAngularUnits, OSRGetLinearUnits.
"""
return double_output(f, [c_void_p, POINTER(c_char_p)], strarg=True)
# Creation & destruction.
clone_srs = srs_output(std_call("OSRClone"), [c_void_p])
new_srs = srs_output(std_call("OSRNewSpatialReference"), [c_char_p])
release_srs = void_output(lgdal.OSRRelease, [c_void_p], errcheck=False)
destroy_srs = void_output(
std_call("OSRDestroySpatialReference"), [c_void_p], errcheck=False
)
srs_validate = void_output(lgdal.OSRValidate, [c_void_p])
if GDAL_VERSION >= (3, 0):
set_axis_strategy = void_output(
lgdal.OSRSetAxisMappingStrategy, [c_void_p, c_int], errcheck=False
)
# Getting the semi_major, semi_minor, and flattening functions.
semi_major = srs_double(lgdal.OSRGetSemiMajor)
semi_minor = srs_double(lgdal.OSRGetSemiMinor)
invflattening = srs_double(lgdal.OSRGetInvFlattening)
# WKT, PROJ, EPSG, XML importation routines.
from_wkt = void_output(lgdal.OSRImportFromWkt, [c_void_p, POINTER(c_char_p)])
from_proj = void_output(lgdal.OSRImportFromProj4, [c_void_p, c_char_p])
from_epsg = void_output(std_call("OSRImportFromEPSG"), [c_void_p, c_int])
from_xml = void_output(lgdal.OSRImportFromXML, [c_void_p, c_char_p])
from_user_input = void_output(std_call("OSRSetFromUserInput"), [c_void_p, c_char_p])
# Morphing to/from ESRI WKT.
morph_to_esri = void_output(lgdal.OSRMorphToESRI, [c_void_p])
morph_from_esri = void_output(lgdal.OSRMorphFromESRI, [c_void_p])
# Identifying the EPSG
identify_epsg = void_output(lgdal.OSRAutoIdentifyEPSG, [c_void_p])
# Getting the angular_units, linear_units functions
linear_units = units_func(lgdal.OSRGetLinearUnits)
angular_units = units_func(lgdal.OSRGetAngularUnits)
# For exporting to WKT, PROJ, "Pretty" WKT, and XML.
to_wkt = string_output(
std_call("OSRExportToWkt"), [c_void_p, POINTER(c_char_p)], decoding="utf-8"
)
to_proj = string_output(
std_call("OSRExportToProj4"), [c_void_p, POINTER(c_char_p)], decoding="ascii"
)
to_pretty_wkt = string_output(
std_call("OSRExportToPrettyWkt"),
[c_void_p, POINTER(c_char_p), c_int],
offset=-2,
decoding="utf-8",
)
to_xml = string_output(
lgdal.OSRExportToXML,
[c_void_p, POINTER(c_char_p), c_char_p],
offset=-2,
decoding="utf-8",
)
# String attribute retrieval routines.
get_attr_value = const_string_output(
std_call("OSRGetAttrValue"), [c_void_p, c_char_p, c_int], decoding="utf-8"
)
get_auth_name = const_string_output(
lgdal.OSRGetAuthorityName, [c_void_p, c_char_p], decoding="ascii"
)
get_auth_code = const_string_output(
lgdal.OSRGetAuthorityCode, [c_void_p, c_char_p], decoding="ascii"
)
# SRS Properties
isgeographic = int_output(lgdal.OSRIsGeographic, [c_void_p])
islocal = int_output(lgdal.OSRIsLocal, [c_void_p])
isprojected = int_output(lgdal.OSRIsProjected, [c_void_p])
# Coordinate transformation
new_ct = srs_output(std_call("OCTNewCoordinateTransformation"), [c_void_p, c_void_p])
destroy_ct = void_output(
std_call("OCTDestroyCoordinateTransformation"), [c_void_p], errcheck=False
)
|
9f5c8262f402b5cee9885ae18de642587f0f7f4f8088e1811f7dd566eb138162 | import threading
from django.contrib.gis.geos.base import GEOSBase
from django.contrib.gis.geos.libgeos import CONTEXT_PTR, error_h, lgeos, notice_h
class GEOSContextHandle(GEOSBase):
"""Represent a GEOS context handle."""
ptr_type = CONTEXT_PTR
destructor = lgeos.finishGEOS_r
def __init__(self):
# Initializing the context handler for this thread with
# the notice and error handler.
self.ptr = lgeos.initGEOS_r(notice_h, error_h)
# Defining a thread-local object and creating an instance
# to hold a reference to GEOSContextHandle for this thread.
class GEOSContext(threading.local):
handle = None
thread_context = GEOSContext()
class GEOSFunc:
"""
Serve as a wrapper for GEOS C Functions. Use thread-safe function
variants when available.
"""
def __init__(self, func_name):
# GEOS thread-safe function signatures end with '_r' and take an
# additional context handle parameter.
self.cfunc = getattr(lgeos, func_name + "_r")
# Create a reference to thread_context so it's not garbage-collected
# before an attempt to call this object.
self.thread_context = thread_context
def __call__(self, *args):
# Create a context handle if one doesn't exist for this thread.
self.thread_context.handle = self.thread_context.handle or GEOSContextHandle()
# Call the threaded GEOS routine with the pointer of the context handle
# as the first argument.
return self.cfunc(self.thread_context.handle.ptr, *args)
def __str__(self):
return self.cfunc.__name__
# argtypes property
def _get_argtypes(self):
return self.cfunc.argtypes
def _set_argtypes(self, argtypes):
self.cfunc.argtypes = [CONTEXT_PTR, *argtypes]
argtypes = property(_get_argtypes, _set_argtypes)
# restype property
def _get_restype(self):
return self.cfunc.restype
def _set_restype(self, restype):
self.cfunc.restype = restype
restype = property(_get_restype, _set_restype)
# errcheck property
def _get_errcheck(self):
return self.cfunc.errcheck
def _set_errcheck(self, errcheck):
self.cfunc.errcheck = errcheck
errcheck = property(_get_errcheck, _set_errcheck)
|
36547ead41428ff577969a52998488d9b6a92e29bf554257001c0495395d64cd | from ctypes import POINTER, c_char_p, c_int, c_ubyte, c_uint
from django.contrib.gis.geos.libgeos import CS_PTR, GEOM_PTR, GEOSFuncFactory
from django.contrib.gis.geos.prototypes.errcheck import (
check_geom,
check_minus_one,
check_string,
)
# This is the return type used by binary output (WKB, HEX) routines.
c_uchar_p = POINTER(c_ubyte)
# We create a simple subclass of c_char_p here because when the response
# type is set to c_char_p, you get a _Python_ string and there's no way
# to access the string's address inside the error checking function.
# In other words, you can't free the memory allocated inside GEOS. Previously,
# the return type would just be omitted and the integer address would be
# used -- but this allows us to be specific in the function definition and
# keeps the reference so it may be free'd.
class geos_char_p(c_char_p):
pass
# ### ctypes factory classes ###
class GeomOutput(GEOSFuncFactory):
"For GEOS routines that return a geometry."
restype = GEOM_PTR
errcheck = staticmethod(check_geom)
class IntFromGeom(GEOSFuncFactory):
"Argument is a geometry, return type is an integer."
argtypes = [GEOM_PTR]
restype = c_int
errcheck = staticmethod(check_minus_one)
class StringFromGeom(GEOSFuncFactory):
"Argument is a Geometry, return type is a string."
argtypes = [GEOM_PTR]
restype = geos_char_p
errcheck = staticmethod(check_string)
# ### ctypes prototypes ###
# The GEOS geometry type, typeid, num_coordinates and number of geometries
geos_makevalid = GeomOutput("GEOSMakeValid", argtypes=[GEOM_PTR])
geos_normalize = IntFromGeom("GEOSNormalize")
geos_type = StringFromGeom("GEOSGeomType")
geos_typeid = IntFromGeom("GEOSGeomTypeId")
get_dims = GEOSFuncFactory("GEOSGeom_getDimensions", argtypes=[GEOM_PTR], restype=c_int)
get_num_coords = IntFromGeom("GEOSGetNumCoordinates")
get_num_geoms = IntFromGeom("GEOSGetNumGeometries")
# Geometry creation factories
create_point = GeomOutput("GEOSGeom_createPoint", argtypes=[CS_PTR])
create_linestring = GeomOutput("GEOSGeom_createLineString", argtypes=[CS_PTR])
create_linearring = GeomOutput("GEOSGeom_createLinearRing", argtypes=[CS_PTR])
# Polygon and collection creation routines need argument types defined
# for compatibility with some platforms, e.g. macOS ARM64. With argtypes
# defined, arrays are automatically cast and byref() calls are not needed.
create_polygon = GeomOutput(
"GEOSGeom_createPolygon",
argtypes=[GEOM_PTR, POINTER(GEOM_PTR), c_uint],
)
create_empty_polygon = GeomOutput("GEOSGeom_createEmptyPolygon", argtypes=[])
create_collection = GeomOutput(
"GEOSGeom_createCollection",
argtypes=[c_int, POINTER(GEOM_PTR), c_uint],
)
# Ring routines
get_extring = GeomOutput("GEOSGetExteriorRing", argtypes=[GEOM_PTR])
get_intring = GeomOutput("GEOSGetInteriorRingN", argtypes=[GEOM_PTR, c_int])
get_nrings = IntFromGeom("GEOSGetNumInteriorRings")
# Collection Routines
get_geomn = GeomOutput("GEOSGetGeometryN", argtypes=[GEOM_PTR, c_int])
# Cloning
geom_clone = GEOSFuncFactory("GEOSGeom_clone", argtypes=[GEOM_PTR], restype=GEOM_PTR)
# Destruction routine.
destroy_geom = GEOSFuncFactory("GEOSGeom_destroy", argtypes=[GEOM_PTR])
# SRID routines
geos_get_srid = GEOSFuncFactory("GEOSGetSRID", argtypes=[GEOM_PTR], restype=c_int)
geos_set_srid = GEOSFuncFactory("GEOSSetSRID", argtypes=[GEOM_PTR, c_int])
|
60483c05b32a1d1331ab2f5a416c48b6a7daf34873ac6a6ef46687e83ddf8288 | """
This module contains all of the GEOS ctypes function prototypes. Each
prototype handles the interaction between the GEOS library and Python
via ctypes.
"""
from django.contrib.gis.geos.prototypes.coordseq import ( # NOQA
create_cs,
cs_clone,
cs_getdims,
cs_getordinate,
cs_getsize,
cs_getx,
cs_gety,
cs_getz,
cs_is_ccw,
cs_setordinate,
cs_setx,
cs_sety,
cs_setz,
get_cs,
)
from django.contrib.gis.geos.prototypes.geom import ( # NOQA
create_collection,
create_empty_polygon,
create_linearring,
create_linestring,
create_point,
create_polygon,
destroy_geom,
geom_clone,
geos_get_srid,
geos_makevalid,
geos_normalize,
geos_set_srid,
geos_type,
geos_typeid,
get_dims,
get_extring,
get_geomn,
get_intring,
get_nrings,
get_num_coords,
get_num_geoms,
)
from django.contrib.gis.geos.prototypes.misc import * # NOQA
from django.contrib.gis.geos.prototypes.predicates import ( # NOQA
geos_contains,
geos_covers,
geos_crosses,
geos_disjoint,
geos_equals,
geos_equalsexact,
geos_hasz,
geos_intersects,
geos_isclosed,
geos_isempty,
geos_isring,
geos_issimple,
geos_isvalid,
geos_overlaps,
geos_relatepattern,
geos_touches,
geos_within,
)
from django.contrib.gis.geos.prototypes.topology import * # NOQA
|
ebb1d68b07f935615636388327c1af7654b9ac2c3405c3bb6eba9c0ccc2fff9b | """
This module houses the GEOS ctypes prototype functions for the
unary and binary predicate operations on geometries.
"""
from ctypes import c_byte, c_char_p, c_double
from django.contrib.gis.geos.libgeos import GEOM_PTR, GEOSFuncFactory
from django.contrib.gis.geos.prototypes.errcheck import check_predicate
# ## Binary & unary predicate factories ##
class UnaryPredicate(GEOSFuncFactory):
"For GEOS unary predicate functions."
argtypes = [GEOM_PTR]
restype = c_byte
errcheck = staticmethod(check_predicate)
class BinaryPredicate(UnaryPredicate):
"For GEOS binary predicate functions."
argtypes = [GEOM_PTR, GEOM_PTR]
# ## Unary Predicates ##
geos_hasz = UnaryPredicate("GEOSHasZ")
geos_isclosed = UnaryPredicate("GEOSisClosed")
geos_isempty = UnaryPredicate("GEOSisEmpty")
geos_isring = UnaryPredicate("GEOSisRing")
geos_issimple = UnaryPredicate("GEOSisSimple")
geos_isvalid = UnaryPredicate("GEOSisValid")
# ## Binary Predicates ##
geos_contains = BinaryPredicate("GEOSContains")
geos_covers = BinaryPredicate("GEOSCovers")
geos_crosses = BinaryPredicate("GEOSCrosses")
geos_disjoint = BinaryPredicate("GEOSDisjoint")
geos_equals = BinaryPredicate("GEOSEquals")
geos_equalsexact = BinaryPredicate(
"GEOSEqualsExact", argtypes=[GEOM_PTR, GEOM_PTR, c_double]
)
geos_intersects = BinaryPredicate("GEOSIntersects")
geos_overlaps = BinaryPredicate("GEOSOverlaps")
geos_relatepattern = BinaryPredicate(
"GEOSRelatePattern", argtypes=[GEOM_PTR, GEOM_PTR, c_char_p]
)
geos_touches = BinaryPredicate("GEOSTouches")
geos_within = BinaryPredicate("GEOSWithin")
|
696e242dec37b5d5d9e0d98984e17634563cdfb00289de959befdad74113e50f | """
Error checking functions for GEOS ctypes prototype functions.
"""
from ctypes import c_void_p, string_at
from django.contrib.gis.geos.error import GEOSException
from django.contrib.gis.geos.libgeos import GEOSFuncFactory
# Getting the `free` routine used to free the memory allocated for
# string pointers returned by GEOS.
free = GEOSFuncFactory("GEOSFree")
free.argtypes = [c_void_p]
def last_arg_byref(args):
"Return the last C argument's value by reference."
return args[-1]._obj.value
def check_dbl(result, func, cargs):
"Check the status code and returns the double value passed in by reference."
# Checking the status code
if result != 1:
return None
# Double passed in by reference, return its value.
return last_arg_byref(cargs)
def check_geom(result, func, cargs):
"Error checking on routines that return Geometries."
if not result:
raise GEOSException(
'Error encountered checking Geometry returned from GEOS C function "%s".'
% func.__name__
)
return result
def check_minus_one(result, func, cargs):
"Error checking on routines that should not return -1."
if result == -1:
raise GEOSException(
'Error encountered in GEOS C function "%s".' % func.__name__
)
else:
return result
def check_predicate(result, func, cargs):
"Error checking for unary/binary predicate functions."
if result == 1:
return True
elif result == 0:
return False
else:
raise GEOSException(
'Error encountered on GEOS C predicate function "%s".' % func.__name__
)
def check_sized_string(result, func, cargs):
"""
Error checking for routines that return explicitly sized strings.
This frees the memory allocated by GEOS at the result pointer.
"""
if not result:
raise GEOSException(
'Invalid string pointer returned by GEOS C function "%s"' % func.__name__
)
# A c_size_t object is passed in by reference for the second
# argument on these routines, and its needed to determine the
# correct size.
s = string_at(result, last_arg_byref(cargs))
# Freeing the memory allocated within GEOS
free(result)
return s
def check_string(result, func, cargs):
"""
Error checking for routines that return strings.
This frees the memory allocated by GEOS at the result pointer.
"""
if not result:
raise GEOSException(
'Error encountered checking string return value in GEOS C function "%s".'
% func.__name__
)
# Getting the string value at the pointer address.
s = string_at(result)
# Freeing the memory allocated within GEOS
free(result)
return s
|
ed3360bd353c2f7732a14d153176e8c77440dea99478f0d77a32621cc9ed5e55 | """
This module houses the GEOS ctypes prototype functions for the
topological operations on geometries.
"""
from ctypes import c_double, c_int
from django.contrib.gis.geos.libgeos import GEOM_PTR, GEOSFuncFactory
from django.contrib.gis.geos.prototypes.errcheck import (
check_geom,
check_minus_one,
check_string,
)
from django.contrib.gis.geos.prototypes.geom import geos_char_p
class Topology(GEOSFuncFactory):
"For GEOS unary topology functions."
argtypes = [GEOM_PTR]
restype = GEOM_PTR
errcheck = staticmethod(check_geom)
# Topology Routines
geos_boundary = Topology("GEOSBoundary")
geos_buffer = Topology("GEOSBuffer", argtypes=[GEOM_PTR, c_double, c_int])
geos_bufferwithstyle = Topology(
"GEOSBufferWithStyle", argtypes=[GEOM_PTR, c_double, c_int, c_int, c_int, c_double]
)
geos_centroid = Topology("GEOSGetCentroid")
geos_convexhull = Topology("GEOSConvexHull")
geos_difference = Topology("GEOSDifference", argtypes=[GEOM_PTR, GEOM_PTR])
geos_envelope = Topology("GEOSEnvelope")
geos_intersection = Topology("GEOSIntersection", argtypes=[GEOM_PTR, GEOM_PTR])
geos_linemerge = Topology("GEOSLineMerge")
geos_pointonsurface = Topology("GEOSPointOnSurface")
geos_preservesimplify = Topology(
"GEOSTopologyPreserveSimplify", argtypes=[GEOM_PTR, c_double]
)
geos_simplify = Topology("GEOSSimplify", argtypes=[GEOM_PTR, c_double])
geos_symdifference = Topology("GEOSSymDifference", argtypes=[GEOM_PTR, GEOM_PTR])
geos_union = Topology("GEOSUnion", argtypes=[GEOM_PTR, GEOM_PTR])
geos_unary_union = GEOSFuncFactory(
"GEOSUnaryUnion", argtypes=[GEOM_PTR], restype=GEOM_PTR
)
# GEOSRelate returns a string, not a geometry.
geos_relate = GEOSFuncFactory(
"GEOSRelate",
argtypes=[GEOM_PTR, GEOM_PTR],
restype=geos_char_p,
errcheck=check_string,
)
# Linear referencing routines
geos_project = GEOSFuncFactory(
"GEOSProject",
argtypes=[GEOM_PTR, GEOM_PTR],
restype=c_double,
errcheck=check_minus_one,
)
geos_interpolate = Topology("GEOSInterpolate", argtypes=[GEOM_PTR, c_double])
geos_project_normalized = GEOSFuncFactory(
"GEOSProjectNormalized",
argtypes=[GEOM_PTR, GEOM_PTR],
restype=c_double,
errcheck=check_minus_one,
)
geos_interpolate_normalized = Topology(
"GEOSInterpolateNormalized", argtypes=[GEOM_PTR, c_double]
)
|
83288e3ab5f1637afb27d0d6376a55bccebe80ad932fdf7babf92811a066ad60 | import threading
from ctypes import POINTER, Structure, byref, c_byte, c_char_p, c_int, c_size_t
from django.contrib.gis.geos.base import GEOSBase
from django.contrib.gis.geos.libgeos import (
GEOM_PTR,
GEOSFuncFactory,
geos_version_tuple,
)
from django.contrib.gis.geos.prototypes.errcheck import (
check_geom,
check_sized_string,
check_string,
)
from django.contrib.gis.geos.prototypes.geom import c_uchar_p, geos_char_p
from django.utils.encoding import force_bytes
# ### The WKB/WKT Reader/Writer structures and pointers ###
class WKTReader_st(Structure):
pass
class WKTWriter_st(Structure):
pass
class WKBReader_st(Structure):
pass
class WKBWriter_st(Structure):
pass
WKT_READ_PTR = POINTER(WKTReader_st)
WKT_WRITE_PTR = POINTER(WKTWriter_st)
WKB_READ_PTR = POINTER(WKBReader_st)
WKB_WRITE_PTR = POINTER(WKBReader_st)
# WKTReader routines
wkt_reader_create = GEOSFuncFactory("GEOSWKTReader_create", restype=WKT_READ_PTR)
wkt_reader_destroy = GEOSFuncFactory("GEOSWKTReader_destroy", argtypes=[WKT_READ_PTR])
wkt_reader_read = GEOSFuncFactory(
"GEOSWKTReader_read",
argtypes=[WKT_READ_PTR, c_char_p],
restype=GEOM_PTR,
errcheck=check_geom,
)
# WKTWriter routines
wkt_writer_create = GEOSFuncFactory("GEOSWKTWriter_create", restype=WKT_WRITE_PTR)
wkt_writer_destroy = GEOSFuncFactory("GEOSWKTWriter_destroy", argtypes=[WKT_WRITE_PTR])
wkt_writer_write = GEOSFuncFactory(
"GEOSWKTWriter_write",
argtypes=[WKT_WRITE_PTR, GEOM_PTR],
restype=geos_char_p,
errcheck=check_string,
)
wkt_writer_get_outdim = GEOSFuncFactory(
"GEOSWKTWriter_getOutputDimension", argtypes=[WKT_WRITE_PTR], restype=c_int
)
wkt_writer_set_outdim = GEOSFuncFactory(
"GEOSWKTWriter_setOutputDimension", argtypes=[WKT_WRITE_PTR, c_int]
)
wkt_writer_set_trim = GEOSFuncFactory(
"GEOSWKTWriter_setTrim", argtypes=[WKT_WRITE_PTR, c_byte]
)
wkt_writer_set_precision = GEOSFuncFactory(
"GEOSWKTWriter_setRoundingPrecision", argtypes=[WKT_WRITE_PTR, c_int]
)
# WKBReader routines
wkb_reader_create = GEOSFuncFactory("GEOSWKBReader_create", restype=WKB_READ_PTR)
wkb_reader_destroy = GEOSFuncFactory("GEOSWKBReader_destroy", argtypes=[WKB_READ_PTR])
class WKBReadFunc(GEOSFuncFactory):
# Although the function definitions take `const unsigned char *`
# as their parameter, we use c_char_p here so the function may
# take Python strings directly as parameters. Inside Python there
# is not a difference between signed and unsigned characters, so
# it is not a problem.
argtypes = [WKB_READ_PTR, c_char_p, c_size_t]
restype = GEOM_PTR
errcheck = staticmethod(check_geom)
wkb_reader_read = WKBReadFunc("GEOSWKBReader_read")
wkb_reader_read_hex = WKBReadFunc("GEOSWKBReader_readHEX")
# WKBWriter routines
wkb_writer_create = GEOSFuncFactory("GEOSWKBWriter_create", restype=WKB_WRITE_PTR)
wkb_writer_destroy = GEOSFuncFactory("GEOSWKBWriter_destroy", argtypes=[WKB_WRITE_PTR])
# WKB Writing prototypes.
class WKBWriteFunc(GEOSFuncFactory):
argtypes = [WKB_WRITE_PTR, GEOM_PTR, POINTER(c_size_t)]
restype = c_uchar_p
errcheck = staticmethod(check_sized_string)
wkb_writer_write = WKBWriteFunc("GEOSWKBWriter_write")
wkb_writer_write_hex = WKBWriteFunc("GEOSWKBWriter_writeHEX")
# WKBWriter property getter/setter prototypes.
class WKBWriterGet(GEOSFuncFactory):
argtypes = [WKB_WRITE_PTR]
restype = c_int
class WKBWriterSet(GEOSFuncFactory):
argtypes = [WKB_WRITE_PTR, c_int]
wkb_writer_get_byteorder = WKBWriterGet("GEOSWKBWriter_getByteOrder")
wkb_writer_set_byteorder = WKBWriterSet("GEOSWKBWriter_setByteOrder")
wkb_writer_get_outdim = WKBWriterGet("GEOSWKBWriter_getOutputDimension")
wkb_writer_set_outdim = WKBWriterSet("GEOSWKBWriter_setOutputDimension")
wkb_writer_get_include_srid = WKBWriterGet(
"GEOSWKBWriter_getIncludeSRID", restype=c_byte
)
wkb_writer_set_include_srid = WKBWriterSet(
"GEOSWKBWriter_setIncludeSRID", argtypes=[WKB_WRITE_PTR, c_byte]
)
# ### Base I/O Class ###
class IOBase(GEOSBase):
"Base class for GEOS I/O objects."
def __init__(self):
# Getting the pointer with the constructor.
self.ptr = self._constructor()
# Loading the real destructor function at this point as doing it in
# __del__ is too late (import error).
self.destructor.func
# ### Base WKB/WKT Reading and Writing objects ###
# Non-public WKB/WKT reader classes for internal use because
# their `read` methods return _pointers_ instead of GEOSGeometry
# objects.
class _WKTReader(IOBase):
_constructor = wkt_reader_create
ptr_type = WKT_READ_PTR
destructor = wkt_reader_destroy
def read(self, wkt):
if not isinstance(wkt, (bytes, str)):
raise TypeError
return wkt_reader_read(self.ptr, force_bytes(wkt))
class _WKBReader(IOBase):
_constructor = wkb_reader_create
ptr_type = WKB_READ_PTR
destructor = wkb_reader_destroy
def read(self, wkb):
"Return a _pointer_ to C GEOS Geometry object from the given WKB."
if isinstance(wkb, memoryview):
wkb_s = bytes(wkb)
return wkb_reader_read(self.ptr, wkb_s, len(wkb_s))
elif isinstance(wkb, (bytes, str)):
return wkb_reader_read_hex(self.ptr, wkb, len(wkb))
else:
raise TypeError
# ### WKB/WKT Writer Classes ###
class WKTWriter(IOBase):
_constructor = wkt_writer_create
ptr_type = WKT_WRITE_PTR
destructor = wkt_writer_destroy
_trim = False
_precision = None
def __init__(self, dim=2, trim=False, precision=None):
super().__init__()
if bool(trim) != self._trim:
self.trim = trim
if precision is not None:
self.precision = precision
self.outdim = dim
def write(self, geom):
"Return the WKT representation of the given geometry."
return wkt_writer_write(self.ptr, geom.ptr)
@property
def outdim(self):
return wkt_writer_get_outdim(self.ptr)
@outdim.setter
def outdim(self, new_dim):
if new_dim not in (2, 3):
raise ValueError("WKT output dimension must be 2 or 3")
wkt_writer_set_outdim(self.ptr, new_dim)
@property
def trim(self):
return self._trim
@trim.setter
def trim(self, flag):
if bool(flag) != self._trim:
self._trim = bool(flag)
wkt_writer_set_trim(self.ptr, self._trim)
@property
def precision(self):
return self._precision
@precision.setter
def precision(self, precision):
if (not isinstance(precision, int) or precision < 0) and precision is not None:
raise AttributeError(
"WKT output rounding precision must be non-negative integer or None."
)
if precision != self._precision:
self._precision = precision
wkt_writer_set_precision(self.ptr, -1 if precision is None else precision)
class WKBWriter(IOBase):
_constructor = wkb_writer_create
ptr_type = WKB_WRITE_PTR
destructor = wkb_writer_destroy
geos_version = geos_version_tuple()
def __init__(self, dim=2):
super().__init__()
self.outdim = dim
def _handle_empty_point(self, geom):
from django.contrib.gis.geos import Point
if isinstance(geom, Point) and geom.empty:
if self.srid:
# PostGIS uses POINT(NaN NaN) for WKB representation of empty
# points. Use it for EWKB as it's a PostGIS specific format.
# https://trac.osgeo.org/postgis/ticket/3181
geom = Point(float("NaN"), float("NaN"), srid=geom.srid)
else:
raise ValueError("Empty point is not representable in WKB.")
return geom
def write(self, geom):
"Return the WKB representation of the given geometry."
from django.contrib.gis.geos import Polygon
geom = self._handle_empty_point(geom)
wkb = wkb_writer_write(self.ptr, geom.ptr, byref(c_size_t()))
if self.geos_version < (3, 6, 1) and isinstance(geom, Polygon) and geom.empty:
# Fix GEOS output for empty polygon.
# See https://trac.osgeo.org/geos/ticket/680.
wkb = wkb[:-8] + b"\0" * 4
return memoryview(wkb)
def write_hex(self, geom):
"Return the HEXEWKB representation of the given geometry."
from django.contrib.gis.geos.polygon import Polygon
geom = self._handle_empty_point(geom)
wkb = wkb_writer_write_hex(self.ptr, geom.ptr, byref(c_size_t()))
if self.geos_version < (3, 6, 1) and isinstance(geom, Polygon) and geom.empty:
wkb = wkb[:-16] + b"0" * 8
return wkb
# ### WKBWriter Properties ###
# Property for getting/setting the byteorder.
def _get_byteorder(self):
return wkb_writer_get_byteorder(self.ptr)
def _set_byteorder(self, order):
if order not in (0, 1):
raise ValueError(
"Byte order parameter must be 0 (Big Endian) or 1 (Little Endian)."
)
wkb_writer_set_byteorder(self.ptr, order)
byteorder = property(_get_byteorder, _set_byteorder)
# Property for getting/setting the output dimension.
@property
def outdim(self):
return wkb_writer_get_outdim(self.ptr)
@outdim.setter
def outdim(self, new_dim):
if new_dim not in (2, 3):
raise ValueError("WKB output dimension must be 2 or 3")
wkb_writer_set_outdim(self.ptr, new_dim)
# Property for getting/setting the include srid flag.
@property
def srid(self):
return bool(wkb_writer_get_include_srid(self.ptr))
@srid.setter
def srid(self, include):
wkb_writer_set_include_srid(self.ptr, bool(include))
# `ThreadLocalIO` object holds instances of the WKT and WKB reader/writer
# objects that are local to the thread. The `GEOSGeometry` internals
# access these instances by calling the module-level functions, defined
# below.
class ThreadLocalIO(threading.local):
wkt_r = None
wkt_w = None
wkb_r = None
wkb_w = None
ewkb_w = None
thread_context = ThreadLocalIO()
# These module-level routines return the I/O object that is local to the
# thread. If the I/O object does not exist yet it will be initialized.
def wkt_r():
thread_context.wkt_r = thread_context.wkt_r or _WKTReader()
return thread_context.wkt_r
def wkt_w(dim=2, trim=False, precision=None):
if not thread_context.wkt_w:
thread_context.wkt_w = WKTWriter(dim=dim, trim=trim, precision=precision)
else:
thread_context.wkt_w.outdim = dim
thread_context.wkt_w.trim = trim
thread_context.wkt_w.precision = precision
return thread_context.wkt_w
def wkb_r():
thread_context.wkb_r = thread_context.wkb_r or _WKBReader()
return thread_context.wkb_r
def wkb_w(dim=2):
if not thread_context.wkb_w:
thread_context.wkb_w = WKBWriter(dim=dim)
else:
thread_context.wkb_w.outdim = dim
return thread_context.wkb_w
def ewkb_w(dim=2):
if not thread_context.ewkb_w:
thread_context.ewkb_w = WKBWriter(dim=dim)
thread_context.ewkb_w.srid = True
else:
thread_context.ewkb_w.outdim = dim
return thread_context.ewkb_w
|
dc49350d3783a38041b12ecb968b1e7921cf073ef456efb8985f9dc528f25cb5 | """
This module is for the miscellaneous GEOS routines, particularly the
ones that return the area, distance, and length.
"""
from ctypes import POINTER, c_double, c_int
from django.contrib.gis.geos.libgeos import GEOM_PTR, GEOSFuncFactory
from django.contrib.gis.geos.prototypes.errcheck import check_dbl, check_string
from django.contrib.gis.geos.prototypes.geom import geos_char_p
__all__ = ["geos_area", "geos_distance", "geos_length", "geos_isvalidreason"]
class DblFromGeom(GEOSFuncFactory):
"""
Argument is a Geometry, return type is double that is passed
in by reference as the last argument.
"""
restype = c_int # Status code returned
errcheck = staticmethod(check_dbl)
# ### ctypes prototypes ###
# Area, distance, and length prototypes.
geos_area = DblFromGeom("GEOSArea", argtypes=[GEOM_PTR, POINTER(c_double)])
geos_distance = DblFromGeom(
"GEOSDistance", argtypes=[GEOM_PTR, GEOM_PTR, POINTER(c_double)]
)
geos_length = DblFromGeom("GEOSLength", argtypes=[GEOM_PTR, POINTER(c_double)])
geos_isvalidreason = GEOSFuncFactory(
"GEOSisValidReason", restype=geos_char_p, errcheck=check_string, argtypes=[GEOM_PTR]
)
|
e08f694bbe50e4c675cfc035bf498a926742972df72a3ff4b0372bab13a9a733 | from ctypes import c_byte
from django.contrib.gis.geos.libgeos import GEOM_PTR, PREPGEOM_PTR, GEOSFuncFactory
from django.contrib.gis.geos.prototypes.errcheck import check_predicate
# Prepared geometry constructor and destructors.
geos_prepare = GEOSFuncFactory("GEOSPrepare", argtypes=[GEOM_PTR], restype=PREPGEOM_PTR)
prepared_destroy = GEOSFuncFactory("GEOSPreparedGeom_destroy", argtypes=[PREPGEOM_PTR])
# Prepared geometry binary predicate support.
class PreparedPredicate(GEOSFuncFactory):
argtypes = [PREPGEOM_PTR, GEOM_PTR]
restype = c_byte
errcheck = staticmethod(check_predicate)
prepared_contains = PreparedPredicate("GEOSPreparedContains")
prepared_contains_properly = PreparedPredicate("GEOSPreparedContainsProperly")
prepared_covers = PreparedPredicate("GEOSPreparedCovers")
prepared_crosses = PreparedPredicate("GEOSPreparedCrosses")
prepared_disjoint = PreparedPredicate("GEOSPreparedDisjoint")
prepared_intersects = PreparedPredicate("GEOSPreparedIntersects")
prepared_overlaps = PreparedPredicate("GEOSPreparedOverlaps")
prepared_touches = PreparedPredicate("GEOSPreparedTouches")
prepared_within = PreparedPredicate("GEOSPreparedWithin")
|
7c87122339b209b6b341247e5ddbc2c2d3f661922d42eaf5636eef7c029735fc | from ctypes import POINTER, c_byte, c_double, c_int, c_uint
from django.contrib.gis.geos.libgeos import CS_PTR, GEOM_PTR, GEOSFuncFactory
from django.contrib.gis.geos.prototypes.errcheck import GEOSException, last_arg_byref
# ## Error-checking routines specific to coordinate sequences. ##
def check_cs_op(result, func, cargs):
"Check the status code of a coordinate sequence operation."
if result == 0:
raise GEOSException("Could not set value on coordinate sequence")
else:
return result
def check_cs_get(result, func, cargs):
"Check the coordinate sequence retrieval."
check_cs_op(result, func, cargs)
# Object in by reference, return its value.
return last_arg_byref(cargs)
# ## Coordinate sequence prototype factory classes. ##
class CsInt(GEOSFuncFactory):
"For coordinate sequence routines that return an integer."
argtypes = [CS_PTR, POINTER(c_uint)]
restype = c_int
errcheck = staticmethod(check_cs_get)
class CsOperation(GEOSFuncFactory):
"For coordinate sequence operations."
restype = c_int
def __init__(self, *args, ordinate=False, get=False, **kwargs):
if get:
# Get routines have double parameter passed-in by reference.
errcheck = check_cs_get
dbl_param = POINTER(c_double)
else:
errcheck = check_cs_op
dbl_param = c_double
if ordinate:
# Get/Set ordinate routines have an extra uint parameter.
argtypes = [CS_PTR, c_uint, c_uint, dbl_param]
else:
argtypes = [CS_PTR, c_uint, dbl_param]
super().__init__(
*args, **{**kwargs, "errcheck": errcheck, "argtypes": argtypes}
)
class CsOutput(GEOSFuncFactory):
restype = CS_PTR
@staticmethod
def errcheck(result, func, cargs):
if not result:
raise GEOSException(
"Error encountered checking Coordinate Sequence returned from GEOS "
'C function "%s".' % func.__name__
)
return result
# ## Coordinate Sequence ctypes prototypes ##
# Coordinate Sequence constructors & cloning.
cs_clone = CsOutput("GEOSCoordSeq_clone", argtypes=[CS_PTR])
create_cs = CsOutput("GEOSCoordSeq_create", argtypes=[c_uint, c_uint])
get_cs = CsOutput("GEOSGeom_getCoordSeq", argtypes=[GEOM_PTR])
# Getting, setting ordinate
cs_getordinate = CsOperation("GEOSCoordSeq_getOrdinate", ordinate=True, get=True)
cs_setordinate = CsOperation("GEOSCoordSeq_setOrdinate", ordinate=True)
# For getting, x, y, z
cs_getx = CsOperation("GEOSCoordSeq_getX", get=True)
cs_gety = CsOperation("GEOSCoordSeq_getY", get=True)
cs_getz = CsOperation("GEOSCoordSeq_getZ", get=True)
# For setting, x, y, z
cs_setx = CsOperation("GEOSCoordSeq_setX")
cs_sety = CsOperation("GEOSCoordSeq_setY")
cs_setz = CsOperation("GEOSCoordSeq_setZ")
# These routines return size & dimensions.
cs_getsize = CsInt("GEOSCoordSeq_getSize")
cs_getdims = CsInt("GEOSCoordSeq_getDimensions")
cs_is_ccw = GEOSFuncFactory(
"GEOSCoordSeq_isCCW", restype=c_int, argtypes=[CS_PTR, POINTER(c_byte)]
)
|
47a76a11d8c441b4d4278d1e0d1b779c2ae8e4adf87441b479ec200ef3d72037 | """
Tests for django.core.servers.
"""
import errno
import os
import socket
import threading
from http.client import HTTPConnection
from urllib.error import HTTPError
from urllib.parse import urlencode
from urllib.request import urlopen
from django.conf import settings
from django.core.servers.basehttp import ThreadedWSGIServer, WSGIServer
from django.db import DEFAULT_DB_ALIAS, connections
from django.test import LiveServerTestCase, override_settings
from django.test.testcases import LiveServerThread, QuietWSGIRequestHandler
from .models import Person
TEST_ROOT = os.path.dirname(__file__)
TEST_SETTINGS = {
"MEDIA_URL": "media/",
"MEDIA_ROOT": os.path.join(TEST_ROOT, "media"),
"STATIC_URL": "static/",
"STATIC_ROOT": os.path.join(TEST_ROOT, "static"),
}
@override_settings(ROOT_URLCONF="servers.urls", **TEST_SETTINGS)
class LiveServerBase(LiveServerTestCase):
available_apps = [
"servers",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
]
fixtures = ["testdata.json"]
def urlopen(self, url):
return urlopen(self.live_server_url + url)
class CloseConnectionTestServer(ThreadedWSGIServer):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# This event is set right after the first time a request closes its
# database connections.
self._connections_closed = threading.Event()
def _close_connections(self):
super()._close_connections()
self._connections_closed.set()
class CloseConnectionTestLiveServerThread(LiveServerThread):
server_class = CloseConnectionTestServer
def _create_server(self, connections_override=None):
return super()._create_server(connections_override=self.connections_override)
class LiveServerTestCloseConnectionTest(LiveServerBase):
server_thread_class = CloseConnectionTestLiveServerThread
@classmethod
def _make_connections_override(cls):
conn = connections[DEFAULT_DB_ALIAS]
cls.conn = conn
cls.old_conn_max_age = conn.settings_dict["CONN_MAX_AGE"]
# Set the connection's CONN_MAX_AGE to None to simulate the
# CONN_MAX_AGE setting being set to None on the server. This prevents
# Django from closing the connection and allows testing that
# ThreadedWSGIServer closes connections.
conn.settings_dict["CONN_MAX_AGE"] = None
# Pass a database connection through to the server to check it is being
# closed by ThreadedWSGIServer.
return {DEFAULT_DB_ALIAS: conn}
@classmethod
def tearDownConnectionTest(cls):
cls.conn.settings_dict["CONN_MAX_AGE"] = cls.old_conn_max_age
@classmethod
def tearDownClass(cls):
cls.tearDownConnectionTest()
super().tearDownClass()
def test_closes_connections(self):
# The server's request thread sets this event after closing
# its database connections.
closed_event = self.server_thread.httpd._connections_closed
conn = self.conn
# Open a connection to the database.
conn.connect()
self.assertIsNotNone(conn.connection)
with self.urlopen("/model_view/") as f:
# The server can access the database.
self.assertEqual(f.read().splitlines(), [b"jane", b"robert"])
# Wait for the server's request thread to close the connection.
# A timeout of 0.1 seconds should be more than enough. If the wait
# times out, the assertion after should fail.
closed_event.wait(timeout=0.1)
self.assertIsNone(conn.connection)
class FailingLiveServerThread(LiveServerThread):
def _create_server(self):
raise RuntimeError("Error creating server.")
class LiveServerTestCaseSetupTest(LiveServerBase):
server_thread_class = FailingLiveServerThread
@classmethod
def check_allowed_hosts(cls, expected):
if settings.ALLOWED_HOSTS != expected:
raise RuntimeError(f"{settings.ALLOWED_HOSTS} != {expected}")
@classmethod
def setUpClass(cls):
cls.check_allowed_hosts(["testserver"])
try:
super().setUpClass()
except RuntimeError:
# LiveServerTestCase's change to ALLOWED_HOSTS should be reverted.
cls.doClassCleanups()
cls.check_allowed_hosts(["testserver"])
else:
raise RuntimeError("Server did not fail.")
cls.set_up_called = True
def test_set_up_class(self):
self.assertIs(self.set_up_called, True)
class LiveServerAddress(LiveServerBase):
@classmethod
def setUpClass(cls):
super().setUpClass()
# put it in a list to prevent descriptor lookups in test
cls.live_server_url_test = [cls.live_server_url]
def test_live_server_url_is_class_property(self):
self.assertIsInstance(self.live_server_url_test[0], str)
self.assertEqual(self.live_server_url_test[0], self.live_server_url)
class LiveServerSingleThread(LiveServerThread):
def _create_server(self):
return WSGIServer(
(self.host, self.port), QuietWSGIRequestHandler, allow_reuse_address=False
)
class SingleThreadLiveServerTestCase(LiveServerTestCase):
server_thread_class = LiveServerSingleThread
class LiveServerViews(LiveServerBase):
def test_protocol(self):
"""Launched server serves with HTTP 1.1."""
with self.urlopen("/example_view/") as f:
self.assertEqual(f.version, 11)
def test_closes_connection_without_content_length(self):
"""
An HTTP 1.1 server is supposed to support keep-alive. Since our
development server is rather simple we support it only in cases where
we can detect a content length from the response. This should be doable
for all simple views and streaming responses where an iterable with
length of one is passed. The latter follows as result of `set_content_length`
from https://github.com/python/cpython/blob/master/Lib/wsgiref/handlers.py.
If we cannot detect a content length we explicitly set the `Connection`
header to `close` to notify the client that we do not actually support
it.
"""
conn = HTTPConnection(
LiveServerViews.server_thread.host,
LiveServerViews.server_thread.port,
timeout=1,
)
try:
conn.request(
"GET", "/streaming_example_view/", headers={"Connection": "keep-alive"}
)
response = conn.getresponse()
self.assertTrue(response.will_close)
self.assertEqual(response.read(), b"Iamastream")
self.assertEqual(response.status, 200)
self.assertEqual(response.getheader("Connection"), "close")
conn.request(
"GET", "/streaming_example_view/", headers={"Connection": "close"}
)
response = conn.getresponse()
self.assertTrue(response.will_close)
self.assertEqual(response.read(), b"Iamastream")
self.assertEqual(response.status, 200)
self.assertEqual(response.getheader("Connection"), "close")
finally:
conn.close()
def test_keep_alive_on_connection_with_content_length(self):
"""
See `test_closes_connection_without_content_length` for details. This
is a follow up test, which ensure that we do not close the connection
if not needed, hence allowing us to take advantage of keep-alive.
"""
conn = HTTPConnection(
LiveServerViews.server_thread.host, LiveServerViews.server_thread.port
)
try:
conn.request("GET", "/example_view/", headers={"Connection": "keep-alive"})
response = conn.getresponse()
self.assertFalse(response.will_close)
self.assertEqual(response.read(), b"example view")
self.assertEqual(response.status, 200)
self.assertIsNone(response.getheader("Connection"))
conn.request("GET", "/example_view/", headers={"Connection": "close"})
response = conn.getresponse()
self.assertFalse(response.will_close)
self.assertEqual(response.read(), b"example view")
self.assertEqual(response.status, 200)
self.assertIsNone(response.getheader("Connection"))
finally:
conn.close()
def test_keep_alive_connection_clears_previous_request_data(self):
conn = HTTPConnection(
LiveServerViews.server_thread.host, LiveServerViews.server_thread.port
)
try:
conn.request(
"POST", "/method_view/", b"{}", headers={"Connection": "keep-alive"}
)
response = conn.getresponse()
self.assertFalse(response.will_close)
self.assertEqual(response.status, 200)
self.assertEqual(response.read(), b"POST")
conn.request(
"POST", "/method_view/", b"{}", headers={"Connection": "close"}
)
response = conn.getresponse()
self.assertFalse(response.will_close)
self.assertEqual(response.status, 200)
self.assertEqual(response.read(), b"POST")
finally:
conn.close()
def test_404(self):
with self.assertRaises(HTTPError) as err:
self.urlopen("/")
err.exception.close()
self.assertEqual(err.exception.code, 404, "Expected 404 response")
def test_view(self):
with self.urlopen("/example_view/") as f:
self.assertEqual(f.read(), b"example view")
def test_static_files(self):
with self.urlopen("/static/example_static_file.txt") as f:
self.assertEqual(f.read().rstrip(b"\r\n"), b"example static file")
def test_no_collectstatic_emulation(self):
"""
LiveServerTestCase reports a 404 status code when HTTP client
tries to access a static file that isn't explicitly put under
STATIC_ROOT.
"""
with self.assertRaises(HTTPError) as err:
self.urlopen("/static/another_app/another_app_static_file.txt")
err.exception.close()
self.assertEqual(err.exception.code, 404, "Expected 404 response")
def test_media_files(self):
with self.urlopen("/media/example_media_file.txt") as f:
self.assertEqual(f.read().rstrip(b"\r\n"), b"example media file")
def test_environ(self):
with self.urlopen("/environ_view/?%s" % urlencode({"q": "тест"})) as f:
self.assertIn(b"QUERY_STRING: 'q=%D1%82%D0%B5%D1%81%D1%82'", f.read())
@override_settings(ROOT_URLCONF="servers.urls")
class SingleThreadLiveServerViews(SingleThreadLiveServerTestCase):
available_apps = ["servers"]
def test_closes_connection_with_content_length(self):
"""
Contrast to
LiveServerViews.test_keep_alive_on_connection_with_content_length().
Persistent connections require threading server.
"""
conn = HTTPConnection(
SingleThreadLiveServerViews.server_thread.host,
SingleThreadLiveServerViews.server_thread.port,
timeout=1,
)
try:
conn.request("GET", "/example_view/", headers={"Connection": "keep-alive"})
response = conn.getresponse()
self.assertTrue(response.will_close)
self.assertEqual(response.read(), b"example view")
self.assertEqual(response.status, 200)
self.assertEqual(response.getheader("Connection"), "close")
finally:
conn.close()
class LiveServerDatabase(LiveServerBase):
def test_fixtures_loaded(self):
"""
Fixtures are properly loaded and visible to the live server thread.
"""
with self.urlopen("/model_view/") as f:
self.assertEqual(f.read().splitlines(), [b"jane", b"robert"])
def test_database_writes(self):
"""
Data written to the database by a view can be read.
"""
with self.urlopen("/create_model_instance/"):
pass
self.assertQuerysetEqual(
Person.objects.order_by("pk"),
["jane", "robert", "emily"],
lambda b: b.name,
)
class LiveServerPort(LiveServerBase):
def test_port_bind(self):
"""
Each LiveServerTestCase binds to a unique port or fails to start a
server thread when run concurrently (#26011).
"""
TestCase = type("TestCase", (LiveServerBase,), {})
try:
TestCase._start_server_thread()
except OSError as e:
if e.errno == errno.EADDRINUSE:
# We're out of ports, LiveServerTestCase correctly fails with
# an OSError.
return
# Unexpected error.
raise
self.assertNotEqual(
self.live_server_url,
TestCase.live_server_url,
f"Acquired duplicate server addresses for server threads: "
f"{self.live_server_url}",
)
def test_specified_port_bind(self):
"""LiveServerTestCase.port customizes the server's port."""
TestCase = type("TestCase", (LiveServerBase,), {})
# Find an open port and tell TestCase to use it.
s = socket.socket()
s.bind(("", 0))
TestCase.port = s.getsockname()[1]
s.close()
TestCase._start_server_thread()
self.assertEqual(
TestCase.port,
TestCase.server_thread.port,
f"Did not use specified port for LiveServerTestCase thread: "
f"{TestCase.port}",
)
class LiveServerThreadedTests(LiveServerBase):
"""If LiveServerTestCase isn't threaded, these tests will hang."""
def test_view_calls_subview(self):
url = "/subview_calling_view/?%s" % urlencode({"url": self.live_server_url})
with self.urlopen(url) as f:
self.assertEqual(f.read(), b"subview calling view: subview")
def test_check_model_instance_from_subview(self):
url = "/check_model_instance_from_subview/?%s" % urlencode(
{
"url": self.live_server_url,
}
)
with self.urlopen(url) as f:
self.assertIn(b"emily", f.read())
|
5d35774bb1e705a37014c3d7dec41debb62d8ef695ccbebf2f2100a03bdd7625 | from django.urls import path
from . import views
urlpatterns = [
path("example_view/", views.example_view),
path("streaming_example_view/", views.streaming_example_view),
path("model_view/", views.model_view),
path("create_model_instance/", views.create_model_instance),
path("environ_view/", views.environ_view),
path("subview_calling_view/", views.subview_calling_view),
path("subview/", views.subview),
path("check_model_instance_from_subview/", views.check_model_instance_from_subview),
path("method_view/", views.method_view),
]
|
12f88536451531e75772e5f9470355ca7d7f868a5055ae0c7070609e4fa736f5 | from io import BytesIO
from django.core.handlers.wsgi import WSGIRequest
from django.core.servers.basehttp import WSGIRequestHandler, WSGIServer
from django.test import SimpleTestCase
from django.test.client import RequestFactory
from django.test.utils import captured_stderr
class Stub:
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def sendall(self, data):
self.makefile("wb").write(data)
class WSGIRequestHandlerTestCase(SimpleTestCase):
request_factory = RequestFactory()
def test_log_message(self):
request = WSGIRequest(self.request_factory.get("/").environ)
request.makefile = lambda *args, **kwargs: BytesIO()
handler = WSGIRequestHandler(request, "192.168.0.2", None)
level_status_codes = {
"info": [200, 301, 304],
"warning": [400, 403, 404],
"error": [500, 503],
}
for level, status_codes in level_status_codes.items():
for status_code in status_codes:
# The correct level gets the message.
with self.assertLogs("django.server", level.upper()) as cm:
handler.log_message("GET %s %s", "A", str(status_code))
self.assertIn("GET A %d" % status_code, cm.output[0])
# Incorrect levels don't have any messages.
for wrong_level in level_status_codes:
if wrong_level != level:
with self.assertLogs("django.server", "INFO") as cm:
handler.log_message("GET %s %s", "A", str(status_code))
self.assertNotEqual(
cm.records[0].levelname, wrong_level.upper()
)
def test_https(self):
request = WSGIRequest(self.request_factory.get("/").environ)
request.makefile = lambda *args, **kwargs: BytesIO()
handler = WSGIRequestHandler(request, "192.168.0.2", None)
with self.assertLogs("django.server", "ERROR") as cm:
handler.log_message("GET %s %s", "\x16\x03", "4")
self.assertEqual(
"You're accessing the development server over HTTPS, "
"but it only supports HTTP.",
cm.records[0].getMessage(),
)
def test_strips_underscore_headers(self):
"""WSGIRequestHandler ignores headers containing underscores.
This follows the lead of nginx and Apache 2.4, and is to avoid
ambiguity between dashes and underscores in mapping to WSGI environ,
which can have security implications.
"""
def test_app(environ, start_response):
"""A WSGI app that just reflects its HTTP environ."""
start_response("200 OK", [])
http_environ_items = sorted(
"%s:%s" % (k, v) for k, v in environ.items() if k.startswith("HTTP_")
)
yield (",".join(http_environ_items)).encode()
rfile = BytesIO()
rfile.write(b"GET / HTTP/1.0\r\n")
rfile.write(b"Some-Header: good\r\n")
rfile.write(b"Some_Header: bad\r\n")
rfile.write(b"Other_Header: bad\r\n")
rfile.seek(0)
# WSGIRequestHandler closes the output file; we need to make this a
# no-op so we can still read its contents.
class UnclosableBytesIO(BytesIO):
def close(self):
pass
wfile = UnclosableBytesIO()
def makefile(mode, *a, **kw):
if mode == "rb":
return rfile
elif mode == "wb":
return wfile
request = Stub(makefile=makefile)
server = Stub(base_environ={}, get_app=lambda: test_app)
# Prevent logging from appearing in test output.
with self.assertLogs("django.server", "INFO"):
# instantiating a handler runs the request as side effect
WSGIRequestHandler(request, "192.168.0.2", server)
wfile.seek(0)
body = list(wfile.readlines())[-1]
self.assertEqual(body, b"HTTP_SOME_HEADER:good")
class WSGIServerTestCase(SimpleTestCase):
request_factory = RequestFactory()
def test_broken_pipe_errors(self):
"""WSGIServer handles broken pipe errors."""
request = WSGIRequest(self.request_factory.get("/").environ)
client_address = ("192.168.2.0", 8080)
msg = f"- Broken pipe from {client_address}"
tests = [
BrokenPipeError,
ConnectionAbortedError,
ConnectionResetError,
]
for exception in tests:
with self.subTest(exception=exception):
try:
server = WSGIServer(("localhost", 0), WSGIRequestHandler)
try:
raise exception()
except Exception:
with captured_stderr() as err:
with self.assertLogs("django.server", "INFO") as cm:
server.handle_error(request, client_address)
self.assertEqual(err.getvalue(), "")
self.assertEqual(cm.records[0].getMessage(), msg)
finally:
server.server_close()
|
d2214dfe8c7b7d3ec8df1e1fc3e73fa836d71746a23bafb2cb1cfabf4c3b142c | from urllib.request import urlopen
from django.http import HttpResponse, StreamingHttpResponse
from django.views.decorators.csrf import csrf_exempt
from .models import Person
def example_view(request):
return HttpResponse("example view")
def streaming_example_view(request):
return StreamingHttpResponse((b"I", b"am", b"a", b"stream"))
def model_view(request):
people = Person.objects.all()
return HttpResponse("\n".join(person.name for person in people))
def create_model_instance(request):
person = Person(name="emily")
person.save()
return HttpResponse()
def environ_view(request):
return HttpResponse(
"\n".join("%s: %r" % (k, v) for k, v in request.environ.items())
)
def subview(request):
return HttpResponse("subview")
def subview_calling_view(request):
with urlopen(request.GET["url"] + "/subview/") as response:
return HttpResponse("subview calling view: {}".format(response.read().decode()))
def check_model_instance_from_subview(request):
with urlopen(request.GET["url"] + "/create_model_instance/"):
pass
with urlopen(request.GET["url"] + "/model_view/") as response:
return HttpResponse("subview calling view: {}".format(response.read().decode()))
@csrf_exempt
def method_view(request):
return HttpResponse(request.method)
|
2f28e5c039b81a22e6e18c89ac94de61c296a238fd269c986d9eb1f75e8aee8f | class DefaultOtherRouter:
def allow_migrate(self, db, app_label, model_name=None, **hints):
return db in {"default", "other"}
class TestRouter:
def allow_migrate(self, db, app_label, model_name=None, **hints):
"""
The Tribble model should be the only one to appear in the 'other' db.
"""
if model_name == "tribble":
return db == "other"
elif db != "default":
return False
|
bd474b83ee1258610d0c78bd8034caaa439db2593aacfb845ca442414ea5b9ad | from django.db import migrations, models
from django.db.migrations import operations
from django.db.migrations.optimizer import MigrationOptimizer
from django.db.migrations.serializer import serializer_factory
from django.test import SimpleTestCase
from .models import EmptyManager, UnicodeModel
class OptimizerTests(SimpleTestCase):
"""
Tests the migration autodetector.
"""
def optimize(self, operations, app_label):
"""
Handy shortcut for getting results + number of loops
"""
optimizer = MigrationOptimizer()
return optimizer.optimize(operations, app_label), optimizer._iterations
def serialize(self, value):
return serializer_factory(value).serialize()[0]
def assertOptimizesTo(
self, operations, expected, exact=None, less_than=None, app_label=None
):
result, iterations = self.optimize(operations, app_label or "migrations")
result = [self.serialize(f) for f in result]
expected = [self.serialize(f) for f in expected]
self.assertEqual(expected, result)
if exact is not None and iterations != exact:
raise self.failureException(
"Optimization did not take exactly %s iterations (it took %s)"
% (exact, iterations)
)
if less_than is not None and iterations >= less_than:
raise self.failureException(
"Optimization did not take less than %s iterations (it took %s)"
% (less_than, iterations)
)
def assertDoesNotOptimize(self, operations, **kwargs):
self.assertOptimizesTo(operations, operations, **kwargs)
def test_none_app_label(self):
optimizer = MigrationOptimizer()
with self.assertRaisesMessage(TypeError, "app_label must be a str"):
optimizer.optimize([], None)
def test_single(self):
"""
The optimizer does nothing on a single operation,
and that it does it in just one pass.
"""
self.assertOptimizesTo(
[migrations.DeleteModel("Foo")],
[migrations.DeleteModel("Foo")],
exact=1,
)
def test_create_delete_model(self):
"""
CreateModel and DeleteModel should collapse into nothing.
"""
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.DeleteModel("Foo"),
],
[],
)
def test_create_rename_model(self):
"""
CreateModel should absorb RenameModels.
"""
managers = [("objects", EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[("name", models.CharField(max_length=255))],
options={"verbose_name": "Foo"},
bases=(UnicodeModel,),
managers=managers,
),
migrations.RenameModel("Foo", "Bar"),
],
[
migrations.CreateModel(
"Bar",
[("name", models.CharField(max_length=255))],
options={"verbose_name": "Foo"},
bases=(UnicodeModel,),
managers=managers,
)
],
)
def test_rename_model_self(self):
"""
RenameModels should absorb themselves.
"""
self.assertOptimizesTo(
[
migrations.RenameModel("Foo", "Baa"),
migrations.RenameModel("Baa", "Bar"),
],
[
migrations.RenameModel("Foo", "Bar"),
],
)
def test_create_alter_model_options(self):
self.assertOptimizesTo(
[
migrations.CreateModel("Foo", fields=[]),
migrations.AlterModelOptions(
name="Foo", options={"verbose_name_plural": "Foozes"}
),
],
[
migrations.CreateModel(
"Foo", fields=[], options={"verbose_name_plural": "Foozes"}
),
],
)
def test_create_model_and_remove_model_options(self):
self.assertOptimizesTo(
[
migrations.CreateModel(
"MyModel",
fields=[],
options={"verbose_name": "My Model"},
),
migrations.AlterModelOptions("MyModel", options={}),
],
[migrations.CreateModel("MyModel", fields=[])],
)
self.assertOptimizesTo(
[
migrations.CreateModel(
"MyModel",
fields=[],
options={
"verbose_name": "My Model",
"verbose_name_plural": "My Model plural",
},
),
migrations.AlterModelOptions(
"MyModel",
options={"verbose_name": "My Model"},
),
],
[
migrations.CreateModel(
"MyModel",
fields=[],
options={"verbose_name": "My Model"},
),
],
)
def _test_create_alter_foo_delete_model(self, alter_foo):
"""
CreateModel, AlterModelTable, AlterUniqueTogether/AlterIndexTogether/
AlterOrderWithRespectTo, and DeleteModel should collapse into nothing.
"""
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.AlterModelTable("Foo", "woohoo"),
alter_foo,
migrations.DeleteModel("Foo"),
],
[],
)
def test_create_alter_unique_delete_model(self):
self._test_create_alter_foo_delete_model(
migrations.AlterUniqueTogether("Foo", [["a", "b"]])
)
def test_create_alter_index_delete_model(self):
self._test_create_alter_foo_delete_model(
migrations.AlterIndexTogether("Foo", [["a", "b"]])
)
def test_create_alter_owrt_delete_model(self):
self._test_create_alter_foo_delete_model(
migrations.AlterOrderWithRespectTo("Foo", "a")
)
def _test_alter_alter_model(self, alter_foo, alter_bar):
"""
Two AlterUniqueTogether/AlterIndexTogether/AlterOrderWithRespectTo
should collapse into the second.
"""
self.assertOptimizesTo(
[
alter_foo,
alter_bar,
],
[
alter_bar,
],
)
def test_alter_alter_table_model(self):
self._test_alter_alter_model(
migrations.AlterModelTable("Foo", "a"),
migrations.AlterModelTable("Foo", "b"),
)
def test_alter_alter_unique_model(self):
self._test_alter_alter_model(
migrations.AlterUniqueTogether("Foo", [["a", "b"]]),
migrations.AlterUniqueTogether("Foo", [["a", "c"]]),
)
def test_alter_alter_index_model(self):
self._test_alter_alter_model(
migrations.AlterIndexTogether("Foo", [["a", "b"]]),
migrations.AlterIndexTogether("Foo", [["a", "c"]]),
)
def test_alter_alter_owrt_model(self):
self._test_alter_alter_model(
migrations.AlterOrderWithRespectTo("Foo", "a"),
migrations.AlterOrderWithRespectTo("Foo", "b"),
)
def test_optimize_through_create(self):
"""
We should be able to optimize away create/delete through a create or
delete of a different model, but only if the create operation does not
mention the model at all.
"""
# These should work
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.DeleteModel("Foo"),
],
[
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
],
)
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.DeleteModel("Bar"),
migrations.DeleteModel("Foo"),
],
[],
)
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.DeleteModel("Foo"),
migrations.DeleteModel("Bar"),
],
[],
)
# Operations should be optimized if the FK references a model from the
# other app.
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]
),
migrations.DeleteModel("Foo"),
],
[
migrations.CreateModel(
"Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]
),
],
app_label="otherapp",
)
# But it shouldn't work if a FK references a model with the same
# app_label.
self.assertDoesNotOptimize(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Bar", [("other", models.ForeignKey("Foo", models.CASCADE))]
),
migrations.DeleteModel("Foo"),
],
)
self.assertDoesNotOptimize(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]
),
migrations.DeleteModel("Foo"),
],
app_label="testapp",
)
# This should not work - bases should block it
self.assertDoesNotOptimize(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Bar", [("size", models.IntegerField())], bases=("Foo",)
),
migrations.DeleteModel("Foo"),
],
)
self.assertDoesNotOptimize(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Bar", [("size", models.IntegerField())], bases=("testapp.Foo",)
),
migrations.DeleteModel("Foo"),
],
app_label="testapp",
)
# The same operations should be optimized if app_label and none of
# bases belong to that app.
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Bar", [("size", models.IntegerField())], bases=("testapp.Foo",)
),
migrations.DeleteModel("Foo"),
],
[
migrations.CreateModel(
"Bar", [("size", models.IntegerField())], bases=("testapp.Foo",)
),
],
app_label="otherapp",
)
# But it shouldn't work if some of bases belongs to the specified app.
self.assertDoesNotOptimize(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Bar", [("size", models.IntegerField())], bases=("testapp.Foo",)
),
migrations.DeleteModel("Foo"),
],
app_label="testapp",
)
self.assertOptimizesTo(
[
migrations.CreateModel(
"Book", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Person", [("name", models.CharField(max_length=255))]
),
migrations.AddField(
"book",
"author",
models.ForeignKey("test_app.Person", models.CASCADE),
),
migrations.CreateModel(
"Review",
[("book", models.ForeignKey("test_app.Book", models.CASCADE))],
),
migrations.CreateModel(
"Reviewer", [("name", models.CharField(max_length=255))]
),
migrations.AddField(
"review",
"reviewer",
models.ForeignKey("test_app.Reviewer", models.CASCADE),
),
migrations.RemoveField("book", "author"),
migrations.DeleteModel("Person"),
],
[
migrations.CreateModel(
"Book", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Reviewer", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Review",
[
("book", models.ForeignKey("test_app.Book", models.CASCADE)),
(
"reviewer",
models.ForeignKey("test_app.Reviewer", models.CASCADE),
),
],
),
],
app_label="test_app",
)
def test_create_model_add_field(self):
"""
AddField should optimize into CreateModel.
"""
managers = [("objects", EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[("name", models.CharField(max_length=255))],
options={"verbose_name": "Foo"},
bases=(UnicodeModel,),
managers=managers,
),
migrations.AddField("Foo", "age", models.IntegerField()),
],
[
migrations.CreateModel(
name="Foo",
fields=[
("name", models.CharField(max_length=255)),
("age", models.IntegerField()),
],
options={"verbose_name": "Foo"},
bases=(UnicodeModel,),
managers=managers,
),
],
)
def test_create_model_reordering(self):
"""
AddField optimizes into CreateModel if it's a FK to a model that's
between them (and there's no FK in the other direction), by changing
the order of the CreateModel operations.
"""
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel("Link", [("url", models.TextField())]),
migrations.AddField(
"Foo", "link", models.ForeignKey("migrations.Link", models.CASCADE)
),
],
[
migrations.CreateModel("Link", [("url", models.TextField())]),
migrations.CreateModel(
"Foo",
[
("name", models.CharField(max_length=255)),
("link", models.ForeignKey("migrations.Link", models.CASCADE)),
],
),
],
)
def test_create_model_reordering_circular_fk(self):
"""
CreateModel reordering behavior doesn't result in an infinite loop if
there are FKs in both directions.
"""
self.assertOptimizesTo(
[
migrations.CreateModel("Bar", [("url", models.TextField())]),
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.AddField(
"Bar", "foo_fk", models.ForeignKey("migrations.Foo", models.CASCADE)
),
migrations.AddField(
"Foo", "bar_fk", models.ForeignKey("migrations.Bar", models.CASCADE)
),
],
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel(
"Bar",
[
("url", models.TextField()),
("foo_fk", models.ForeignKey("migrations.Foo", models.CASCADE)),
],
),
migrations.AddField(
"Foo", "bar_fk", models.ForeignKey("migrations.Bar", models.CASCADE)
),
],
)
def test_create_model_no_reordering_for_unrelated_fk(self):
"""
CreateModel order remains unchanged if the later AddField operation
isn't a FK between them.
"""
self.assertDoesNotOptimize(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel("Link", [("url", models.TextField())]),
migrations.AddField(
"Other",
"link",
models.ForeignKey("migrations.Link", models.CASCADE),
),
],
)
def test_create_model_no_reordering_of_inherited_model(self):
"""
A CreateModel that inherits from another isn't reordered to avoid
moving it earlier than its parent CreateModel operation.
"""
self.assertOptimizesTo(
[
migrations.CreateModel(
"Other", [("foo", models.CharField(max_length=255))]
),
migrations.CreateModel(
"ParentModel", [("bar", models.CharField(max_length=255))]
),
migrations.CreateModel(
"ChildModel",
[("baz", models.CharField(max_length=255))],
bases=("migrations.parentmodel",),
),
migrations.AddField(
"Other",
"fk",
models.ForeignKey("migrations.ChildModel", models.CASCADE),
),
],
[
migrations.CreateModel(
"ParentModel", [("bar", models.CharField(max_length=255))]
),
migrations.CreateModel(
"ChildModel",
[("baz", models.CharField(max_length=255))],
bases=("migrations.parentmodel",),
),
migrations.CreateModel(
"Other",
[
("foo", models.CharField(max_length=255)),
(
"fk",
models.ForeignKey("migrations.ChildModel", models.CASCADE),
),
],
),
],
)
def test_create_model_add_field_not_through_m2m_through(self):
"""
AddField should NOT optimize into CreateModel if it's an M2M using a
through that's created between them.
"""
self.assertDoesNotOptimize(
[
migrations.CreateModel("Employee", []),
migrations.CreateModel("Employer", []),
migrations.CreateModel(
"Employment",
[
(
"employee",
models.ForeignKey("migrations.Employee", models.CASCADE),
),
(
"employment",
models.ForeignKey("migrations.Employer", models.CASCADE),
),
],
),
migrations.AddField(
"Employer",
"employees",
models.ManyToManyField(
"migrations.Employee",
through="migrations.Employment",
),
),
],
)
def test_create_model_alter_field(self):
"""
AlterField should optimize into CreateModel.
"""
managers = [("objects", EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[("name", models.CharField(max_length=255))],
options={"verbose_name": "Foo"},
bases=(UnicodeModel,),
managers=managers,
),
migrations.AlterField("Foo", "name", models.IntegerField()),
],
[
migrations.CreateModel(
name="Foo",
fields=[
("name", models.IntegerField()),
],
options={"verbose_name": "Foo"},
bases=(UnicodeModel,),
managers=managers,
),
],
)
def test_create_model_rename_field(self):
"""
RenameField should optimize into CreateModel.
"""
managers = [("objects", EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[("name", models.CharField(max_length=255))],
options={"verbose_name": "Foo"},
bases=(UnicodeModel,),
managers=managers,
),
migrations.RenameField("Foo", "name", "title"),
],
[
migrations.CreateModel(
name="Foo",
fields=[
("title", models.CharField(max_length=255)),
],
options={"verbose_name": "Foo"},
bases=(UnicodeModel,),
managers=managers,
),
],
)
def test_add_field_rename_field(self):
"""
RenameField should optimize into AddField
"""
self.assertOptimizesTo(
[
migrations.AddField("Foo", "name", models.CharField(max_length=255)),
migrations.RenameField("Foo", "name", "title"),
],
[
migrations.AddField("Foo", "title", models.CharField(max_length=255)),
],
)
def test_alter_field_rename_field(self):
"""
RenameField should optimize to the other side of AlterField,
and into itself.
"""
self.assertOptimizesTo(
[
migrations.AlterField("Foo", "name", models.CharField(max_length=255)),
migrations.RenameField("Foo", "name", "title"),
migrations.RenameField("Foo", "title", "nom"),
],
[
migrations.RenameField("Foo", "name", "nom"),
migrations.AlterField("Foo", "nom", models.CharField(max_length=255)),
],
)
def test_swapping_fields_names(self):
self.assertDoesNotOptimize(
[
migrations.CreateModel(
"MyModel",
[
("field_a", models.IntegerField()),
("field_b", models.IntegerField()),
],
),
migrations.RunPython(migrations.RunPython.noop),
migrations.RenameField("MyModel", "field_a", "field_c"),
migrations.RenameField("MyModel", "field_b", "field_a"),
migrations.RenameField("MyModel", "field_c", "field_b"),
],
)
def test_create_model_remove_field(self):
"""
RemoveField should optimize into CreateModel.
"""
managers = [("objects", EmptyManager())]
self.assertOptimizesTo(
[
migrations.CreateModel(
name="Foo",
fields=[
("name", models.CharField(max_length=255)),
("age", models.IntegerField()),
],
options={"verbose_name": "Foo"},
bases=(UnicodeModel,),
managers=managers,
),
migrations.RemoveField("Foo", "age"),
],
[
migrations.CreateModel(
name="Foo",
fields=[
("name", models.CharField(max_length=255)),
],
options={"verbose_name": "Foo"},
bases=(UnicodeModel,),
managers=managers,
),
],
)
def test_add_field_alter_field(self):
"""
AlterField should optimize into AddField.
"""
self.assertOptimizesTo(
[
migrations.AddField("Foo", "age", models.IntegerField()),
migrations.AlterField("Foo", "age", models.FloatField(default=2.4)),
],
[
migrations.AddField(
"Foo", name="age", field=models.FloatField(default=2.4)
),
],
)
def test_add_field_delete_field(self):
"""
RemoveField should cancel AddField
"""
self.assertOptimizesTo(
[
migrations.AddField("Foo", "age", models.IntegerField()),
migrations.RemoveField("Foo", "age"),
],
[],
)
def test_alter_field_delete_field(self):
"""
RemoveField should absorb AlterField
"""
self.assertOptimizesTo(
[
migrations.AlterField("Foo", "age", models.IntegerField()),
migrations.RemoveField("Foo", "age"),
],
[
migrations.RemoveField("Foo", "age"),
],
)
def _test_create_alter_foo_field(self, alter):
"""
CreateModel, AlterFooTogether/AlterOrderWithRespectTo followed by an
add/alter/rename field should optimize to CreateModel with options.
"""
option_value = getattr(alter, alter.option_name)
options = {alter.option_name: option_value}
# AddField
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
],
),
alter,
migrations.AddField("Foo", "c", models.IntegerField()),
],
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
("c", models.IntegerField()),
],
options=options,
),
],
)
# AlterField
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
],
),
alter,
migrations.AlterField("Foo", "b", models.CharField(max_length=255)),
],
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.CharField(max_length=255)),
],
options=options,
),
],
)
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
("c", models.IntegerField()),
],
),
alter,
migrations.AlterField("Foo", "c", models.CharField(max_length=255)),
],
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
("c", models.CharField(max_length=255)),
],
options=options,
),
],
)
# RenameField
if isinstance(option_value, str):
renamed_options = {alter.option_name: "c"}
else:
renamed_options = {
alter.option_name: {
tuple("c" if value == "b" else value for value in item)
for item in option_value
}
}
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
],
),
alter,
migrations.RenameField("Foo", "b", "c"),
],
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("c", models.IntegerField()),
],
options=renamed_options,
),
],
)
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
],
),
alter,
migrations.RenameField("Foo", "b", "x"),
migrations.RenameField("Foo", "x", "c"),
],
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("c", models.IntegerField()),
],
options=renamed_options,
),
],
)
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
("c", models.IntegerField()),
],
),
alter,
migrations.RenameField("Foo", "c", "d"),
],
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
("d", models.IntegerField()),
],
options=options,
),
],
)
# RemoveField
if isinstance(option_value, str):
removed_options = None
else:
removed_options = {
alter.option_name: {
tuple(value for value in item if value != "b")
for item in option_value
}
}
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
],
),
alter,
migrations.RemoveField("Foo", "b"),
],
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
],
options=removed_options,
),
],
)
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
("c", models.IntegerField()),
],
),
alter,
migrations.RemoveField("Foo", "c"),
],
[
migrations.CreateModel(
"Foo",
[
("a", models.IntegerField()),
("b", models.IntegerField()),
],
options=options,
),
],
)
def test_create_alter_unique_field(self):
self._test_create_alter_foo_field(
migrations.AlterUniqueTogether("Foo", [["a", "b"]])
)
def test_create_alter_index_field(self):
self._test_create_alter_foo_field(
migrations.AlterIndexTogether("Foo", [["a", "b"]])
)
def test_create_alter_owrt_field(self):
self._test_create_alter_foo_field(
migrations.AlterOrderWithRespectTo("Foo", "b")
)
def test_optimize_through_fields(self):
"""
field-level through checking is working. This should manage to collapse
model Foo to nonexistence, and model Bar to a single IntegerField
called "width".
"""
self.assertOptimizesTo(
[
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
migrations.AddField("Foo", "age", models.IntegerField()),
migrations.AddField("Bar", "width", models.IntegerField()),
migrations.AlterField("Foo", "age", models.IntegerField()),
migrations.RenameField("Bar", "size", "dimensions"),
migrations.RemoveField("Foo", "age"),
migrations.RenameModel("Foo", "Phou"),
migrations.RemoveField("Bar", "dimensions"),
migrations.RenameModel("Phou", "Fou"),
migrations.DeleteModel("Fou"),
],
[
migrations.CreateModel("Bar", [("width", models.IntegerField())]),
],
)
def test_optimize_elidable_operation(self):
elidable_operation = operations.base.Operation()
elidable_operation.elidable = True
self.assertOptimizesTo(
[
elidable_operation,
migrations.CreateModel(
"Foo", [("name", models.CharField(max_length=255))]
),
elidable_operation,
migrations.CreateModel("Bar", [("size", models.IntegerField())]),
elidable_operation,
migrations.RenameModel("Foo", "Phou"),
migrations.DeleteModel("Bar"),
elidable_operation,
],
[
migrations.CreateModel(
"Phou", [("name", models.CharField(max_length=255))]
),
],
)
|
765567a8862ce5a937b7ee22f640289b7ebcc424c02e5c3051f1ddccfd1b3d8a | import datetime
from io import StringIO
from unittest import mock
from django.core.management.base import OutputWrapper
from django.db.migrations.questioner import (
InteractiveMigrationQuestioner,
MigrationQuestioner,
)
from django.db.models import NOT_PROVIDED
from django.test import SimpleTestCase
from django.test.utils import override_settings
class QuestionerTests(SimpleTestCase):
@override_settings(
INSTALLED_APPS=["migrations"],
MIGRATION_MODULES={"migrations": None},
)
def test_ask_initial_with_disabled_migrations(self):
questioner = MigrationQuestioner()
self.assertIs(False, questioner.ask_initial("migrations"))
def test_ask_not_null_alteration(self):
questioner = MigrationQuestioner()
self.assertIsNone(
questioner.ask_not_null_alteration("field_name", "model_name")
)
@mock.patch("builtins.input", return_value="2")
def test_ask_not_null_alteration_not_provided(self, mock):
questioner = InteractiveMigrationQuestioner(
prompt_output=OutputWrapper(StringIO())
)
question = questioner.ask_not_null_alteration("field_name", "model_name")
self.assertEqual(question, NOT_PROVIDED)
class QuestionerHelperMethodsTests(SimpleTestCase):
def setUp(self):
self.prompt = OutputWrapper(StringIO())
self.questioner = InteractiveMigrationQuestioner(prompt_output=self.prompt)
@mock.patch("builtins.input", return_value="datetime.timedelta(days=1)")
def test_questioner_default_timedelta(self, mock_input):
value = self.questioner._ask_default()
self.assertEqual(value, datetime.timedelta(days=1))
@mock.patch("builtins.input", return_value="")
def test_questioner_default_no_user_entry(self, mock_input):
value = self.questioner._ask_default(default="datetime.timedelta(days=1)")
self.assertEqual(value, datetime.timedelta(days=1))
@mock.patch("builtins.input", side_effect=["", "exit"])
def test_questioner_no_default_no_user_entry(self, mock_input):
with self.assertRaises(SystemExit):
self.questioner._ask_default()
self.assertIn(
"Please enter some code, or 'exit' (without quotes) to exit.",
self.prompt.getvalue(),
)
@mock.patch("builtins.input", side_effect=["bad code", "exit"])
def test_questioner_no_default_bad_user_entry_code(self, mock_input):
with self.assertRaises(SystemExit):
self.questioner._ask_default()
self.assertIn("Invalid input: ", self.prompt.getvalue())
@mock.patch("builtins.input", side_effect=["", "n"])
def test_questioner_no_default_no_user_entry_boolean(self, mock_input):
value = self.questioner._boolean_input("Proceed?")
self.assertIs(value, False)
@mock.patch("builtins.input", return_value="")
def test_questioner_default_no_user_entry_boolean(self, mock_input):
value = self.questioner._boolean_input("Proceed?", default=True)
self.assertIs(value, True)
@mock.patch("builtins.input", side_effect=[10, "garbage", 1])
def test_questioner_bad_user_choice(self, mock_input):
question = "Make a choice:"
value = self.questioner._choice_input(question, choices="abc")
expected_msg = f"{question}\n" f" 1) a\n" f" 2) b\n" f" 3) c\n"
self.assertIn(expected_msg, self.prompt.getvalue())
self.assertEqual(value, 1)
|
418dccf8f6e89239f057fd8d806b3a3f677dc2527614ade950e14f13fa6796e2 | from unittest import mock
from django.apps.registry import apps as global_apps
from django.db import DatabaseError, connection, migrations, models
from django.db.migrations.exceptions import InvalidMigrationPlan
from django.db.migrations.executor import MigrationExecutor
from django.db.migrations.graph import MigrationGraph
from django.db.migrations.recorder import MigrationRecorder
from django.db.migrations.state import ProjectState
from django.test import (
SimpleTestCase,
modify_settings,
override_settings,
skipUnlessDBFeature,
)
from django.test.utils import isolate_lru_cache
from .test_base import MigrationTestBase
@modify_settings(INSTALLED_APPS={"append": "migrations2"})
class ExecutorTests(MigrationTestBase):
"""
Tests the migration executor (full end-to-end running).
Bear in mind that if these are failing you should fix the other
test failures first, as they may be propagating into here.
"""
available_apps = [
"migrations",
"migrations2",
"django.contrib.auth",
"django.contrib.contenttypes",
]
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_run(self):
"""
Tests running a simple set of migrations.
"""
executor = MigrationExecutor(connection)
# Let's look at the plan first and make sure it's up to scratch
plan = executor.migration_plan([("migrations", "0002_second")])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
(executor.loader.graph.nodes["migrations", "0002_second"], False),
],
)
# Were the tables there before?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
# Alright, let's try running it
executor.migrate([("migrations", "0002_second")])
# Are the tables there now?
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_book")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Alright, let's undo what we did
plan = executor.migration_plan([("migrations", None)])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0002_second"], True),
(executor.loader.graph.nodes["migrations", "0001_initial"], True),
],
)
executor.migrate([("migrations", None)])
# Are the tables gone?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_run_with_squashed(self):
"""
Tests running a squashed migration from zero (should ignore what it replaces)
"""
executor = MigrationExecutor(connection)
# Check our leaf node is the squashed one
leaves = [
key for key in executor.loader.graph.leaf_nodes() if key[0] == "migrations"
]
self.assertEqual(leaves, [("migrations", "0001_squashed_0002")])
# Check the plan
plan = executor.migration_plan([("migrations", "0001_squashed_0002")])
self.assertEqual(
plan,
[
(
executor.loader.graph.nodes["migrations", "0001_squashed_0002"],
False,
),
],
)
# Were the tables there before?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
# Alright, let's try running it
executor.migrate([("migrations", "0001_squashed_0002")])
# Are the tables there now?
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_book")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Alright, let's undo what we did. Should also just use squashed.
plan = executor.migration_plan([("migrations", None)])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_squashed_0002"], True),
],
)
executor.migrate([("migrations", None)])
# Are the tables gone?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"},
)
def test_migrate_backward_to_squashed_migration(self):
executor = MigrationExecutor(connection)
try:
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
executor.migrate([("migrations", "0001_squashed_0002")])
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_book")
executor.loader.build_graph()
# Migrate backward to a squashed migration.
executor.migrate([("migrations", "0001_initial")])
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_book")
finally:
# Unmigrate everything.
executor = MigrationExecutor(connection)
executor.migrate([("migrations", None)])
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_non_atomic"}
)
def test_non_atomic_migration(self):
"""
Applying a non-atomic migration works as expected.
"""
executor = MigrationExecutor(connection)
with self.assertRaisesMessage(RuntimeError, "Abort migration"):
executor.migrate([("migrations", "0001_initial")])
self.assertTableExists("migrations_publisher")
migrations_apps = executor.loader.project_state(
("migrations", "0001_initial")
).apps
Publisher = migrations_apps.get_model("migrations", "Publisher")
self.assertTrue(Publisher.objects.exists())
self.assertTableNotExists("migrations_book")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_atomic_operation"}
)
def test_atomic_operation_in_non_atomic_migration(self):
"""
An atomic operation is properly rolled back inside a non-atomic
migration.
"""
executor = MigrationExecutor(connection)
with self.assertRaisesMessage(RuntimeError, "Abort migration"):
executor.migrate([("migrations", "0001_initial")])
migrations_apps = executor.loader.project_state(
("migrations", "0001_initial")
).apps
Editor = migrations_apps.get_model("migrations", "Editor")
self.assertFalse(Editor.objects.exists())
# Record previous migration as successful.
executor.migrate([("migrations", "0001_initial")], fake=True)
# Rebuild the graph to reflect the new DB state.
executor.loader.build_graph()
# Migrating backwards is also atomic.
with self.assertRaisesMessage(RuntimeError, "Abort migration"):
executor.migrate([("migrations", None)])
self.assertFalse(Editor.objects.exists())
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations",
"migrations2": "migrations2.test_migrations_2",
}
)
def test_empty_plan(self):
"""
Re-planning a full migration of a fully-migrated set doesn't
perform spurious unmigrations and remigrations.
There was previously a bug where the executor just always performed the
backwards plan for applied migrations - which even for the most recent
migration in an app, might include other, dependent apps, and these
were being unmigrated.
"""
# Make the initial plan, check it
executor = MigrationExecutor(connection)
plan = executor.migration_plan(
[
("migrations", "0002_second"),
("migrations2", "0001_initial"),
]
)
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
(executor.loader.graph.nodes["migrations", "0002_second"], False),
(executor.loader.graph.nodes["migrations2", "0001_initial"], False),
],
)
# Fake-apply all migrations
executor.migrate(
[("migrations", "0002_second"), ("migrations2", "0001_initial")], fake=True
)
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Now plan a second time and make sure it's empty
plan = executor.migration_plan(
[
("migrations", "0002_second"),
("migrations2", "0001_initial"),
]
)
self.assertEqual(plan, [])
# The resulting state should include applied migrations.
state = executor.migrate(
[
("migrations", "0002_second"),
("migrations2", "0001_initial"),
]
)
self.assertIn(("migrations", "book"), state.models)
self.assertIn(("migrations", "author"), state.models)
self.assertIn(("migrations2", "otherauthor"), state.models)
# Erase all the fake records
executor.recorder.record_unapplied("migrations2", "0001_initial")
executor.recorder.record_unapplied("migrations", "0002_second")
executor.recorder.record_unapplied("migrations", "0001_initial")
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations",
"migrations2": "migrations2.test_migrations_2_no_deps",
}
)
def test_mixed_plan_not_supported(self):
"""
Although the MigrationExecutor interfaces allows for mixed migration
plans (combined forwards and backwards migrations) this is not
supported.
"""
# Prepare for mixed plan
executor = MigrationExecutor(connection)
plan = executor.migration_plan([("migrations", "0002_second")])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
(executor.loader.graph.nodes["migrations", "0002_second"], False),
],
)
executor.migrate(None, plan)
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
self.assertIn(
("migrations", "0001_initial"), executor.loader.applied_migrations
)
self.assertIn(("migrations", "0002_second"), executor.loader.applied_migrations)
self.assertNotIn(
("migrations2", "0001_initial"), executor.loader.applied_migrations
)
# Generate mixed plan
plan = executor.migration_plan(
[
("migrations", None),
("migrations2", "0001_initial"),
]
)
msg = (
"Migration plans with both forwards and backwards migrations are "
"not supported. Please split your migration process into separate "
"plans of only forwards OR backwards migrations."
)
with self.assertRaisesMessage(InvalidMigrationPlan, msg) as cm:
executor.migrate(None, plan)
self.assertEqual(
cm.exception.args[1],
[
(executor.loader.graph.nodes["migrations", "0002_second"], True),
(executor.loader.graph.nodes["migrations", "0001_initial"], True),
(executor.loader.graph.nodes["migrations2", "0001_initial"], False),
],
)
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
executor.migrate(
[
("migrations", None),
("migrations2", None),
]
)
# Are the tables gone?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
self.assertTableNotExists("migrations2_otherauthor")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_soft_apply(self):
"""
Tests detection of initial migrations already having been applied.
"""
state = {"faked": None}
def fake_storer(phase, migration=None, fake=None):
state["faked"] = fake
executor = MigrationExecutor(connection, progress_callback=fake_storer)
# Were the tables there before?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Run it normally
self.assertEqual(
executor.migration_plan([("migrations", "0001_initial")]),
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
],
)
executor.migrate([("migrations", "0001_initial")])
# Are the tables there now?
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
# We shouldn't have faked that one
self.assertIs(state["faked"], False)
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Fake-reverse that
executor.migrate([("migrations", None)], fake=True)
# Are the tables still there?
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
# Make sure that was faked
self.assertIs(state["faked"], True)
# Finally, migrate forwards; this should fake-apply our initial migration
executor.loader.build_graph()
self.assertEqual(
executor.migration_plan([("migrations", "0001_initial")]),
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
],
)
# Applying the migration should raise a database level error
# because we haven't given the --fake-initial option
with self.assertRaises(DatabaseError):
executor.migrate([("migrations", "0001_initial")])
# Reset the faked state
state = {"faked": None}
# Allow faking of initial CreateModel operations
executor.migrate([("migrations", "0001_initial")], fake_initial=True)
self.assertIs(state["faked"], True)
# And migrate back to clean up the database
executor.loader.build_graph()
executor.migrate([("migrations", None)])
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_custom_user",
"django.contrib.auth": "django.contrib.auth.migrations",
},
AUTH_USER_MODEL="migrations.Author",
)
def test_custom_user(self):
"""
Regression test for #22325 - references to a custom user model defined in the
same app are not resolved correctly.
"""
with isolate_lru_cache(global_apps.get_swappable_settings_name):
executor = MigrationExecutor(connection)
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Migrate forwards
executor.migrate([("migrations", "0001_initial")])
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
# The soft-application detection works.
# Change table_names to not return auth_user during this as it
# wouldn't be there in a normal run, and ensure migrations.Author
# exists in the global app registry temporarily.
old_table_names = connection.introspection.table_names
connection.introspection.table_names = lambda c: [
x for x in old_table_names(c) if x != "auth_user"
]
migrations_apps = executor.loader.project_state(
("migrations", "0001_initial"),
).apps
global_apps.get_app_config("migrations").models[
"author"
] = migrations_apps.get_model("migrations", "author")
try:
migration = executor.loader.get_migration("auth", "0001_initial")
self.assertIs(executor.detect_soft_applied(None, migration)[0], True)
finally:
connection.introspection.table_names = old_table_names
del global_apps.get_app_config("migrations").models["author"]
# Migrate back to clean up the database.
executor.loader.build_graph()
executor.migrate([("migrations", None)])
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_add_many_to_many_field_initial",
},
)
def test_detect_soft_applied_add_field_manytomanyfield(self):
"""
executor.detect_soft_applied() detects ManyToManyField tables from an
AddField operation. This checks the case of AddField in a migration
with other operations (0001) and the case of AddField in its own
migration (0002).
"""
tables = [
# from 0001
"migrations_project",
"migrations_task",
"migrations_project_tasks",
# from 0002
"migrations_task_projects",
]
executor = MigrationExecutor(connection)
# Create the tables for 0001 but make it look like the migration hasn't
# been applied.
executor.migrate([("migrations", "0001_initial")])
executor.migrate([("migrations", None)], fake=True)
for table in tables[:3]:
self.assertTableExists(table)
# Table detection sees 0001 is applied but not 0002.
migration = executor.loader.get_migration("migrations", "0001_initial")
self.assertIs(executor.detect_soft_applied(None, migration)[0], True)
migration = executor.loader.get_migration("migrations", "0002_initial")
self.assertIs(executor.detect_soft_applied(None, migration)[0], False)
# Create the tables for both migrations but make it look like neither
# has been applied.
executor.loader.build_graph()
executor.migrate([("migrations", "0001_initial")], fake=True)
executor.migrate([("migrations", "0002_initial")])
executor.loader.build_graph()
executor.migrate([("migrations", None)], fake=True)
# Table detection sees 0002 is applied.
migration = executor.loader.get_migration("migrations", "0002_initial")
self.assertIs(executor.detect_soft_applied(None, migration)[0], True)
# Leave the tables for 0001 except the many-to-many table. That missing
# table should cause detect_soft_applied() to return False.
with connection.schema_editor() as editor:
for table in tables[2:]:
editor.execute(editor.sql_delete_table % {"table": table})
migration = executor.loader.get_migration("migrations", "0001_initial")
self.assertIs(executor.detect_soft_applied(None, migration)[0], False)
# Cleanup by removing the remaining tables.
with connection.schema_editor() as editor:
for table in tables[:2]:
editor.execute(editor.sql_delete_table % {"table": table})
for table in tables:
self.assertTableNotExists(table)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.lookuperror_a",
"migrations.migrations_test_apps.lookuperror_b",
"migrations.migrations_test_apps.lookuperror_c",
]
)
def test_unrelated_model_lookups_forwards(self):
"""
#24123 - All models of apps already applied which are
unrelated to the first app being applied are part of the initial model
state.
"""
try:
executor = MigrationExecutor(connection)
self.assertTableNotExists("lookuperror_a_a1")
self.assertTableNotExists("lookuperror_b_b1")
self.assertTableNotExists("lookuperror_c_c1")
executor.migrate([("lookuperror_b", "0003_b3")])
self.assertTableExists("lookuperror_b_b3")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Migrate forwards -- This led to a lookup LookupErrors because
# lookuperror_b.B2 is already applied
executor.migrate(
[
("lookuperror_a", "0004_a4"),
("lookuperror_c", "0003_c3"),
]
)
self.assertTableExists("lookuperror_a_a4")
self.assertTableExists("lookuperror_c_c3")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
finally:
# Cleanup
executor.migrate(
[
("lookuperror_a", None),
("lookuperror_b", None),
("lookuperror_c", None),
]
)
self.assertTableNotExists("lookuperror_a_a1")
self.assertTableNotExists("lookuperror_b_b1")
self.assertTableNotExists("lookuperror_c_c1")
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.lookuperror_a",
"migrations.migrations_test_apps.lookuperror_b",
"migrations.migrations_test_apps.lookuperror_c",
]
)
def test_unrelated_model_lookups_backwards(self):
"""
#24123 - All models of apps being unapplied which are
unrelated to the first app being unapplied are part of the initial
model state.
"""
try:
executor = MigrationExecutor(connection)
self.assertTableNotExists("lookuperror_a_a1")
self.assertTableNotExists("lookuperror_b_b1")
self.assertTableNotExists("lookuperror_c_c1")
executor.migrate(
[
("lookuperror_a", "0004_a4"),
("lookuperror_b", "0003_b3"),
("lookuperror_c", "0003_c3"),
]
)
self.assertTableExists("lookuperror_b_b3")
self.assertTableExists("lookuperror_a_a4")
self.assertTableExists("lookuperror_c_c3")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Migrate backwards -- This led to a lookup LookupErrors because
# lookuperror_b.B2 is not in the initial state (unrelated to app c)
executor.migrate([("lookuperror_a", None)])
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
finally:
# Cleanup
executor.migrate([("lookuperror_b", None), ("lookuperror_c", None)])
self.assertTableNotExists("lookuperror_a_a1")
self.assertTableNotExists("lookuperror_b_b1")
self.assertTableNotExists("lookuperror_c_c1")
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.mutate_state_a",
"migrations.migrations_test_apps.mutate_state_b",
]
)
def test_unrelated_applied_migrations_mutate_state(self):
"""
#26647 - Unrelated applied migrations should be part of the final
state in both directions.
"""
executor = MigrationExecutor(connection)
executor.migrate(
[
("mutate_state_b", "0002_add_field"),
]
)
# Migrate forward.
executor.loader.build_graph()
state = executor.migrate(
[
("mutate_state_a", "0001_initial"),
]
)
self.assertIn("added", state.models["mutate_state_b", "b"].fields)
executor.loader.build_graph()
# Migrate backward.
state = executor.migrate(
[
("mutate_state_a", None),
]
)
self.assertIn("added", state.models["mutate_state_b", "b"].fields)
executor.migrate(
[
("mutate_state_b", None),
]
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_process_callback(self):
"""
#24129 - Tests callback process
"""
call_args_list = []
def callback(*args):
call_args_list.append(args)
executor = MigrationExecutor(connection, progress_callback=callback)
# Were the tables there before?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
executor.migrate(
[
("migrations", "0001_initial"),
("migrations", "0002_second"),
]
)
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
executor.migrate(
[
("migrations", None),
("migrations", None),
]
)
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
migrations = executor.loader.graph.nodes
expected = [
("render_start",),
("render_success",),
("apply_start", migrations["migrations", "0001_initial"], False),
("apply_success", migrations["migrations", "0001_initial"], False),
("apply_start", migrations["migrations", "0002_second"], False),
("apply_success", migrations["migrations", "0002_second"], False),
("render_start",),
("render_success",),
("unapply_start", migrations["migrations", "0002_second"], False),
("unapply_success", migrations["migrations", "0002_second"], False),
("unapply_start", migrations["migrations", "0001_initial"], False),
("unapply_success", migrations["migrations", "0001_initial"], False),
]
self.assertEqual(call_args_list, expected)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.alter_fk.author_app",
"migrations.migrations_test_apps.alter_fk.book_app",
]
)
def test_alter_id_type_with_fk(self):
try:
executor = MigrationExecutor(connection)
self.assertTableNotExists("author_app_author")
self.assertTableNotExists("book_app_book")
# Apply initial migrations
executor.migrate(
[
("author_app", "0001_initial"),
("book_app", "0001_initial"),
]
)
self.assertTableExists("author_app_author")
self.assertTableExists("book_app_book")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Apply PK type alteration
executor.migrate([("author_app", "0002_alter_id")])
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
finally:
# We can't simply unapply the migrations here because there is no
# implicit cast from VARCHAR to INT on the database level.
with connection.schema_editor() as editor:
editor.execute(editor.sql_delete_table % {"table": "book_app_book"})
editor.execute(editor.sql_delete_table % {"table": "author_app_author"})
self.assertTableNotExists("author_app_author")
self.assertTableNotExists("book_app_book")
executor.migrate([("author_app", None)], fake=True)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_apply_all_replaced_marks_replacement_as_applied(self):
"""
Applying all replaced migrations marks replacement as applied (#24628).
"""
recorder = MigrationRecorder(connection)
# Place the database in a state where the replaced migrations are
# partially applied: 0001 is applied, 0002 is not.
recorder.record_applied("migrations", "0001_initial")
executor = MigrationExecutor(connection)
# Use fake because we don't actually have the first migration
# applied, so the second will fail. And there's no need to actually
# create/modify tables here, we're just testing the
# MigrationRecord, which works the same with or without fake.
executor.migrate([("migrations", "0002_second")], fake=True)
# Because we've now applied 0001 and 0002 both, their squashed
# replacement should be marked as applied.
self.assertIn(
("migrations", "0001_squashed_0002"),
recorder.applied_migrations(),
)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_migrate_marks_replacement_applied_even_if_it_did_nothing(self):
"""
A new squash migration will be marked as applied even if all its
replaced migrations were previously already applied (#24628).
"""
recorder = MigrationRecorder(connection)
# Record all replaced migrations as applied
recorder.record_applied("migrations", "0001_initial")
recorder.record_applied("migrations", "0002_second")
executor = MigrationExecutor(connection)
executor.migrate([("migrations", "0001_squashed_0002")])
# Because 0001 and 0002 are both applied, even though this migrate run
# didn't apply anything new, their squashed replacement should be
# marked as applied.
self.assertIn(
("migrations", "0001_squashed_0002"),
recorder.applied_migrations(),
)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_migrate_marks_replacement_unapplied(self):
executor = MigrationExecutor(connection)
executor.migrate([("migrations", "0001_squashed_0002")])
try:
self.assertIn(
("migrations", "0001_squashed_0002"),
executor.recorder.applied_migrations(),
)
finally:
executor.loader.build_graph()
executor.migrate([("migrations", None)])
self.assertNotIn(
("migrations", "0001_squashed_0002"),
executor.recorder.applied_migrations(),
)
# When the feature is False, the operation and the record won't be
# performed in a transaction and the test will systematically pass.
@skipUnlessDBFeature("can_rollback_ddl")
def test_migrations_applied_and_recorded_atomically(self):
"""Migrations are applied and recorded atomically."""
class Migration(migrations.Migration):
operations = [
migrations.CreateModel(
"model",
[
("id", models.AutoField(primary_key=True)),
],
),
]
executor = MigrationExecutor(connection)
with mock.patch(
"django.db.migrations.executor.MigrationExecutor.record_migration"
) as record_migration:
record_migration.side_effect = RuntimeError("Recording migration failed.")
with self.assertRaisesMessage(RuntimeError, "Recording migration failed."):
executor.apply_migration(
ProjectState(),
Migration("0001_initial", "record_migration"),
)
executor.migrate([("migrations", "0001_initial")])
# The migration isn't recorded as applied since it failed.
migration_recorder = MigrationRecorder(connection)
self.assertIs(
migration_recorder.migration_qs.filter(
app="record_migration",
name="0001_initial",
).exists(),
False,
)
self.assertTableNotExists("record_migration_model")
def test_migrations_not_applied_on_deferred_sql_failure(self):
"""Migrations are not recorded if deferred SQL application fails."""
class DeferredSQL:
def __str__(self):
raise DatabaseError("Failed to apply deferred SQL")
class Migration(migrations.Migration):
atomic = False
def apply(self, project_state, schema_editor, collect_sql=False):
schema_editor.deferred_sql.append(DeferredSQL())
executor = MigrationExecutor(connection)
with self.assertRaisesMessage(DatabaseError, "Failed to apply deferred SQL"):
executor.apply_migration(
ProjectState(),
Migration("0001_initial", "deferred_sql"),
)
# The migration isn't recorded as applied since it failed.
migration_recorder = MigrationRecorder(connection)
self.assertIs(
migration_recorder.migration_qs.filter(
app="deferred_sql",
name="0001_initial",
).exists(),
False,
)
@mock.patch.object(MigrationRecorder, "has_table", return_value=False)
def test_migrate_skips_schema_creation(self, mocked_has_table):
"""
The django_migrations table is not created if there are no migrations
to record.
"""
executor = MigrationExecutor(connection)
# 0 queries, since the query for has_table is being mocked.
with self.assertNumQueries(0):
executor.migrate([], plan=[])
class FakeLoader:
def __init__(self, graph, applied):
self.graph = graph
self.applied_migrations = applied
self.replace_migrations = True
class FakeMigration:
"""Really all we need is any object with a debug-useful repr."""
def __init__(self, name):
self.name = name
def __repr__(self):
return "M<%s>" % self.name
class ExecutorUnitTests(SimpleTestCase):
"""(More) isolated unit tests for executor methods."""
def test_minimize_rollbacks(self):
"""
Minimize unnecessary rollbacks in connected apps.
When you say "./manage.py migrate appA 0001", rather than migrating to
just after appA-0001 in the linearized migration plan (which could roll
back migrations in other apps that depend on appA 0001, but don't need
to be rolled back since we're not rolling back appA 0001), we migrate
to just before appA-0002.
"""
a1_impl = FakeMigration("a1")
a1 = ("a", "1")
a2_impl = FakeMigration("a2")
a2 = ("a", "2")
b1_impl = FakeMigration("b1")
b1 = ("b", "1")
graph = MigrationGraph()
graph.add_node(a1, a1_impl)
graph.add_node(a2, a2_impl)
graph.add_node(b1, b1_impl)
graph.add_dependency(None, b1, a1)
graph.add_dependency(None, a2, a1)
executor = MigrationExecutor(None)
executor.loader = FakeLoader(
graph,
{
a1: a1_impl,
b1: b1_impl,
a2: a2_impl,
},
)
plan = executor.migration_plan({a1})
self.assertEqual(plan, [(a2_impl, True)])
def test_minimize_rollbacks_branchy(self):
r"""
Minimize rollbacks when target has multiple in-app children.
a: 1 <---- 3 <--\
\ \- 2 <--- 4
\ \
b: \- 1 <--- 2
"""
a1_impl = FakeMigration("a1")
a1 = ("a", "1")
a2_impl = FakeMigration("a2")
a2 = ("a", "2")
a3_impl = FakeMigration("a3")
a3 = ("a", "3")
a4_impl = FakeMigration("a4")
a4 = ("a", "4")
b1_impl = FakeMigration("b1")
b1 = ("b", "1")
b2_impl = FakeMigration("b2")
b2 = ("b", "2")
graph = MigrationGraph()
graph.add_node(a1, a1_impl)
graph.add_node(a2, a2_impl)
graph.add_node(a3, a3_impl)
graph.add_node(a4, a4_impl)
graph.add_node(b1, b1_impl)
graph.add_node(b2, b2_impl)
graph.add_dependency(None, a2, a1)
graph.add_dependency(None, a3, a1)
graph.add_dependency(None, a4, a2)
graph.add_dependency(None, a4, a3)
graph.add_dependency(None, b2, b1)
graph.add_dependency(None, b1, a1)
graph.add_dependency(None, b2, a2)
executor = MigrationExecutor(None)
executor.loader = FakeLoader(
graph,
{
a1: a1_impl,
b1: b1_impl,
a2: a2_impl,
b2: b2_impl,
a3: a3_impl,
a4: a4_impl,
},
)
plan = executor.migration_plan({a1})
should_be_rolled_back = [b2_impl, a4_impl, a2_impl, a3_impl]
exp = [(m, True) for m in should_be_rolled_back]
self.assertEqual(plan, exp)
def test_backwards_nothing_to_do(self):
r"""
If the current state satisfies the given target, do nothing.
a: 1 <--- 2
b: \- 1
c: \- 1
If a1 is applied already and a2 is not, and we're asked to migrate to
a1, don't apply or unapply b1 or c1, regardless of their current state.
"""
a1_impl = FakeMigration("a1")
a1 = ("a", "1")
a2_impl = FakeMigration("a2")
a2 = ("a", "2")
b1_impl = FakeMigration("b1")
b1 = ("b", "1")
c1_impl = FakeMigration("c1")
c1 = ("c", "1")
graph = MigrationGraph()
graph.add_node(a1, a1_impl)
graph.add_node(a2, a2_impl)
graph.add_node(b1, b1_impl)
graph.add_node(c1, c1_impl)
graph.add_dependency(None, a2, a1)
graph.add_dependency(None, b1, a1)
graph.add_dependency(None, c1, a1)
executor = MigrationExecutor(None)
executor.loader = FakeLoader(
graph,
{
a1: a1_impl,
b1: b1_impl,
},
)
plan = executor.migration_plan({a1})
self.assertEqual(plan, [])
|
7d47fa9d9ffeed408daf0e5c1e75c992a1fca47d207a8af3b1440e786270009a | import datetime
import importlib
import io
import os
import shutil
import sys
from unittest import mock
from django.apps import apps
from django.core.management import CommandError, call_command
from django.db import (
ConnectionHandler,
DatabaseError,
OperationalError,
connection,
connections,
models,
)
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.backends.utils import truncate_name
from django.db.migrations.exceptions import InconsistentMigrationHistory
from django.db.migrations.recorder import MigrationRecorder
from django.test import TestCase, override_settings, skipUnlessDBFeature
from django.test.utils import captured_stdout
from django.utils import timezone
from django.utils.version import get_docs_version
from .models import UnicodeModel, UnserializableModel
from .routers import TestRouter
from .test_base import MigrationTestBase
HAS_BLACK = shutil.which("black")
class MigrateTests(MigrationTestBase):
"""
Tests running the migrate command.
"""
databases = {"default", "other"}
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate(self):
"""
Tests basic usage of the migrate command.
"""
# No tables are created
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
# Run the migrations to 0001 only
stdout = io.StringIO()
call_command(
"migrate", "migrations", "0001", verbosity=2, stdout=stdout, no_color=True
)
stdout = stdout.getvalue()
self.assertIn(
"Target specific migration: 0001_initial, from migrations", stdout
)
self.assertIn("Applying migrations.0001_initial... OK", stdout)
self.assertIn("Running pre-migrate handlers for application migrations", stdout)
self.assertIn(
"Running post-migrate handlers for application migrations", stdout
)
# The correct tables exist
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
# Run migrations all the way
call_command("migrate", verbosity=0)
# The correct tables exist
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
# Unmigrate everything
stdout = io.StringIO()
call_command(
"migrate", "migrations", "zero", verbosity=2, stdout=stdout, no_color=True
)
stdout = stdout.getvalue()
self.assertIn("Unapply all migrations: migrations", stdout)
self.assertIn("Unapplying migrations.0002_second... OK", stdout)
self.assertIn("Running pre-migrate handlers for application migrations", stdout)
self.assertIn(
"Running post-migrate handlers for application migrations", stdout
)
# Tables are gone
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
@override_settings(
INSTALLED_APPS=[
"django.contrib.auth",
"django.contrib.contenttypes",
"migrations.migrations_test_apps.migrated_app",
]
)
def test_migrate_with_system_checks(self):
out = io.StringIO()
call_command("migrate", skip_checks=False, no_color=True, stdout=out)
self.assertIn("Apply all migrations: migrated_app", out.getvalue())
@override_settings(
INSTALLED_APPS=[
"migrations",
"migrations.migrations_test_apps.unmigrated_app_syncdb",
]
)
def test_app_without_migrations(self):
msg = "App 'unmigrated_app_syncdb' does not have migrations."
with self.assertRaisesMessage(CommandError, msg):
call_command("migrate", app_label="unmigrated_app_syncdb")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_clashing_prefix"}
)
def test_ambiguous_prefix(self):
msg = (
"More than one migration matches 'a' in app 'migrations'. Please "
"be more specific."
)
with self.assertRaisesMessage(CommandError, msg):
call_command("migrate", app_label="migrations", migration_name="a")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_unknown_prefix(self):
msg = "Cannot find a migration matching 'nonexistent' from app 'migrations'."
with self.assertRaisesMessage(CommandError, msg):
call_command(
"migrate", app_label="migrations", migration_name="nonexistent"
)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_initial_false"}
)
def test_migrate_initial_false(self):
"""
`Migration.initial = False` skips fake-initial detection.
"""
# Make sure no tables are created
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Run the migrations to 0001 only
call_command("migrate", "migrations", "0001", verbosity=0)
# Fake rollback
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
# Make sure fake-initial detection does not run
with self.assertRaises(DatabaseError):
call_command(
"migrate", "migrations", "0001", fake_initial=True, verbosity=0
)
call_command("migrate", "migrations", "0001", fake=True, verbosity=0)
# Real rollback
call_command("migrate", "migrations", "zero", verbosity=0)
# Make sure it's all gone
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations"},
DATABASE_ROUTERS=["migrations.routers.TestRouter"],
)
def test_migrate_fake_initial(self):
"""
--fake-initial only works if all tables created in the initial
migration of an app exists. Database routers must be obeyed when doing
that check.
"""
# Make sure no tables are created
for db in self.databases:
self.assertTableNotExists("migrations_author", using=db)
self.assertTableNotExists("migrations_tribble", using=db)
# Run the migrations to 0001 only
call_command("migrate", "migrations", "0001", verbosity=0)
call_command("migrate", "migrations", "0001", verbosity=0, database="other")
# Make sure the right tables exist
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Also check the "other" database
self.assertTableNotExists("migrations_author", using="other")
self.assertTableExists("migrations_tribble", using="other")
# Fake a roll-back
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
call_command(
"migrate", "migrations", "zero", fake=True, verbosity=0, database="other"
)
# Make sure the tables still exist
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble", using="other")
# Try to run initial migration
with self.assertRaises(DatabaseError):
call_command("migrate", "migrations", "0001", verbosity=0)
# Run initial migration with an explicit --fake-initial
out = io.StringIO()
with mock.patch(
"django.core.management.color.supports_color", lambda *args: False
):
call_command(
"migrate",
"migrations",
"0001",
fake_initial=True,
stdout=out,
verbosity=1,
)
call_command(
"migrate",
"migrations",
"0001",
fake_initial=True,
verbosity=0,
database="other",
)
self.assertIn("migrations.0001_initial... faked", out.getvalue().lower())
try:
# Run migrations all the way.
call_command("migrate", verbosity=0)
call_command("migrate", verbosity=0, database="other")
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
self.assertTableNotExists("migrations_author", using="other")
self.assertTableNotExists("migrations_tribble", using="other")
self.assertTableNotExists("migrations_book", using="other")
# Fake a roll-back.
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
call_command(
"migrate",
"migrations",
"zero",
fake=True,
verbosity=0,
database="other",
)
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
# Run initial migration.
with self.assertRaises(DatabaseError):
call_command("migrate", "migrations", verbosity=0)
# Run initial migration with an explicit --fake-initial.
with self.assertRaises(DatabaseError):
# Fails because "migrations_tribble" does not exist but needs
# to in order to make --fake-initial work.
call_command("migrate", "migrations", fake_initial=True, verbosity=0)
# Fake an apply.
call_command("migrate", "migrations", fake=True, verbosity=0)
call_command(
"migrate", "migrations", fake=True, verbosity=0, database="other"
)
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
call_command("migrate", "migrations", "zero", verbosity=0, database="other")
# Make sure it's all gone
for db in self.databases:
self.assertTableNotExists("migrations_author", using=db)
self.assertTableNotExists("migrations_tribble", using=db)
self.assertTableNotExists("migrations_book", using=db)
@skipUnlessDBFeature("ignores_table_name_case")
def test_migrate_fake_initial_case_insensitive(self):
with override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_fake_initial_case_insensitive.initial",
}
):
call_command("migrate", "migrations", "0001", verbosity=0)
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
with override_settings(
MIGRATION_MODULES={
"migrations": (
"migrations.test_fake_initial_case_insensitive.fake_initial"
),
}
):
out = io.StringIO()
call_command(
"migrate",
"migrations",
"0001",
fake_initial=True,
stdout=out,
verbosity=1,
no_color=True,
)
self.assertIn(
"migrations.0001_initial... faked",
out.getvalue().lower(),
)
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_fake_split_initial"
}
)
def test_migrate_fake_split_initial(self):
"""
Split initial migrations can be faked with --fake-initial.
"""
try:
call_command("migrate", "migrations", "0002", verbosity=0)
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
out = io.StringIO()
with mock.patch(
"django.core.management.color.supports_color", lambda *args: False
):
call_command(
"migrate",
"migrations",
"0002",
fake_initial=True,
stdout=out,
verbosity=1,
)
value = out.getvalue().lower()
self.assertIn("migrations.0001_initial... faked", value)
self.assertIn("migrations.0002_second... faked", value)
finally:
# Fake an apply.
call_command("migrate", "migrations", fake=True, verbosity=0)
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"}
)
def test_migrate_conflict_exit(self):
"""
migrate exits if it detects a conflict.
"""
msg = (
"Conflicting migrations detected; multiple leaf nodes in the "
"migration graph: (0002_conflicting_second, 0002_second in "
"migrations).\n"
"To fix them run 'python manage.py makemigrations --merge'"
)
with self.assertRaisesMessage(CommandError, msg):
call_command("migrate", "migrations")
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations",
}
)
def test_migrate_check(self):
with self.assertRaises(SystemExit):
call_command("migrate", "migrations", "0001", check_unapplied=True)
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_plan",
}
)
def test_migrate_check_plan(self):
out = io.StringIO()
with self.assertRaises(SystemExit):
call_command(
"migrate",
"migrations",
"0001",
check_unapplied=True,
plan=True,
stdout=out,
no_color=True,
)
self.assertEqual(
"Planned operations:\n"
"migrations.0001_initial\n"
" Create model Salamander\n"
" Raw Python operation -> Grow salamander tail.\n",
out.getvalue(),
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_showmigrations_list(self):
"""
showmigrations --list displays migrations and whether or not they're
applied.
"""
out = io.StringIO()
with mock.patch(
"django.core.management.color.supports_color", lambda *args: True
):
call_command(
"showmigrations", format="list", stdout=out, verbosity=0, no_color=False
)
self.assertEqual(
"\x1b[1mmigrations\n\x1b[0m [ ] 0001_initial\n [ ] 0002_second\n",
out.getvalue().lower(),
)
call_command("migrate", "migrations", "0001", verbosity=0)
out = io.StringIO()
# Giving the explicit app_label tests for selective `show_list` in the command
call_command(
"showmigrations",
"migrations",
format="list",
stdout=out,
verbosity=0,
no_color=True,
)
self.assertEqual(
"migrations\n [x] 0001_initial\n [ ] 0002_second\n", out.getvalue().lower()
)
out = io.StringIO()
# Applied datetimes are displayed at verbosity 2+.
call_command(
"showmigrations", "migrations", stdout=out, verbosity=2, no_color=True
)
migration1 = MigrationRecorder(connection).migration_qs.get(
app="migrations", name="0001_initial"
)
self.assertEqual(
"migrations\n"
" [x] 0001_initial (applied at %s)\n"
" [ ] 0002_second\n" % migration1.applied.strftime("%Y-%m-%d %H:%M:%S"),
out.getvalue().lower(),
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_showmigrations_list_squashed(self):
out = io.StringIO()
call_command(
"showmigrations", format="list", stdout=out, verbosity=2, no_color=True
)
self.assertEqual(
"migrations\n [ ] 0001_squashed_0002 (2 squashed migrations)\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command(
"migrate",
"migrations",
"0001_squashed_0002",
stdout=out,
verbosity=2,
no_color=True,
)
try:
self.assertIn(
"operations to perform:\n"
" target specific migration: 0001_squashed_0002, from migrations\n"
"running pre-migrate handlers for application migrations\n"
"running migrations:\n"
" applying migrations.0001_squashed_0002... ok (",
out.getvalue().lower(),
)
out = io.StringIO()
call_command(
"showmigrations", format="list", stdout=out, verbosity=2, no_color=True
)
self.assertEqual(
"migrations\n [x] 0001_squashed_0002 (2 squashed migrations)\n",
out.getvalue().lower(),
)
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"}
)
def test_showmigrations_plan(self):
"""
Tests --plan output of showmigrations command
"""
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out)
self.assertEqual(
"[ ] migrations.0001_initial\n"
"[ ] migrations.0003_third\n"
"[ ] migrations.0002_second\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out, verbosity=2)
self.assertEqual(
"[ ] migrations.0001_initial\n"
"[ ] migrations.0003_third ... (migrations.0001_initial)\n"
"[ ] migrations.0002_second ... (migrations.0001_initial, "
"migrations.0003_third)\n",
out.getvalue().lower(),
)
call_command("migrate", "migrations", "0003", verbosity=0)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out)
self.assertEqual(
"[x] migrations.0001_initial\n"
"[x] migrations.0003_third\n"
"[ ] migrations.0002_second\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out, verbosity=2)
self.assertEqual(
"[x] migrations.0001_initial\n"
"[x] migrations.0003_third ... (migrations.0001_initial)\n"
"[ ] migrations.0002_second ... (migrations.0001_initial, "
"migrations.0003_third)\n",
out.getvalue().lower(),
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_plan"}
)
def test_migrate_plan(self):
"""Tests migrate --plan output."""
out = io.StringIO()
# Show the plan up to the third migration.
call_command(
"migrate", "migrations", "0003", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n"
"migrations.0001_initial\n"
" Create model Salamander\n"
" Raw Python operation -> Grow salamander tail.\n"
"migrations.0002_second\n"
" Create model Book\n"
" Raw SQL operation -> ['SELECT * FROM migrations_book']\n"
"migrations.0003_third\n"
" Create model Author\n"
" Raw SQL operation -> ['SELECT * FROM migrations_author']\n",
out.getvalue(),
)
try:
# Migrate to the third migration.
call_command("migrate", "migrations", "0003", verbosity=0)
out = io.StringIO()
# Show the plan for when there is nothing to apply.
call_command(
"migrate", "migrations", "0003", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n No planned migration operations.\n",
out.getvalue(),
)
out = io.StringIO()
# Show the plan for reverse migration back to 0001.
call_command(
"migrate", "migrations", "0001", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n"
"migrations.0003_third\n"
" Undo Create model Author\n"
" Raw SQL operation -> ['SELECT * FROM migrations_book']\n"
"migrations.0002_second\n"
" Undo Create model Book\n"
" Raw SQL operation -> ['SELECT * FROM migrations_salamand…\n",
out.getvalue(),
)
out = io.StringIO()
# Show the migration plan to fourth, with truncated details.
call_command(
"migrate", "migrations", "0004", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n"
"migrations.0004_fourth\n"
" Raw SQL operation -> SELECT * FROM migrations_author WHE…\n",
out.getvalue(),
)
# Show the plan when an operation is irreversible.
# Migrate to the fourth migration.
call_command("migrate", "migrations", "0004", verbosity=0)
out = io.StringIO()
call_command(
"migrate", "migrations", "0003", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n"
"migrations.0004_fourth\n"
" Raw SQL operation -> IRREVERSIBLE\n",
out.getvalue(),
)
out = io.StringIO()
call_command(
"migrate", "migrations", "0005", plan=True, stdout=out, no_color=True
)
# Operation is marked as irreversible only in the revert plan.
self.assertEqual(
"Planned operations:\n"
"migrations.0005_fifth\n"
" Raw Python operation\n"
" Raw Python operation\n"
" Raw Python operation -> Feed salamander.\n",
out.getvalue(),
)
call_command("migrate", "migrations", "0005", verbosity=0)
out = io.StringIO()
call_command(
"migrate", "migrations", "0004", plan=True, stdout=out, no_color=True
)
self.assertEqual(
"Planned operations:\n"
"migrations.0005_fifth\n"
" Raw Python operation -> IRREVERSIBLE\n"
" Raw Python operation -> IRREVERSIBLE\n"
" Raw Python operation\n",
out.getvalue(),
)
finally:
# Cleanup by unmigrating everything: fake the irreversible, then
# migrate all to zero.
call_command("migrate", "migrations", "0003", fake=True, verbosity=0)
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_empty"}
)
def test_showmigrations_no_migrations(self):
out = io.StringIO()
call_command("showmigrations", stdout=out, no_color=True)
self.assertEqual("migrations\n (no migrations)\n", out.getvalue().lower())
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.unmigrated_app"]
)
def test_showmigrations_unmigrated_app(self):
out = io.StringIO()
call_command("showmigrations", "unmigrated_app", stdout=out, no_color=True)
try:
self.assertEqual(
"unmigrated_app\n (no migrations)\n", out.getvalue().lower()
)
finally:
# unmigrated_app.SillyModel has a foreign key to
# 'migrations.Tribble', but that model is only defined in a
# migration, so the global app registry never sees it and the
# reference is left dangling. Remove it to avoid problems in
# subsequent tests.
apps._pending_operations.pop(("migrations", "tribble"), None)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_empty"}
)
def test_showmigrations_plan_no_migrations(self):
"""
Tests --plan output of showmigrations command without migrations
"""
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out, no_color=True)
self.assertEqual("(no migrations)\n", out.getvalue().lower())
out = io.StringIO()
call_command(
"showmigrations", format="plan", stdout=out, verbosity=2, no_color=True
)
self.assertEqual("(no migrations)\n", out.getvalue().lower())
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"}
)
def test_showmigrations_plan_squashed(self):
"""
Tests --plan output of showmigrations command with squashed migrations.
"""
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out)
self.assertEqual(
"[ ] migrations.1_auto\n"
"[ ] migrations.2_auto\n"
"[ ] migrations.3_squashed_5\n"
"[ ] migrations.6_auto\n"
"[ ] migrations.7_auto\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out, verbosity=2)
self.assertEqual(
"[ ] migrations.1_auto\n"
"[ ] migrations.2_auto ... (migrations.1_auto)\n"
"[ ] migrations.3_squashed_5 ... (migrations.2_auto)\n"
"[ ] migrations.6_auto ... (migrations.3_squashed_5)\n"
"[ ] migrations.7_auto ... (migrations.6_auto)\n",
out.getvalue().lower(),
)
call_command("migrate", "migrations", "3_squashed_5", verbosity=0)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out)
self.assertEqual(
"[x] migrations.1_auto\n"
"[x] migrations.2_auto\n"
"[x] migrations.3_squashed_5\n"
"[ ] migrations.6_auto\n"
"[ ] migrations.7_auto\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command("showmigrations", format="plan", stdout=out, verbosity=2)
self.assertEqual(
"[x] migrations.1_auto\n"
"[x] migrations.2_auto ... (migrations.1_auto)\n"
"[x] migrations.3_squashed_5 ... (migrations.2_auto)\n"
"[ ] migrations.6_auto ... (migrations.3_squashed_5)\n"
"[ ] migrations.7_auto ... (migrations.6_auto)\n",
out.getvalue().lower(),
)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.mutate_state_b",
"migrations.migrations_test_apps.alter_fk.author_app",
"migrations.migrations_test_apps.alter_fk.book_app",
]
)
def test_showmigrations_plan_single_app_label(self):
"""
`showmigrations --plan app_label` output with a single app_label.
"""
# Single app with no dependencies on other apps.
out = io.StringIO()
call_command("showmigrations", "mutate_state_b", format="plan", stdout=out)
self.assertEqual(
"[ ] mutate_state_b.0001_initial\n[ ] mutate_state_b.0002_add_field\n",
out.getvalue(),
)
# Single app with dependencies.
out = io.StringIO()
call_command("showmigrations", "author_app", format="plan", stdout=out)
self.assertEqual(
"[ ] author_app.0001_initial\n"
"[ ] book_app.0001_initial\n"
"[ ] author_app.0002_alter_id\n",
out.getvalue(),
)
# Some migrations already applied.
call_command("migrate", "author_app", "0001", verbosity=0)
out = io.StringIO()
call_command("showmigrations", "author_app", format="plan", stdout=out)
self.assertEqual(
"[X] author_app.0001_initial\n"
"[ ] book_app.0001_initial\n"
"[ ] author_app.0002_alter_id\n",
out.getvalue(),
)
# Cleanup by unmigrating author_app.
call_command("migrate", "author_app", "zero", verbosity=0)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.mutate_state_b",
"migrations.migrations_test_apps.alter_fk.author_app",
"migrations.migrations_test_apps.alter_fk.book_app",
]
)
def test_showmigrations_plan_multiple_app_labels(self):
"""
`showmigrations --plan app_label` output with multiple app_labels.
"""
# Multiple apps: author_app depends on book_app; mutate_state_b doesn't
# depend on other apps.
out = io.StringIO()
call_command(
"showmigrations", "mutate_state_b", "author_app", format="plan", stdout=out
)
self.assertEqual(
"[ ] author_app.0001_initial\n"
"[ ] book_app.0001_initial\n"
"[ ] author_app.0002_alter_id\n"
"[ ] mutate_state_b.0001_initial\n"
"[ ] mutate_state_b.0002_add_field\n",
out.getvalue(),
)
# Multiple apps: args order shouldn't matter (the same result is
# expected as above).
out = io.StringIO()
call_command(
"showmigrations", "author_app", "mutate_state_b", format="plan", stdout=out
)
self.assertEqual(
"[ ] author_app.0001_initial\n"
"[ ] book_app.0001_initial\n"
"[ ] author_app.0002_alter_id\n"
"[ ] mutate_state_b.0001_initial\n"
"[ ] mutate_state_b.0002_add_field\n",
out.getvalue(),
)
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.unmigrated_app"]
)
def test_showmigrations_plan_app_label_no_migrations(self):
out = io.StringIO()
call_command(
"showmigrations", "unmigrated_app", format="plan", stdout=out, no_color=True
)
try:
self.assertEqual("(no migrations)\n", out.getvalue())
finally:
# unmigrated_app.SillyModel has a foreign key to
# 'migrations.Tribble', but that model is only defined in a
# migration, so the global app registry never sees it and the
# reference is left dangling. Remove it to avoid problems in
# subsequent tests.
apps._pending_operations.pop(("migrations", "tribble"), None)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_sqlmigrate_forwards(self):
"""
sqlmigrate outputs forward looking SQL.
"""
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out)
output = out.getvalue().lower()
index_tx_start = output.find(connection.ops.start_transaction_sql().lower())
index_op_desc_author = output.find("-- create model author")
index_create_table = output.find("create table")
index_op_desc_tribble = output.find("-- create model tribble")
index_op_desc_unique_together = output.find("-- alter unique_together")
index_tx_end = output.find(connection.ops.end_transaction_sql().lower())
if connection.features.can_rollback_ddl:
self.assertGreater(index_tx_start, -1, "Transaction start not found")
self.assertGreater(
index_tx_end,
index_op_desc_unique_together,
"Transaction end not found or found before operation description "
"(unique_together)",
)
self.assertGreater(
index_op_desc_author,
index_tx_start,
"Operation description (author) not found or found before transaction "
"start",
)
self.assertGreater(
index_create_table,
index_op_desc_author,
"CREATE TABLE not found or found before operation description (author)",
)
self.assertGreater(
index_op_desc_tribble,
index_create_table,
"Operation description (tribble) not found or found before CREATE TABLE "
"(author)",
)
self.assertGreater(
index_op_desc_unique_together,
index_op_desc_tribble,
"Operation description (unique_together) not found or found before "
"operation description (tribble)",
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_sqlmigrate_backwards(self):
"""
sqlmigrate outputs reverse looking SQL.
"""
# Cannot generate the reverse SQL unless we've applied the migration.
call_command("migrate", "migrations", verbosity=0)
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out, backwards=True)
output = out.getvalue().lower()
index_tx_start = output.find(connection.ops.start_transaction_sql().lower())
index_op_desc_unique_together = output.find("-- alter unique_together")
index_op_desc_tribble = output.find("-- create model tribble")
index_op_desc_author = output.find("-- create model author")
index_drop_table = output.rfind("drop table")
index_tx_end = output.find(connection.ops.end_transaction_sql().lower())
if connection.features.can_rollback_ddl:
self.assertGreater(index_tx_start, -1, "Transaction start not found")
self.assertGreater(
index_tx_end,
index_op_desc_unique_together,
"Transaction end not found or found before DROP TABLE",
)
self.assertGreater(
index_op_desc_unique_together,
index_tx_start,
"Operation description (unique_together) not found or found before "
"transaction start",
)
self.assertGreater(
index_op_desc_tribble,
index_op_desc_unique_together,
"Operation description (tribble) not found or found before operation "
"description (unique_together)",
)
self.assertGreater(
index_op_desc_author,
index_op_desc_tribble,
"Operation description (author) not found or found before operation "
"description (tribble)",
)
self.assertGreater(
index_drop_table,
index_op_desc_author,
"DROP TABLE not found or found before operation description (author)",
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_non_atomic"}
)
def test_sqlmigrate_for_non_atomic_migration(self):
"""
Transaction wrappers aren't shown for non-atomic migrations.
"""
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out)
output = out.getvalue().lower()
queries = [q.strip() for q in output.splitlines()]
if connection.ops.start_transaction_sql():
self.assertNotIn(connection.ops.start_transaction_sql().lower(), queries)
self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_sqlmigrate_for_non_transactional_databases(self):
"""
Transaction wrappers aren't shown for databases that don't support
transactional DDL.
"""
out = io.StringIO()
with mock.patch.object(connection.features, "can_rollback_ddl", False):
call_command("sqlmigrate", "migrations", "0001", stdout=out)
output = out.getvalue().lower()
queries = [q.strip() for q in output.splitlines()]
start_transaction_sql = connection.ops.start_transaction_sql()
if start_transaction_sql:
self.assertNotIn(start_transaction_sql.lower(), queries)
self.assertNotIn(connection.ops.end_transaction_sql().lower(), queries)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_sqlmigrate_ambiguous_prefix_squashed_migrations(self):
msg = (
"More than one migration matches '0001' in app 'migrations'. "
"Please be more specific."
)
with self.assertRaisesMessage(CommandError, msg):
call_command("sqlmigrate", "migrations", "0001")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_sqlmigrate_squashed_migration(self):
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001_squashed_0002", stdout=out)
output = out.getvalue().lower()
self.assertIn("-- create model author", output)
self.assertIn("-- create model book", output)
self.assertNotIn("-- create model tribble", output)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_sqlmigrate_replaced_migration(self):
out = io.StringIO()
call_command("sqlmigrate", "migrations", "0001_initial", stdout=out)
output = out.getvalue().lower()
self.assertIn("-- create model author", output)
self.assertIn("-- create model tribble", output)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_no_operations"}
)
def test_migrations_no_operations(self):
err = io.StringIO()
call_command("sqlmigrate", "migrations", "0001_initial", stderr=err)
self.assertEqual(err.getvalue(), "No operations found.\n")
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
"migrations.migrations_test_apps.migrated_unapplied_app",
"migrations.migrations_test_apps.unmigrated_app",
],
)
def test_regression_22823_unmigrated_fk_to_migrated_model(self):
"""
Assuming you have 3 apps, `A`, `B`, and `C`, such that:
* `A` has migrations
* `B` has a migration we want to apply
* `C` has no migrations, but has an FK to `A`
When we try to migrate "B", an exception occurs because the
"B" was not included in the ProjectState that is used to detect
soft-applied migrations (#22823).
"""
call_command("migrate", "migrated_unapplied_app", verbosity=0)
# unmigrated_app.SillyModel has a foreign key to 'migrations.Tribble',
# but that model is only defined in a migration, so the global app
# registry never sees it and the reference is left dangling. Remove it
# to avoid problems in subsequent tests.
apps._pending_operations.pop(("migrations", "tribble"), None)
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.unmigrated_app_syncdb"]
)
def test_migrate_syncdb_deferred_sql_executed_with_schemaeditor(self):
"""
For an app without migrations, editor.execute() is used for executing
the syncdb deferred SQL.
"""
stdout = io.StringIO()
with mock.patch.object(BaseDatabaseSchemaEditor, "execute") as execute:
call_command(
"migrate", run_syncdb=True, verbosity=1, stdout=stdout, no_color=True
)
create_table_count = len(
[call for call in execute.mock_calls if "CREATE TABLE" in str(call)]
)
self.assertEqual(create_table_count, 2)
# There's at least one deferred SQL for creating the foreign key
# index.
self.assertGreater(len(execute.mock_calls), 2)
stdout = stdout.getvalue()
self.assertIn("Synchronize unmigrated apps: unmigrated_app_syncdb", stdout)
self.assertIn("Creating tables...", stdout)
table_name = truncate_name(
"unmigrated_app_syncdb_classroom", connection.ops.max_name_length()
)
self.assertIn("Creating table %s" % table_name, stdout)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate_syncdb_app_with_migrations(self):
msg = "Can't use run_syncdb with app 'migrations' as it has migrations."
with self.assertRaisesMessage(CommandError, msg):
call_command("migrate", "migrations", run_syncdb=True, verbosity=0)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.unmigrated_app_syncdb",
"migrations.migrations_test_apps.unmigrated_app_simple",
]
)
def test_migrate_syncdb_app_label(self):
"""
Running migrate --run-syncdb with an app_label only creates tables for
the specified app.
"""
stdout = io.StringIO()
with mock.patch.object(BaseDatabaseSchemaEditor, "execute") as execute:
call_command(
"migrate", "unmigrated_app_syncdb", run_syncdb=True, stdout=stdout
)
create_table_count = len(
[call for call in execute.mock_calls if "CREATE TABLE" in str(call)]
)
self.assertEqual(create_table_count, 2)
self.assertGreater(len(execute.mock_calls), 2)
self.assertIn(
"Synchronize unmigrated app: unmigrated_app_syncdb", stdout.getvalue()
)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_migrate_record_replaced(self):
"""
Running a single squashed migration should record all of the original
replaced migrations as run.
"""
recorder = MigrationRecorder(connection)
out = io.StringIO()
call_command("migrate", "migrations", verbosity=0)
call_command("showmigrations", "migrations", stdout=out, no_color=True)
self.assertEqual(
"migrations\n [x] 0001_squashed_0002 (2 squashed migrations)\n",
out.getvalue().lower(),
)
applied_migrations = recorder.applied_migrations()
self.assertIn(("migrations", "0001_initial"), applied_migrations)
self.assertIn(("migrations", "0002_second"), applied_migrations)
self.assertIn(("migrations", "0001_squashed_0002"), applied_migrations)
# Rollback changes
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_migrate_record_squashed(self):
"""
Running migrate for a squashed migration should record as run
if all of the replaced migrations have been run (#25231).
"""
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0001_initial")
recorder.record_applied("migrations", "0002_second")
out = io.StringIO()
call_command("showmigrations", "migrations", stdout=out, no_color=True)
self.assertEqual(
"migrations\n"
" [-] 0001_squashed_0002 (2 squashed migrations) "
"run 'manage.py migrate' to finish recording.\n",
out.getvalue().lower(),
)
out = io.StringIO()
call_command("migrate", "migrations", verbosity=0)
call_command("showmigrations", "migrations", stdout=out, no_color=True)
self.assertEqual(
"migrations\n [x] 0001_squashed_0002 (2 squashed migrations)\n",
out.getvalue().lower(),
)
self.assertIn(
("migrations", "0001_squashed_0002"), recorder.applied_migrations()
)
# No changes were actually applied so there is nothing to rollback
def test_migrate_partially_applied_squashed_migration(self):
"""
Migrating to a squashed migration specified by name should succeed
even if it is partially applied.
"""
with self.temporary_migration_module(module="migrations.test_migrations"):
recorder = MigrationRecorder(connection)
try:
call_command("migrate", "migrations", "0001_initial", verbosity=0)
call_command(
"squashmigrations",
"migrations",
"0002",
interactive=False,
verbosity=0,
)
call_command(
"migrate",
"migrations",
"0001_squashed_0002_second",
verbosity=0,
)
applied_migrations = recorder.applied_migrations()
self.assertIn(("migrations", "0002_second"), applied_migrations)
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_migrate_backward_to_squashed_migration(self):
try:
call_command("migrate", "migrations", "0001_squashed_0002", verbosity=0)
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_book")
call_command("migrate", "migrations", "0001_initial", verbosity=0)
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_book")
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate_inconsistent_history(self):
"""
Running migrate with some migrations applied before their dependencies
should not be allowed.
"""
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0002_second")
msg = (
"Migration migrations.0002_second is applied before its dependency "
"migrations.0001_initial"
)
with self.assertRaisesMessage(InconsistentMigrationHistory, msg):
call_command("migrate")
applied_migrations = recorder.applied_migrations()
self.assertNotIn(("migrations", "0001_initial"), applied_migrations)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_unapplied_app",
"migrations.migrations_test_apps.migrated_app",
]
)
def test_migrate_not_reflected_changes(self):
class NewModel1(models.Model):
class Meta:
app_label = "migrated_app"
class NewModel2(models.Model):
class Meta:
app_label = "migrated_unapplied_app"
out = io.StringIO()
try:
call_command("migrate", verbosity=0)
call_command("migrate", stdout=out, no_color=True)
self.assertEqual(
"operations to perform:\n"
" apply all migrations: migrated_app, migrated_unapplied_app\n"
"running migrations:\n"
" no migrations to apply.\n"
" your models in app(s): 'migrated_app', "
"'migrated_unapplied_app' have changes that are not yet "
"reflected in a migration, and so won't be applied.\n"
" run 'manage.py makemigrations' to make new migrations, and "
"then re-run 'manage.py migrate' to apply them.\n",
out.getvalue().lower(),
)
finally:
# Unmigrate everything.
call_command("migrate", "migrated_app", "zero", verbosity=0)
call_command("migrate", "migrated_unapplied_app", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_squashed_no_replaces",
}
)
def test_migrate_prune(self):
"""
With prune=True, references to migration files deleted from the
migrations module (such as after being squashed) are removed from the
django_migrations table.
"""
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0001_initial")
recorder.record_applied("migrations", "0002_second")
recorder.record_applied("migrations", "0001_squashed_0002")
out = io.StringIO()
try:
call_command("migrate", "migrations", prune=True, stdout=out, no_color=True)
self.assertEqual(
out.getvalue(),
"Pruning migrations:\n"
" Pruning migrations.0001_initial OK\n"
" Pruning migrations.0002_second OK\n",
)
applied_migrations = [
migration
for migration in recorder.applied_migrations()
if migration[0] == "migrations"
]
self.assertEqual(applied_migrations, [("migrations", "0001_squashed_0002")])
finally:
recorder.record_unapplied("migrations", "0001_initial")
recorder.record_unapplied("migrations", "0001_second")
recorder.record_unapplied("migrations", "0001_squashed_0002")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_prune_deleted_squashed_migrations_in_replaces(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_squashed"
) as migration_dir:
try:
call_command("migrate", "migrations", verbosity=0)
# Delete the replaced migrations.
os.remove(os.path.join(migration_dir, "0001_initial.py"))
os.remove(os.path.join(migration_dir, "0002_second.py"))
# --prune cannot be used before removing the "replaces"
# attribute.
call_command(
"migrate",
"migrations",
prune=True,
stdout=out,
no_color=True,
)
self.assertEqual(
out.getvalue(),
"Pruning migrations:\n"
" Cannot use --prune because the following squashed "
"migrations have their 'replaces' attributes and may not "
"be recorded as applied:\n"
" migrations.0001_squashed_0002\n"
" Re-run 'manage.py migrate' if they are not marked as "
"applied, and remove 'replaces' attributes in their "
"Migration classes.\n",
)
finally:
# Unmigrate everything.
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_prune_no_migrations_to_prune(self):
out = io.StringIO()
call_command("migrate", "migrations", prune=True, stdout=out, no_color=True)
self.assertEqual(
out.getvalue(),
"Pruning migrations:\n No migrations to prune.\n",
)
out = io.StringIO()
call_command(
"migrate",
"migrations",
prune=True,
stdout=out,
no_color=True,
verbosity=0,
)
self.assertEqual(out.getvalue(), "")
def test_prune_no_app_label(self):
msg = "Migrations can be pruned only when an app is specified."
with self.assertRaisesMessage(CommandError, msg):
call_command("migrate", prune=True)
class MakeMigrationsTests(MigrationTestBase):
"""
Tests running the makemigrations command.
"""
def setUp(self):
super().setUp()
self._old_models = apps.app_configs["migrations"].models.copy()
def tearDown(self):
apps.app_configs["migrations"].models = self._old_models
apps.all_models["migrations"] = self._old_models
apps.clear_cache()
super().tearDown()
def test_files_content(self):
self.assertTableNotExists("migrations_unicodemodel")
apps.register_model("migrations", UnicodeModel)
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", verbosity=0)
# Check for empty __init__.py file in migrations folder
init_file = os.path.join(migration_dir, "__init__.py")
self.assertTrue(os.path.exists(init_file))
with open(init_file) as fp:
content = fp.read()
self.assertEqual(content, "")
# Check for existing 0001_initial.py file in migration folder
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
with open(initial_file, encoding="utf-8") as fp:
content = fp.read()
self.assertIn("migrations.CreateModel", content)
self.assertIn("initial = True", content)
self.assertIn("úñí©óðé µóðéø", content) # Meta.verbose_name
self.assertIn("úñí©óðé µóðéøß", content) # Meta.verbose_name_plural
self.assertIn("ÚÑÍ¢ÓÐÉ", content) # title.verbose_name
self.assertIn("“Ðjáñgó”", content) # title.default
def test_makemigrations_order(self):
"""
makemigrations should recognize number-only migrations (0001.py).
"""
module = "migrations.test_migrations_order"
with self.temporary_migration_module(module=module) as migration_dir:
if hasattr(importlib, "invalidate_caches"):
# importlib caches os.listdir() on some platforms like macOS
# (#23850).
importlib.invalidate_caches()
call_command(
"makemigrations", "migrations", "--empty", "-n", "a", "-v", "0"
)
self.assertTrue(os.path.exists(os.path.join(migration_dir, "0002_a.py")))
def test_makemigrations_empty_connections(self):
empty_connections = ConnectionHandler({"default": {}})
with mock.patch(
"django.core.management.commands.makemigrations.connections",
new=empty_connections,
):
# with no apps
out = io.StringIO()
call_command("makemigrations", stdout=out)
self.assertIn("No changes detected", out.getvalue())
# with an app
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", verbosity=0)
init_file = os.path.join(migration_dir, "__init__.py")
self.assertTrue(os.path.exists(init_file))
@override_settings(INSTALLED_APPS=["migrations", "migrations2"])
def test_makemigrations_consistency_checks_respect_routers(self):
"""
The history consistency checks in makemigrations respect
settings.DATABASE_ROUTERS.
"""
def patched_has_table(migration_recorder):
if migration_recorder.connection is connections["other"]:
raise Exception("Other connection")
else:
return mock.DEFAULT
self.assertTableNotExists("migrations_unicodemodel")
apps.register_model("migrations", UnicodeModel)
with mock.patch.object(
MigrationRecorder, "has_table", autospec=True, side_effect=patched_has_table
) as has_table:
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", verbosity=0)
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
self.assertEqual(has_table.call_count, 1) # 'default' is checked
# Router says not to migrate 'other' so consistency shouldn't
# be checked.
with self.settings(DATABASE_ROUTERS=["migrations.routers.TestRouter"]):
call_command("makemigrations", "migrations", verbosity=0)
self.assertEqual(has_table.call_count, 2) # 'default' again
# With a router that doesn't prohibit migrating 'other',
# consistency is checked.
with self.settings(
DATABASE_ROUTERS=["migrations.routers.DefaultOtherRouter"]
):
with self.assertRaisesMessage(Exception, "Other connection"):
call_command("makemigrations", "migrations", verbosity=0)
self.assertEqual(has_table.call_count, 4) # 'default' and 'other'
# With a router that doesn't allow migrating on any database,
# no consistency checks are made.
with self.settings(DATABASE_ROUTERS=["migrations.routers.TestRouter"]):
with mock.patch.object(
TestRouter, "allow_migrate", return_value=False
) as allow_migrate:
call_command("makemigrations", "migrations", verbosity=0)
allow_migrate.assert_any_call(
"other", "migrations", model_name="UnicodeModel"
)
# allow_migrate() is called with the correct arguments.
self.assertGreater(len(allow_migrate.mock_calls), 0)
called_aliases = set()
for mock_call in allow_migrate.mock_calls:
_, call_args, call_kwargs = mock_call
connection_alias, app_name = call_args
called_aliases.add(connection_alias)
# Raises an error if invalid app_name/model_name occurs.
apps.get_app_config(app_name).get_model(call_kwargs["model_name"])
self.assertEqual(called_aliases, set(connections))
self.assertEqual(has_table.call_count, 4)
def test_failing_migration(self):
# If a migration fails to serialize, it shouldn't generate an empty file. #21280
apps.register_model("migrations", UnserializableModel)
with self.temporary_migration_module() as migration_dir:
with self.assertRaisesMessage(ValueError, "Cannot serialize"):
call_command("makemigrations", "migrations", verbosity=0)
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertFalse(os.path.exists(initial_file))
def test_makemigrations_conflict_exit(self):
"""
makemigrations exits if it detects a conflict.
"""
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
):
with self.assertRaises(CommandError) as context:
call_command("makemigrations")
self.assertEqual(
str(context.exception),
"Conflicting migrations detected; multiple leaf nodes in the "
"migration graph: (0002_conflicting_second, 0002_second in "
"migrations).\n"
"To fix them run 'python manage.py makemigrations --merge'",
)
def test_makemigrations_merge_no_conflict(self):
"""
makemigrations exits if in merge mode with no conflicts.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations"):
call_command("makemigrations", merge=True, stdout=out)
self.assertIn("No conflicts detected to merge.", out.getvalue())
def test_makemigrations_empty_no_app_specified(self):
"""
makemigrations exits if no app is specified with 'empty' mode.
"""
msg = "You must supply at least one app label when using --empty."
with self.assertRaisesMessage(CommandError, msg):
call_command("makemigrations", empty=True)
def test_makemigrations_empty_migration(self):
"""
makemigrations properly constructs an empty migration.
"""
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", empty=True, verbosity=0)
# Check for existing 0001_initial.py file in migration folder
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
with open(initial_file, encoding="utf-8") as fp:
content = fp.read()
# Remove all whitespace to check for empty dependencies and operations
content = content.replace(" ", "")
self.assertIn(
"dependencies=[]" if HAS_BLACK else "dependencies=[\n]", content
)
self.assertIn(
"operations=[]" if HAS_BLACK else "operations=[\n]", content
)
@override_settings(MIGRATION_MODULES={"migrations": None})
def test_makemigrations_disabled_migrations_for_app(self):
"""
makemigrations raises a nice error when migrations are disabled for an
app.
"""
msg = (
"Django can't create migrations for app 'migrations' because migrations "
"have been disabled via the MIGRATION_MODULES setting."
)
with self.assertRaisesMessage(ValueError, msg):
call_command("makemigrations", "migrations", empty=True, verbosity=0)
def test_makemigrations_no_changes_no_apps(self):
"""
makemigrations exits when there are no changes and no apps are specified.
"""
out = io.StringIO()
call_command("makemigrations", stdout=out)
self.assertIn("No changes detected", out.getvalue())
def test_makemigrations_no_changes(self):
"""
makemigrations exits when there are no changes to an app.
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_changes"
):
call_command("makemigrations", "migrations", stdout=out)
self.assertIn("No changes detected in app 'migrations'", out.getvalue())
def test_makemigrations_no_apps_initial(self):
"""
makemigrations should detect initial is needed on empty migration
modules if no app provided.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_empty"):
call_command("makemigrations", stdout=out)
self.assertIn("0001_initial.py", out.getvalue())
def test_makemigrations_no_init(self):
"""Migration directories without an __init__.py file are allowed."""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_init"
):
call_command("makemigrations", stdout=out)
self.assertIn("0001_initial.py", out.getvalue())
def test_makemigrations_migrations_announce(self):
"""
makemigrations announces the migration at the default verbosity level.
"""
out = io.StringIO()
with self.temporary_migration_module():
call_command("makemigrations", "migrations", stdout=out)
self.assertIn("Migrations for 'migrations'", out.getvalue())
def test_makemigrations_no_common_ancestor(self):
"""
makemigrations fails to merge migrations with no common ancestor.
"""
with self.assertRaises(ValueError) as context:
with self.temporary_migration_module(
module="migrations.test_migrations_no_ancestor"
):
call_command("makemigrations", "migrations", merge=True)
exception_message = str(context.exception)
self.assertIn("Could not find common ancestor of", exception_message)
self.assertIn("0002_second", exception_message)
self.assertIn("0002_conflicting_second", exception_message)
def test_makemigrations_interactive_reject(self):
"""
makemigrations enters and exits interactive mode properly.
"""
# Monkeypatch interactive questioner to auto reject
with mock.patch("builtins.input", mock.Mock(return_value="N")):
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
with captured_stdout():
call_command(
"makemigrations",
"migrations",
name="merge",
merge=True,
interactive=True,
verbosity=0,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertFalse(os.path.exists(merge_file))
def test_makemigrations_interactive_accept(self):
"""
makemigrations enters interactive mode and merges properly.
"""
# Monkeypatch interactive questioner to auto accept
with mock.patch("builtins.input", mock.Mock(return_value="y")):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
name="merge",
merge=True,
interactive=True,
stdout=out,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertTrue(os.path.exists(merge_file))
self.assertIn("Created new merge migration", out.getvalue())
def test_makemigrations_default_merge_name(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
merge=True,
interactive=False,
stdout=out,
)
merge_file = os.path.join(
migration_dir,
"0003_merge_0002_conflicting_second_0002_second.py",
)
self.assertIs(os.path.exists(merge_file), True)
with open(merge_file, encoding="utf-8") as fp:
content = fp.read()
if HAS_BLACK:
target_str = '("migrations", "0002_conflicting_second")'
else:
target_str = "('migrations', '0002_conflicting_second')"
self.assertIn(target_str, content)
self.assertIn("Created new merge migration %s" % merge_file, out.getvalue())
@mock.patch("django.db.migrations.utils.datetime")
def test_makemigrations_auto_merge_name(self, mock_datetime):
mock_datetime.datetime.now.return_value = datetime.datetime(2016, 1, 2, 3, 4)
with mock.patch("builtins.input", mock.Mock(return_value="y")):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict_long_name"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
merge=True,
interactive=True,
stdout=out,
)
merge_file = os.path.join(migration_dir, "0003_merge_20160102_0304.py")
self.assertTrue(os.path.exists(merge_file))
self.assertIn("Created new merge migration", out.getvalue())
def test_makemigrations_non_interactive_not_null_addition(self):
"""
Non-interactive makemigrations fails when a default is missing on a
new not-null field.
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
silly_int = models.IntegerField()
class Meta:
app_label = "migrations"
with self.assertRaises(SystemExit):
with self.temporary_migration_module(
module="migrations.test_migrations_no_default"
):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=False)
self.assertIn(
"Field 'silly_int' on model 'sillymodel' not migrated: it is "
"impossible to add a non-nullable field without specifying a "
"default.",
out.getvalue(),
)
def test_makemigrations_interactive_not_null_addition(self):
"""
makemigrations messages when adding a NOT NULL field in interactive
mode.
"""
class Author(models.Model):
silly_field = models.BooleanField(null=False)
class Meta:
app_label = "migrations"
input_msg = (
"It is impossible to add a non-nullable field 'silly_field' to "
"author without specifying a default. This is because the "
"database needs something to populate existing rows.\n"
"Please select a fix:\n"
" 1) Provide a one-off default now (will be set on all existing "
"rows with a null value for this column)\n"
" 2) Quit and manually define a default value in models.py."
)
with self.temporary_migration_module(module="migrations.test_migrations"):
# 2 - quit.
with mock.patch("builtins.input", return_value="2"):
with captured_stdout() as out, self.assertRaises(SystemExit):
call_command("makemigrations", "migrations", interactive=True)
self.assertIn(input_msg, out.getvalue())
# 1 - provide a default.
with mock.patch("builtins.input", return_value="1"):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=True)
output = out.getvalue()
self.assertIn(input_msg, output)
self.assertIn("Please enter the default value as valid Python.", output)
self.assertIn(
"The datetime and django.utils.timezone modules are "
"available, so it is possible to provide e.g. timezone.now as "
"a value",
output,
)
self.assertIn("Type 'exit' to exit this prompt", output)
def test_makemigrations_non_interactive_not_null_alteration(self):
"""
Non-interactive makemigrations fails when a default is missing on a
field changed to not-null.
"""
class Author(models.Model):
name = models.CharField(max_length=255)
slug = models.SlugField()
age = models.IntegerField(default=0)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(module="migrations.test_migrations"):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=False)
self.assertIn("Alter field slug on author", out.getvalue())
self.assertIn(
"Field 'slug' on model 'author' given a default of NOT PROVIDED "
"and must be corrected.",
out.getvalue(),
)
def test_makemigrations_interactive_not_null_alteration(self):
"""
makemigrations messages when changing a NULL field to NOT NULL in
interactive mode.
"""
class Author(models.Model):
slug = models.SlugField(null=False)
class Meta:
app_label = "migrations"
input_msg = (
"It is impossible to change a nullable field 'slug' on author to "
"non-nullable without providing a default. This is because the "
"database needs something to populate existing rows.\n"
"Please select a fix:\n"
" 1) Provide a one-off default now (will be set on all existing "
"rows with a null value for this column)\n"
" 2) Ignore for now. Existing rows that contain NULL values will "
"have to be handled manually, for example with a RunPython or "
"RunSQL operation.\n"
" 3) Quit and manually define a default value in models.py."
)
with self.temporary_migration_module(module="migrations.test_migrations"):
# No message appears if --dry-run.
with captured_stdout() as out:
call_command(
"makemigrations",
"migrations",
interactive=True,
dry_run=True,
)
self.assertNotIn(input_msg, out.getvalue())
# 3 - quit.
with mock.patch("builtins.input", return_value="3"):
with captured_stdout() as out, self.assertRaises(SystemExit):
call_command("makemigrations", "migrations", interactive=True)
self.assertIn(input_msg, out.getvalue())
# 1 - provide a default.
with mock.patch("builtins.input", return_value="1"):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=True)
output = out.getvalue()
self.assertIn(input_msg, output)
self.assertIn("Please enter the default value as valid Python.", output)
self.assertIn(
"The datetime and django.utils.timezone modules are "
"available, so it is possible to provide e.g. timezone.now as "
"a value",
output,
)
self.assertIn("Type 'exit' to exit this prompt", output)
def test_makemigrations_non_interactive_no_model_rename(self):
"""
makemigrations adds and removes a possible model rename in
non-interactive mode.
"""
class RenamedModel(models.Model):
silly_field = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_default"
):
call_command("makemigrations", "migrations", interactive=False, stdout=out)
self.assertIn("Delete model SillyModel", out.getvalue())
self.assertIn("Create model RenamedModel", out.getvalue())
def test_makemigrations_non_interactive_no_field_rename(self):
"""
makemigrations adds and removes a possible field rename in
non-interactive mode.
"""
class SillyModel(models.Model):
silly_rename = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_default"
):
call_command("makemigrations", "migrations", interactive=False, stdout=out)
self.assertIn("Remove field silly_field from sillymodel", out.getvalue())
self.assertIn("Add field silly_rename to sillymodel", out.getvalue())
@mock.patch("builtins.input", return_value="Y")
def test_makemigrations_model_rename_interactive(self, mock_input):
class RenamedModel(models.Model):
silly_field = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(
module="migrations.test_migrations_no_default",
):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=True)
self.assertIn("Rename model SillyModel to RenamedModel", out.getvalue())
@mock.patch("builtins.input", return_value="Y")
def test_makemigrations_field_rename_interactive(self, mock_input):
class SillyModel(models.Model):
silly_rename = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(
module="migrations.test_migrations_no_default",
):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=True)
self.assertIn(
"Rename field silly_field on sillymodel to silly_rename",
out.getvalue(),
)
def test_makemigrations_handle_merge(self):
"""
makemigrations properly merges the conflicting migrations with --noinput.
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
name="merge",
merge=True,
interactive=False,
stdout=out,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertTrue(os.path.exists(merge_file))
output = out.getvalue()
self.assertIn("Merging migrations", output)
self.assertIn("Branch 0002_second", output)
self.assertIn("Branch 0002_conflicting_second", output)
self.assertIn("Created new merge migration", output)
def test_makemigration_merge_dry_run(self):
"""
makemigrations respects --dry-run option when fixing migration
conflicts (#24427).
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
name="merge",
dry_run=True,
merge=True,
interactive=False,
stdout=out,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertFalse(os.path.exists(merge_file))
output = out.getvalue()
self.assertIn("Merging migrations", output)
self.assertIn("Branch 0002_second", output)
self.assertIn("Branch 0002_conflicting_second", output)
self.assertNotIn("Created new merge migration", output)
def test_makemigration_merge_dry_run_verbosity_3(self):
"""
`makemigrations --merge --dry-run` writes the merge migration file to
stdout with `verbosity == 3` (#24427).
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations",
"migrations",
name="merge",
dry_run=True,
merge=True,
interactive=False,
stdout=out,
verbosity=3,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertFalse(os.path.exists(merge_file))
output = out.getvalue()
self.assertIn("Merging migrations", output)
self.assertIn("Branch 0002_second", output)
self.assertIn("Branch 0002_conflicting_second", output)
self.assertNotIn("Created new merge migration", output)
# Additional output caused by verbosity 3
# The complete merge migration file that would be written
self.assertIn("class Migration(migrations.Migration):", output)
self.assertIn("dependencies = [", output)
self.assertIn("('migrations', '0002_second')", output)
self.assertIn("('migrations', '0002_conflicting_second')", output)
self.assertIn("operations = [", output)
self.assertIn("]", output)
def test_makemigrations_dry_run(self):
"""
`makemigrations --dry-run` should not ask for defaults.
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
silly_date = models.DateField() # Added field without a default
silly_auto_now = models.DateTimeField(auto_now_add=True)
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_default"
):
call_command("makemigrations", "migrations", dry_run=True, stdout=out)
# Output the expected changes directly, without asking for defaults
self.assertIn("Add field silly_date to sillymodel", out.getvalue())
def test_makemigrations_dry_run_verbosity_3(self):
"""
Allow `makemigrations --dry-run` to output the migrations file to
stdout (with verbosity == 3).
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
silly_char = models.CharField(default="")
class Meta:
app_label = "migrations"
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_default"
):
call_command(
"makemigrations", "migrations", dry_run=True, stdout=out, verbosity=3
)
# Normal --dry-run output
self.assertIn("- Add field silly_char to sillymodel", out.getvalue())
# Additional output caused by verbosity 3
# The complete migrations file that would be written
self.assertIn("class Migration(migrations.Migration):", out.getvalue())
self.assertIn("dependencies = [", out.getvalue())
self.assertIn("('migrations', '0001_initial'),", out.getvalue())
self.assertIn("migrations.AddField(", out.getvalue())
self.assertIn("model_name='sillymodel',", out.getvalue())
self.assertIn("name='silly_char',", out.getvalue())
def test_makemigrations_scriptable(self):
"""
With scriptable=True, log output is diverted to stderr, and only the
paths of generated migration files are written to stdout.
"""
out = io.StringIO()
err = io.StringIO()
with self.temporary_migration_module(
module="migrations.migrations.test_migrations",
) as migration_dir:
call_command(
"makemigrations",
"migrations",
scriptable=True,
stdout=out,
stderr=err,
)
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertEqual(out.getvalue(), f"{initial_file}\n")
self.assertIn(" - Create model ModelWithCustomBase\n", err.getvalue())
@mock.patch("builtins.input", return_value="Y")
def test_makemigrations_scriptable_merge(self, mock_input):
out = io.StringIO()
err = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_conflict",
) as migration_dir:
call_command(
"makemigrations",
"migrations",
merge=True,
name="merge",
scriptable=True,
stdout=out,
stderr=err,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertEqual(out.getvalue(), f"{merge_file}\n")
self.assertIn(f"Created new merge migration {merge_file}", err.getvalue())
def test_makemigrations_migrations_modules_path_not_exist(self):
"""
makemigrations creates migrations when specifying a custom location
for migration files using MIGRATION_MODULES if the custom path
doesn't already exist.
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
out = io.StringIO()
migration_module = "migrations.test_migrations_path_doesnt_exist.foo.bar"
with self.temporary_migration_module(module=migration_module) as migration_dir:
call_command("makemigrations", "migrations", stdout=out)
# Migrations file is actually created in the expected path.
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
# Command output indicates the migration is created.
self.assertIn(" - Create model SillyModel", out.getvalue())
@override_settings(MIGRATION_MODULES={"migrations": "some.nonexistent.path"})
def test_makemigrations_migrations_modules_nonexistent_toplevel_package(self):
msg = (
"Could not locate an appropriate location to create migrations "
"package some.nonexistent.path. Make sure the toplevel package "
"exists and can be imported."
)
with self.assertRaisesMessage(ValueError, msg):
call_command("makemigrations", "migrations", empty=True, verbosity=0)
def test_makemigrations_interactive_by_default(self):
"""
The user is prompted to merge by default if there are conflicts and
merge is True. Answer negative to differentiate it from behavior when
--noinput is specified.
"""
# Monkeypatch interactive questioner to auto reject
out = io.StringIO()
with mock.patch("builtins.input", mock.Mock(return_value="N")):
with self.temporary_migration_module(
module="migrations.test_migrations_conflict"
) as migration_dir:
call_command(
"makemigrations", "migrations", name="merge", merge=True, stdout=out
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
# This will fail if interactive is False by default
self.assertFalse(os.path.exists(merge_file))
self.assertNotIn("Created new merge migration", out.getvalue())
@override_settings(
INSTALLED_APPS=[
"migrations",
"migrations.migrations_test_apps.unspecified_app_with_conflict",
]
)
def test_makemigrations_unspecified_app_with_conflict_no_merge(self):
"""
makemigrations does not raise a CommandError when an unspecified app
has conflicting migrations.
"""
with self.temporary_migration_module(
module="migrations.test_migrations_no_changes"
):
call_command("makemigrations", "migrations", merge=False, verbosity=0)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
"migrations.migrations_test_apps.unspecified_app_with_conflict",
]
)
def test_makemigrations_unspecified_app_with_conflict_merge(self):
"""
makemigrations does not create a merge for an unspecified app even if
it has conflicting migrations.
"""
# Monkeypatch interactive questioner to auto accept
with mock.patch("builtins.input", mock.Mock(return_value="y")):
out = io.StringIO()
with self.temporary_migration_module(
app_label="migrated_app"
) as migration_dir:
call_command(
"makemigrations",
"migrated_app",
name="merge",
merge=True,
interactive=True,
stdout=out,
)
merge_file = os.path.join(migration_dir, "0003_merge.py")
self.assertFalse(os.path.exists(merge_file))
self.assertIn("No conflicts detected to merge.", out.getvalue())
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
"migrations.migrations_test_apps.conflicting_app_with_dependencies",
]
)
def test_makemigrations_merge_dont_output_dependency_operations(self):
"""
makemigrations --merge does not output any operations from apps that
don't belong to a given app.
"""
# Monkeypatch interactive questioner to auto accept
with mock.patch("builtins.input", mock.Mock(return_value="N")):
out = io.StringIO()
with mock.patch(
"django.core.management.color.supports_color", lambda *args: False
):
call_command(
"makemigrations",
"conflicting_app_with_dependencies",
merge=True,
interactive=True,
stdout=out,
)
self.assertEqual(
out.getvalue().lower(),
"merging conflicting_app_with_dependencies\n"
" branch 0002_conflicting_second\n"
" - create model something\n"
" branch 0002_second\n"
" - delete model tribble\n"
" - remove field silly_field from author\n"
" - add field rating to author\n"
" - create model book\n"
"\n"
"merging will only work if the operations printed above do not "
"conflict\n"
"with each other (working on different fields or models)\n"
"should these migration branches be merged? [y/n] ",
)
def test_makemigrations_with_custom_name(self):
"""
makemigrations --name generate a custom migration name.
"""
with self.temporary_migration_module() as migration_dir:
def cmd(migration_count, migration_name, *args):
call_command(
"makemigrations",
"migrations",
"--verbosity",
"0",
"--name",
migration_name,
*args,
)
migration_file = os.path.join(
migration_dir, "%s_%s.py" % (migration_count, migration_name)
)
# Check for existing migration file in migration folder
self.assertTrue(os.path.exists(migration_file))
with open(migration_file, encoding="utf-8") as fp:
content = fp.read()
content = content.replace(" ", "")
return content
# generate an initial migration
migration_name_0001 = "my_initial_migration"
content = cmd("0001", migration_name_0001)
self.assertIn(
"dependencies=[]" if HAS_BLACK else "dependencies=[\n]", content
)
# importlib caches os.listdir() on some platforms like macOS
# (#23850).
if hasattr(importlib, "invalidate_caches"):
importlib.invalidate_caches()
# generate an empty migration
migration_name_0002 = "my_custom_migration"
content = cmd("0002", migration_name_0002, "--empty")
if HAS_BLACK:
template_str = 'dependencies=[\n("migrations","0001_%s"),\n]'
else:
template_str = "dependencies=[\n('migrations','0001_%s'),\n]"
self.assertIn(
template_str % migration_name_0001,
content,
)
self.assertIn("operations=[]" if HAS_BLACK else "operations=[\n]", content)
def test_makemigrations_with_invalid_custom_name(self):
msg = "The migration name must be a valid Python identifier."
with self.assertRaisesMessage(CommandError, msg):
call_command(
"makemigrations", "migrations", "--name", "invalid name", "--empty"
)
def test_makemigrations_check(self):
"""
makemigrations --check should exit with a non-zero status when
there are changes to an app requiring migrations.
"""
with self.temporary_migration_module():
with self.assertRaises(SystemExit):
call_command("makemigrations", "--check", "migrations", verbosity=0)
with self.temporary_migration_module(
module="migrations.test_migrations_no_changes"
):
call_command("makemigrations", "--check", "migrations", verbosity=0)
def test_makemigrations_migration_path_output(self):
"""
makemigrations should print the relative paths to the migrations unless
they are outside of the current tree, in which case the absolute path
should be shown.
"""
out = io.StringIO()
apps.register_model("migrations", UnicodeModel)
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", stdout=out)
self.assertIn(
os.path.join(migration_dir, "0001_initial.py"), out.getvalue()
)
def test_makemigrations_migration_path_output_valueerror(self):
"""
makemigrations prints the absolute path if os.path.relpath() raises a
ValueError when it's impossible to obtain a relative path, e.g. on
Windows if Django is installed on a different drive than where the
migration files are created.
"""
out = io.StringIO()
with self.temporary_migration_module() as migration_dir:
with mock.patch("os.path.relpath", side_effect=ValueError):
call_command("makemigrations", "migrations", stdout=out)
self.assertIn(os.path.join(migration_dir, "0001_initial.py"), out.getvalue())
def test_makemigrations_inconsistent_history(self):
"""
makemigrations should raise InconsistentMigrationHistory exception if
there are some migrations applied before their dependencies.
"""
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0002_second")
msg = (
"Migration migrations.0002_second is applied before its dependency "
"migrations.0001_initial"
)
with self.temporary_migration_module(module="migrations.test_migrations"):
with self.assertRaisesMessage(InconsistentMigrationHistory, msg):
call_command("makemigrations")
def test_makemigrations_inconsistent_history_db_failure(self):
msg = (
"Got an error checking a consistent migration history performed "
"for database connection 'default': could not connect to server"
)
with mock.patch(
"django.db.migrations.loader.MigrationLoader.check_consistent_history",
side_effect=OperationalError("could not connect to server"),
):
with self.temporary_migration_module():
with self.assertWarns(RuntimeWarning) as cm:
call_command("makemigrations", verbosity=0)
self.assertEqual(str(cm.warning), msg)
@mock.patch("builtins.input", return_value="1")
@mock.patch(
"django.db.migrations.questioner.sys.stdin",
mock.MagicMock(encoding=sys.getdefaultencoding()),
)
def test_makemigrations_auto_now_add_interactive(self, *args):
"""
makemigrations prompts the user when adding auto_now_add to an existing
model.
"""
class Entry(models.Model):
title = models.CharField(max_length=255)
creation_date = models.DateTimeField(auto_now_add=True)
class Meta:
app_label = "migrations"
input_msg = (
"It is impossible to add the field 'creation_date' with "
"'auto_now_add=True' to entry without providing a default. This "
"is because the database needs something to populate existing "
"rows.\n"
" 1) Provide a one-off default now which will be set on all "
"existing rows\n"
" 2) Quit and manually define a default value in models.py."
)
# Monkeypatch interactive questioner to auto accept
prompt_stdout = io.StringIO()
with self.temporary_migration_module(module="migrations.test_auto_now_add"):
call_command(
"makemigrations", "migrations", interactive=True, stdout=prompt_stdout
)
prompt_output = prompt_stdout.getvalue()
self.assertIn(input_msg, prompt_output)
self.assertIn("Please enter the default value as valid Python.", prompt_output)
self.assertIn(
"Accept the default 'timezone.now' by pressing 'Enter' or provide "
"another value.",
prompt_output,
)
self.assertIn("Type 'exit' to exit this prompt", prompt_output)
self.assertIn("Add field creation_date to entry", prompt_output)
@mock.patch("builtins.input", return_value="2")
def test_makemigrations_auto_now_add_interactive_quit(self, mock_input):
class Author(models.Model):
publishing_date = models.DateField(auto_now_add=True)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(module="migrations.test_migrations"):
with captured_stdout():
with self.assertRaises(SystemExit):
call_command("makemigrations", "migrations", interactive=True)
def test_makemigrations_non_interactive_auto_now_add_addition(self):
"""
Non-interactive makemigrations fails when a default is missing on a
new field when auto_now_add=True.
"""
class Entry(models.Model):
creation_date = models.DateTimeField(auto_now_add=True)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(module="migrations.test_auto_now_add"):
with self.assertRaises(SystemExit), captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=False)
self.assertIn(
"Field 'creation_date' on model 'entry' not migrated: it is "
"impossible to add a field with 'auto_now_add=True' without "
"specifying a default.",
out.getvalue(),
)
def test_makemigrations_interactive_unique_callable_default_addition(self):
"""
makemigrations prompts the user when adding a unique field with
a callable default.
"""
class Book(models.Model):
created = models.DateTimeField(unique=True, default=timezone.now)
class Meta:
app_label = "migrations"
version = get_docs_version()
input_msg = (
f"Callable default on unique field book.created will not generate "
f"unique values upon migrating.\n"
f"Please choose how to proceed:\n"
f" 1) Continue making this migration as the first step in writing "
f"a manual migration to generate unique values described here: "
f"https://docs.djangoproject.com/en/{version}/howto/"
f"writing-migrations/#migrations-that-add-unique-fields.\n"
f" 2) Quit and edit field options in models.py.\n"
)
with self.temporary_migration_module(module="migrations.test_migrations"):
# 2 - quit.
with mock.patch("builtins.input", return_value="2"):
with captured_stdout() as out, self.assertRaises(SystemExit):
call_command("makemigrations", "migrations", interactive=True)
out_value = out.getvalue()
self.assertIn(input_msg, out_value)
self.assertNotIn("Add field created to book", out_value)
# 1 - continue.
with mock.patch("builtins.input", return_value="1"):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=True)
out_value = out.getvalue()
self.assertIn(input_msg, out_value)
self.assertIn("Add field created to book", out_value)
def test_makemigrations_non_interactive_unique_callable_default_addition(self):
class Book(models.Model):
created = models.DateTimeField(unique=True, default=timezone.now)
class Meta:
app_label = "migrations"
with self.temporary_migration_module(module="migrations.test_migrations"):
with captured_stdout() as out:
call_command("makemigrations", "migrations", interactive=False)
out_value = out.getvalue()
self.assertIn("Add field created to book", out_value)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"},
)
def test_makemigrations_continues_number_sequence_after_squash(self):
with self.temporary_migration_module(
module="migrations.test_migrations_squashed"
):
with captured_stdout() as out:
call_command(
"makemigrations",
"migrations",
interactive=False,
empty=True,
)
out_value = out.getvalue()
self.assertIn("0003_auto", out_value)
class SquashMigrationsTests(MigrationTestBase):
"""
Tests running the squashmigrations command.
"""
def test_squashmigrations_squashes(self):
"""
squashmigrations squashes migrations.
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"squashmigrations",
"migrations",
"0002",
interactive=False,
stdout=out,
no_color=True,
)
squashed_migration_file = os.path.join(
migration_dir, "0001_squashed_0002_second.py"
)
self.assertTrue(os.path.exists(squashed_migration_file))
self.assertEqual(
out.getvalue(),
"Will squash the following migrations:\n"
" - 0001_initial\n"
" - 0002_second\n"
"Optimizing...\n"
" Optimized from 8 operations to 2 operations.\n"
"Created new squashed migration %s\n"
" You should commit this migration but leave the old ones in place;\n"
" the new migration will be used for new installs. Once you are sure\n"
" all instances of the codebase have applied the migrations you "
"squashed,\n"
" you can delete them.\n" % squashed_migration_file,
)
def test_squashmigrations_initial_attribute(self):
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"squashmigrations", "migrations", "0002", interactive=False, verbosity=0
)
squashed_migration_file = os.path.join(
migration_dir, "0001_squashed_0002_second.py"
)
with open(squashed_migration_file, encoding="utf-8") as fp:
content = fp.read()
self.assertIn("initial = True", content)
def test_squashmigrations_optimizes(self):
"""
squashmigrations optimizes operations.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations"):
call_command(
"squashmigrations",
"migrations",
"0002",
interactive=False,
verbosity=1,
stdout=out,
)
self.assertIn("Optimized from 8 operations to 2 operations.", out.getvalue())
def test_ticket_23799_squashmigrations_no_optimize(self):
"""
squashmigrations --no-optimize doesn't optimize operations.
"""
out = io.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations"):
call_command(
"squashmigrations",
"migrations",
"0002",
interactive=False,
verbosity=1,
no_optimize=True,
stdout=out,
)
self.assertIn("Skipping optimization", out.getvalue())
def test_squashmigrations_valid_start(self):
"""
squashmigrations accepts a starting migration.
"""
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_no_changes"
) as migration_dir:
call_command(
"squashmigrations",
"migrations",
"0002",
"0003",
interactive=False,
verbosity=1,
stdout=out,
)
squashed_migration_file = os.path.join(
migration_dir, "0002_second_squashed_0003_third.py"
)
with open(squashed_migration_file, encoding="utf-8") as fp:
content = fp.read()
if HAS_BLACK:
test_str = ' ("migrations", "0001_initial")'
else:
test_str = " ('migrations', '0001_initial')"
self.assertIn(test_str, content)
self.assertNotIn("initial = True", content)
out = out.getvalue()
self.assertNotIn(" - 0001_initial", out)
self.assertIn(" - 0002_second", out)
self.assertIn(" - 0003_third", out)
def test_squashmigrations_invalid_start(self):
"""
squashmigrations doesn't accept a starting migration after the ending migration.
"""
with self.temporary_migration_module(
module="migrations.test_migrations_no_changes"
):
msg = (
"The migration 'migrations.0003_third' cannot be found. Maybe "
"it comes after the migration 'migrations.0002_second'"
)
with self.assertRaisesMessage(CommandError, msg):
call_command(
"squashmigrations",
"migrations",
"0003",
"0002",
interactive=False,
verbosity=0,
)
def test_squashed_name_with_start_migration_name(self):
"""--squashed-name specifies the new migration's name."""
squashed_name = "squashed_name"
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"squashmigrations",
"migrations",
"0001",
"0002",
squashed_name=squashed_name,
interactive=False,
verbosity=0,
)
squashed_migration_file = os.path.join(
migration_dir, "0001_%s.py" % squashed_name
)
self.assertTrue(os.path.exists(squashed_migration_file))
def test_squashed_name_without_start_migration_name(self):
"""--squashed-name also works if a start migration is omitted."""
squashed_name = "squashed_name"
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"squashmigrations",
"migrations",
"0001",
squashed_name=squashed_name,
interactive=False,
verbosity=0,
)
squashed_migration_file = os.path.join(
migration_dir, "0001_%s.py" % squashed_name
)
self.assertTrue(os.path.exists(squashed_migration_file))
def test_squashed_name_exists(self):
msg = "Migration 0001_initial already exists. Use a different name."
with self.temporary_migration_module(module="migrations.test_migrations"):
with self.assertRaisesMessage(CommandError, msg):
call_command(
"squashmigrations",
"migrations",
"0001",
"0002",
squashed_name="initial",
interactive=False,
verbosity=0,
)
def test_squashmigrations_manual_porting(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_manual_porting",
) as migration_dir:
call_command(
"squashmigrations",
"migrations",
"0002",
interactive=False,
stdout=out,
no_color=True,
)
squashed_migration_file = os.path.join(
migration_dir,
"0001_squashed_0002_second.py",
)
self.assertTrue(os.path.exists(squashed_migration_file))
black_warning = ""
if HAS_BLACK:
black_warning = (
"Squashed migration couldn't be formatted using the "
'"black" command. You can call it manually.\n'
)
self.assertEqual(
out.getvalue(),
f"Will squash the following migrations:\n"
f" - 0001_initial\n"
f" - 0002_second\n"
f"Optimizing...\n"
f" No optimizations possible.\n"
f"Created new squashed migration {squashed_migration_file}\n"
f" You should commit this migration but leave the old ones in place;\n"
f" the new migration will be used for new installs. Once you are sure\n"
f" all instances of the codebase have applied the migrations you "
f"squashed,\n"
f" you can delete them.\n"
f"Manual porting required\n"
f" Your migrations contained functions that must be manually copied "
f"over,\n"
f" as we could not safely copy their implementation.\n"
f" See the comment at the top of the squashed migration for details.\n"
+ black_warning,
)
class AppLabelErrorTests(TestCase):
"""
This class inherits TestCase because MigrationTestBase uses
`available_apps = ['migrations']` which means that it's the only installed
app. 'django.contrib.auth' must be in INSTALLED_APPS for some of these
tests.
"""
nonexistent_app_error = "No installed app with label 'nonexistent_app'."
did_you_mean_auth_error = (
"No installed app with label 'django.contrib.auth'. Did you mean 'auth'?"
)
def test_makemigrations_nonexistent_app_label(self):
err = io.StringIO()
with self.assertRaises(SystemExit):
call_command("makemigrations", "nonexistent_app", stderr=err)
self.assertIn(self.nonexistent_app_error, err.getvalue())
def test_makemigrations_app_name_specified_as_label(self):
err = io.StringIO()
with self.assertRaises(SystemExit):
call_command("makemigrations", "django.contrib.auth", stderr=err)
self.assertIn(self.did_you_mean_auth_error, err.getvalue())
def test_migrate_nonexistent_app_label(self):
with self.assertRaisesMessage(CommandError, self.nonexistent_app_error):
call_command("migrate", "nonexistent_app")
def test_migrate_app_name_specified_as_label(self):
with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error):
call_command("migrate", "django.contrib.auth")
def test_showmigrations_nonexistent_app_label(self):
err = io.StringIO()
with self.assertRaises(SystemExit):
call_command("showmigrations", "nonexistent_app", stderr=err)
self.assertIn(self.nonexistent_app_error, err.getvalue())
def test_showmigrations_app_name_specified_as_label(self):
err = io.StringIO()
with self.assertRaises(SystemExit):
call_command("showmigrations", "django.contrib.auth", stderr=err)
self.assertIn(self.did_you_mean_auth_error, err.getvalue())
def test_sqlmigrate_nonexistent_app_label(self):
with self.assertRaisesMessage(CommandError, self.nonexistent_app_error):
call_command("sqlmigrate", "nonexistent_app", "0002")
def test_sqlmigrate_app_name_specified_as_label(self):
with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error):
call_command("sqlmigrate", "django.contrib.auth", "0002")
def test_squashmigrations_nonexistent_app_label(self):
with self.assertRaisesMessage(CommandError, self.nonexistent_app_error):
call_command("squashmigrations", "nonexistent_app", "0002")
def test_squashmigrations_app_name_specified_as_label(self):
with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error):
call_command("squashmigrations", "django.contrib.auth", "0002")
def test_optimizemigration_nonexistent_app_label(self):
with self.assertRaisesMessage(CommandError, self.nonexistent_app_error):
call_command("optimizemigration", "nonexistent_app", "0002")
def test_optimizemigration_app_name_specified_as_label(self):
with self.assertRaisesMessage(CommandError, self.did_you_mean_auth_error):
call_command("optimizemigration", "django.contrib.auth", "0002")
class OptimizeMigrationTests(MigrationTestBase):
def test_no_optimization_possible(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"optimizemigration", "migrations", "0002", stdout=out, no_color=True
)
migration_file = os.path.join(migration_dir, "0002_second.py")
self.assertTrue(os.path.exists(migration_file))
call_command(
"optimizemigration",
"migrations",
"0002",
stdout=out,
no_color=True,
verbosity=0,
)
self.assertEqual(out.getvalue(), "No optimizations possible.\n")
def test_optimization(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"optimizemigration", "migrations", "0001", stdout=out, no_color=True
)
initial_migration_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_migration_file))
with open(initial_migration_file) as fp:
content = fp.read()
self.assertIn(
'("bool", models.BooleanField'
if HAS_BLACK
else "('bool', models.BooleanField",
content,
)
self.assertEqual(
out.getvalue(),
f"Optimizing from 4 operations to 2 operations.\n"
f"Optimized migration {initial_migration_file}\n",
)
def test_optimization_no_verbosity(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
call_command(
"optimizemigration",
"migrations",
"0001",
stdout=out,
no_color=True,
verbosity=0,
)
initial_migration_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_migration_file))
with open(initial_migration_file) as fp:
content = fp.read()
self.assertIn(
'("bool", models.BooleanField'
if HAS_BLACK
else "('bool', models.BooleanField",
content,
)
self.assertEqual(out.getvalue(), "")
def test_creates_replace_migration_manual_porting(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_manual_porting"
) as migration_dir:
call_command(
"optimizemigration", "migrations", "0003", stdout=out, no_color=True
)
optimized_migration_file = os.path.join(
migration_dir, "0003_third_optimized.py"
)
self.assertTrue(os.path.exists(optimized_migration_file))
with open(optimized_migration_file) as fp:
content = fp.read()
self.assertIn("replaces = [", content)
black_warning = ""
if HAS_BLACK:
black_warning = (
"Optimized migration couldn't be formatted using the "
'"black" command. You can call it manually.\n'
)
self.assertEqual(
out.getvalue(),
"Optimizing from 3 operations to 2 operations.\n"
"Manual porting required\n"
" Your migrations contained functions that must be manually copied over,\n"
" as we could not safely copy their implementation.\n"
" See the comment at the top of the optimized migration for details.\n"
+ black_warning
+ f"Optimized migration {optimized_migration_file}\n",
)
def test_fails_squash_migration_manual_porting(self):
out = io.StringIO()
with self.temporary_migration_module(
module="migrations.test_migrations_manual_porting"
) as migration_dir:
msg = (
"Migration will require manual porting but is already a squashed "
"migration.\nTransition to a normal migration first: "
"https://docs.djangoproject.com/en/dev/topics/migrations/"
"#squashing-migrations"
)
with self.assertRaisesMessage(CommandError, msg):
call_command("optimizemigration", "migrations", "0004", stdout=out)
optimized_migration_file = os.path.join(
migration_dir, "0004_fourth_optimized.py"
)
self.assertFalse(os.path.exists(optimized_migration_file))
self.assertEqual(
out.getvalue(), "Optimizing from 3 operations to 2 operations.\n"
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_optimizemigration_check(self):
with self.assertRaises(SystemExit):
call_command(
"optimizemigration", "--check", "migrations", "0001", verbosity=0
)
call_command("optimizemigration", "--check", "migrations", "0002", verbosity=0)
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.unmigrated_app_simple"],
)
def test_app_without_migrations(self):
msg = "App 'unmigrated_app_simple' does not have migrations."
with self.assertRaisesMessage(CommandError, msg):
call_command("optimizemigration", "unmigrated_app_simple", "0001")
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_clashing_prefix"},
)
def test_ambigious_prefix(self):
msg = (
"More than one migration matches 'a' in app 'migrations'. Please "
"be more specific."
)
with self.assertRaisesMessage(CommandError, msg):
call_command("optimizemigration", "migrations", "a")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_unknown_prefix(self):
msg = "Cannot find a migration matching 'nonexistent' from app 'migrations'."
with self.assertRaisesMessage(CommandError, msg):
call_command("optimizemigration", "migrations", "nonexistent")
|
c7c115b69d0cdf41fbbdfe3a108f79b808fbe918afa0d86879a3898f09ff6526 | import datetime
import decimal
import enum
import functools
import math
import os
import pathlib
import re
import sys
import uuid
from unittest import mock
try:
import zoneinfo
except ImportError:
from backports import zoneinfo
try:
import pytz
except ImportError:
pytz = None
import custom_migration_operations.more_operations
import custom_migration_operations.operations
from django import get_version
from django.conf import SettingsReference, settings
from django.core.validators import EmailValidator, RegexValidator
from django.db import migrations, models
from django.db.migrations.serializer import BaseSerializer
from django.db.migrations.writer import MigrationWriter, OperationWriter
from django.test import SimpleTestCase
from django.utils.deconstruct import deconstructible
from django.utils.functional import SimpleLazyObject
from django.utils.timezone import get_default_timezone, get_fixed_timezone, utc
from django.utils.translation import gettext_lazy as _
from .models import FoodManager, FoodQuerySet
class DeconstructibleInstances:
def deconstruct(self):
return ("DeconstructibleInstances", [], {})
class Money(decimal.Decimal):
def deconstruct(self):
return (
"%s.%s" % (self.__class__.__module__, self.__class__.__name__),
[str(self)],
{},
)
class TestModel1:
def upload_to(self):
return "/somewhere/dynamic/"
thing = models.FileField(upload_to=upload_to)
class TextEnum(enum.Enum):
A = "a-value"
B = "value-b"
class TextTranslatedEnum(enum.Enum):
A = _("a-value")
B = _("value-b")
class BinaryEnum(enum.Enum):
A = b"a-value"
B = b"value-b"
class IntEnum(enum.IntEnum):
A = 1
B = 2
class OperationWriterTests(SimpleTestCase):
def test_empty_signature(self):
operation = custom_migration_operations.operations.TestOperation()
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {"import custom_migration_operations.operations"})
self.assertEqual(
buff,
"custom_migration_operations.operations.TestOperation(\n),",
)
def test_args_signature(self):
operation = custom_migration_operations.operations.ArgsOperation(1, 2)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {"import custom_migration_operations.operations"})
self.assertEqual(
buff,
"custom_migration_operations.operations.ArgsOperation(\n"
" arg1=1,\n"
" arg2=2,\n"
"),",
)
def test_kwargs_signature(self):
operation = custom_migration_operations.operations.KwargsOperation(kwarg1=1)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {"import custom_migration_operations.operations"})
self.assertEqual(
buff,
"custom_migration_operations.operations.KwargsOperation(\n"
" kwarg1=1,\n"
"),",
)
def test_args_kwargs_signature(self):
operation = custom_migration_operations.operations.ArgsKwargsOperation(
1, 2, kwarg2=4
)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {"import custom_migration_operations.operations"})
self.assertEqual(
buff,
"custom_migration_operations.operations.ArgsKwargsOperation(\n"
" arg1=1,\n"
" arg2=2,\n"
" kwarg2=4,\n"
"),",
)
def test_nested_args_signature(self):
operation = custom_migration_operations.operations.ArgsOperation(
custom_migration_operations.operations.ArgsOperation(1, 2),
custom_migration_operations.operations.KwargsOperation(kwarg1=3, kwarg2=4),
)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {"import custom_migration_operations.operations"})
self.assertEqual(
buff,
"custom_migration_operations.operations.ArgsOperation(\n"
" arg1=custom_migration_operations.operations.ArgsOperation(\n"
" arg1=1,\n"
" arg2=2,\n"
" ),\n"
" arg2=custom_migration_operations.operations.KwargsOperation(\n"
" kwarg1=3,\n"
" kwarg2=4,\n"
" ),\n"
"),",
)
def test_multiline_args_signature(self):
operation = custom_migration_operations.operations.ArgsOperation(
"test\n arg1", "test\narg2"
)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {"import custom_migration_operations.operations"})
self.assertEqual(
buff,
"custom_migration_operations.operations.ArgsOperation(\n"
" arg1='test\\n arg1',\n"
" arg2='test\\narg2',\n"
"),",
)
def test_expand_args_signature(self):
operation = custom_migration_operations.operations.ExpandArgsOperation([1, 2])
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {"import custom_migration_operations.operations"})
self.assertEqual(
buff,
"custom_migration_operations.operations.ExpandArgsOperation(\n"
" arg=[\n"
" 1,\n"
" 2,\n"
" ],\n"
"),",
)
def test_nested_operation_expand_args_signature(self):
operation = custom_migration_operations.operations.ExpandArgsOperation(
arg=[
custom_migration_operations.operations.KwargsOperation(
kwarg1=1,
kwarg2=2,
),
]
)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {"import custom_migration_operations.operations"})
self.assertEqual(
buff,
"custom_migration_operations.operations.ExpandArgsOperation(\n"
" arg=[\n"
" custom_migration_operations.operations.KwargsOperation(\n"
" kwarg1=1,\n"
" kwarg2=2,\n"
" ),\n"
" ],\n"
"),",
)
class WriterTests(SimpleTestCase):
"""
Tests the migration writer (makes migration files from Migration instances)
"""
class NestedEnum(enum.IntEnum):
A = 1
B = 2
class NestedChoices(models.TextChoices):
X = "X", "X value"
Y = "Y", "Y value"
def safe_exec(self, string, value=None):
d = {}
try:
exec(string, globals(), d)
except Exception as e:
if value:
self.fail(
"Could not exec %r (from value %r): %s" % (string.strip(), value, e)
)
else:
self.fail("Could not exec %r: %s" % (string.strip(), e))
return d
def serialize_round_trip(self, value):
string, imports = MigrationWriter.serialize(value)
return self.safe_exec(
"%s\ntest_value_result = %s" % ("\n".join(imports), string), value
)["test_value_result"]
def assertSerializedEqual(self, value):
self.assertEqual(self.serialize_round_trip(value), value)
def assertSerializedResultEqual(self, value, target):
self.assertEqual(MigrationWriter.serialize(value), target)
def assertSerializedFieldEqual(self, value):
new_value = self.serialize_round_trip(value)
self.assertEqual(value.__class__, new_value.__class__)
self.assertEqual(value.max_length, new_value.max_length)
self.assertEqual(value.null, new_value.null)
self.assertEqual(value.unique, new_value.unique)
def test_serialize_numbers(self):
self.assertSerializedEqual(1)
self.assertSerializedEqual(1.2)
self.assertTrue(math.isinf(self.serialize_round_trip(float("inf"))))
self.assertTrue(math.isinf(self.serialize_round_trip(float("-inf"))))
self.assertTrue(math.isnan(self.serialize_round_trip(float("nan"))))
self.assertSerializedEqual(decimal.Decimal("1.3"))
self.assertSerializedResultEqual(
decimal.Decimal("1.3"), ("Decimal('1.3')", {"from decimal import Decimal"})
)
self.assertSerializedEqual(Money("1.3"))
self.assertSerializedResultEqual(
Money("1.3"),
("migrations.test_writer.Money('1.3')", {"import migrations.test_writer"}),
)
def test_serialize_constants(self):
self.assertSerializedEqual(None)
self.assertSerializedEqual(True)
self.assertSerializedEqual(False)
def test_serialize_strings(self):
self.assertSerializedEqual(b"foobar")
string, imports = MigrationWriter.serialize(b"foobar")
self.assertEqual(string, "b'foobar'")
self.assertSerializedEqual("föobár")
string, imports = MigrationWriter.serialize("foobar")
self.assertEqual(string, "'foobar'")
def test_serialize_multiline_strings(self):
self.assertSerializedEqual(b"foo\nbar")
string, imports = MigrationWriter.serialize(b"foo\nbar")
self.assertEqual(string, "b'foo\\nbar'")
self.assertSerializedEqual("föo\nbár")
string, imports = MigrationWriter.serialize("foo\nbar")
self.assertEqual(string, "'foo\\nbar'")
def test_serialize_collections(self):
self.assertSerializedEqual({1: 2})
self.assertSerializedEqual(["a", 2, True, None])
self.assertSerializedEqual({2, 3, "eighty"})
self.assertSerializedEqual({"lalalala": ["yeah", "no", "maybe"]})
self.assertSerializedEqual(_("Hello"))
def test_serialize_builtin_types(self):
self.assertSerializedEqual([list, tuple, dict, set, frozenset])
self.assertSerializedResultEqual(
[list, tuple, dict, set, frozenset],
("[list, tuple, dict, set, frozenset]", set()),
)
def test_serialize_lazy_objects(self):
pattern = re.compile(r"^foo$")
lazy_pattern = SimpleLazyObject(lambda: pattern)
self.assertEqual(self.serialize_round_trip(lazy_pattern), pattern)
def test_serialize_enums(self):
self.assertSerializedResultEqual(
TextEnum.A,
("migrations.test_writer.TextEnum['A']", {"import migrations.test_writer"}),
)
self.assertSerializedResultEqual(
TextTranslatedEnum.A,
(
"migrations.test_writer.TextTranslatedEnum['A']",
{"import migrations.test_writer"},
),
)
self.assertSerializedResultEqual(
BinaryEnum.A,
(
"migrations.test_writer.BinaryEnum['A']",
{"import migrations.test_writer"},
),
)
self.assertSerializedResultEqual(
IntEnum.B,
("migrations.test_writer.IntEnum['B']", {"import migrations.test_writer"}),
)
self.assertSerializedResultEqual(
self.NestedEnum.A,
(
"migrations.test_writer.WriterTests.NestedEnum['A']",
{"import migrations.test_writer"},
),
)
self.assertSerializedEqual(self.NestedEnum.A)
field = models.CharField(
default=TextEnum.B, choices=[(m.value, m) for m in TextEnum]
)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.CharField(choices=["
"('a-value', migrations.test_writer.TextEnum['A']), "
"('value-b', migrations.test_writer.TextEnum['B'])], "
"default=migrations.test_writer.TextEnum['B'])",
)
field = models.CharField(
default=TextTranslatedEnum.A,
choices=[(m.value, m) for m in TextTranslatedEnum],
)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.CharField(choices=["
"('a-value', migrations.test_writer.TextTranslatedEnum['A']), "
"('value-b', migrations.test_writer.TextTranslatedEnum['B'])], "
"default=migrations.test_writer.TextTranslatedEnum['A'])",
)
field = models.CharField(
default=BinaryEnum.B, choices=[(m.value, m) for m in BinaryEnum]
)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.CharField(choices=["
"(b'a-value', migrations.test_writer.BinaryEnum['A']), "
"(b'value-b', migrations.test_writer.BinaryEnum['B'])], "
"default=migrations.test_writer.BinaryEnum['B'])",
)
field = models.IntegerField(
default=IntEnum.A, choices=[(m.value, m) for m in IntEnum]
)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.IntegerField(choices=["
"(1, migrations.test_writer.IntEnum['A']), "
"(2, migrations.test_writer.IntEnum['B'])], "
"default=migrations.test_writer.IntEnum['A'])",
)
def test_serialize_choices(self):
class TextChoices(models.TextChoices):
A = "A", "A value"
B = "B", "B value"
class IntegerChoices(models.IntegerChoices):
A = 1, "One"
B = 2, "Two"
class DateChoices(datetime.date, models.Choices):
DATE_1 = 1969, 7, 20, "First date"
DATE_2 = 1969, 11, 19, "Second date"
self.assertSerializedResultEqual(TextChoices.A, ("'A'", set()))
self.assertSerializedResultEqual(IntegerChoices.A, ("1", set()))
self.assertSerializedResultEqual(
DateChoices.DATE_1,
("datetime.date(1969, 7, 20)", {"import datetime"}),
)
field = models.CharField(default=TextChoices.B, choices=TextChoices.choices)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.CharField(choices=[('A', 'A value'), ('B', 'B value')], "
"default='B')",
)
field = models.IntegerField(
default=IntegerChoices.B, choices=IntegerChoices.choices
)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.IntegerField(choices=[(1, 'One'), (2, 'Two')], default=2)",
)
field = models.DateField(
default=DateChoices.DATE_2, choices=DateChoices.choices
)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.DateField(choices=["
"(datetime.date(1969, 7, 20), 'First date'), "
"(datetime.date(1969, 11, 19), 'Second date')], "
"default=datetime.date(1969, 11, 19))",
)
def test_serialize_nested_class(self):
for nested_cls in [self.NestedEnum, self.NestedChoices]:
cls_name = nested_cls.__name__
with self.subTest(cls_name):
self.assertSerializedResultEqual(
nested_cls,
(
"migrations.test_writer.WriterTests.%s" % cls_name,
{"import migrations.test_writer"},
),
)
def test_serialize_uuid(self):
self.assertSerializedEqual(uuid.uuid1())
self.assertSerializedEqual(uuid.uuid4())
uuid_a = uuid.UUID("5c859437-d061-4847-b3f7-e6b78852f8c8")
uuid_b = uuid.UUID("c7853ec1-2ea3-4359-b02d-b54e8f1bcee2")
self.assertSerializedResultEqual(
uuid_a,
("uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8')", {"import uuid"}),
)
self.assertSerializedResultEqual(
uuid_b,
("uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2')", {"import uuid"}),
)
field = models.UUIDField(
choices=((uuid_a, "UUID A"), (uuid_b, "UUID B")), default=uuid_a
)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.UUIDField(choices=["
"(uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8'), 'UUID A'), "
"(uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2'), 'UUID B')], "
"default=uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8'))",
)
def test_serialize_pathlib(self):
# Pure path objects work in all platforms.
self.assertSerializedEqual(pathlib.PurePosixPath())
self.assertSerializedEqual(pathlib.PureWindowsPath())
path = pathlib.PurePosixPath("/path/file.txt")
expected = ("pathlib.PurePosixPath('/path/file.txt')", {"import pathlib"})
self.assertSerializedResultEqual(path, expected)
path = pathlib.PureWindowsPath("A:\\File.txt")
expected = ("pathlib.PureWindowsPath('A:/File.txt')", {"import pathlib"})
self.assertSerializedResultEqual(path, expected)
# Concrete path objects work on supported platforms.
if sys.platform == "win32":
self.assertSerializedEqual(pathlib.WindowsPath.cwd())
path = pathlib.WindowsPath("A:\\File.txt")
expected = ("pathlib.PureWindowsPath('A:/File.txt')", {"import pathlib"})
self.assertSerializedResultEqual(path, expected)
else:
self.assertSerializedEqual(pathlib.PosixPath.cwd())
path = pathlib.PosixPath("/path/file.txt")
expected = ("pathlib.PurePosixPath('/path/file.txt')", {"import pathlib"})
self.assertSerializedResultEqual(path, expected)
field = models.FilePathField(path=pathlib.PurePosixPath("/home/user"))
string, imports = MigrationWriter.serialize(field)
self.assertEqual(
string,
"models.FilePathField(path=pathlib.PurePosixPath('/home/user'))",
)
self.assertIn("import pathlib", imports)
def test_serialize_path_like(self):
with os.scandir(os.path.dirname(__file__)) as entries:
path_like = list(entries)[0]
expected = (repr(path_like.path), {})
self.assertSerializedResultEqual(path_like, expected)
field = models.FilePathField(path=path_like)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(string, "models.FilePathField(path=%r)" % path_like.path)
def test_serialize_functions(self):
with self.assertRaisesMessage(ValueError, "Cannot serialize function: lambda"):
self.assertSerializedEqual(lambda x: 42)
self.assertSerializedEqual(models.SET_NULL)
string, imports = MigrationWriter.serialize(models.SET(42))
self.assertEqual(string, "models.SET(42)")
self.serialize_round_trip(models.SET(42))
def test_serialize_datetime(self):
self.assertSerializedEqual(datetime.datetime.now())
self.assertSerializedEqual(datetime.datetime.now)
self.assertSerializedEqual(datetime.datetime.today())
self.assertSerializedEqual(datetime.datetime.today)
self.assertSerializedEqual(datetime.date.today())
self.assertSerializedEqual(datetime.date.today)
self.assertSerializedEqual(datetime.datetime.now().time())
self.assertSerializedEqual(
datetime.datetime(2014, 1, 1, 1, 1, tzinfo=get_default_timezone())
)
self.assertSerializedEqual(
datetime.datetime(2013, 12, 31, 22, 1, tzinfo=get_fixed_timezone(180))
)
self.assertSerializedResultEqual(
datetime.datetime(2014, 1, 1, 1, 1),
("datetime.datetime(2014, 1, 1, 1, 1)", {"import datetime"}),
)
for tzinfo in (utc, datetime.timezone.utc):
with self.subTest(tzinfo=tzinfo):
self.assertSerializedResultEqual(
datetime.datetime(2012, 1, 1, 1, 1, tzinfo=tzinfo),
(
"datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc)",
{"import datetime", "from django.utils.timezone import utc"},
),
)
self.assertSerializedResultEqual(
datetime.datetime(
2012, 1, 1, 2, 1, tzinfo=zoneinfo.ZoneInfo("Europe/Paris")
),
(
"datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc)",
{"import datetime", "from django.utils.timezone import utc"},
),
)
if pytz:
self.assertSerializedResultEqual(
pytz.timezone("Europe/Paris").localize(
datetime.datetime(2012, 1, 1, 2, 1)
),
(
"datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc)",
{"import datetime", "from django.utils.timezone import utc"},
),
)
def test_serialize_fields(self):
self.assertSerializedFieldEqual(models.CharField(max_length=255))
self.assertSerializedResultEqual(
models.CharField(max_length=255),
("models.CharField(max_length=255)", {"from django.db import models"}),
)
self.assertSerializedFieldEqual(models.TextField(null=True, blank=True))
self.assertSerializedResultEqual(
models.TextField(null=True, blank=True),
(
"models.TextField(blank=True, null=True)",
{"from django.db import models"},
),
)
def test_serialize_settings(self):
self.assertSerializedEqual(
SettingsReference(settings.AUTH_USER_MODEL, "AUTH_USER_MODEL")
)
self.assertSerializedResultEqual(
SettingsReference("someapp.model", "AUTH_USER_MODEL"),
("settings.AUTH_USER_MODEL", {"from django.conf import settings"}),
)
def test_serialize_iterators(self):
self.assertSerializedResultEqual(
((x, x * x) for x in range(3)), ("((0, 0), (1, 1), (2, 4))", set())
)
def test_serialize_compiled_regex(self):
"""
Make sure compiled regex can be serialized.
"""
regex = re.compile(r"^\w+$")
self.assertSerializedEqual(regex)
def test_serialize_class_based_validators(self):
"""
Ticket #22943: Test serialization of class-based validators, including
compiled regexes.
"""
validator = RegexValidator(message="hello")
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(
string, "django.core.validators.RegexValidator(message='hello')"
)
self.serialize_round_trip(validator)
# Test with a compiled regex.
validator = RegexValidator(regex=re.compile(r"^\w+$"))
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(
string,
"django.core.validators.RegexValidator(regex=re.compile('^\\\\w+$'))",
)
self.serialize_round_trip(validator)
# Test a string regex with flag
validator = RegexValidator(r"^[0-9]+$", flags=re.S)
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(
string,
"django.core.validators.RegexValidator('^[0-9]+$', "
"flags=re.RegexFlag['DOTALL'])",
)
self.serialize_round_trip(validator)
# Test message and code
validator = RegexValidator("^[-a-zA-Z0-9_]+$", "Invalid", "invalid")
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(
string,
"django.core.validators.RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', "
"'invalid')",
)
self.serialize_round_trip(validator)
# Test with a subclass.
validator = EmailValidator(message="hello")
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(
string, "django.core.validators.EmailValidator(message='hello')"
)
self.serialize_round_trip(validator)
validator = deconstructible(path="migrations.test_writer.EmailValidator")(
EmailValidator
)(message="hello")
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(
string, "migrations.test_writer.EmailValidator(message='hello')"
)
validator = deconstructible(path="custom.EmailValidator")(EmailValidator)(
message="hello"
)
with self.assertRaisesMessage(ImportError, "No module named 'custom'"):
MigrationWriter.serialize(validator)
validator = deconstructible(path="django.core.validators.EmailValidator2")(
EmailValidator
)(message="hello")
with self.assertRaisesMessage(
ValueError,
"Could not find object EmailValidator2 in django.core.validators.",
):
MigrationWriter.serialize(validator)
def test_serialize_complex_func_index(self):
index = models.Index(
models.Func("rating", function="ABS"),
models.Case(
models.When(name="special", then=models.Value("X")),
default=models.Value("other"),
),
models.ExpressionWrapper(
models.F("pages"),
output_field=models.IntegerField(),
),
models.OrderBy(models.F("name").desc()),
name="complex_func_index",
)
string, imports = MigrationWriter.serialize(index)
self.assertEqual(
string,
"models.Index(models.Func('rating', function='ABS'), "
"models.Case(models.When(name='special', then=models.Value('X')), "
"default=models.Value('other')), "
"models.ExpressionWrapper("
"models.F('pages'), output_field=models.IntegerField()), "
"models.OrderBy(models.OrderBy(models.F('name'), descending=True)), "
"name='complex_func_index')",
)
self.assertEqual(imports, {"from django.db import models"})
def test_serialize_empty_nonempty_tuple(self):
"""
Ticket #22679: makemigrations generates invalid code for (an empty
tuple) default_permissions = ()
"""
empty_tuple = ()
one_item_tuple = ("a",)
many_items_tuple = ("a", "b", "c")
self.assertSerializedEqual(empty_tuple)
self.assertSerializedEqual(one_item_tuple)
self.assertSerializedEqual(many_items_tuple)
def test_serialize_range(self):
string, imports = MigrationWriter.serialize(range(1, 5))
self.assertEqual(string, "range(1, 5)")
self.assertEqual(imports, set())
def test_serialize_builtins(self):
string, imports = MigrationWriter.serialize(range)
self.assertEqual(string, "range")
self.assertEqual(imports, set())
def test_serialize_unbound_method_reference(self):
"""An unbound method used within a class body can be serialized."""
self.serialize_round_trip(TestModel1.thing)
def test_serialize_local_function_reference(self):
"""A reference in a local scope can't be serialized."""
class TestModel2:
def upload_to(self):
return "somewhere dynamic"
thing = models.FileField(upload_to=upload_to)
with self.assertRaisesMessage(
ValueError, "Could not find function upload_to in migrations.test_writer"
):
self.serialize_round_trip(TestModel2.thing)
def test_serialize_managers(self):
self.assertSerializedEqual(models.Manager())
self.assertSerializedResultEqual(
FoodQuerySet.as_manager(),
(
"migrations.models.FoodQuerySet.as_manager()",
{"import migrations.models"},
),
)
self.assertSerializedEqual(FoodManager("a", "b"))
self.assertSerializedEqual(FoodManager("x", "y", c=3, d=4))
def test_serialize_frozensets(self):
self.assertSerializedEqual(frozenset())
self.assertSerializedEqual(frozenset("let it go"))
def test_serialize_set(self):
self.assertSerializedEqual(set())
self.assertSerializedResultEqual(set(), ("set()", set()))
self.assertSerializedEqual({"a"})
self.assertSerializedResultEqual({"a"}, ("{'a'}", set()))
def test_serialize_timedelta(self):
self.assertSerializedEqual(datetime.timedelta())
self.assertSerializedEqual(datetime.timedelta(minutes=42))
def test_serialize_functools_partial(self):
value = functools.partial(datetime.timedelta, 1, seconds=2)
result = self.serialize_round_trip(value)
self.assertEqual(result.func, value.func)
self.assertEqual(result.args, value.args)
self.assertEqual(result.keywords, value.keywords)
def test_serialize_functools_partialmethod(self):
value = functools.partialmethod(datetime.timedelta, 1, seconds=2)
result = self.serialize_round_trip(value)
self.assertIsInstance(result, functools.partialmethod)
self.assertEqual(result.func, value.func)
self.assertEqual(result.args, value.args)
self.assertEqual(result.keywords, value.keywords)
def test_serialize_type_none(self):
self.assertSerializedEqual(type(None))
def test_serialize_type_model(self):
self.assertSerializedEqual(models.Model)
self.assertSerializedResultEqual(
MigrationWriter.serialize(models.Model),
("('models.Model', {'from django.db import models'})", set()),
)
def test_simple_migration(self):
"""
Tests serializing a simple migration.
"""
fields = {
"charfield": models.DateTimeField(default=datetime.datetime.now),
"datetimefield": models.DateTimeField(default=datetime.datetime.now),
}
options = {
"verbose_name": "My model",
"verbose_name_plural": "My models",
}
migration = type(
"Migration",
(migrations.Migration,),
{
"operations": [
migrations.CreateModel(
"MyModel", tuple(fields.items()), options, (models.Model,)
),
migrations.CreateModel(
"MyModel2", tuple(fields.items()), bases=(models.Model,)
),
migrations.CreateModel(
name="MyModel3",
fields=tuple(fields.items()),
options=options,
bases=(models.Model,),
),
migrations.DeleteModel("MyModel"),
migrations.AddField(
"OtherModel", "datetimefield", fields["datetimefield"]
),
],
"dependencies": [("testapp", "some_other_one")],
},
)
writer = MigrationWriter(migration)
output = writer.as_string()
# We don't test the output formatting - that's too fragile.
# Just make sure it runs for now, and that things look alright.
result = self.safe_exec(output)
self.assertIn("Migration", result)
def test_migration_path(self):
test_apps = [
"migrations.migrations_test_apps.normal",
"migrations.migrations_test_apps.with_package_model",
"migrations.migrations_test_apps.without_init_file",
]
base_dir = os.path.dirname(os.path.dirname(__file__))
for app in test_apps:
with self.modify_settings(INSTALLED_APPS={"append": app}):
migration = migrations.Migration("0001_initial", app.split(".")[-1])
expected_path = os.path.join(
base_dir, *(app.split(".") + ["migrations", "0001_initial.py"])
)
writer = MigrationWriter(migration)
self.assertEqual(writer.path, expected_path)
def test_custom_operation(self):
migration = type(
"Migration",
(migrations.Migration,),
{
"operations": [
custom_migration_operations.operations.TestOperation(),
custom_migration_operations.operations.CreateModel(),
migrations.CreateModel("MyModel", (), {}, (models.Model,)),
custom_migration_operations.more_operations.TestOperation(),
],
"dependencies": [],
},
)
writer = MigrationWriter(migration)
output = writer.as_string()
result = self.safe_exec(output)
self.assertIn("custom_migration_operations", result)
self.assertNotEqual(
result["custom_migration_operations"].operations.TestOperation,
result["custom_migration_operations"].more_operations.TestOperation,
)
def test_sorted_imports(self):
"""
#24155 - Tests ordering of imports.
"""
migration = type(
"Migration",
(migrations.Migration,),
{
"operations": [
migrations.AddField(
"mymodel",
"myfield",
models.DateTimeField(
default=datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc),
),
),
]
},
)
writer = MigrationWriter(migration)
output = writer.as_string()
self.assertIn(
"import datetime\n"
"from django.db import migrations, models\n"
"from django.utils.timezone import utc\n",
output,
)
def test_migration_file_header_comments(self):
"""
Test comments at top of file.
"""
migration = type("Migration", (migrations.Migration,), {"operations": []})
dt = datetime.datetime(2015, 7, 31, 4, 40, 0, 0, tzinfo=utc)
with mock.patch("django.db.migrations.writer.now", lambda: dt):
for include_header in (True, False):
with self.subTest(include_header=include_header):
writer = MigrationWriter(migration, include_header)
output = writer.as_string()
self.assertEqual(
include_header,
output.startswith(
"# Generated by Django %s on 2015-07-31 04:40\n\n"
% get_version()
),
)
if not include_header:
# Make sure the output starts with something that's not
# a comment or indentation or blank line
self.assertRegex(
output.splitlines(keepends=True)[0], r"^[^#\s]+"
)
def test_models_import_omitted(self):
"""
django.db.models shouldn't be imported if unused.
"""
migration = type(
"Migration",
(migrations.Migration,),
{
"operations": [
migrations.AlterModelOptions(
name="model",
options={
"verbose_name": "model",
"verbose_name_plural": "models",
},
),
]
},
)
writer = MigrationWriter(migration)
output = writer.as_string()
self.assertIn("from django.db import migrations\n", output)
def test_deconstruct_class_arguments(self):
# Yes, it doesn't make sense to use a class as a default for a
# CharField. It does make sense for custom fields though, for example
# an enumfield that takes the enum class as an argument.
string = MigrationWriter.serialize(
models.CharField(default=DeconstructibleInstances)
)[0]
self.assertEqual(
string,
"models.CharField(default=migrations.test_writer.DeconstructibleInstances)",
)
def test_register_serializer(self):
class ComplexSerializer(BaseSerializer):
def serialize(self):
return "complex(%r)" % self.value, {}
MigrationWriter.register_serializer(complex, ComplexSerializer)
self.assertSerializedEqual(complex(1, 2))
MigrationWriter.unregister_serializer(complex)
with self.assertRaisesMessage(ValueError, "Cannot serialize: (1+2j)"):
self.assertSerializedEqual(complex(1, 2))
def test_register_non_serializer(self):
with self.assertRaisesMessage(
ValueError, "'TestModel1' must inherit from 'BaseSerializer'."
):
MigrationWriter.register_serializer(complex, TestModel1)
|
423e17028c33044b08ca21fc0fee33d2888e6da528adea8ad241925b852df6f4 | import compileall
import os
from importlib import import_module
from django.db import connection, connections
from django.db.migrations.exceptions import (
AmbiguityError,
InconsistentMigrationHistory,
NodeNotFoundError,
)
from django.db.migrations.loader import MigrationLoader
from django.db.migrations.recorder import MigrationRecorder
from django.test import TestCase, modify_settings, override_settings
from .test_base import MigrationTestBase
class RecorderTests(TestCase):
"""
Tests recording migrations as applied or not.
"""
databases = {"default", "other"}
def test_apply(self):
"""
Tests marking migrations as applied/unapplied.
"""
recorder = MigrationRecorder(connection)
self.assertEqual(
{(x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"},
set(),
)
recorder.record_applied("myapp", "0432_ponies")
self.assertEqual(
{(x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"},
{("myapp", "0432_ponies")},
)
# That should not affect records of another database
recorder_other = MigrationRecorder(connections["other"])
self.assertEqual(
{(x, y) for (x, y) in recorder_other.applied_migrations() if x == "myapp"},
set(),
)
recorder.record_unapplied("myapp", "0432_ponies")
self.assertEqual(
{(x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"},
set(),
)
class LoaderTests(TestCase):
"""
Tests the disk and database loader, and running through migrations
in memory.
"""
def setUp(self):
self.applied_records = []
def tearDown(self):
# Unapply records on databases that don't roll back changes after each
# test method.
if not connection.features.supports_transactions:
for recorder, app, name in self.applied_records:
recorder.record_unapplied(app, name)
def record_applied(self, recorder, app, name):
recorder.record_applied(app, name)
self.applied_records.append((recorder, app, name))
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
@modify_settings(INSTALLED_APPS={"append": "basic"})
def test_load(self):
"""
Makes sure the loader can load the migrations for the test apps,
and then render them out to a new Apps.
"""
# Load and test the plan
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.graph.forwards_plan(("migrations", "0002_second")),
[
("migrations", "0001_initial"),
("migrations", "0002_second"),
],
)
# Now render it out!
project_state = migration_loader.project_state(("migrations", "0002_second"))
self.assertEqual(len(project_state.models), 2)
author_state = project_state.models["migrations", "author"]
self.assertEqual(
list(author_state.fields), ["id", "name", "slug", "age", "rating"]
)
book_state = project_state.models["migrations", "book"]
self.assertEqual(list(book_state.fields), ["id", "author"])
# Ensure we've included unmigrated apps in there too
self.assertIn("basic", project_state.real_apps)
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations",
"migrations2": "migrations2.test_migrations_2",
}
)
@modify_settings(INSTALLED_APPS={"append": "migrations2"})
def test_plan_handles_repeated_migrations(self):
"""
_generate_plan() doesn't readd migrations already in the plan (#29180).
"""
migration_loader = MigrationLoader(connection)
nodes = [("migrations", "0002_second"), ("migrations2", "0001_initial")]
self.assertEqual(
migration_loader.graph._generate_plan(nodes, at_end=True),
[
("migrations", "0001_initial"),
("migrations", "0002_second"),
("migrations2", "0001_initial"),
],
)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_unmigdep"}
)
def test_load_unmigrated_dependency(self):
"""
The loader can load migrations with a dependency on an unmigrated app.
"""
# Load and test the plan
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.graph.forwards_plan(("migrations", "0001_initial")),
[
("contenttypes", "0001_initial"),
("auth", "0001_initial"),
("migrations", "0001_initial"),
],
)
# Now render it out!
project_state = migration_loader.project_state(("migrations", "0001_initial"))
self.assertEqual(
len([m for a, m in project_state.models if a == "migrations"]), 1
)
book_state = project_state.models["migrations", "book"]
self.assertEqual(list(book_state.fields), ["id", "user"])
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"}
)
def test_run_before(self):
"""
Makes sure the loader uses Migration.run_before.
"""
# Load and test the plan
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.graph.forwards_plan(("migrations", "0002_second")),
[
("migrations", "0001_initial"),
("migrations", "0003_third"),
("migrations", "0002_second"),
],
)
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_first",
"migrations2": "migrations2.test_migrations_2_first",
}
)
@modify_settings(INSTALLED_APPS={"append": "migrations2"})
def test_first(self):
"""
Makes sure the '__first__' migrations build correctly.
"""
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.graph.forwards_plan(("migrations", "second")),
[
("migrations", "thefirst"),
("migrations2", "0001_initial"),
("migrations2", "0002_second"),
("migrations", "second"),
],
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_name_match(self):
"Tests prefix name matching"
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.get_migration_by_prefix("migrations", "0001").name,
"0001_initial",
)
msg = "There is more than one migration for 'migrations' with the prefix '0'"
with self.assertRaisesMessage(AmbiguityError, msg):
migration_loader.get_migration_by_prefix("migrations", "0")
msg = "There is no migration for 'migrations' with the prefix 'blarg'"
with self.assertRaisesMessage(KeyError, msg):
migration_loader.get_migration_by_prefix("migrations", "blarg")
def test_load_import_error(self):
with override_settings(
MIGRATION_MODULES={"migrations": "import_error_package"}
):
with self.assertRaises(ImportError):
MigrationLoader(connection)
def test_load_module_file(self):
with override_settings(
MIGRATION_MODULES={"migrations": "migrations.faulty_migrations.file"}
):
loader = MigrationLoader(connection)
self.assertIn(
"migrations",
loader.unmigrated_apps,
"App with migrations module file not in unmigrated apps.",
)
def test_load_empty_dir(self):
with override_settings(
MIGRATION_MODULES={"migrations": "migrations.faulty_migrations.namespace"}
):
loader = MigrationLoader(connection)
self.assertIn(
"migrations",
loader.unmigrated_apps,
"App missing __init__.py in migrations module not in unmigrated apps.",
)
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.migrated_app"],
)
def test_marked_as_migrated(self):
"""
Undefined MIGRATION_MODULES implies default migration module.
"""
migration_loader = MigrationLoader(connection)
self.assertEqual(migration_loader.migrated_apps, {"migrated_app"})
self.assertEqual(migration_loader.unmigrated_apps, set())
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.migrated_app"],
MIGRATION_MODULES={"migrated_app": None},
)
def test_marked_as_unmigrated(self):
"""
MIGRATION_MODULES allows disabling of migrations for a particular app.
"""
migration_loader = MigrationLoader(connection)
self.assertEqual(migration_loader.migrated_apps, set())
self.assertEqual(migration_loader.unmigrated_apps, {"migrated_app"})
@override_settings(
INSTALLED_APPS=["migrations.migrations_test_apps.migrated_app"],
MIGRATION_MODULES={"migrated_app": "missing-module"},
)
def test_explicit_missing_module(self):
"""
If a MIGRATION_MODULES override points to a missing module, the error
raised during the importation attempt should be propagated unless
`ignore_no_migrations=True`.
"""
with self.assertRaisesMessage(ImportError, "missing-module"):
migration_loader = MigrationLoader(connection)
migration_loader = MigrationLoader(connection, ignore_no_migrations=True)
self.assertEqual(migration_loader.migrated_apps, set())
self.assertEqual(migration_loader.unmigrated_apps, {"migrated_app"})
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"}
)
def test_loading_squashed(self):
"Tests loading a squashed migration"
migration_loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
self.addCleanup(recorder.flush)
# Loading with nothing applied should just give us the one node
self.assertEqual(
len([x for x in migration_loader.graph.nodes if x[0] == "migrations"]),
1,
)
# However, fake-apply one migration and it should now use the old two
self.record_applied(recorder, "migrations", "0001_initial")
migration_loader.build_graph()
self.assertEqual(
len([x for x in migration_loader.graph.nodes if x[0] == "migrations"]),
2,
)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"}
)
def test_loading_squashed_complex(self):
"Tests loading a complex set of squashed migrations"
loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
self.addCleanup(recorder.flush)
def num_nodes():
plan = set(loader.graph.forwards_plan(("migrations", "7_auto")))
return len(plan - loader.applied_migrations.keys())
# Empty database: use squashed migration
loader.build_graph()
self.assertEqual(num_nodes(), 5)
# Starting at 1 or 2 should use the squashed migration too
self.record_applied(recorder, "migrations", "1_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 4)
self.record_applied(recorder, "migrations", "2_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 3)
# However, starting at 3 to 5 cannot use the squashed migration
self.record_applied(recorder, "migrations", "3_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 4)
self.record_applied(recorder, "migrations", "4_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 3)
# Starting at 5 to 7 we are past the squashed migrations.
self.record_applied(recorder, "migrations", "5_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 2)
self.record_applied(recorder, "migrations", "6_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 1)
self.record_applied(recorder, "migrations", "7_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 0)
@override_settings(
MIGRATION_MODULES={
"app1": "migrations.test_migrations_squashed_complex_multi_apps.app1",
"app2": "migrations.test_migrations_squashed_complex_multi_apps.app2",
}
)
@modify_settings(
INSTALLED_APPS={
"append": [
"migrations.test_migrations_squashed_complex_multi_apps.app1",
"migrations.test_migrations_squashed_complex_multi_apps.app2",
]
}
)
def test_loading_squashed_complex_multi_apps(self):
loader = MigrationLoader(connection)
loader.build_graph()
plan = set(loader.graph.forwards_plan(("app1", "4_auto")))
expected_plan = {
("app1", "1_auto"),
("app2", "1_squashed_2"),
("app1", "2_squashed_3"),
("app1", "4_auto"),
}
self.assertEqual(plan, expected_plan)
@override_settings(
MIGRATION_MODULES={
"app1": "migrations.test_migrations_squashed_complex_multi_apps.app1",
"app2": "migrations.test_migrations_squashed_complex_multi_apps.app2",
}
)
@modify_settings(
INSTALLED_APPS={
"append": [
"migrations.test_migrations_squashed_complex_multi_apps.app1",
"migrations.test_migrations_squashed_complex_multi_apps.app2",
]
}
)
def test_loading_squashed_complex_multi_apps_partially_applied(self):
loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
self.record_applied(recorder, "app1", "1_auto")
self.record_applied(recorder, "app1", "2_auto")
loader.build_graph()
plan = set(loader.graph.forwards_plan(("app1", "4_auto")))
plan = plan - loader.applied_migrations.keys()
expected_plan = {
("app2", "1_squashed_2"),
("app1", "3_auto"),
("app1", "4_auto"),
}
self.assertEqual(plan, expected_plan)
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_squashed_erroneous"
}
)
def test_loading_squashed_erroneous(self):
"Tests loading a complex but erroneous set of squashed migrations"
loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
self.addCleanup(recorder.flush)
def num_nodes():
plan = set(loader.graph.forwards_plan(("migrations", "7_auto")))
return len(plan - loader.applied_migrations.keys())
# Empty database: use squashed migration
loader.build_graph()
self.assertEqual(num_nodes(), 5)
# Starting at 1 or 2 should use the squashed migration too
self.record_applied(recorder, "migrations", "1_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 4)
self.record_applied(recorder, "migrations", "2_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 3)
# However, starting at 3 or 4, nonexistent migrations would be needed.
msg = (
"Migration migrations.6_auto depends on nonexistent node "
"('migrations', '5_auto'). Django tried to replace migration "
"migrations.5_auto with any of [migrations.3_squashed_5] but wasn't able "
"to because some of the replaced migrations are already applied."
)
self.record_applied(recorder, "migrations", "3_auto")
with self.assertRaisesMessage(NodeNotFoundError, msg):
loader.build_graph()
self.record_applied(recorder, "migrations", "4_auto")
with self.assertRaisesMessage(NodeNotFoundError, msg):
loader.build_graph()
# Starting at 5 to 7 we are passed the squashed migrations
self.record_applied(recorder, "migrations", "5_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 2)
self.record_applied(recorder, "migrations", "6_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 1)
self.record_applied(recorder, "migrations", "7_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 0)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations"},
INSTALLED_APPS=["migrations"],
)
def test_check_consistent_history(self):
loader = MigrationLoader(connection=None)
loader.check_consistent_history(connection)
recorder = MigrationRecorder(connection)
self.record_applied(recorder, "migrations", "0002_second")
msg = (
"Migration migrations.0002_second is applied before its dependency "
"migrations.0001_initial on database 'default'."
)
with self.assertRaisesMessage(InconsistentMigrationHistory, msg):
loader.check_consistent_history(connection)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_extra"},
INSTALLED_APPS=["migrations"],
)
def test_check_consistent_history_squashed(self):
"""
MigrationLoader.check_consistent_history() should ignore unapplied
squashed migrations that have all of their `replaces` applied.
"""
loader = MigrationLoader(connection=None)
recorder = MigrationRecorder(connection)
self.record_applied(recorder, "migrations", "0001_initial")
self.record_applied(recorder, "migrations", "0002_second")
loader.check_consistent_history(connection)
self.record_applied(recorder, "migrations", "0003_third")
loader.check_consistent_history(connection)
@override_settings(
MIGRATION_MODULES={
"app1": "migrations.test_migrations_squashed_ref_squashed.app1",
"app2": "migrations.test_migrations_squashed_ref_squashed.app2",
}
)
@modify_settings(
INSTALLED_APPS={
"append": [
"migrations.test_migrations_squashed_ref_squashed.app1",
"migrations.test_migrations_squashed_ref_squashed.app2",
]
}
)
def test_loading_squashed_ref_squashed(self):
"Tests loading a squashed migration with a new migration referencing it"
r"""
The sample migrations are structured like this:
app_1 1 --> 2 ---------------------*--> 3 *--> 4
\ / /
*-------------------*----/--> 2_sq_3 --*
\ / /
=============== \ ============= / == / ======================
app_2 *--> 1_sq_2 --* /
\ /
*--> 1 --> 2 --*
Where 2_sq_3 is a replacing migration for 2 and 3 in app_1,
as 1_sq_2 is a replacing migration for 1 and 2 in app_2.
"""
loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
self.addCleanup(recorder.flush)
# Load with nothing applied: both migrations squashed.
loader.build_graph()
plan = set(loader.graph.forwards_plan(("app1", "4_auto")))
plan = plan - loader.applied_migrations.keys()
expected_plan = {
("app1", "1_auto"),
("app2", "1_squashed_2"),
("app1", "2_squashed_3"),
("app1", "4_auto"),
}
self.assertEqual(plan, expected_plan)
# Load with nothing applied and migrate to a replaced migration.
# Not possible if loader.replace_migrations is True (default).
loader.build_graph()
msg = "Node ('app1', '3_auto') not a valid node"
with self.assertRaisesMessage(NodeNotFoundError, msg):
loader.graph.forwards_plan(("app1", "3_auto"))
# Possible if loader.replace_migrations is False.
loader.replace_migrations = False
loader.build_graph()
plan = set(loader.graph.forwards_plan(("app1", "3_auto")))
plan = plan - loader.applied_migrations.keys()
expected_plan = {
("app1", "1_auto"),
("app2", "1_auto"),
("app2", "2_auto"),
("app1", "2_auto"),
("app1", "3_auto"),
}
self.assertEqual(plan, expected_plan)
loader.replace_migrations = True
# Fake-apply a few from app1: unsquashes migration in app1.
self.record_applied(recorder, "app1", "1_auto")
self.record_applied(recorder, "app1", "2_auto")
loader.build_graph()
plan = set(loader.graph.forwards_plan(("app1", "4_auto")))
plan = plan - loader.applied_migrations.keys()
expected_plan = {
("app2", "1_squashed_2"),
("app1", "3_auto"),
("app1", "4_auto"),
}
self.assertEqual(plan, expected_plan)
# Fake-apply one from app2: unsquashes migration in app2 too.
self.record_applied(recorder, "app2", "1_auto")
loader.build_graph()
plan = set(loader.graph.forwards_plan(("app1", "4_auto")))
plan = plan - loader.applied_migrations.keys()
expected_plan = {
("app2", "2_auto"),
("app1", "3_auto"),
("app1", "4_auto"),
}
self.assertEqual(plan, expected_plan)
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.test_migrations_private"}
)
def test_ignore_files(self):
"""Files prefixed with underscore, tilde, or dot aren't loaded."""
loader = MigrationLoader(connection)
loader.load_disk()
migrations = [
name for app, name in loader.disk_migrations if app == "migrations"
]
self.assertEqual(migrations, ["0001_initial"])
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_namespace_package"
},
)
def test_loading_namespace_package(self):
"""Migration directories without an __init__.py file are ignored."""
loader = MigrationLoader(connection)
loader.load_disk()
migrations = [
name for app, name in loader.disk_migrations if app == "migrations"
]
self.assertEqual(migrations, [])
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_loading_package_without__file__(self):
"""
To support frozen environments, MigrationLoader loads migrations from
regular packages with no __file__ attribute.
"""
test_module = import_module("migrations.test_migrations")
loader = MigrationLoader(connection)
# __file__ == __spec__.origin or the latter is None and former is
# undefined.
module_file = test_module.__file__
module_origin = test_module.__spec__.origin
module_has_location = test_module.__spec__.has_location
try:
del test_module.__file__
test_module.__spec__.origin = None
test_module.__spec__.has_location = False
loader.load_disk()
migrations = [
name for app, name in loader.disk_migrations if app == "migrations"
]
self.assertCountEqual(migrations, ["0001_initial", "0002_second"])
finally:
test_module.__file__ = module_file
test_module.__spec__.origin = module_origin
test_module.__spec__.has_location = module_has_location
class PycLoaderTests(MigrationTestBase):
def test_valid(self):
"""
To support frozen environments, MigrationLoader loads .pyc migrations.
"""
with self.temporary_migration_module(
module="migrations.test_migrations"
) as migration_dir:
# Compile .py files to .pyc files and delete .py files.
compileall.compile_dir(migration_dir, force=True, quiet=1, legacy=True)
for name in os.listdir(migration_dir):
if name.endswith(".py"):
os.remove(os.path.join(migration_dir, name))
loader = MigrationLoader(connection)
self.assertIn(("migrations", "0001_initial"), loader.disk_migrations)
def test_invalid(self):
"""
MigrationLoader reraises ImportErrors caused by "bad magic number" pyc
files with a more helpful message.
"""
with self.temporary_migration_module(
module="migrations.test_migrations_bad_pyc"
) as migration_dir:
# The -tpl suffix is to avoid the pyc exclusion in MANIFEST.in.
os.rename(
os.path.join(migration_dir, "0001_initial.pyc-tpl"),
os.path.join(migration_dir, "0001_initial.pyc"),
)
msg = (
r"Couldn't import '\w+.migrations.0001_initial' as it appears "
"to be a stale .pyc file."
)
with self.assertRaisesRegex(ImportError, msg):
MigrationLoader(connection)
|
4fa1fe7acf4cc8c7a999afe270b4f7c93cdf2017938ded10de04cc9e425d6af4 | from django.db.migrations.exceptions import CircularDependencyError, NodeNotFoundError
from django.db.migrations.graph import DummyNode, MigrationGraph, Node
from django.test import SimpleTestCase
class GraphTests(SimpleTestCase):
"""
Tests the digraph structure.
"""
def test_simple_graph(self):
"""
Tests a basic dependency graph:
app_a: 0001 <-- 0002 <--- 0003 <-- 0004
/
app_b: 0001 <-- 0002 <-/
"""
# Build graph
graph = MigrationGraph()
graph.add_node(("app_a", "0001"), None)
graph.add_node(("app_a", "0002"), None)
graph.add_node(("app_a", "0003"), None)
graph.add_node(("app_a", "0004"), None)
graph.add_node(("app_b", "0001"), None)
graph.add_node(("app_b", "0002"), None)
graph.add_dependency("app_a.0004", ("app_a", "0004"), ("app_a", "0003"))
graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_a", "0002"))
graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001"))
graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_b", "0002"))
graph.add_dependency("app_b.0002", ("app_b", "0002"), ("app_b", "0001"))
# Test root migration case
self.assertEqual(
graph.forwards_plan(("app_a", "0001")),
[("app_a", "0001")],
)
# Test branch B only
self.assertEqual(
graph.forwards_plan(("app_b", "0002")),
[("app_b", "0001"), ("app_b", "0002")],
)
# Test whole graph
self.assertEqual(
graph.forwards_plan(("app_a", "0004")),
[
("app_b", "0001"),
("app_b", "0002"),
("app_a", "0001"),
("app_a", "0002"),
("app_a", "0003"),
("app_a", "0004"),
],
)
# Test reverse to b:0002
self.assertEqual(
graph.backwards_plan(("app_b", "0002")),
[("app_a", "0004"), ("app_a", "0003"), ("app_b", "0002")],
)
# Test roots and leaves
self.assertEqual(
graph.root_nodes(),
[("app_a", "0001"), ("app_b", "0001")],
)
self.assertEqual(
graph.leaf_nodes(),
[("app_a", "0004"), ("app_b", "0002")],
)
def test_complex_graph(self):
r"""
Tests a complex dependency graph:
app_a: 0001 <-- 0002 <--- 0003 <-- 0004
\ \ / /
app_b: 0001 <-\ 0002 <-X /
\ \ /
app_c: \ 0001 <-- 0002 <-
"""
# Build graph
graph = MigrationGraph()
graph.add_node(("app_a", "0001"), None)
graph.add_node(("app_a", "0002"), None)
graph.add_node(("app_a", "0003"), None)
graph.add_node(("app_a", "0004"), None)
graph.add_node(("app_b", "0001"), None)
graph.add_node(("app_b", "0002"), None)
graph.add_node(("app_c", "0001"), None)
graph.add_node(("app_c", "0002"), None)
graph.add_dependency("app_a.0004", ("app_a", "0004"), ("app_a", "0003"))
graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_a", "0002"))
graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001"))
graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_b", "0002"))
graph.add_dependency("app_b.0002", ("app_b", "0002"), ("app_b", "0001"))
graph.add_dependency("app_a.0004", ("app_a", "0004"), ("app_c", "0002"))
graph.add_dependency("app_c.0002", ("app_c", "0002"), ("app_c", "0001"))
graph.add_dependency("app_c.0001", ("app_c", "0001"), ("app_b", "0001"))
graph.add_dependency("app_c.0002", ("app_c", "0002"), ("app_a", "0002"))
# Test branch C only
self.assertEqual(
graph.forwards_plan(("app_c", "0002")),
[
("app_b", "0001"),
("app_c", "0001"),
("app_a", "0001"),
("app_a", "0002"),
("app_c", "0002"),
],
)
# Test whole graph
self.assertEqual(
graph.forwards_plan(("app_a", "0004")),
[
("app_b", "0001"),
("app_c", "0001"),
("app_a", "0001"),
("app_a", "0002"),
("app_c", "0002"),
("app_b", "0002"),
("app_a", "0003"),
("app_a", "0004"),
],
)
# Test reverse to b:0001
self.assertEqual(
graph.backwards_plan(("app_b", "0001")),
[
("app_a", "0004"),
("app_c", "0002"),
("app_c", "0001"),
("app_a", "0003"),
("app_b", "0002"),
("app_b", "0001"),
],
)
# Test roots and leaves
self.assertEqual(
graph.root_nodes(),
[("app_a", "0001"), ("app_b", "0001"), ("app_c", "0001")],
)
self.assertEqual(
graph.leaf_nodes(),
[("app_a", "0004"), ("app_b", "0002"), ("app_c", "0002")],
)
def test_circular_graph(self):
"""
Tests a circular dependency graph.
"""
# Build graph
graph = MigrationGraph()
graph.add_node(("app_a", "0001"), None)
graph.add_node(("app_a", "0002"), None)
graph.add_node(("app_a", "0003"), None)
graph.add_node(("app_b", "0001"), None)
graph.add_node(("app_b", "0002"), None)
graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_a", "0002"))
graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001"))
graph.add_dependency("app_a.0001", ("app_a", "0001"), ("app_b", "0002"))
graph.add_dependency("app_b.0002", ("app_b", "0002"), ("app_b", "0001"))
graph.add_dependency("app_b.0001", ("app_b", "0001"), ("app_a", "0003"))
# Test whole graph
with self.assertRaises(CircularDependencyError):
graph.ensure_not_cyclic()
def test_circular_graph_2(self):
graph = MigrationGraph()
graph.add_node(("A", "0001"), None)
graph.add_node(("C", "0001"), None)
graph.add_node(("B", "0001"), None)
graph.add_dependency("A.0001", ("A", "0001"), ("B", "0001"))
graph.add_dependency("B.0001", ("B", "0001"), ("A", "0001"))
graph.add_dependency("C.0001", ("C", "0001"), ("B", "0001"))
with self.assertRaises(CircularDependencyError):
graph.ensure_not_cyclic()
def test_iterative_dfs(self):
graph = MigrationGraph()
root = ("app_a", "1")
graph.add_node(root, None)
expected = [root]
for i in range(2, 750):
parent = ("app_a", str(i - 1))
child = ("app_a", str(i))
graph.add_node(child, None)
graph.add_dependency(str(i), child, parent)
expected.append(child)
leaf = expected[-1]
forwards_plan = graph.forwards_plan(leaf)
self.assertEqual(expected, forwards_plan)
backwards_plan = graph.backwards_plan(root)
self.assertEqual(expected[::-1], backwards_plan)
def test_iterative_dfs_complexity(self):
"""
In a graph with merge migrations, iterative_dfs() traverses each node
only once even if there are multiple paths leading to it.
"""
n = 50
graph = MigrationGraph()
for i in range(1, n + 1):
graph.add_node(("app_a", str(i)), None)
graph.add_node(("app_b", str(i)), None)
graph.add_node(("app_c", str(i)), None)
for i in range(1, n):
graph.add_dependency(None, ("app_b", str(i)), ("app_a", str(i)))
graph.add_dependency(None, ("app_c", str(i)), ("app_a", str(i)))
graph.add_dependency(None, ("app_a", str(i + 1)), ("app_b", str(i)))
graph.add_dependency(None, ("app_a", str(i + 1)), ("app_c", str(i)))
plan = graph.forwards_plan(("app_a", str(n)))
expected = [
(app, str(i)) for i in range(1, n) for app in ["app_a", "app_c", "app_b"]
] + [("app_a", str(n))]
self.assertEqual(plan, expected)
def test_plan_invalid_node(self):
"""
Tests for forwards/backwards_plan of nonexistent node.
"""
graph = MigrationGraph()
message = "Node ('app_b', '0001') not a valid node"
with self.assertRaisesMessage(NodeNotFoundError, message):
graph.forwards_plan(("app_b", "0001"))
with self.assertRaisesMessage(NodeNotFoundError, message):
graph.backwards_plan(("app_b", "0001"))
def test_missing_parent_nodes(self):
"""
Tests for missing parent nodes.
"""
# Build graph
graph = MigrationGraph()
graph.add_node(("app_a", "0001"), None)
graph.add_node(("app_a", "0002"), None)
graph.add_node(("app_a", "0003"), None)
graph.add_node(("app_b", "0001"), None)
graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_a", "0002"))
graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001"))
msg = (
"Migration app_a.0001 dependencies reference nonexistent parent node "
"('app_b', '0002')"
)
with self.assertRaisesMessage(NodeNotFoundError, msg):
graph.add_dependency("app_a.0001", ("app_a", "0001"), ("app_b", "0002"))
def test_missing_child_nodes(self):
"""
Tests for missing child nodes.
"""
# Build graph
graph = MigrationGraph()
graph.add_node(("app_a", "0001"), None)
msg = (
"Migration app_a.0002 dependencies reference nonexistent child node "
"('app_a', '0002')"
)
with self.assertRaisesMessage(NodeNotFoundError, msg):
graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001"))
def test_validate_consistency_missing_parent(self):
graph = MigrationGraph()
graph.add_node(("app_a", "0001"), None)
graph.add_dependency(
"app_a.0001", ("app_a", "0001"), ("app_b", "0002"), skip_validation=True
)
msg = (
"Migration app_a.0001 dependencies reference nonexistent parent node "
"('app_b', '0002')"
)
with self.assertRaisesMessage(NodeNotFoundError, msg):
graph.validate_consistency()
def test_validate_consistency_missing_child(self):
graph = MigrationGraph()
graph.add_node(("app_b", "0002"), None)
graph.add_dependency(
"app_b.0002", ("app_a", "0001"), ("app_b", "0002"), skip_validation=True
)
msg = (
"Migration app_b.0002 dependencies reference nonexistent child node "
"('app_a', '0001')"
)
with self.assertRaisesMessage(NodeNotFoundError, msg):
graph.validate_consistency()
def test_validate_consistency_no_error(self):
graph = MigrationGraph()
graph.add_node(("app_a", "0001"), None)
graph.add_node(("app_b", "0002"), None)
graph.add_dependency(
"app_a.0001", ("app_a", "0001"), ("app_b", "0002"), skip_validation=True
)
graph.validate_consistency()
def test_validate_consistency_dummy(self):
"""
validate_consistency() raises an error if there's an isolated dummy
node.
"""
msg = "app_a.0001 (req'd by app_b.0002) is missing!"
graph = MigrationGraph()
graph.add_dummy_node(
key=("app_a", "0001"), origin="app_b.0002", error_message=msg
)
with self.assertRaisesMessage(NodeNotFoundError, msg):
graph.validate_consistency()
def test_remove_replaced_nodes(self):
"""
Replaced nodes are properly removed and dependencies remapped.
"""
# Add some dummy nodes to be replaced.
graph = MigrationGraph()
graph.add_dummy_node(
key=("app_a", "0001"), origin="app_a.0002", error_message="BAD!"
)
graph.add_dummy_node(
key=("app_a", "0002"), origin="app_b.0001", error_message="BAD!"
)
graph.add_dependency(
"app_a.0002", ("app_a", "0002"), ("app_a", "0001"), skip_validation=True
)
# Add some normal parent and child nodes to test dependency remapping.
graph.add_node(("app_c", "0001"), None)
graph.add_node(("app_b", "0001"), None)
graph.add_dependency(
"app_a.0001", ("app_a", "0001"), ("app_c", "0001"), skip_validation=True
)
graph.add_dependency(
"app_b.0001", ("app_b", "0001"), ("app_a", "0002"), skip_validation=True
)
# Try replacing before replacement node exists.
msg = (
"Unable to find replacement node ('app_a', '0001_squashed_0002'). It was "
"either never added to the migration graph, or has been removed."
)
with self.assertRaisesMessage(NodeNotFoundError, msg):
graph.remove_replaced_nodes(
replacement=("app_a", "0001_squashed_0002"),
replaced=[("app_a", "0001"), ("app_a", "0002")],
)
graph.add_node(("app_a", "0001_squashed_0002"), None)
# Ensure `validate_consistency()` still raises an error at this stage.
with self.assertRaisesMessage(NodeNotFoundError, "BAD!"):
graph.validate_consistency()
# Remove the dummy nodes.
graph.remove_replaced_nodes(
replacement=("app_a", "0001_squashed_0002"),
replaced=[("app_a", "0001"), ("app_a", "0002")],
)
# Ensure graph is now consistent and dependencies have been remapped
graph.validate_consistency()
parent_node = graph.node_map[("app_c", "0001")]
replacement_node = graph.node_map[("app_a", "0001_squashed_0002")]
child_node = graph.node_map[("app_b", "0001")]
self.assertIn(parent_node, replacement_node.parents)
self.assertIn(replacement_node, parent_node.children)
self.assertIn(child_node, replacement_node.children)
self.assertIn(replacement_node, child_node.parents)
def test_remove_replacement_node(self):
"""
A replacement node is properly removed and child dependencies remapped.
We assume parent dependencies are already correct.
"""
# Add some dummy nodes to be replaced.
graph = MigrationGraph()
graph.add_node(("app_a", "0001"), None)
graph.add_node(("app_a", "0002"), None)
graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001"))
# Try removing replacement node before replacement node exists.
msg = (
"Unable to remove replacement node ('app_a', '0001_squashed_0002'). It was"
" either never added to the migration graph, or has been removed already."
)
with self.assertRaisesMessage(NodeNotFoundError, msg):
graph.remove_replacement_node(
replacement=("app_a", "0001_squashed_0002"),
replaced=[("app_a", "0001"), ("app_a", "0002")],
)
graph.add_node(("app_a", "0001_squashed_0002"), None)
# Add a child node to test dependency remapping.
graph.add_node(("app_b", "0001"), None)
graph.add_dependency(
"app_b.0001", ("app_b", "0001"), ("app_a", "0001_squashed_0002")
)
# Remove the replacement node.
graph.remove_replacement_node(
replacement=("app_a", "0001_squashed_0002"),
replaced=[("app_a", "0001"), ("app_a", "0002")],
)
# Ensure graph is consistent and child dependency has been remapped
graph.validate_consistency()
replaced_node = graph.node_map[("app_a", "0002")]
child_node = graph.node_map[("app_b", "0001")]
self.assertIn(child_node, replaced_node.children)
self.assertIn(replaced_node, child_node.parents)
# Child dependency hasn't also gotten remapped to the other replaced
# node.
other_replaced_node = graph.node_map[("app_a", "0001")]
self.assertNotIn(child_node, other_replaced_node.children)
self.assertNotIn(other_replaced_node, child_node.parents)
def test_infinite_loop(self):
"""
Tests a complex dependency graph:
app_a: 0001 <-
\
app_b: 0001 <- x 0002 <-
/ \
app_c: 0001<- <------------- x 0002
And apply squashing on app_c.
"""
graph = MigrationGraph()
graph.add_node(("app_a", "0001"), None)
graph.add_node(("app_b", "0001"), None)
graph.add_node(("app_b", "0002"), None)
graph.add_node(("app_c", "0001_squashed_0002"), None)
graph.add_dependency(
"app_b.0001", ("app_b", "0001"), ("app_c", "0001_squashed_0002")
)
graph.add_dependency("app_b.0002", ("app_b", "0002"), ("app_a", "0001"))
graph.add_dependency("app_b.0002", ("app_b", "0002"), ("app_b", "0001"))
graph.add_dependency(
"app_c.0001_squashed_0002",
("app_c", "0001_squashed_0002"),
("app_b", "0002"),
)
with self.assertRaises(CircularDependencyError):
graph.ensure_not_cyclic()
def test_stringify(self):
graph = MigrationGraph()
self.assertEqual(str(graph), "Graph: 0 nodes, 0 edges")
graph.add_node(("app_a", "0001"), None)
graph.add_node(("app_a", "0002"), None)
graph.add_node(("app_a", "0003"), None)
graph.add_node(("app_b", "0001"), None)
graph.add_node(("app_b", "0002"), None)
graph.add_dependency("app_a.0002", ("app_a", "0002"), ("app_a", "0001"))
graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_a", "0002"))
graph.add_dependency("app_a.0003", ("app_a", "0003"), ("app_b", "0002"))
self.assertEqual(str(graph), "Graph: 5 nodes, 3 edges")
self.assertEqual(repr(graph), "<MigrationGraph: nodes=5, edges=3>")
class NodeTests(SimpleTestCase):
def test_node_repr(self):
node = Node(("app_a", "0001"))
self.assertEqual(repr(node), "<Node: ('app_a', '0001')>")
def test_dummynode_repr(self):
node = DummyNode(
key=("app_a", "0001"),
origin="app_a.0001",
error_message="x is missing",
)
self.assertEqual(repr(node), "<DummyNode: ('app_a', '0001')>")
|
2768bf32f7baf421d14aab67af706e8669cc5d0f9e7baacb1130c72e2e8c7e07 | from django.apps.registry import Apps
from django.db import models
class CustomModelBase(models.base.ModelBase):
pass
class ModelWithCustomBase(models.Model, metaclass=CustomModelBase):
pass
class UnicodeModel(models.Model):
title = models.CharField("ÚÑÍ¢ÓÐÉ", max_length=20, default="“Ðjáñgó”")
class Meta:
# Disable auto loading of this model as we load it on our own
apps = Apps()
verbose_name = "úñí©óðé µóðéø"
verbose_name_plural = "úñí©óðé µóðéøß"
def __str__(self):
return self.title
class Unserializable:
"""
An object that migration doesn't know how to serialize.
"""
pass
class UnserializableModel(models.Model):
title = models.CharField(max_length=20, default=Unserializable())
class Meta:
# Disable auto loading of this model as we load it on our own
apps = Apps()
class UnmigratedModel(models.Model):
"""
A model that is in a migration-less app (which this app is
if its migrations directory has not been repointed)
"""
pass
class EmptyManager(models.Manager):
use_in_migrations = True
class FoodQuerySet(models.query.QuerySet):
pass
class BaseFoodManager(models.Manager):
def __init__(self, a, b, c=1, d=2):
super().__init__()
self.args = (a, b, c, d)
class FoodManager(BaseFoodManager.from_queryset(FoodQuerySet)):
use_in_migrations = True
class NoMigrationFoodManager(BaseFoodManager.from_queryset(FoodQuerySet)):
pass
|
e912ab6455e7a0321c85b10ce243aeb9d2cc860d0ac2aa640f16333b324762ca | from django.core.exceptions import FieldDoesNotExist
from django.db import IntegrityError, connection, migrations, models, transaction
from django.db.migrations.migration import Migration
from django.db.migrations.operations.fields import FieldOperation
from django.db.migrations.state import ModelState, ProjectState
from django.db.models.functions import Abs
from django.db.transaction import atomic
from django.test import SimpleTestCase, override_settings, skipUnlessDBFeature
from django.test.utils import CaptureQueriesContext
from .models import FoodManager, FoodQuerySet, UnicodeModel
from .test_base import OperationTestBase
class Mixin:
pass
class OperationTests(OperationTestBase):
"""
Tests running the operations and making sure they do what they say they do.
Each test looks at their state changing, and then their database operation -
both forwards and backwards.
"""
def test_create_model(self):
"""
Tests the CreateModel operation.
Most other tests use this operation as part of setup, so check failures
here first.
"""
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=1)),
],
)
self.assertEqual(operation.describe(), "Create model Pony")
self.assertEqual(operation.migration_name_fragment, "pony")
# Test the state alteration
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crmo", new_state)
self.assertEqual(new_state.models["test_crmo", "pony"].name, "Pony")
self.assertEqual(len(new_state.models["test_crmo", "pony"].fields), 2)
# Test the database alteration
self.assertTableNotExists("test_crmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmo", editor, project_state, new_state)
self.assertTableExists("test_crmo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmo", editor, new_state, project_state)
self.assertTableNotExists("test_crmo_pony")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["fields", "name"])
# And default manager not in set
operation = migrations.CreateModel(
"Foo", fields=[], managers=[("objects", models.Manager())]
)
definition = operation.deconstruct()
self.assertNotIn("managers", definition[2])
def test_create_model_with_duplicate_field_name(self):
with self.assertRaisesMessage(
ValueError, "Found duplicate value pink in CreateModel fields argument."
):
migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.TextField()),
("pink", models.IntegerField(default=1)),
],
)
def test_create_model_with_duplicate_base(self):
message = "Found duplicate value test_crmo.pony in CreateModel bases argument."
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(
"test_crmo.Pony",
"test_crmo.Pony",
),
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(
"test_crmo.Pony",
"test_crmo.pony",
),
)
message = (
"Found duplicate value migrations.unicodemodel in CreateModel bases "
"argument."
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(
UnicodeModel,
UnicodeModel,
),
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(
UnicodeModel,
"migrations.unicodemodel",
),
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(
UnicodeModel,
"migrations.UnicodeModel",
),
)
message = (
"Found duplicate value <class 'django.db.models.base.Model'> in "
"CreateModel bases argument."
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(
models.Model,
models.Model,
),
)
message = (
"Found duplicate value <class 'migrations.test_operations.Mixin'> in "
"CreateModel bases argument."
)
with self.assertRaisesMessage(ValueError, message):
migrations.CreateModel(
"Pony",
fields=[],
bases=(
Mixin,
Mixin,
),
)
def test_create_model_with_duplicate_manager_name(self):
with self.assertRaisesMessage(
ValueError,
"Found duplicate value objects in CreateModel managers argument.",
):
migrations.CreateModel(
"Pony",
fields=[],
managers=[
("objects", models.Manager()),
("objects", models.Manager()),
],
)
def test_create_model_with_unique_after(self):
"""
Tests the CreateModel operation directly followed by an
AlterUniqueTogether (bug #22844 - sqlite remake issues)
"""
operation1 = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=1)),
],
)
operation2 = migrations.CreateModel(
"Rider",
[
("id", models.AutoField(primary_key=True)),
("number", models.IntegerField(default=1)),
("pony", models.ForeignKey("test_crmoua.Pony", models.CASCADE)),
],
)
operation3 = migrations.AlterUniqueTogether(
"Rider",
[
("number", "pony"),
],
)
# Test the database alteration
project_state = ProjectState()
self.assertTableNotExists("test_crmoua_pony")
self.assertTableNotExists("test_crmoua_rider")
with connection.schema_editor() as editor:
new_state = project_state.clone()
operation1.state_forwards("test_crmoua", new_state)
operation1.database_forwards(
"test_crmoua", editor, project_state, new_state
)
project_state, new_state = new_state, new_state.clone()
operation2.state_forwards("test_crmoua", new_state)
operation2.database_forwards(
"test_crmoua", editor, project_state, new_state
)
project_state, new_state = new_state, new_state.clone()
operation3.state_forwards("test_crmoua", new_state)
operation3.database_forwards(
"test_crmoua", editor, project_state, new_state
)
self.assertTableExists("test_crmoua_pony")
self.assertTableExists("test_crmoua_rider")
def test_create_model_m2m(self):
"""
Test the creation of a model with a ManyToMany field and the
auto-created "through" model.
"""
project_state = self.set_up_test_model("test_crmomm")
operation = migrations.CreateModel(
"Stable",
[
("id", models.AutoField(primary_key=True)),
("ponies", models.ManyToManyField("Pony", related_name="stables")),
],
)
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_crmomm", new_state)
# Test the database alteration
self.assertTableNotExists("test_crmomm_stable_ponies")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmomm", editor, project_state, new_state)
self.assertTableExists("test_crmomm_stable")
self.assertTableExists("test_crmomm_stable_ponies")
self.assertColumnNotExists("test_crmomm_stable", "ponies")
# Make sure the M2M field actually works
with atomic():
Pony = new_state.apps.get_model("test_crmomm", "Pony")
Stable = new_state.apps.get_model("test_crmomm", "Stable")
stable = Stable.objects.create()
p1 = Pony.objects.create(pink=False, weight=4.55)
p2 = Pony.objects.create(pink=True, weight=5.43)
stable.ponies.add(p1, p2)
self.assertEqual(stable.ponies.count(), 2)
stable.ponies.all().delete()
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_crmomm", editor, new_state, project_state
)
self.assertTableNotExists("test_crmomm_stable")
self.assertTableNotExists("test_crmomm_stable_ponies")
def test_create_model_inheritance(self):
"""
Tests the CreateModel operation on a multi-table inheritance setup.
"""
project_state = self.set_up_test_model("test_crmoih")
# Test the state alteration
operation = migrations.CreateModel(
"ShetlandPony",
[
(
"pony_ptr",
models.OneToOneField(
"test_crmoih.Pony",
models.CASCADE,
auto_created=True,
primary_key=True,
to_field="id",
serialize=False,
),
),
("cuteness", models.IntegerField(default=1)),
],
)
new_state = project_state.clone()
operation.state_forwards("test_crmoih", new_state)
self.assertIn(("test_crmoih", "shetlandpony"), new_state.models)
# Test the database alteration
self.assertTableNotExists("test_crmoih_shetlandpony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmoih", editor, project_state, new_state)
self.assertTableExists("test_crmoih_shetlandpony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_crmoih", editor, new_state, project_state
)
self.assertTableNotExists("test_crmoih_shetlandpony")
def test_create_proxy_model(self):
"""
CreateModel ignores proxy models.
"""
project_state = self.set_up_test_model("test_crprmo")
# Test the state alteration
operation = migrations.CreateModel(
"ProxyPony",
[],
options={"proxy": True},
bases=("test_crprmo.Pony",),
)
self.assertEqual(operation.describe(), "Create proxy model ProxyPony")
new_state = project_state.clone()
operation.state_forwards("test_crprmo", new_state)
self.assertIn(("test_crprmo", "proxypony"), new_state.models)
# Test the database alteration
self.assertTableNotExists("test_crprmo_proxypony")
self.assertTableExists("test_crprmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crprmo", editor, project_state, new_state)
self.assertTableNotExists("test_crprmo_proxypony")
self.assertTableExists("test_crprmo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_crprmo", editor, new_state, project_state
)
self.assertTableNotExists("test_crprmo_proxypony")
self.assertTableExists("test_crprmo_pony")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["bases", "fields", "name", "options"])
def test_create_unmanaged_model(self):
"""
CreateModel ignores unmanaged models.
"""
project_state = self.set_up_test_model("test_crummo")
# Test the state alteration
operation = migrations.CreateModel(
"UnmanagedPony",
[],
options={"proxy": True},
bases=("test_crummo.Pony",),
)
self.assertEqual(operation.describe(), "Create proxy model UnmanagedPony")
new_state = project_state.clone()
operation.state_forwards("test_crummo", new_state)
self.assertIn(("test_crummo", "unmanagedpony"), new_state.models)
# Test the database alteration
self.assertTableNotExists("test_crummo_unmanagedpony")
self.assertTableExists("test_crummo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crummo", editor, project_state, new_state)
self.assertTableNotExists("test_crummo_unmanagedpony")
self.assertTableExists("test_crummo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_crummo", editor, new_state, project_state
)
self.assertTableNotExists("test_crummo_unmanagedpony")
self.assertTableExists("test_crummo_pony")
@skipUnlessDBFeature("supports_table_check_constraints")
def test_create_model_with_constraint(self):
where = models.Q(pink__gt=2)
check_constraint = models.CheckConstraint(
check=where, name="test_constraint_pony_pink_gt_2"
)
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=3)),
],
options={"constraints": [check_constraint]},
)
# Test the state alteration
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crmo", new_state)
self.assertEqual(
len(new_state.models["test_crmo", "pony"].options["constraints"]), 1
)
# Test database alteration
self.assertTableNotExists("test_crmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmo", editor, project_state, new_state)
self.assertTableExists("test_crmo_pony")
with connection.cursor() as cursor:
with self.assertRaises(IntegrityError):
cursor.execute("INSERT INTO test_crmo_pony (id, pink) VALUES (1, 1)")
# Test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmo", editor, new_state, project_state)
self.assertTableNotExists("test_crmo_pony")
# Test deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2]["options"]["constraints"], [check_constraint])
def test_create_model_with_partial_unique_constraint(self):
partial_unique_constraint = models.UniqueConstraint(
fields=["pink"],
condition=models.Q(weight__gt=5),
name="test_constraint_pony_pink_for_weight_gt_5_uniq",
)
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=3)),
("weight", models.FloatField()),
],
options={"constraints": [partial_unique_constraint]},
)
# Test the state alteration
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crmo", new_state)
self.assertEqual(
len(new_state.models["test_crmo", "pony"].options["constraints"]), 1
)
# Test database alteration
self.assertTableNotExists("test_crmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crmo", editor, project_state, new_state)
self.assertTableExists("test_crmo_pony")
# Test constraint works
Pony = new_state.apps.get_model("test_crmo", "Pony")
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=6.0)
if connection.features.supports_partial_indexes:
with self.assertRaises(IntegrityError):
Pony.objects.create(pink=1, weight=7.0)
else:
Pony.objects.create(pink=1, weight=7.0)
# Test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_crmo", editor, new_state, project_state)
self.assertTableNotExists("test_crmo_pony")
# Test deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2]["options"]["constraints"], [partial_unique_constraint]
)
def test_create_model_with_deferred_unique_constraint(self):
deferred_unique_constraint = models.UniqueConstraint(
fields=["pink"],
name="deferrable_pink_constraint",
deferrable=models.Deferrable.DEFERRED,
)
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=3)),
],
options={"constraints": [deferred_unique_constraint]},
)
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crmo", new_state)
self.assertEqual(
len(new_state.models["test_crmo", "pony"].options["constraints"]), 1
)
self.assertTableNotExists("test_crmo_pony")
# Create table.
with connection.schema_editor() as editor:
operation.database_forwards("test_crmo", editor, project_state, new_state)
self.assertTableExists("test_crmo_pony")
Pony = new_state.apps.get_model("test_crmo", "Pony")
Pony.objects.create(pink=1)
if connection.features.supports_deferrable_unique_constraints:
# Unique constraint is deferred.
with transaction.atomic():
obj = Pony.objects.create(pink=1)
obj.pink = 2
obj.save()
# Constraint behavior can be changed with SET CONSTRAINTS.
with self.assertRaises(IntegrityError):
with transaction.atomic(), connection.cursor() as cursor:
quoted_name = connection.ops.quote_name(
deferred_unique_constraint.name
)
cursor.execute("SET CONSTRAINTS %s IMMEDIATE" % quoted_name)
obj = Pony.objects.create(pink=1)
obj.pink = 3
obj.save()
else:
Pony.objects.create(pink=1)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards("test_crmo", editor, new_state, project_state)
self.assertTableNotExists("test_crmo_pony")
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2]["options"]["constraints"],
[deferred_unique_constraint],
)
@skipUnlessDBFeature("supports_covering_indexes")
def test_create_model_with_covering_unique_constraint(self):
covering_unique_constraint = models.UniqueConstraint(
fields=["pink"],
include=["weight"],
name="test_constraint_pony_pink_covering_weight",
)
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=3)),
("weight", models.FloatField()),
],
options={"constraints": [covering_unique_constraint]},
)
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crmo", new_state)
self.assertEqual(
len(new_state.models["test_crmo", "pony"].options["constraints"]), 1
)
self.assertTableNotExists("test_crmo_pony")
# Create table.
with connection.schema_editor() as editor:
operation.database_forwards("test_crmo", editor, project_state, new_state)
self.assertTableExists("test_crmo_pony")
Pony = new_state.apps.get_model("test_crmo", "Pony")
Pony.objects.create(pink=1, weight=4.0)
with self.assertRaises(IntegrityError):
Pony.objects.create(pink=1, weight=7.0)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards("test_crmo", editor, new_state, project_state)
self.assertTableNotExists("test_crmo_pony")
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "CreateModel")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2]["options"]["constraints"],
[covering_unique_constraint],
)
def test_create_model_managers(self):
"""
The managers on a model are set.
"""
project_state = self.set_up_test_model("test_cmoma")
# Test the state alteration
operation = migrations.CreateModel(
"Food",
fields=[
("id", models.AutoField(primary_key=True)),
],
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
],
)
self.assertEqual(operation.describe(), "Create model Food")
new_state = project_state.clone()
operation.state_forwards("test_cmoma", new_state)
self.assertIn(("test_cmoma", "food"), new_state.models)
managers = new_state.models["test_cmoma", "food"].managers
self.assertEqual(managers[0][0], "food_qs")
self.assertIsInstance(managers[0][1], models.Manager)
self.assertEqual(managers[1][0], "food_mgr")
self.assertIsInstance(managers[1][1], FoodManager)
self.assertEqual(managers[1][1].args, ("a", "b", 1, 2))
self.assertEqual(managers[2][0], "food_mgr_kwargs")
self.assertIsInstance(managers[2][1], FoodManager)
self.assertEqual(managers[2][1].args, ("x", "y", 3, 4))
def test_delete_model(self):
"""
Tests the DeleteModel operation.
"""
project_state = self.set_up_test_model("test_dlmo")
# Test the state alteration
operation = migrations.DeleteModel("Pony")
self.assertEqual(operation.describe(), "Delete model Pony")
self.assertEqual(operation.migration_name_fragment, "delete_pony")
new_state = project_state.clone()
operation.state_forwards("test_dlmo", new_state)
self.assertNotIn(("test_dlmo", "pony"), new_state.models)
# Test the database alteration
self.assertTableExists("test_dlmo_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_dlmo", editor, project_state, new_state)
self.assertTableNotExists("test_dlmo_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_dlmo", editor, new_state, project_state)
self.assertTableExists("test_dlmo_pony")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "DeleteModel")
self.assertEqual(definition[1], [])
self.assertEqual(list(definition[2]), ["name"])
def test_delete_proxy_model(self):
"""
Tests the DeleteModel operation ignores proxy models.
"""
project_state = self.set_up_test_model("test_dlprmo", proxy_model=True)
# Test the state alteration
operation = migrations.DeleteModel("ProxyPony")
new_state = project_state.clone()
operation.state_forwards("test_dlprmo", new_state)
self.assertIn(("test_dlprmo", "proxypony"), project_state.models)
self.assertNotIn(("test_dlprmo", "proxypony"), new_state.models)
# Test the database alteration
self.assertTableExists("test_dlprmo_pony")
self.assertTableNotExists("test_dlprmo_proxypony")
with connection.schema_editor() as editor:
operation.database_forwards("test_dlprmo", editor, project_state, new_state)
self.assertTableExists("test_dlprmo_pony")
self.assertTableNotExists("test_dlprmo_proxypony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_dlprmo", editor, new_state, project_state
)
self.assertTableExists("test_dlprmo_pony")
self.assertTableNotExists("test_dlprmo_proxypony")
def test_delete_mti_model(self):
project_state = self.set_up_test_model("test_dlmtimo", mti_model=True)
# Test the state alteration
operation = migrations.DeleteModel("ShetlandPony")
new_state = project_state.clone()
operation.state_forwards("test_dlmtimo", new_state)
self.assertIn(("test_dlmtimo", "shetlandpony"), project_state.models)
self.assertNotIn(("test_dlmtimo", "shetlandpony"), new_state.models)
# Test the database alteration
self.assertTableExists("test_dlmtimo_pony")
self.assertTableExists("test_dlmtimo_shetlandpony")
self.assertColumnExists("test_dlmtimo_shetlandpony", "pony_ptr_id")
with connection.schema_editor() as editor:
operation.database_forwards(
"test_dlmtimo", editor, project_state, new_state
)
self.assertTableExists("test_dlmtimo_pony")
self.assertTableNotExists("test_dlmtimo_shetlandpony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_dlmtimo", editor, new_state, project_state
)
self.assertTableExists("test_dlmtimo_pony")
self.assertTableExists("test_dlmtimo_shetlandpony")
self.assertColumnExists("test_dlmtimo_shetlandpony", "pony_ptr_id")
def test_rename_model(self):
"""
Tests the RenameModel operation.
"""
project_state = self.set_up_test_model("test_rnmo", related_model=True)
# Test the state alteration
operation = migrations.RenameModel("Pony", "Horse")
self.assertEqual(operation.describe(), "Rename model Pony to Horse")
self.assertEqual(operation.migration_name_fragment, "rename_pony_horse")
# Test initial state and database
self.assertIn(("test_rnmo", "pony"), project_state.models)
self.assertNotIn(("test_rnmo", "horse"), project_state.models)
self.assertTableExists("test_rnmo_pony")
self.assertTableNotExists("test_rnmo_horse")
if connection.features.supports_foreign_keys:
self.assertFKExists(
"test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")
)
self.assertFKNotExists(
"test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")
)
# Migrate forwards
new_state = project_state.clone()
atomic_rename = connection.features.supports_atomic_references_rename
new_state = self.apply_operations(
"test_rnmo", new_state, [operation], atomic=atomic_rename
)
# Test new state and database
self.assertNotIn(("test_rnmo", "pony"), new_state.models)
self.assertIn(("test_rnmo", "horse"), new_state.models)
# RenameModel also repoints all incoming FKs and M2Ms
self.assertEqual(
new_state.models["test_rnmo", "rider"].fields["pony"].remote_field.model,
"test_rnmo.Horse",
)
self.assertTableNotExists("test_rnmo_pony")
self.assertTableExists("test_rnmo_horse")
if connection.features.supports_foreign_keys:
self.assertFKNotExists(
"test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")
)
self.assertFKExists(
"test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")
)
# Migrate backwards
original_state = self.unapply_operations(
"test_rnmo", project_state, [operation], atomic=atomic_rename
)
# Test original state and database
self.assertIn(("test_rnmo", "pony"), original_state.models)
self.assertNotIn(("test_rnmo", "horse"), original_state.models)
self.assertEqual(
original_state.models["test_rnmo", "rider"]
.fields["pony"]
.remote_field.model,
"Pony",
)
self.assertTableExists("test_rnmo_pony")
self.assertTableNotExists("test_rnmo_horse")
if connection.features.supports_foreign_keys:
self.assertFKExists(
"test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")
)
self.assertFKNotExists(
"test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")
)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RenameModel")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"old_name": "Pony", "new_name": "Horse"})
def test_rename_model_state_forwards(self):
"""
RenameModel operations shouldn't trigger the caching of rendered apps
on state without prior apps.
"""
state = ProjectState()
state.add_model(ModelState("migrations", "Foo", []))
operation = migrations.RenameModel("Foo", "Bar")
operation.state_forwards("migrations", state)
self.assertNotIn("apps", state.__dict__)
self.assertNotIn(("migrations", "foo"), state.models)
self.assertIn(("migrations", "bar"), state.models)
# Now with apps cached.
apps = state.apps
operation = migrations.RenameModel("Bar", "Foo")
operation.state_forwards("migrations", state)
self.assertIs(state.apps, apps)
self.assertNotIn(("migrations", "bar"), state.models)
self.assertIn(("migrations", "foo"), state.models)
def test_rename_model_with_self_referential_fk(self):
"""
Tests the RenameModel operation on model with self referential FK.
"""
project_state = self.set_up_test_model("test_rmwsrf", related_model=True)
# Test the state alteration
operation = migrations.RenameModel("Rider", "HorseRider")
self.assertEqual(operation.describe(), "Rename model Rider to HorseRider")
new_state = project_state.clone()
operation.state_forwards("test_rmwsrf", new_state)
self.assertNotIn(("test_rmwsrf", "rider"), new_state.models)
self.assertIn(("test_rmwsrf", "horserider"), new_state.models)
# Remember, RenameModel also repoints all incoming FKs and M2Ms
self.assertEqual(
"self",
new_state.models["test_rmwsrf", "horserider"]
.fields["friend"]
.remote_field.model,
)
HorseRider = new_state.apps.get_model("test_rmwsrf", "horserider")
self.assertIs(
HorseRider._meta.get_field("horserider").remote_field.model, HorseRider
)
# Test the database alteration
self.assertTableExists("test_rmwsrf_rider")
self.assertTableNotExists("test_rmwsrf_horserider")
if connection.features.supports_foreign_keys:
self.assertFKExists(
"test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")
)
self.assertFKNotExists(
"test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")
)
atomic_rename = connection.features.supports_atomic_references_rename
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_forwards("test_rmwsrf", editor, project_state, new_state)
self.assertTableNotExists("test_rmwsrf_rider")
self.assertTableExists("test_rmwsrf_horserider")
if connection.features.supports_foreign_keys:
self.assertFKNotExists(
"test_rmwsrf_horserider", ["friend_id"], ("test_rmwsrf_rider", "id")
)
self.assertFKExists(
"test_rmwsrf_horserider",
["friend_id"],
("test_rmwsrf_horserider", "id"),
)
# And test reversal
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_backwards(
"test_rmwsrf", editor, new_state, project_state
)
self.assertTableExists("test_rmwsrf_rider")
self.assertTableNotExists("test_rmwsrf_horserider")
if connection.features.supports_foreign_keys:
self.assertFKExists(
"test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_rider", "id")
)
self.assertFKNotExists(
"test_rmwsrf_rider", ["friend_id"], ("test_rmwsrf_horserider", "id")
)
def test_rename_model_with_superclass_fk(self):
"""
Tests the RenameModel operation on a model which has a superclass that
has a foreign key.
"""
project_state = self.set_up_test_model(
"test_rmwsc", related_model=True, mti_model=True
)
# Test the state alteration
operation = migrations.RenameModel("ShetlandPony", "LittleHorse")
self.assertEqual(
operation.describe(), "Rename model ShetlandPony to LittleHorse"
)
new_state = project_state.clone()
operation.state_forwards("test_rmwsc", new_state)
self.assertNotIn(("test_rmwsc", "shetlandpony"), new_state.models)
self.assertIn(("test_rmwsc", "littlehorse"), new_state.models)
# RenameModel shouldn't repoint the superclass's relations, only local ones
self.assertEqual(
project_state.models["test_rmwsc", "rider"]
.fields["pony"]
.remote_field.model,
new_state.models["test_rmwsc", "rider"].fields["pony"].remote_field.model,
)
# Before running the migration we have a table for Shetland Pony, not
# Little Horse.
self.assertTableExists("test_rmwsc_shetlandpony")
self.assertTableNotExists("test_rmwsc_littlehorse")
if connection.features.supports_foreign_keys:
# and the foreign key on rider points to pony, not shetland pony
self.assertFKExists(
"test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")
)
self.assertFKNotExists(
"test_rmwsc_rider", ["pony_id"], ("test_rmwsc_shetlandpony", "id")
)
with connection.schema_editor(
atomic=connection.features.supports_atomic_references_rename
) as editor:
operation.database_forwards("test_rmwsc", editor, project_state, new_state)
# Now we have a little horse table, not shetland pony
self.assertTableNotExists("test_rmwsc_shetlandpony")
self.assertTableExists("test_rmwsc_littlehorse")
if connection.features.supports_foreign_keys:
# but the Foreign keys still point at pony, not little horse
self.assertFKExists(
"test_rmwsc_rider", ["pony_id"], ("test_rmwsc_pony", "id")
)
self.assertFKNotExists(
"test_rmwsc_rider", ["pony_id"], ("test_rmwsc_littlehorse", "id")
)
def test_rename_model_with_self_referential_m2m(self):
app_label = "test_rename_model_with_self_referential_m2m"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"ReflexivePony",
fields=[
("id", models.AutoField(primary_key=True)),
("ponies", models.ManyToManyField("self")),
],
),
],
)
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.RenameModel("ReflexivePony", "ReflexivePony2"),
],
atomic=connection.features.supports_atomic_references_rename,
)
Pony = project_state.apps.get_model(app_label, "ReflexivePony2")
pony = Pony.objects.create()
pony.ponies.add(pony)
def test_rename_model_with_m2m(self):
app_label = "test_rename_model_with_m2m"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
("riders", models.ManyToManyField("Rider")),
],
),
],
)
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.RenameModel("Pony", "Pony2"),
],
atomic=connection.features.supports_atomic_references_rename,
)
Pony = project_state.apps.get_model(app_label, "Pony2")
Rider = project_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
self.assertEqual(Pony.objects.count(), 2)
self.assertEqual(Rider.objects.count(), 2)
self.assertEqual(
Pony._meta.get_field("riders").remote_field.through.objects.count(), 2
)
def test_rename_model_with_db_table_noop(self):
app_label = "test_rmwdbtn"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
],
options={"db_table": "rider"},
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
(
"rider",
models.ForeignKey("%s.Rider" % app_label, models.CASCADE),
),
],
),
],
)
new_state = project_state.clone()
operation = migrations.RenameModel("Rider", "Runner")
operation.state_forwards(app_label, new_state)
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_forwards(app_label, editor, project_state, new_state)
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_backwards(
app_label, editor, new_state, project_state
)
def test_rename_m2m_target_model(self):
app_label = "test_rename_m2m_target_model"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
("riders", models.ManyToManyField("Rider")),
],
),
],
)
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.RenameModel("Rider", "Rider2"),
],
atomic=connection.features.supports_atomic_references_rename,
)
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider2")
pony = Pony.objects.create()
rider = Rider.objects.create()
pony.riders.add(rider)
self.assertEqual(Pony.objects.count(), 2)
self.assertEqual(Rider.objects.count(), 2)
self.assertEqual(
Pony._meta.get_field("riders").remote_field.through.objects.count(), 2
)
def test_rename_m2m_through_model(self):
app_label = "test_rename_through"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
],
),
migrations.CreateModel(
"PonyRider",
fields=[
("id", models.AutoField(primary_key=True)),
(
"rider",
models.ForeignKey(
"test_rename_through.Rider", models.CASCADE
),
),
(
"pony",
models.ForeignKey(
"test_rename_through.Pony", models.CASCADE
),
),
],
),
migrations.AddField(
"Pony",
"riders",
models.ManyToManyField(
"test_rename_through.Rider",
through="test_rename_through.PonyRider",
),
),
],
)
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
PonyRider = project_state.apps.get_model(app_label, "PonyRider")
pony = Pony.objects.create()
rider = Rider.objects.create()
PonyRider.objects.create(pony=pony, rider=rider)
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.RenameModel("PonyRider", "PonyRider2"),
],
)
Pony = project_state.apps.get_model(app_label, "Pony")
Rider = project_state.apps.get_model(app_label, "Rider")
PonyRider = project_state.apps.get_model(app_label, "PonyRider2")
pony = Pony.objects.first()
rider = Rider.objects.create()
PonyRider.objects.create(pony=pony, rider=rider)
self.assertEqual(Pony.objects.count(), 1)
self.assertEqual(Rider.objects.count(), 2)
self.assertEqual(PonyRider.objects.count(), 2)
self.assertEqual(pony.riders.count(), 2)
def test_rename_m2m_model_after_rename_field(self):
"""RenameModel renames a many-to-many column after a RenameField."""
app_label = "test_rename_multiple"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=20)),
],
),
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
(
"pony",
models.ForeignKey(
"test_rename_multiple.Pony", models.CASCADE
),
),
],
),
migrations.CreateModel(
"PonyRider",
fields=[
("id", models.AutoField(primary_key=True)),
("riders", models.ManyToManyField("Rider")),
],
),
migrations.RenameField(
model_name="pony", old_name="name", new_name="fancy_name"
),
migrations.RenameModel(old_name="Rider", new_name="Jockey"),
],
atomic=connection.features.supports_atomic_references_rename,
)
Pony = project_state.apps.get_model(app_label, "Pony")
Jockey = project_state.apps.get_model(app_label, "Jockey")
PonyRider = project_state.apps.get_model(app_label, "PonyRider")
# No "no such column" error means the column was renamed correctly.
pony = Pony.objects.create(fancy_name="a good name")
jockey = Jockey.objects.create(pony=pony)
ponyrider = PonyRider.objects.create()
ponyrider.riders.add(jockey)
def test_add_field(self):
"""
Tests the AddField operation.
"""
# Test the state alteration
operation = migrations.AddField(
"Pony",
"height",
models.FloatField(null=True, default=5),
)
self.assertEqual(operation.describe(), "Add field height to Pony")
self.assertEqual(operation.migration_name_fragment, "pony_height")
project_state, new_state = self.make_test_state("test_adfl", operation)
self.assertEqual(len(new_state.models["test_adfl", "pony"].fields), 4)
field = new_state.models["test_adfl", "pony"].fields["height"]
self.assertEqual(field.default, 5)
# Test the database alteration
self.assertColumnNotExists("test_adfl_pony", "height")
with connection.schema_editor() as editor:
operation.database_forwards("test_adfl", editor, project_state, new_state)
self.assertColumnExists("test_adfl_pony", "height")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_adfl", editor, new_state, project_state)
self.assertColumnNotExists("test_adfl_pony", "height")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddField")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"])
def test_add_charfield(self):
"""
Tests the AddField operation on TextField.
"""
project_state = self.set_up_test_model("test_adchfl")
Pony = project_state.apps.get_model("test_adchfl", "Pony")
pony = Pony.objects.create(weight=42)
new_state = self.apply_operations(
"test_adchfl",
project_state,
[
migrations.AddField(
"Pony",
"text",
models.CharField(max_length=10, default="some text"),
),
migrations.AddField(
"Pony",
"empty",
models.CharField(max_length=10, default=""),
),
# If not properly quoted digits would be interpreted as an int.
migrations.AddField(
"Pony",
"digits",
models.CharField(max_length=10, default="42"),
),
# Manual quoting is fragile and could trip on quotes. Refs #xyz.
migrations.AddField(
"Pony",
"quotes",
models.CharField(max_length=10, default='"\'"'),
),
],
)
Pony = new_state.apps.get_model("test_adchfl", "Pony")
pony = Pony.objects.get(pk=pony.pk)
self.assertEqual(pony.text, "some text")
self.assertEqual(pony.empty, "")
self.assertEqual(pony.digits, "42")
self.assertEqual(pony.quotes, '"\'"')
def test_add_textfield(self):
"""
Tests the AddField operation on TextField.
"""
project_state = self.set_up_test_model("test_adtxtfl")
Pony = project_state.apps.get_model("test_adtxtfl", "Pony")
pony = Pony.objects.create(weight=42)
new_state = self.apply_operations(
"test_adtxtfl",
project_state,
[
migrations.AddField(
"Pony",
"text",
models.TextField(default="some text"),
),
migrations.AddField(
"Pony",
"empty",
models.TextField(default=""),
),
# If not properly quoted digits would be interpreted as an int.
migrations.AddField(
"Pony",
"digits",
models.TextField(default="42"),
),
# Manual quoting is fragile and could trip on quotes. Refs #xyz.
migrations.AddField(
"Pony",
"quotes",
models.TextField(default='"\'"'),
),
],
)
Pony = new_state.apps.get_model("test_adtxtfl", "Pony")
pony = Pony.objects.get(pk=pony.pk)
self.assertEqual(pony.text, "some text")
self.assertEqual(pony.empty, "")
self.assertEqual(pony.digits, "42")
self.assertEqual(pony.quotes, '"\'"')
def test_add_binaryfield(self):
"""
Tests the AddField operation on TextField/BinaryField.
"""
project_state = self.set_up_test_model("test_adbinfl")
Pony = project_state.apps.get_model("test_adbinfl", "Pony")
pony = Pony.objects.create(weight=42)
new_state = self.apply_operations(
"test_adbinfl",
project_state,
[
migrations.AddField(
"Pony",
"blob",
models.BinaryField(default=b"some text"),
),
migrations.AddField(
"Pony",
"empty",
models.BinaryField(default=b""),
),
# If not properly quoted digits would be interpreted as an int.
migrations.AddField(
"Pony",
"digits",
models.BinaryField(default=b"42"),
),
# Manual quoting is fragile and could trip on quotes. Refs #xyz.
migrations.AddField(
"Pony",
"quotes",
models.BinaryField(default=b'"\'"'),
),
],
)
Pony = new_state.apps.get_model("test_adbinfl", "Pony")
pony = Pony.objects.get(pk=pony.pk)
# SQLite returns buffer/memoryview, cast to bytes for checking.
self.assertEqual(bytes(pony.blob), b"some text")
self.assertEqual(bytes(pony.empty), b"")
self.assertEqual(bytes(pony.digits), b"42")
self.assertEqual(bytes(pony.quotes), b'"\'"')
def test_column_name_quoting(self):
"""
Column names that are SQL keywords shouldn't cause problems when used
in migrations (#22168).
"""
project_state = self.set_up_test_model("test_regr22168")
operation = migrations.AddField(
"Pony",
"order",
models.IntegerField(default=0),
)
new_state = project_state.clone()
operation.state_forwards("test_regr22168", new_state)
with connection.schema_editor() as editor:
operation.database_forwards(
"test_regr22168", editor, project_state, new_state
)
self.assertColumnExists("test_regr22168_pony", "order")
def test_add_field_preserve_default(self):
"""
Tests the AddField operation's state alteration
when preserve_default = False.
"""
project_state = self.set_up_test_model("test_adflpd")
# Test the state alteration
operation = migrations.AddField(
"Pony",
"height",
models.FloatField(null=True, default=4),
preserve_default=False,
)
new_state = project_state.clone()
operation.state_forwards("test_adflpd", new_state)
self.assertEqual(len(new_state.models["test_adflpd", "pony"].fields), 4)
field = new_state.models["test_adflpd", "pony"].fields["height"]
self.assertEqual(field.default, models.NOT_PROVIDED)
# Test the database alteration
project_state.apps.get_model("test_adflpd", "pony").objects.create(
weight=4,
)
self.assertColumnNotExists("test_adflpd_pony", "height")
with connection.schema_editor() as editor:
operation.database_forwards("test_adflpd", editor, project_state, new_state)
self.assertColumnExists("test_adflpd_pony", "height")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddField")
self.assertEqual(definition[1], [])
self.assertEqual(
sorted(definition[2]), ["field", "model_name", "name", "preserve_default"]
)
def test_add_field_m2m(self):
"""
Tests the AddField operation with a ManyToManyField.
"""
project_state = self.set_up_test_model("test_adflmm", second_model=True)
# Test the state alteration
operation = migrations.AddField(
"Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")
)
new_state = project_state.clone()
operation.state_forwards("test_adflmm", new_state)
self.assertEqual(len(new_state.models["test_adflmm", "pony"].fields), 4)
# Test the database alteration
self.assertTableNotExists("test_adflmm_pony_stables")
with connection.schema_editor() as editor:
operation.database_forwards("test_adflmm", editor, project_state, new_state)
self.assertTableExists("test_adflmm_pony_stables")
self.assertColumnNotExists("test_adflmm_pony", "stables")
# Make sure the M2M field actually works
with atomic():
Pony = new_state.apps.get_model("test_adflmm", "Pony")
p = Pony.objects.create(pink=False, weight=4.55)
p.stables.create()
self.assertEqual(p.stables.count(), 1)
p.stables.all().delete()
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_adflmm", editor, new_state, project_state
)
self.assertTableNotExists("test_adflmm_pony_stables")
def test_alter_field_m2m(self):
project_state = self.set_up_test_model("test_alflmm", second_model=True)
project_state = self.apply_operations(
"test_alflmm",
project_state,
operations=[
migrations.AddField(
"Pony",
"stables",
models.ManyToManyField("Stable", related_name="ponies"),
)
],
)
Pony = project_state.apps.get_model("test_alflmm", "Pony")
self.assertFalse(Pony._meta.get_field("stables").blank)
project_state = self.apply_operations(
"test_alflmm",
project_state,
operations=[
migrations.AlterField(
"Pony",
"stables",
models.ManyToManyField(
to="Stable", related_name="ponies", blank=True
),
)
],
)
Pony = project_state.apps.get_model("test_alflmm", "Pony")
self.assertTrue(Pony._meta.get_field("stables").blank)
def test_repoint_field_m2m(self):
project_state = self.set_up_test_model(
"test_alflmm", second_model=True, third_model=True
)
project_state = self.apply_operations(
"test_alflmm",
project_state,
operations=[
migrations.AddField(
"Pony",
"places",
models.ManyToManyField("Stable", related_name="ponies"),
)
],
)
Pony = project_state.apps.get_model("test_alflmm", "Pony")
project_state = self.apply_operations(
"test_alflmm",
project_state,
operations=[
migrations.AlterField(
"Pony",
"places",
models.ManyToManyField(to="Van", related_name="ponies"),
)
],
)
# Ensure the new field actually works
Pony = project_state.apps.get_model("test_alflmm", "Pony")
p = Pony.objects.create(pink=False, weight=4.55)
p.places.create()
self.assertEqual(p.places.count(), 1)
p.places.all().delete()
def test_remove_field_m2m(self):
project_state = self.set_up_test_model("test_rmflmm", second_model=True)
project_state = self.apply_operations(
"test_rmflmm",
project_state,
operations=[
migrations.AddField(
"Pony",
"stables",
models.ManyToManyField("Stable", related_name="ponies"),
)
],
)
self.assertTableExists("test_rmflmm_pony_stables")
with_field_state = project_state.clone()
operations = [migrations.RemoveField("Pony", "stables")]
project_state = self.apply_operations(
"test_rmflmm", project_state, operations=operations
)
self.assertTableNotExists("test_rmflmm_pony_stables")
# And test reversal
self.unapply_operations("test_rmflmm", with_field_state, operations=operations)
self.assertTableExists("test_rmflmm_pony_stables")
def test_remove_field_m2m_with_through(self):
project_state = self.set_up_test_model("test_rmflmmwt", second_model=True)
self.assertTableNotExists("test_rmflmmwt_ponystables")
project_state = self.apply_operations(
"test_rmflmmwt",
project_state,
operations=[
migrations.CreateModel(
"PonyStables",
fields=[
(
"pony",
models.ForeignKey("test_rmflmmwt.Pony", models.CASCADE),
),
(
"stable",
models.ForeignKey("test_rmflmmwt.Stable", models.CASCADE),
),
],
),
migrations.AddField(
"Pony",
"stables",
models.ManyToManyField(
"Stable",
related_name="ponies",
through="test_rmflmmwt.PonyStables",
),
),
],
)
self.assertTableExists("test_rmflmmwt_ponystables")
operations = [
migrations.RemoveField("Pony", "stables"),
migrations.DeleteModel("PonyStables"),
]
self.apply_operations("test_rmflmmwt", project_state, operations=operations)
def test_remove_field(self):
"""
Tests the RemoveField operation.
"""
project_state = self.set_up_test_model("test_rmfl")
# Test the state alteration
operation = migrations.RemoveField("Pony", "pink")
self.assertEqual(operation.describe(), "Remove field pink from Pony")
self.assertEqual(operation.migration_name_fragment, "remove_pony_pink")
new_state = project_state.clone()
operation.state_forwards("test_rmfl", new_state)
self.assertEqual(len(new_state.models["test_rmfl", "pony"].fields), 2)
# Test the database alteration
self.assertColumnExists("test_rmfl_pony", "pink")
with connection.schema_editor() as editor:
operation.database_forwards("test_rmfl", editor, project_state, new_state)
self.assertColumnNotExists("test_rmfl_pony", "pink")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_rmfl", editor, new_state, project_state)
self.assertColumnExists("test_rmfl_pony", "pink")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveField")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"model_name": "Pony", "name": "pink"})
def test_remove_fk(self):
"""
Tests the RemoveField operation on a foreign key.
"""
project_state = self.set_up_test_model("test_rfk", related_model=True)
self.assertColumnExists("test_rfk_rider", "pony_id")
operation = migrations.RemoveField("Rider", "pony")
new_state = project_state.clone()
operation.state_forwards("test_rfk", new_state)
with connection.schema_editor() as editor:
operation.database_forwards("test_rfk", editor, project_state, new_state)
self.assertColumnNotExists("test_rfk_rider", "pony_id")
with connection.schema_editor() as editor:
operation.database_backwards("test_rfk", editor, new_state, project_state)
self.assertColumnExists("test_rfk_rider", "pony_id")
def test_alter_model_table(self):
"""
Tests the AlterModelTable operation.
"""
project_state = self.set_up_test_model("test_almota")
# Test the state alteration
operation = migrations.AlterModelTable("Pony", "test_almota_pony_2")
self.assertEqual(
operation.describe(), "Rename table for Pony to test_almota_pony_2"
)
self.assertEqual(operation.migration_name_fragment, "alter_pony_table")
new_state = project_state.clone()
operation.state_forwards("test_almota", new_state)
self.assertEqual(
new_state.models["test_almota", "pony"].options["db_table"],
"test_almota_pony_2",
)
# Test the database alteration
self.assertTableExists("test_almota_pony")
self.assertTableNotExists("test_almota_pony_2")
with connection.schema_editor() as editor:
operation.database_forwards("test_almota", editor, project_state, new_state)
self.assertTableNotExists("test_almota_pony")
self.assertTableExists("test_almota_pony_2")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_almota", editor, new_state, project_state
)
self.assertTableExists("test_almota_pony")
self.assertTableNotExists("test_almota_pony_2")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelTable")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"name": "Pony", "table": "test_almota_pony_2"})
def test_alter_model_table_none(self):
"""
Tests the AlterModelTable operation if the table name is set to None.
"""
operation = migrations.AlterModelTable("Pony", None)
self.assertEqual(operation.describe(), "Rename table for Pony to (default)")
def test_alter_model_table_noop(self):
"""
Tests the AlterModelTable operation if the table name is not changed.
"""
project_state = self.set_up_test_model("test_almota")
# Test the state alteration
operation = migrations.AlterModelTable("Pony", "test_almota_pony")
new_state = project_state.clone()
operation.state_forwards("test_almota", new_state)
self.assertEqual(
new_state.models["test_almota", "pony"].options["db_table"],
"test_almota_pony",
)
# Test the database alteration
self.assertTableExists("test_almota_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_almota", editor, project_state, new_state)
self.assertTableExists("test_almota_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_almota", editor, new_state, project_state
)
self.assertTableExists("test_almota_pony")
def test_alter_model_table_m2m(self):
"""
AlterModelTable should rename auto-generated M2M tables.
"""
app_label = "test_talflmltlm2m"
pony_db_table = "pony_foo"
project_state = self.set_up_test_model(
app_label, second_model=True, db_table=pony_db_table
)
# Add the M2M field
first_state = project_state.clone()
operation = migrations.AddField(
"Pony", "stables", models.ManyToManyField("Stable")
)
operation.state_forwards(app_label, first_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, first_state)
original_m2m_table = "%s_%s" % (pony_db_table, "stables")
new_m2m_table = "%s_%s" % (app_label, "pony_stables")
self.assertTableExists(original_m2m_table)
self.assertTableNotExists(new_m2m_table)
# Rename the Pony db_table which should also rename the m2m table.
second_state = first_state.clone()
operation = migrations.AlterModelTable(name="pony", table=None)
operation.state_forwards(app_label, second_state)
atomic_rename = connection.features.supports_atomic_references_rename
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_forwards(app_label, editor, first_state, second_state)
self.assertTableExists(new_m2m_table)
self.assertTableNotExists(original_m2m_table)
# And test reversal
with connection.schema_editor(atomic=atomic_rename) as editor:
operation.database_backwards(app_label, editor, second_state, first_state)
self.assertTableExists(original_m2m_table)
self.assertTableNotExists(new_m2m_table)
def test_alter_field(self):
"""
Tests the AlterField operation.
"""
project_state = self.set_up_test_model("test_alfl")
# Test the state alteration
operation = migrations.AlterField(
"Pony", "pink", models.IntegerField(null=True)
)
self.assertEqual(operation.describe(), "Alter field pink on Pony")
self.assertEqual(operation.migration_name_fragment, "alter_pony_pink")
new_state = project_state.clone()
operation.state_forwards("test_alfl", new_state)
self.assertIs(
project_state.models["test_alfl", "pony"].fields["pink"].null, False
)
self.assertIs(new_state.models["test_alfl", "pony"].fields["pink"].null, True)
# Test the database alteration
self.assertColumnNotNull("test_alfl_pony", "pink")
with connection.schema_editor() as editor:
operation.database_forwards("test_alfl", editor, project_state, new_state)
self.assertColumnNull("test_alfl_pony", "pink")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_alfl", editor, new_state, project_state)
self.assertColumnNotNull("test_alfl_pony", "pink")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterField")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["field", "model_name", "name"])
def test_alter_field_add_db_column_noop(self):
"""
AlterField operation is a noop when adding only a db_column and the
column name is not changed.
"""
app_label = "test_afadbn"
project_state = self.set_up_test_model(app_label, related_model=True)
pony_table = "%s_pony" % app_label
new_state = project_state.clone()
operation = migrations.AlterField(
"Pony", "weight", models.FloatField(db_column="weight")
)
operation.state_forwards(app_label, new_state)
self.assertIsNone(
project_state.models[app_label, "pony"].fields["weight"].db_column,
)
self.assertEqual(
new_state.models[app_label, "pony"].fields["weight"].db_column,
"weight",
)
self.assertColumnExists(pony_table, "weight")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertColumnExists(pony_table, "weight")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_backwards(
app_label, editor, new_state, project_state
)
self.assertColumnExists(pony_table, "weight")
rider_table = "%s_rider" % app_label
new_state = project_state.clone()
operation = migrations.AlterField(
"Rider",
"pony",
models.ForeignKey("Pony", models.CASCADE, db_column="pony_id"),
)
operation.state_forwards(app_label, new_state)
self.assertIsNone(
project_state.models[app_label, "rider"].fields["pony"].db_column,
)
self.assertIs(
new_state.models[app_label, "rider"].fields["pony"].db_column,
"pony_id",
)
self.assertColumnExists(rider_table, "pony_id")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertColumnExists(rider_table, "pony_id")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_forwards(app_label, editor, new_state, project_state)
self.assertColumnExists(rider_table, "pony_id")
def test_alter_field_pk(self):
"""
The AlterField operation on primary keys (things like PostgreSQL's
SERIAL weirdness).
"""
project_state = self.set_up_test_model("test_alflpk")
# Test the state alteration
operation = migrations.AlterField(
"Pony", "id", models.IntegerField(primary_key=True)
)
new_state = project_state.clone()
operation.state_forwards("test_alflpk", new_state)
self.assertIsInstance(
project_state.models["test_alflpk", "pony"].fields["id"],
models.AutoField,
)
self.assertIsInstance(
new_state.models["test_alflpk", "pony"].fields["id"],
models.IntegerField,
)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alflpk", editor, project_state, new_state)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_alflpk", editor, new_state, project_state
)
@skipUnlessDBFeature("supports_foreign_keys")
def test_alter_field_pk_fk(self):
"""
Tests the AlterField operation on primary keys changes any FKs pointing to it.
"""
project_state = self.set_up_test_model("test_alflpkfk", related_model=True)
project_state = self.apply_operations(
"test_alflpkfk",
project_state,
[
migrations.CreateModel(
"Stable",
fields=[
("ponies", models.ManyToManyField("Pony")),
],
),
migrations.AddField(
"Pony",
"stables",
models.ManyToManyField("Stable"),
),
],
)
# Test the state alteration
operation = migrations.AlterField(
"Pony", "id", models.FloatField(primary_key=True)
)
new_state = project_state.clone()
operation.state_forwards("test_alflpkfk", new_state)
self.assertIsInstance(
project_state.models["test_alflpkfk", "pony"].fields["id"],
models.AutoField,
)
self.assertIsInstance(
new_state.models["test_alflpkfk", "pony"].fields["id"],
models.FloatField,
)
def assertIdTypeEqualsFkType():
with connection.cursor() as cursor:
id_type, id_null = [
(c.type_code, c.null_ok)
for c in connection.introspection.get_table_description(
cursor, "test_alflpkfk_pony"
)
if c.name == "id"
][0]
fk_type, fk_null = [
(c.type_code, c.null_ok)
for c in connection.introspection.get_table_description(
cursor, "test_alflpkfk_rider"
)
if c.name == "pony_id"
][0]
m2m_fk_type, m2m_fk_null = [
(c.type_code, c.null_ok)
for c in connection.introspection.get_table_description(
cursor,
"test_alflpkfk_pony_stables",
)
if c.name == "pony_id"
][0]
remote_m2m_fk_type, remote_m2m_fk_null = [
(c.type_code, c.null_ok)
for c in connection.introspection.get_table_description(
cursor,
"test_alflpkfk_stable_ponies",
)
if c.name == "pony_id"
][0]
self.assertEqual(id_type, fk_type)
self.assertEqual(id_type, m2m_fk_type)
self.assertEqual(id_type, remote_m2m_fk_type)
self.assertEqual(id_null, fk_null)
self.assertEqual(id_null, m2m_fk_null)
self.assertEqual(id_null, remote_m2m_fk_null)
assertIdTypeEqualsFkType()
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards(
"test_alflpkfk", editor, project_state, new_state
)
assertIdTypeEqualsFkType()
if connection.features.supports_foreign_keys:
self.assertFKExists(
"test_alflpkfk_pony_stables",
["pony_id"],
("test_alflpkfk_pony", "id"),
)
self.assertFKExists(
"test_alflpkfk_stable_ponies",
["pony_id"],
("test_alflpkfk_pony", "id"),
)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_alflpkfk", editor, new_state, project_state
)
assertIdTypeEqualsFkType()
if connection.features.supports_foreign_keys:
self.assertFKExists(
"test_alflpkfk_pony_stables",
["pony_id"],
("test_alflpkfk_pony", "id"),
)
self.assertFKExists(
"test_alflpkfk_stable_ponies",
["pony_id"],
("test_alflpkfk_pony", "id"),
)
def test_alter_field_pk_mti_fk(self):
app_label = "test_alflpkmtifk"
project_state = self.set_up_test_model(app_label, mti_model=True)
project_state = self.apply_operations(
app_label,
project_state,
[
migrations.CreateModel(
"ShetlandRider",
fields=[
(
"pony",
models.ForeignKey(
f"{app_label}.ShetlandPony", models.CASCADE
),
),
],
),
],
)
operation = migrations.AlterField(
"Pony",
"id",
models.BigAutoField(primary_key=True),
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertIsInstance(
new_state.models[app_label, "pony"].fields["id"],
models.BigAutoField,
)
def _get_column_id_type(cursor, table, column):
return [
c.type_code
for c in connection.introspection.get_table_description(
cursor,
f"{app_label}_{table}",
)
if c.name == column
][0]
def assertIdTypeEqualsMTIFkType():
with connection.cursor() as cursor:
parent_id_type = _get_column_id_type(cursor, "pony", "id")
child_id_type = _get_column_id_type(
cursor, "shetlandpony", "pony_ptr_id"
)
mti_id_type = _get_column_id_type(cursor, "shetlandrider", "pony_id")
self.assertEqual(parent_id_type, child_id_type)
self.assertEqual(parent_id_type, mti_id_type)
assertIdTypeEqualsMTIFkType()
# Alter primary key.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
assertIdTypeEqualsMTIFkType()
if connection.features.supports_foreign_keys:
self.assertFKExists(
f"{app_label}_shetlandpony",
["pony_ptr_id"],
(f"{app_label}_pony", "id"),
)
self.assertFKExists(
f"{app_label}_shetlandrider",
["pony_id"],
(f"{app_label}_shetlandpony", "pony_ptr_id"),
)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
assertIdTypeEqualsMTIFkType()
if connection.features.supports_foreign_keys:
self.assertFKExists(
f"{app_label}_shetlandpony",
["pony_ptr_id"],
(f"{app_label}_pony", "id"),
)
self.assertFKExists(
f"{app_label}_shetlandrider",
["pony_id"],
(f"{app_label}_shetlandpony", "pony_ptr_id"),
)
def test_alter_field_pk_mti_and_fk_to_base(self):
app_label = "test_alflpkmtiftb"
project_state = self.set_up_test_model(
app_label,
mti_model=True,
related_model=True,
)
operation = migrations.AlterField(
"Pony",
"id",
models.BigAutoField(primary_key=True),
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertIsInstance(
new_state.models[app_label, "pony"].fields["id"],
models.BigAutoField,
)
def _get_column_id_type(cursor, table, column):
return [
c.type_code
for c in connection.introspection.get_table_description(
cursor,
f"{app_label}_{table}",
)
if c.name == column
][0]
def assertIdTypeEqualsMTIFkType():
with connection.cursor() as cursor:
parent_id_type = _get_column_id_type(cursor, "pony", "id")
fk_id_type = _get_column_id_type(cursor, "rider", "pony_id")
child_id_type = _get_column_id_type(
cursor, "shetlandpony", "pony_ptr_id"
)
self.assertEqual(parent_id_type, child_id_type)
self.assertEqual(parent_id_type, fk_id_type)
assertIdTypeEqualsMTIFkType()
# Alter primary key.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
assertIdTypeEqualsMTIFkType()
if connection.features.supports_foreign_keys:
self.assertFKExists(
f"{app_label}_shetlandpony",
["pony_ptr_id"],
(f"{app_label}_pony", "id"),
)
self.assertFKExists(
f"{app_label}_rider",
["pony_id"],
(f"{app_label}_pony", "id"),
)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
assertIdTypeEqualsMTIFkType()
if connection.features.supports_foreign_keys:
self.assertFKExists(
f"{app_label}_shetlandpony",
["pony_ptr_id"],
(f"{app_label}_pony", "id"),
)
self.assertFKExists(
f"{app_label}_rider",
["pony_id"],
(f"{app_label}_pony", "id"),
)
@skipUnlessDBFeature("supports_foreign_keys")
def test_alter_field_reloads_state_on_fk_with_to_field_target_type_change(self):
app_label = "test_alflrsfkwtflttc"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
("code", models.IntegerField(unique=True)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
(
"rider",
models.ForeignKey(
"%s.Rider" % app_label, models.CASCADE, to_field="code"
),
),
],
),
],
)
operation = migrations.AlterField(
"Rider",
"code",
models.CharField(max_length=100, unique=True),
)
self.apply_operations(app_label, project_state, operations=[operation])
id_type, id_null = [
(c.type_code, c.null_ok)
for c in self.get_table_description("%s_rider" % app_label)
if c.name == "code"
][0]
fk_type, fk_null = [
(c.type_code, c.null_ok)
for c in self.get_table_description("%s_pony" % app_label)
if c.name == "rider_id"
][0]
self.assertEqual(id_type, fk_type)
self.assertEqual(id_null, fk_null)
@skipUnlessDBFeature("supports_foreign_keys")
def test_alter_field_reloads_state_fk_with_to_field_related_name_target_type_change(
self,
):
app_label = "test_alflrsfkwtflrnttc"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.AutoField(primary_key=True)),
("code", models.PositiveIntegerField(unique=True)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
(
"rider",
models.ForeignKey(
"%s.Rider" % app_label,
models.CASCADE,
to_field="code",
related_name="+",
),
),
],
),
],
)
operation = migrations.AlterField(
"Rider",
"code",
models.CharField(max_length=100, unique=True),
)
self.apply_operations(app_label, project_state, operations=[operation])
def test_alter_field_reloads_state_on_fk_target_changes(self):
"""
If AlterField doesn't reload state appropriately, the second AlterField
crashes on MySQL due to not dropping the PonyRider.pony foreign key
constraint before modifying the column.
"""
app_label = "alter_alter_field_reloads_state_on_fk_target_changes"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.CharField(primary_key=True, max_length=100)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.CharField(primary_key=True, max_length=100)),
(
"rider",
models.ForeignKey("%s.Rider" % app_label, models.CASCADE),
),
],
),
migrations.CreateModel(
"PonyRider",
fields=[
("id", models.AutoField(primary_key=True)),
(
"pony",
models.ForeignKey("%s.Pony" % app_label, models.CASCADE),
),
],
),
],
)
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.AlterField(
"Rider", "id", models.CharField(primary_key=True, max_length=99)
),
migrations.AlterField(
"Pony", "id", models.CharField(primary_key=True, max_length=99)
),
],
)
def test_alter_field_reloads_state_on_fk_with_to_field_target_changes(self):
"""
If AlterField doesn't reload state appropriately, the second AlterField
crashes on MySQL due to not dropping the PonyRider.pony foreign key
constraint before modifying the column.
"""
app_label = "alter_alter_field_reloads_state_on_fk_with_to_field_target_changes"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.CharField(primary_key=True, max_length=100)),
("slug", models.CharField(unique=True, max_length=100)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.CharField(primary_key=True, max_length=100)),
(
"rider",
models.ForeignKey(
"%s.Rider" % app_label, models.CASCADE, to_field="slug"
),
),
("slug", models.CharField(unique=True, max_length=100)),
],
),
migrations.CreateModel(
"PonyRider",
fields=[
("id", models.AutoField(primary_key=True)),
(
"pony",
models.ForeignKey(
"%s.Pony" % app_label, models.CASCADE, to_field="slug"
),
),
],
),
],
)
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.AlterField(
"Rider", "slug", models.CharField(unique=True, max_length=99)
),
migrations.AlterField(
"Pony", "slug", models.CharField(unique=True, max_length=99)
),
],
)
def test_rename_field_reloads_state_on_fk_target_changes(self):
"""
If RenameField doesn't reload state appropriately, the AlterField
crashes on MySQL due to not dropping the PonyRider.pony foreign key
constraint before modifying the column.
"""
app_label = "alter_rename_field_reloads_state_on_fk_target_changes"
project_state = self.apply_operations(
app_label,
ProjectState(),
operations=[
migrations.CreateModel(
"Rider",
fields=[
("id", models.CharField(primary_key=True, max_length=100)),
],
),
migrations.CreateModel(
"Pony",
fields=[
("id", models.CharField(primary_key=True, max_length=100)),
(
"rider",
models.ForeignKey("%s.Rider" % app_label, models.CASCADE),
),
],
),
migrations.CreateModel(
"PonyRider",
fields=[
("id", models.AutoField(primary_key=True)),
(
"pony",
models.ForeignKey("%s.Pony" % app_label, models.CASCADE),
),
],
),
],
)
project_state = self.apply_operations(
app_label,
project_state,
operations=[
migrations.RenameField("Rider", "id", "id2"),
migrations.AlterField(
"Pony", "id", models.CharField(primary_key=True, max_length=99)
),
],
atomic=connection.features.supports_atomic_references_rename,
)
def test_rename_field(self):
"""
Tests the RenameField operation.
"""
project_state = self.set_up_test_model(
"test_rnfl", unique_together=True, index_together=True
)
# Test the state alteration
operation = migrations.RenameField("Pony", "pink", "blue")
self.assertEqual(operation.describe(), "Rename field pink on Pony to blue")
self.assertEqual(operation.migration_name_fragment, "rename_pink_pony_blue")
new_state = project_state.clone()
operation.state_forwards("test_rnfl", new_state)
self.assertIn("blue", new_state.models["test_rnfl", "pony"].fields)
self.assertNotIn("pink", new_state.models["test_rnfl", "pony"].fields)
# Make sure the unique_together has the renamed column too
self.assertIn(
"blue", new_state.models["test_rnfl", "pony"].options["unique_together"][0]
)
self.assertNotIn(
"pink", new_state.models["test_rnfl", "pony"].options["unique_together"][0]
)
# Make sure the index_together has the renamed column too
self.assertIn(
"blue", new_state.models["test_rnfl", "pony"].options["index_together"][0]
)
self.assertNotIn(
"pink", new_state.models["test_rnfl", "pony"].options["index_together"][0]
)
# Test the database alteration
self.assertColumnExists("test_rnfl_pony", "pink")
self.assertColumnNotExists("test_rnfl_pony", "blue")
with connection.schema_editor() as editor:
operation.database_forwards("test_rnfl", editor, project_state, new_state)
self.assertColumnExists("test_rnfl_pony", "blue")
self.assertColumnNotExists("test_rnfl_pony", "pink")
# Ensure the unique constraint has been ported over
with connection.cursor() as cursor:
cursor.execute("INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)")
with self.assertRaises(IntegrityError):
with atomic():
cursor.execute(
"INSERT INTO test_rnfl_pony (blue, weight) VALUES (1, 1)"
)
cursor.execute("DELETE FROM test_rnfl_pony")
# Ensure the index constraint has been ported over
self.assertIndexExists("test_rnfl_pony", ["weight", "blue"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_rnfl", editor, new_state, project_state)
self.assertColumnExists("test_rnfl_pony", "pink")
self.assertColumnNotExists("test_rnfl_pony", "blue")
# Ensure the index constraint has been reset
self.assertIndexExists("test_rnfl_pony", ["weight", "pink"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RenameField")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{"model_name": "Pony", "old_name": "pink", "new_name": "blue"},
)
def test_rename_field_with_db_column(self):
project_state = self.apply_operations(
"test_rfwdbc",
ProjectState(),
operations=[
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
("field", models.IntegerField(db_column="db_field")),
(
"fk_field",
models.ForeignKey(
"Pony",
models.CASCADE,
db_column="db_fk_field",
),
),
],
),
],
)
new_state = project_state.clone()
operation = migrations.RenameField("Pony", "field", "renamed_field")
operation.state_forwards("test_rfwdbc", new_state)
self.assertIn("renamed_field", new_state.models["test_rfwdbc", "pony"].fields)
self.assertNotIn("field", new_state.models["test_rfwdbc", "pony"].fields)
self.assertColumnExists("test_rfwdbc_pony", "db_field")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_forwards(
"test_rfwdbc", editor, project_state, new_state
)
self.assertColumnExists("test_rfwdbc_pony", "db_field")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_backwards(
"test_rfwdbc", editor, new_state, project_state
)
self.assertColumnExists("test_rfwdbc_pony", "db_field")
new_state = project_state.clone()
operation = migrations.RenameField("Pony", "fk_field", "renamed_fk_field")
operation.state_forwards("test_rfwdbc", new_state)
self.assertIn(
"renamed_fk_field", new_state.models["test_rfwdbc", "pony"].fields
)
self.assertNotIn("fk_field", new_state.models["test_rfwdbc", "pony"].fields)
self.assertColumnExists("test_rfwdbc_pony", "db_fk_field")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_forwards(
"test_rfwdbc", editor, project_state, new_state
)
self.assertColumnExists("test_rfwdbc_pony", "db_fk_field")
with connection.schema_editor() as editor:
with self.assertNumQueries(0):
operation.database_backwards(
"test_rfwdbc", editor, new_state, project_state
)
self.assertColumnExists("test_rfwdbc_pony", "db_fk_field")
def test_rename_field_case(self):
project_state = self.apply_operations(
"test_rfmx",
ProjectState(),
operations=[
migrations.CreateModel(
"Pony",
fields=[
("id", models.AutoField(primary_key=True)),
("field", models.IntegerField()),
],
),
],
)
new_state = project_state.clone()
operation = migrations.RenameField("Pony", "field", "FiElD")
operation.state_forwards("test_rfmx", new_state)
self.assertIn("FiElD", new_state.models["test_rfmx", "pony"].fields)
self.assertColumnExists("test_rfmx_pony", "field")
with connection.schema_editor() as editor:
operation.database_forwards("test_rfmx", editor, project_state, new_state)
self.assertColumnExists(
"test_rfmx_pony",
connection.introspection.identifier_converter("FiElD"),
)
with connection.schema_editor() as editor:
operation.database_backwards("test_rfmx", editor, new_state, project_state)
self.assertColumnExists("test_rfmx_pony", "field")
def test_rename_missing_field(self):
state = ProjectState()
state.add_model(ModelState("app", "model", []))
with self.assertRaisesMessage(
FieldDoesNotExist, "app.model has no field named 'field'"
):
migrations.RenameField("model", "field", "new_field").state_forwards(
"app", state
)
def test_rename_referenced_field_state_forward(self):
state = ProjectState()
state.add_model(
ModelState(
"app",
"Model",
[
("id", models.AutoField(primary_key=True)),
("field", models.IntegerField(unique=True)),
],
)
)
state.add_model(
ModelState(
"app",
"OtherModel",
[
("id", models.AutoField(primary_key=True)),
(
"fk",
models.ForeignKey("Model", models.CASCADE, to_field="field"),
),
(
"fo",
models.ForeignObject(
"Model",
models.CASCADE,
from_fields=("fk",),
to_fields=("field",),
),
),
],
)
)
operation = migrations.RenameField("Model", "field", "renamed")
new_state = state.clone()
operation.state_forwards("app", new_state)
self.assertEqual(
new_state.models["app", "othermodel"].fields["fk"].remote_field.field_name,
"renamed",
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["fk"].from_fields, ["self"]
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["fk"].to_fields, ("renamed",)
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["fo"].from_fields, ("fk",)
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["fo"].to_fields, ("renamed",)
)
operation = migrations.RenameField("OtherModel", "fk", "renamed_fk")
new_state = state.clone()
operation.state_forwards("app", new_state)
self.assertEqual(
new_state.models["app", "othermodel"]
.fields["renamed_fk"]
.remote_field.field_name,
"renamed",
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["renamed_fk"].from_fields,
("self",),
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["renamed_fk"].to_fields,
("renamed",),
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["fo"].from_fields,
("renamed_fk",),
)
self.assertEqual(
new_state.models["app", "othermodel"].fields["fo"].to_fields, ("renamed",)
)
def test_alter_unique_together(self):
"""
Tests the AlterUniqueTogether operation.
"""
project_state = self.set_up_test_model("test_alunto")
# Test the state alteration
operation = migrations.AlterUniqueTogether("Pony", [("pink", "weight")])
self.assertEqual(
operation.describe(), "Alter unique_together for Pony (1 constraint(s))"
)
self.assertEqual(
operation.migration_name_fragment,
"alter_pony_unique_together",
)
new_state = project_state.clone()
operation.state_forwards("test_alunto", new_state)
self.assertEqual(
len(
project_state.models["test_alunto", "pony"].options.get(
"unique_together", set()
)
),
0,
)
self.assertEqual(
len(
new_state.models["test_alunto", "pony"].options.get(
"unique_together", set()
)
),
1,
)
# Make sure we can insert duplicate rows
with connection.cursor() as cursor:
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_alunto_pony")
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards(
"test_alunto", editor, project_state, new_state
)
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
with self.assertRaises(IntegrityError):
with atomic():
cursor.execute(
"INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)"
)
cursor.execute("DELETE FROM test_alunto_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_alunto", editor, new_state, project_state
)
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("INSERT INTO test_alunto_pony (pink, weight) VALUES (1, 1)")
cursor.execute("DELETE FROM test_alunto_pony")
# Test flat unique_together
operation = migrations.AlterUniqueTogether("Pony", ("pink", "weight"))
operation.state_forwards("test_alunto", new_state)
self.assertEqual(
len(
new_state.models["test_alunto", "pony"].options.get(
"unique_together", set()
)
),
1,
)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterUniqueTogether")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2], {"name": "Pony", "unique_together": {("pink", "weight")}}
)
def test_alter_unique_together_remove(self):
operation = migrations.AlterUniqueTogether("Pony", None)
self.assertEqual(
operation.describe(), "Alter unique_together for Pony (0 constraint(s))"
)
def test_add_index(self):
"""
Test the AddIndex operation.
"""
project_state = self.set_up_test_model("test_adin")
msg = (
"Indexes passed to AddIndex operations require a name argument. "
"<Index: fields=['pink']> doesn't have one."
)
with self.assertRaisesMessage(ValueError, msg):
migrations.AddIndex("Pony", models.Index(fields=["pink"]))
index = models.Index(fields=["pink"], name="test_adin_pony_pink_idx")
operation = migrations.AddIndex("Pony", index)
self.assertEqual(
operation.describe(),
"Create index test_adin_pony_pink_idx on field(s) pink of model Pony",
)
self.assertEqual(
operation.migration_name_fragment,
"pony_test_adin_pony_pink_idx",
)
new_state = project_state.clone()
operation.state_forwards("test_adin", new_state)
# Test the database alteration
self.assertEqual(
len(new_state.models["test_adin", "pony"].options["indexes"]), 1
)
self.assertIndexNotExists("test_adin_pony", ["pink"])
with connection.schema_editor() as editor:
operation.database_forwards("test_adin", editor, project_state, new_state)
self.assertIndexExists("test_adin_pony", ["pink"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_adin", editor, new_state, project_state)
self.assertIndexNotExists("test_adin_pony", ["pink"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddIndex")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"model_name": "Pony", "index": index})
def test_remove_index(self):
"""
Test the RemoveIndex operation.
"""
project_state = self.set_up_test_model("test_rmin", multicol_index=True)
self.assertTableExists("test_rmin_pony")
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
operation = migrations.RemoveIndex("Pony", "pony_test_idx")
self.assertEqual(operation.describe(), "Remove index pony_test_idx from Pony")
self.assertEqual(
operation.migration_name_fragment,
"remove_pony_pony_test_idx",
)
new_state = project_state.clone()
operation.state_forwards("test_rmin", new_state)
# Test the state alteration
self.assertEqual(
len(new_state.models["test_rmin", "pony"].options["indexes"]), 0
)
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_rmin", editor, project_state, new_state)
self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards("test_rmin", editor, new_state, project_state)
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveIndex")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"model_name": "Pony", "name": "pony_test_idx"})
# Also test a field dropped with index - sqlite remake issue
operations = [
migrations.RemoveIndex("Pony", "pony_test_idx"),
migrations.RemoveField("Pony", "pink"),
]
self.assertColumnExists("test_rmin_pony", "pink")
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
# Test database alteration
new_state = project_state.clone()
self.apply_operations("test_rmin", new_state, operations=operations)
self.assertColumnNotExists("test_rmin_pony", "pink")
self.assertIndexNotExists("test_rmin_pony", ["pink", "weight"])
# And test reversal
self.unapply_operations("test_rmin", project_state, operations=operations)
self.assertIndexExists("test_rmin_pony", ["pink", "weight"])
def test_add_index_state_forwards(self):
project_state = self.set_up_test_model("test_adinsf")
index = models.Index(fields=["pink"], name="test_adinsf_pony_pink_idx")
old_model = project_state.apps.get_model("test_adinsf", "Pony")
new_state = project_state.clone()
operation = migrations.AddIndex("Pony", index)
operation.state_forwards("test_adinsf", new_state)
new_model = new_state.apps.get_model("test_adinsf", "Pony")
self.assertIsNot(old_model, new_model)
def test_remove_index_state_forwards(self):
project_state = self.set_up_test_model("test_rminsf")
index = models.Index(fields=["pink"], name="test_rminsf_pony_pink_idx")
migrations.AddIndex("Pony", index).state_forwards("test_rminsf", project_state)
old_model = project_state.apps.get_model("test_rminsf", "Pony")
new_state = project_state.clone()
operation = migrations.RemoveIndex("Pony", "test_rminsf_pony_pink_idx")
operation.state_forwards("test_rminsf", new_state)
new_model = new_state.apps.get_model("test_rminsf", "Pony")
self.assertIsNot(old_model, new_model)
@skipUnlessDBFeature("supports_expression_indexes")
def test_add_func_index(self):
app_label = "test_addfuncin"
index_name = f"{app_label}_pony_abs_idx"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(app_label)
index = models.Index(Abs("weight"), name=index_name)
operation = migrations.AddIndex("Pony", index)
self.assertEqual(
operation.describe(),
"Create index test_addfuncin_pony_abs_idx on Abs(F(weight)) on model Pony",
)
self.assertEqual(
operation.migration_name_fragment,
"pony_test_addfuncin_pony_abs_idx",
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(len(new_state.models[app_label, "pony"].options["indexes"]), 1)
self.assertIndexNameNotExists(table_name, index_name)
# Add index.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexNameExists(table_name, index_name)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertIndexNameNotExists(table_name, index_name)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddIndex")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"model_name": "Pony", "index": index})
@skipUnlessDBFeature("supports_expression_indexes")
def test_remove_func_index(self):
app_label = "test_rmfuncin"
index_name = f"{app_label}_pony_abs_idx"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(
app_label,
indexes=[
models.Index(Abs("weight"), name=index_name),
],
)
self.assertTableExists(table_name)
self.assertIndexNameExists(table_name, index_name)
operation = migrations.RemoveIndex("Pony", index_name)
self.assertEqual(
operation.describe(),
"Remove index test_rmfuncin_pony_abs_idx from Pony",
)
self.assertEqual(
operation.migration_name_fragment,
"remove_pony_test_rmfuncin_pony_abs_idx",
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(len(new_state.models[app_label, "pony"].options["indexes"]), 0)
# Remove index.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexNameNotExists(table_name, index_name)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertIndexNameExists(table_name, index_name)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveIndex")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"model_name": "Pony", "name": index_name})
@skipUnlessDBFeature("supports_expression_indexes")
def test_alter_field_with_func_index(self):
app_label = "test_alfuncin"
index_name = f"{app_label}_pony_idx"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(
app_label,
indexes=[models.Index(Abs("pink"), name=index_name)],
)
operation = migrations.AlterField(
"Pony", "pink", models.IntegerField(null=True)
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexNameExists(table_name, index_name)
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertIndexNameExists(table_name, index_name)
def test_alter_field_with_index(self):
"""
Test AlterField operation with an index to ensure indexes created via
Meta.indexes don't get dropped with sqlite3 remake.
"""
project_state = self.set_up_test_model("test_alflin", index=True)
operation = migrations.AlterField(
"Pony", "pink", models.IntegerField(null=True)
)
new_state = project_state.clone()
operation.state_forwards("test_alflin", new_state)
# Test the database alteration
self.assertColumnNotNull("test_alflin_pony", "pink")
with connection.schema_editor() as editor:
operation.database_forwards("test_alflin", editor, project_state, new_state)
# Index hasn't been dropped
self.assertIndexExists("test_alflin_pony", ["pink"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_alflin", editor, new_state, project_state
)
# Ensure the index is still there
self.assertIndexExists("test_alflin_pony", ["pink"])
def test_alter_index_together(self):
"""
Tests the AlterIndexTogether operation.
"""
project_state = self.set_up_test_model("test_alinto")
# Test the state alteration
operation = migrations.AlterIndexTogether("Pony", [("pink", "weight")])
self.assertEqual(
operation.describe(), "Alter index_together for Pony (1 constraint(s))"
)
self.assertEqual(
operation.migration_name_fragment,
"alter_pony_index_together",
)
new_state = project_state.clone()
operation.state_forwards("test_alinto", new_state)
self.assertEqual(
len(
project_state.models["test_alinto", "pony"].options.get(
"index_together", set()
)
),
0,
)
self.assertEqual(
len(
new_state.models["test_alinto", "pony"].options.get(
"index_together", set()
)
),
1,
)
# Make sure there's no matching index
self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"])
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_alinto", editor, project_state, new_state)
self.assertIndexExists("test_alinto_pony", ["pink", "weight"])
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_alinto", editor, new_state, project_state
)
self.assertIndexNotExists("test_alinto_pony", ["pink", "weight"])
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterIndexTogether")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2], {"name": "Pony", "index_together": {("pink", "weight")}}
)
def test_alter_index_together_remove(self):
operation = migrations.AlterIndexTogether("Pony", None)
self.assertEqual(
operation.describe(), "Alter index_together for Pony (0 constraint(s))"
)
@skipUnlessDBFeature("allows_multiple_constraints_on_same_fields")
def test_alter_index_together_remove_with_unique_together(self):
app_label = "test_alintoremove_wunto"
table_name = "%s_pony" % app_label
project_state = self.set_up_test_model(app_label, unique_together=True)
self.assertUniqueConstraintExists(table_name, ["pink", "weight"])
# Add index together.
new_state = project_state.clone()
operation = migrations.AlterIndexTogether("Pony", [("pink", "weight")])
operation.state_forwards(app_label, new_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexExists(table_name, ["pink", "weight"])
# Remove index together.
project_state = new_state
new_state = project_state.clone()
operation = migrations.AlterIndexTogether("Pony", set())
operation.state_forwards(app_label, new_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexNotExists(table_name, ["pink", "weight"])
self.assertUniqueConstraintExists(table_name, ["pink", "weight"])
@skipUnlessDBFeature("supports_table_check_constraints")
def test_add_constraint(self):
project_state = self.set_up_test_model("test_addconstraint")
gt_check = models.Q(pink__gt=2)
gt_constraint = models.CheckConstraint(
check=gt_check, name="test_add_constraint_pony_pink_gt_2"
)
gt_operation = migrations.AddConstraint("Pony", gt_constraint)
self.assertEqual(
gt_operation.describe(),
"Create constraint test_add_constraint_pony_pink_gt_2 on model Pony",
)
self.assertEqual(
gt_operation.migration_name_fragment,
"pony_test_add_constraint_pony_pink_gt_2",
)
# Test the state alteration
new_state = project_state.clone()
gt_operation.state_forwards("test_addconstraint", new_state)
self.assertEqual(
len(new_state.models["test_addconstraint", "pony"].options["constraints"]),
1,
)
Pony = new_state.apps.get_model("test_addconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 1)
# Test the database alteration
with connection.schema_editor() as editor:
gt_operation.database_forwards(
"test_addconstraint", editor, project_state, new_state
)
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=1, weight=1.0)
# Add another one.
lt_check = models.Q(pink__lt=100)
lt_constraint = models.CheckConstraint(
check=lt_check, name="test_add_constraint_pony_pink_lt_100"
)
lt_operation = migrations.AddConstraint("Pony", lt_constraint)
lt_operation.state_forwards("test_addconstraint", new_state)
self.assertEqual(
len(new_state.models["test_addconstraint", "pony"].options["constraints"]),
2,
)
Pony = new_state.apps.get_model("test_addconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 2)
with connection.schema_editor() as editor:
lt_operation.database_forwards(
"test_addconstraint", editor, project_state, new_state
)
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=100, weight=1.0)
# Test reversal
with connection.schema_editor() as editor:
gt_operation.database_backwards(
"test_addconstraint", editor, new_state, project_state
)
Pony.objects.create(pink=1, weight=1.0)
# Test deconstruction
definition = gt_operation.deconstruct()
self.assertEqual(definition[0], "AddConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2], {"model_name": "Pony", "constraint": gt_constraint}
)
@skipUnlessDBFeature("supports_table_check_constraints")
def test_add_constraint_percent_escaping(self):
app_label = "add_constraint_string_quoting"
operations = [
migrations.CreateModel(
"Author",
fields=[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
("surname", models.CharField(max_length=100, default="")),
("rebate", models.CharField(max_length=100)),
],
),
]
from_state = self.apply_operations(app_label, ProjectState(), operations)
# "%" generated in startswith lookup should be escaped in a way that is
# considered a leading wildcard.
check = models.Q(name__startswith="Albert")
constraint = models.CheckConstraint(check=check, name="name_constraint")
operation = migrations.AddConstraint("Author", constraint)
to_state = from_state.clone()
operation.state_forwards(app_label, to_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, from_state, to_state)
Author = to_state.apps.get_model(app_label, "Author")
with self.assertRaises(IntegrityError), transaction.atomic():
Author.objects.create(name="Artur")
# Literal "%" should be escaped in a way that is not a considered a
# wildcard.
check = models.Q(rebate__endswith="%")
constraint = models.CheckConstraint(check=check, name="rebate_constraint")
operation = migrations.AddConstraint("Author", constraint)
from_state = to_state
to_state = from_state.clone()
operation.state_forwards(app_label, to_state)
Author = to_state.apps.get_model(app_label, "Author")
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, from_state, to_state)
Author = to_state.apps.get_model(app_label, "Author")
with self.assertRaises(IntegrityError), transaction.atomic():
Author.objects.create(name="Albert", rebate="10$")
author = Author.objects.create(name="Albert", rebate="10%")
self.assertEqual(Author.objects.get(), author)
# Right-hand-side baked "%" literals should not be used for parameters
# interpolation.
check = ~models.Q(surname__startswith=models.F("name"))
constraint = models.CheckConstraint(check=check, name="name_constraint_rhs")
operation = migrations.AddConstraint("Author", constraint)
from_state = to_state
to_state = from_state.clone()
operation.state_forwards(app_label, to_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, from_state, to_state)
Author = to_state.apps.get_model(app_label, "Author")
with self.assertRaises(IntegrityError), transaction.atomic():
Author.objects.create(name="Albert", surname="Alberto")
@skipUnlessDBFeature("supports_table_check_constraints")
def test_add_or_constraint(self):
app_label = "test_addorconstraint"
constraint_name = "add_constraint_or"
from_state = self.set_up_test_model(app_label)
check = models.Q(pink__gt=2, weight__gt=2) | models.Q(weight__lt=0)
constraint = models.CheckConstraint(check=check, name=constraint_name)
operation = migrations.AddConstraint("Pony", constraint)
to_state = from_state.clone()
operation.state_forwards(app_label, to_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, from_state, to_state)
Pony = to_state.apps.get_model(app_label, "Pony")
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=2, weight=3.0)
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=3, weight=1.0)
Pony.objects.bulk_create(
[
Pony(pink=3, weight=-1.0),
Pony(pink=1, weight=-1.0),
Pony(pink=3, weight=3.0),
]
)
@skipUnlessDBFeature("supports_table_check_constraints")
def test_add_constraint_combinable(self):
app_label = "test_addconstraint_combinable"
operations = [
migrations.CreateModel(
"Book",
fields=[
("id", models.AutoField(primary_key=True)),
("read", models.PositiveIntegerField()),
("unread", models.PositiveIntegerField()),
],
),
]
from_state = self.apply_operations(app_label, ProjectState(), operations)
constraint = models.CheckConstraint(
check=models.Q(read=(100 - models.F("unread"))),
name="test_addconstraint_combinable_sum_100",
)
operation = migrations.AddConstraint("Book", constraint)
to_state = from_state.clone()
operation.state_forwards(app_label, to_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, from_state, to_state)
Book = to_state.apps.get_model(app_label, "Book")
with self.assertRaises(IntegrityError), transaction.atomic():
Book.objects.create(read=70, unread=10)
Book.objects.create(read=70, unread=30)
@skipUnlessDBFeature("supports_table_check_constraints")
def test_remove_constraint(self):
project_state = self.set_up_test_model(
"test_removeconstraint",
constraints=[
models.CheckConstraint(
check=models.Q(pink__gt=2),
name="test_remove_constraint_pony_pink_gt_2",
),
models.CheckConstraint(
check=models.Q(pink__lt=100),
name="test_remove_constraint_pony_pink_lt_100",
),
],
)
gt_operation = migrations.RemoveConstraint(
"Pony", "test_remove_constraint_pony_pink_gt_2"
)
self.assertEqual(
gt_operation.describe(),
"Remove constraint test_remove_constraint_pony_pink_gt_2 from model Pony",
)
self.assertEqual(
gt_operation.migration_name_fragment,
"remove_pony_test_remove_constraint_pony_pink_gt_2",
)
# Test state alteration
new_state = project_state.clone()
gt_operation.state_forwards("test_removeconstraint", new_state)
self.assertEqual(
len(
new_state.models["test_removeconstraint", "pony"].options["constraints"]
),
1,
)
Pony = new_state.apps.get_model("test_removeconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 1)
# Test database alteration
with connection.schema_editor() as editor:
gt_operation.database_forwards(
"test_removeconstraint", editor, project_state, new_state
)
Pony.objects.create(pink=1, weight=1.0).delete()
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=100, weight=1.0)
# Remove the other one.
lt_operation = migrations.RemoveConstraint(
"Pony", "test_remove_constraint_pony_pink_lt_100"
)
lt_operation.state_forwards("test_removeconstraint", new_state)
self.assertEqual(
len(
new_state.models["test_removeconstraint", "pony"].options["constraints"]
),
0,
)
Pony = new_state.apps.get_model("test_removeconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 0)
with connection.schema_editor() as editor:
lt_operation.database_forwards(
"test_removeconstraint", editor, project_state, new_state
)
Pony.objects.create(pink=100, weight=1.0).delete()
# Test reversal
with connection.schema_editor() as editor:
gt_operation.database_backwards(
"test_removeconstraint", editor, new_state, project_state
)
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=1, weight=1.0)
# Test deconstruction
definition = gt_operation.deconstruct()
self.assertEqual(definition[0], "RemoveConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{"model_name": "Pony", "name": "test_remove_constraint_pony_pink_gt_2"},
)
def test_add_partial_unique_constraint(self):
project_state = self.set_up_test_model("test_addpartialuniqueconstraint")
partial_unique_constraint = models.UniqueConstraint(
fields=["pink"],
condition=models.Q(weight__gt=5),
name="test_constraint_pony_pink_for_weight_gt_5_uniq",
)
operation = migrations.AddConstraint("Pony", partial_unique_constraint)
self.assertEqual(
operation.describe(),
"Create constraint test_constraint_pony_pink_for_weight_gt_5_uniq "
"on model Pony",
)
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_addpartialuniqueconstraint", new_state)
self.assertEqual(
len(
new_state.models["test_addpartialuniqueconstraint", "pony"].options[
"constraints"
]
),
1,
)
Pony = new_state.apps.get_model("test_addpartialuniqueconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 1)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards(
"test_addpartialuniqueconstraint", editor, project_state, new_state
)
# Test constraint works
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=6.0)
if connection.features.supports_partial_indexes:
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=1, weight=7.0)
else:
Pony.objects.create(pink=1, weight=7.0)
# Test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_addpartialuniqueconstraint", editor, new_state, project_state
)
# Test constraint doesn't work
Pony.objects.create(pink=1, weight=7.0)
# Test deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{"model_name": "Pony", "constraint": partial_unique_constraint},
)
def test_remove_partial_unique_constraint(self):
project_state = self.set_up_test_model(
"test_removepartialuniqueconstraint",
constraints=[
models.UniqueConstraint(
fields=["pink"],
condition=models.Q(weight__gt=5),
name="test_constraint_pony_pink_for_weight_gt_5_uniq",
),
],
)
gt_operation = migrations.RemoveConstraint(
"Pony", "test_constraint_pony_pink_for_weight_gt_5_uniq"
)
self.assertEqual(
gt_operation.describe(),
"Remove constraint test_constraint_pony_pink_for_weight_gt_5_uniq from "
"model Pony",
)
# Test state alteration
new_state = project_state.clone()
gt_operation.state_forwards("test_removepartialuniqueconstraint", new_state)
self.assertEqual(
len(
new_state.models["test_removepartialuniqueconstraint", "pony"].options[
"constraints"
]
),
0,
)
Pony = new_state.apps.get_model("test_removepartialuniqueconstraint", "Pony")
self.assertEqual(len(Pony._meta.constraints), 0)
# Test database alteration
with connection.schema_editor() as editor:
gt_operation.database_forwards(
"test_removepartialuniqueconstraint", editor, project_state, new_state
)
# Test constraint doesn't work
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=6.0)
Pony.objects.create(pink=1, weight=7.0).delete()
# Test reversal
with connection.schema_editor() as editor:
gt_operation.database_backwards(
"test_removepartialuniqueconstraint", editor, new_state, project_state
)
# Test constraint works
if connection.features.supports_partial_indexes:
with self.assertRaises(IntegrityError), transaction.atomic():
Pony.objects.create(pink=1, weight=7.0)
else:
Pony.objects.create(pink=1, weight=7.0)
# Test deconstruction
definition = gt_operation.deconstruct()
self.assertEqual(definition[0], "RemoveConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{
"model_name": "Pony",
"name": "test_constraint_pony_pink_for_weight_gt_5_uniq",
},
)
def test_add_deferred_unique_constraint(self):
app_label = "test_adddeferred_uc"
project_state = self.set_up_test_model(app_label)
deferred_unique_constraint = models.UniqueConstraint(
fields=["pink"],
name="deferred_pink_constraint_add",
deferrable=models.Deferrable.DEFERRED,
)
operation = migrations.AddConstraint("Pony", deferred_unique_constraint)
self.assertEqual(
operation.describe(),
"Create constraint deferred_pink_constraint_add on model Pony",
)
# Add constraint.
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(
len(new_state.models[app_label, "pony"].options["constraints"]), 1
)
Pony = new_state.apps.get_model(app_label, "Pony")
self.assertEqual(len(Pony._meta.constraints), 1)
with connection.schema_editor() as editor, CaptureQueriesContext(
connection
) as ctx:
operation.database_forwards(app_label, editor, project_state, new_state)
Pony.objects.create(pink=1, weight=4.0)
if connection.features.supports_deferrable_unique_constraints:
# Unique constraint is deferred.
with transaction.atomic():
obj = Pony.objects.create(pink=1, weight=4.0)
obj.pink = 2
obj.save()
# Constraint behavior can be changed with SET CONSTRAINTS.
with self.assertRaises(IntegrityError):
with transaction.atomic(), connection.cursor() as cursor:
quoted_name = connection.ops.quote_name(
deferred_unique_constraint.name
)
cursor.execute("SET CONSTRAINTS %s IMMEDIATE" % quoted_name)
obj = Pony.objects.create(pink=1, weight=4.0)
obj.pink = 3
obj.save()
else:
self.assertEqual(len(ctx), 0)
Pony.objects.create(pink=1, weight=4.0)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
# Constraint doesn't work.
Pony.objects.create(pink=1, weight=4.0)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{"model_name": "Pony", "constraint": deferred_unique_constraint},
)
def test_remove_deferred_unique_constraint(self):
app_label = "test_removedeferred_uc"
deferred_unique_constraint = models.UniqueConstraint(
fields=["pink"],
name="deferred_pink_constraint_rm",
deferrable=models.Deferrable.DEFERRED,
)
project_state = self.set_up_test_model(
app_label, constraints=[deferred_unique_constraint]
)
operation = migrations.RemoveConstraint("Pony", deferred_unique_constraint.name)
self.assertEqual(
operation.describe(),
"Remove constraint deferred_pink_constraint_rm from model Pony",
)
# Remove constraint.
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(
len(new_state.models[app_label, "pony"].options["constraints"]), 0
)
Pony = new_state.apps.get_model(app_label, "Pony")
self.assertEqual(len(Pony._meta.constraints), 0)
with connection.schema_editor() as editor, CaptureQueriesContext(
connection
) as ctx:
operation.database_forwards(app_label, editor, project_state, new_state)
# Constraint doesn't work.
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=4.0).delete()
if not connection.features.supports_deferrable_unique_constraints:
self.assertEqual(len(ctx), 0)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
if connection.features.supports_deferrable_unique_constraints:
# Unique constraint is deferred.
with transaction.atomic():
obj = Pony.objects.create(pink=1, weight=4.0)
obj.pink = 2
obj.save()
# Constraint behavior can be changed with SET CONSTRAINTS.
with self.assertRaises(IntegrityError):
with transaction.atomic(), connection.cursor() as cursor:
quoted_name = connection.ops.quote_name(
deferred_unique_constraint.name
)
cursor.execute("SET CONSTRAINTS %s IMMEDIATE" % quoted_name)
obj = Pony.objects.create(pink=1, weight=4.0)
obj.pink = 3
obj.save()
else:
Pony.objects.create(pink=1, weight=4.0)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{
"model_name": "Pony",
"name": "deferred_pink_constraint_rm",
},
)
def test_add_covering_unique_constraint(self):
app_label = "test_addcovering_uc"
project_state = self.set_up_test_model(app_label)
covering_unique_constraint = models.UniqueConstraint(
fields=["pink"],
name="covering_pink_constraint_add",
include=["weight"],
)
operation = migrations.AddConstraint("Pony", covering_unique_constraint)
self.assertEqual(
operation.describe(),
"Create constraint covering_pink_constraint_add on model Pony",
)
# Add constraint.
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(
len(new_state.models[app_label, "pony"].options["constraints"]), 1
)
Pony = new_state.apps.get_model(app_label, "Pony")
self.assertEqual(len(Pony._meta.constraints), 1)
with connection.schema_editor() as editor, CaptureQueriesContext(
connection
) as ctx:
operation.database_forwards(app_label, editor, project_state, new_state)
Pony.objects.create(pink=1, weight=4.0)
if connection.features.supports_covering_indexes:
with self.assertRaises(IntegrityError):
Pony.objects.create(pink=1, weight=4.0)
else:
self.assertEqual(len(ctx), 0)
Pony.objects.create(pink=1, weight=4.0)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
# Constraint doesn't work.
Pony.objects.create(pink=1, weight=4.0)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{"model_name": "Pony", "constraint": covering_unique_constraint},
)
def test_remove_covering_unique_constraint(self):
app_label = "test_removecovering_uc"
covering_unique_constraint = models.UniqueConstraint(
fields=["pink"],
name="covering_pink_constraint_rm",
include=["weight"],
)
project_state = self.set_up_test_model(
app_label, constraints=[covering_unique_constraint]
)
operation = migrations.RemoveConstraint("Pony", covering_unique_constraint.name)
self.assertEqual(
operation.describe(),
"Remove constraint covering_pink_constraint_rm from model Pony",
)
# Remove constraint.
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(
len(new_state.models[app_label, "pony"].options["constraints"]), 0
)
Pony = new_state.apps.get_model(app_label, "Pony")
self.assertEqual(len(Pony._meta.constraints), 0)
with connection.schema_editor() as editor, CaptureQueriesContext(
connection
) as ctx:
operation.database_forwards(app_label, editor, project_state, new_state)
# Constraint doesn't work.
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=4.0).delete()
if not connection.features.supports_covering_indexes:
self.assertEqual(len(ctx), 0)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
if connection.features.supports_covering_indexes:
with self.assertRaises(IntegrityError):
Pony.objects.create(pink=1, weight=4.0)
else:
Pony.objects.create(pink=1, weight=4.0)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{
"model_name": "Pony",
"name": "covering_pink_constraint_rm",
},
)
def test_alter_field_with_func_unique_constraint(self):
app_label = "test_alfuncuc"
constraint_name = f"{app_label}_pony_uq"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(
app_label,
constraints=[
models.UniqueConstraint("pink", "weight", name=constraint_name)
],
)
operation = migrations.AlterField(
"Pony", "pink", models.IntegerField(null=True)
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
if connection.features.supports_expression_indexes:
self.assertIndexNameExists(table_name, constraint_name)
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
if connection.features.supports_expression_indexes:
self.assertIndexNameExists(table_name, constraint_name)
def test_add_func_unique_constraint(self):
app_label = "test_adfuncuc"
constraint_name = f"{app_label}_pony_abs_uq"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(app_label)
constraint = models.UniqueConstraint(Abs("weight"), name=constraint_name)
operation = migrations.AddConstraint("Pony", constraint)
self.assertEqual(
operation.describe(),
"Create constraint test_adfuncuc_pony_abs_uq on model Pony",
)
self.assertEqual(
operation.migration_name_fragment,
"pony_test_adfuncuc_pony_abs_uq",
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(
len(new_state.models[app_label, "pony"].options["constraints"]), 1
)
self.assertIndexNameNotExists(table_name, constraint_name)
# Add constraint.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
Pony = new_state.apps.get_model(app_label, "Pony")
Pony.objects.create(weight=4.0)
if connection.features.supports_expression_indexes:
self.assertIndexNameExists(table_name, constraint_name)
with self.assertRaises(IntegrityError):
Pony.objects.create(weight=-4.0)
else:
self.assertIndexNameNotExists(table_name, constraint_name)
Pony.objects.create(weight=-4.0)
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertIndexNameNotExists(table_name, constraint_name)
# Constraint doesn't work.
Pony.objects.create(weight=-4.0)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "AddConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{"model_name": "Pony", "constraint": constraint},
)
def test_remove_func_unique_constraint(self):
app_label = "test_rmfuncuc"
constraint_name = f"{app_label}_pony_abs_uq"
table_name = f"{app_label}_pony"
project_state = self.set_up_test_model(
app_label,
constraints=[
models.UniqueConstraint(Abs("weight"), name=constraint_name),
],
)
self.assertTableExists(table_name)
if connection.features.supports_expression_indexes:
self.assertIndexNameExists(table_name, constraint_name)
operation = migrations.RemoveConstraint("Pony", constraint_name)
self.assertEqual(
operation.describe(),
"Remove constraint test_rmfuncuc_pony_abs_uq from model Pony",
)
self.assertEqual(
operation.migration_name_fragment,
"remove_pony_test_rmfuncuc_pony_abs_uq",
)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(
len(new_state.models[app_label, "pony"].options["constraints"]), 0
)
Pony = new_state.apps.get_model(app_label, "Pony")
self.assertEqual(len(Pony._meta.constraints), 0)
# Remove constraint.
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
self.assertIndexNameNotExists(table_name, constraint_name)
# Constraint doesn't work.
Pony.objects.create(pink=1, weight=4.0)
Pony.objects.create(pink=1, weight=-4.0).delete()
# Reversal.
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
if connection.features.supports_expression_indexes:
self.assertIndexNameExists(table_name, constraint_name)
with self.assertRaises(IntegrityError):
Pony.objects.create(weight=-4.0)
else:
self.assertIndexNameNotExists(table_name, constraint_name)
Pony.objects.create(weight=-4.0)
# Deconstruction.
definition = operation.deconstruct()
self.assertEqual(definition[0], "RemoveConstraint")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"model_name": "Pony", "name": constraint_name})
def test_alter_model_options(self):
"""
Tests the AlterModelOptions operation.
"""
project_state = self.set_up_test_model("test_almoop")
# Test the state alteration (no DB alteration to test)
operation = migrations.AlterModelOptions(
"Pony", {"permissions": [("can_groom", "Can groom")]}
)
self.assertEqual(operation.describe(), "Change Meta options on Pony")
self.assertEqual(operation.migration_name_fragment, "alter_pony_options")
new_state = project_state.clone()
operation.state_forwards("test_almoop", new_state)
self.assertEqual(
len(
project_state.models["test_almoop", "pony"].options.get(
"permissions", []
)
),
0,
)
self.assertEqual(
len(new_state.models["test_almoop", "pony"].options.get("permissions", [])),
1,
)
self.assertEqual(
new_state.models["test_almoop", "pony"].options["permissions"][0][0],
"can_groom",
)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelOptions")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2],
{"name": "Pony", "options": {"permissions": [("can_groom", "Can groom")]}},
)
def test_alter_model_options_emptying(self):
"""
The AlterModelOptions operation removes keys from the dict (#23121)
"""
project_state = self.set_up_test_model("test_almoop", options=True)
# Test the state alteration (no DB alteration to test)
operation = migrations.AlterModelOptions("Pony", {})
self.assertEqual(operation.describe(), "Change Meta options on Pony")
new_state = project_state.clone()
operation.state_forwards("test_almoop", new_state)
self.assertEqual(
len(
project_state.models["test_almoop", "pony"].options.get(
"permissions", []
)
),
1,
)
self.assertEqual(
len(new_state.models["test_almoop", "pony"].options.get("permissions", [])),
0,
)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterModelOptions")
self.assertEqual(definition[1], [])
self.assertEqual(definition[2], {"name": "Pony", "options": {}})
def test_alter_order_with_respect_to(self):
"""
Tests the AlterOrderWithRespectTo operation.
"""
project_state = self.set_up_test_model("test_alorwrtto", related_model=True)
# Test the state alteration
operation = migrations.AlterOrderWithRespectTo("Rider", "pony")
self.assertEqual(
operation.describe(), "Set order_with_respect_to on Rider to pony"
)
self.assertEqual(
operation.migration_name_fragment,
"alter_rider_order_with_respect_to",
)
new_state = project_state.clone()
operation.state_forwards("test_alorwrtto", new_state)
self.assertIsNone(
project_state.models["test_alorwrtto", "rider"].options.get(
"order_with_respect_to", None
)
)
self.assertEqual(
new_state.models["test_alorwrtto", "rider"].options.get(
"order_with_respect_to", None
),
"pony",
)
# Make sure there's no matching index
self.assertColumnNotExists("test_alorwrtto_rider", "_order")
# Create some rows before alteration
rendered_state = project_state.apps
pony = rendered_state.get_model("test_alorwrtto", "Pony").objects.create(
weight=50
)
rider1 = rendered_state.get_model("test_alorwrtto", "Rider").objects.create(
pony=pony
)
rider1.friend = rider1
rider1.save()
rider2 = rendered_state.get_model("test_alorwrtto", "Rider").objects.create(
pony=pony
)
rider2.friend = rider2
rider2.save()
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards(
"test_alorwrtto", editor, project_state, new_state
)
self.assertColumnExists("test_alorwrtto_rider", "_order")
# Check for correct value in rows
updated_riders = new_state.apps.get_model(
"test_alorwrtto", "Rider"
).objects.all()
self.assertEqual(updated_riders[0]._order, 0)
self.assertEqual(updated_riders[1]._order, 0)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_alorwrtto", editor, new_state, project_state
)
self.assertColumnNotExists("test_alorwrtto_rider", "_order")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "AlterOrderWithRespectTo")
self.assertEqual(definition[1], [])
self.assertEqual(
definition[2], {"name": "Rider", "order_with_respect_to": "pony"}
)
def test_alter_model_managers(self):
"""
The managers on a model are set.
"""
project_state = self.set_up_test_model("test_almoma")
# Test the state alteration
operation = migrations.AlterModelManagers(
"Pony",
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
],
)
self.assertEqual(operation.describe(), "Change managers on Pony")
self.assertEqual(operation.migration_name_fragment, "alter_pony_managers")
managers = project_state.models["test_almoma", "pony"].managers
self.assertEqual(managers, [])
new_state = project_state.clone()
operation.state_forwards("test_almoma", new_state)
self.assertIn(("test_almoma", "pony"), new_state.models)
managers = new_state.models["test_almoma", "pony"].managers
self.assertEqual(managers[0][0], "food_qs")
self.assertIsInstance(managers[0][1], models.Manager)
self.assertEqual(managers[1][0], "food_mgr")
self.assertIsInstance(managers[1][1], FoodManager)
self.assertEqual(managers[1][1].args, ("a", "b", 1, 2))
self.assertEqual(managers[2][0], "food_mgr_kwargs")
self.assertIsInstance(managers[2][1], FoodManager)
self.assertEqual(managers[2][1].args, ("x", "y", 3, 4))
rendered_state = new_state.apps
model = rendered_state.get_model("test_almoma", "pony")
self.assertIsInstance(model.food_qs, models.Manager)
self.assertIsInstance(model.food_mgr, FoodManager)
self.assertIsInstance(model.food_mgr_kwargs, FoodManager)
def test_alter_model_managers_emptying(self):
"""
The managers on a model are set.
"""
project_state = self.set_up_test_model("test_almomae", manager_model=True)
# Test the state alteration
operation = migrations.AlterModelManagers("Food", managers=[])
self.assertEqual(operation.describe(), "Change managers on Food")
self.assertIn(("test_almomae", "food"), project_state.models)
managers = project_state.models["test_almomae", "food"].managers
self.assertEqual(managers[0][0], "food_qs")
self.assertIsInstance(managers[0][1], models.Manager)
self.assertEqual(managers[1][0], "food_mgr")
self.assertIsInstance(managers[1][1], FoodManager)
self.assertEqual(managers[1][1].args, ("a", "b", 1, 2))
self.assertEqual(managers[2][0], "food_mgr_kwargs")
self.assertIsInstance(managers[2][1], FoodManager)
self.assertEqual(managers[2][1].args, ("x", "y", 3, 4))
new_state = project_state.clone()
operation.state_forwards("test_almomae", new_state)
managers = new_state.models["test_almomae", "food"].managers
self.assertEqual(managers, [])
def test_alter_fk(self):
"""
Creating and then altering an FK works correctly
and deals with the pending SQL (#23091)
"""
project_state = self.set_up_test_model("test_alfk")
# Test adding and then altering the FK in one go
create_operation = migrations.CreateModel(
name="Rider",
fields=[
("id", models.AutoField(primary_key=True)),
("pony", models.ForeignKey("Pony", models.CASCADE)),
],
)
create_state = project_state.clone()
create_operation.state_forwards("test_alfk", create_state)
alter_operation = migrations.AlterField(
model_name="Rider",
name="pony",
field=models.ForeignKey("Pony", models.CASCADE, editable=False),
)
alter_state = create_state.clone()
alter_operation.state_forwards("test_alfk", alter_state)
with connection.schema_editor() as editor:
create_operation.database_forwards(
"test_alfk", editor, project_state, create_state
)
alter_operation.database_forwards(
"test_alfk", editor, create_state, alter_state
)
def test_alter_fk_non_fk(self):
"""
Altering an FK to a non-FK works (#23244)
"""
# Test the state alteration
operation = migrations.AlterField(
model_name="Rider",
name="pony",
field=models.FloatField(),
)
project_state, new_state = self.make_test_state(
"test_afknfk", operation, related_model=True
)
# Test the database alteration
self.assertColumnExists("test_afknfk_rider", "pony_id")
self.assertColumnNotExists("test_afknfk_rider", "pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_afknfk", editor, project_state, new_state)
self.assertColumnExists("test_afknfk_rider", "pony")
self.assertColumnNotExists("test_afknfk_rider", "pony_id")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_afknfk", editor, new_state, project_state
)
self.assertColumnExists("test_afknfk_rider", "pony_id")
self.assertColumnNotExists("test_afknfk_rider", "pony")
def test_run_sql(self):
"""
Tests the RunSQL operation.
"""
project_state = self.set_up_test_model("test_runsql")
# Create the operation
operation = migrations.RunSQL(
# Use a multi-line string with a comment to test splitting on
# SQLite and MySQL respectively.
"CREATE TABLE i_love_ponies (id int, special_thing varchar(15));\n"
"INSERT INTO i_love_ponies (id, special_thing) "
"VALUES (1, 'i love ponies'); -- this is magic!\n"
"INSERT INTO i_love_ponies (id, special_thing) "
"VALUES (2, 'i love django');\n"
"UPDATE i_love_ponies SET special_thing = 'Ponies' "
"WHERE special_thing LIKE '%%ponies';"
"UPDATE i_love_ponies SET special_thing = 'Django' "
"WHERE special_thing LIKE '%django';",
# Run delete queries to test for parameter substitution failure
# reported in #23426
"DELETE FROM i_love_ponies WHERE special_thing LIKE '%Django%';"
"DELETE FROM i_love_ponies WHERE special_thing LIKE '%%Ponies%%';"
"DROP TABLE i_love_ponies",
state_operations=[
migrations.CreateModel(
"SomethingElse", [("id", models.AutoField(primary_key=True))]
)
],
)
self.assertEqual(operation.describe(), "Raw SQL operation")
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_runsql", new_state)
self.assertEqual(
len(new_state.models["test_runsql", "somethingelse"].fields), 1
)
# Make sure there's no table
self.assertTableNotExists("i_love_ponies")
# Test SQL collection
with connection.schema_editor(collect_sql=True) as editor:
operation.database_forwards("test_runsql", editor, project_state, new_state)
self.assertIn("LIKE '%%ponies';", "\n".join(editor.collected_sql))
operation.database_backwards(
"test_runsql", editor, project_state, new_state
)
self.assertIn("LIKE '%%Ponies%%';", "\n".join(editor.collected_sql))
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_runsql", editor, project_state, new_state)
self.assertTableExists("i_love_ponies")
# Make sure all the SQL was processed
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM i_love_ponies")
self.assertEqual(cursor.fetchall()[0][0], 2)
cursor.execute(
"SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Django'"
)
self.assertEqual(cursor.fetchall()[0][0], 1)
cursor.execute(
"SELECT COUNT(*) FROM i_love_ponies WHERE special_thing = 'Ponies'"
)
self.assertEqual(cursor.fetchall()[0][0], 1)
# And test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards(
"test_runsql", editor, new_state, project_state
)
self.assertTableNotExists("i_love_ponies")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RunSQL")
self.assertEqual(definition[1], [])
self.assertEqual(
sorted(definition[2]), ["reverse_sql", "sql", "state_operations"]
)
# And elidable reduction
self.assertIs(False, operation.reduce(operation, []))
elidable_operation = migrations.RunSQL("SELECT 1 FROM void;", elidable=True)
self.assertEqual(elidable_operation.reduce(operation, []), [operation])
def test_run_sql_params(self):
"""
#23426 - RunSQL should accept parameters.
"""
project_state = self.set_up_test_model("test_runsql")
# Create the operation
operation = migrations.RunSQL(
["CREATE TABLE i_love_ponies (id int, special_thing varchar(15));"],
["DROP TABLE i_love_ponies"],
)
param_operation = migrations.RunSQL(
# forwards
(
"INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 'Django');",
[
"INSERT INTO i_love_ponies (id, special_thing) VALUES (2, %s);",
["Ponies"],
],
(
"INSERT INTO i_love_ponies (id, special_thing) VALUES (%s, %s);",
(
3,
"Python",
),
),
),
# backwards
[
"DELETE FROM i_love_ponies WHERE special_thing = 'Django';",
["DELETE FROM i_love_ponies WHERE special_thing = 'Ponies';", None],
(
"DELETE FROM i_love_ponies WHERE id = %s OR special_thing = %s;",
[3, "Python"],
),
],
)
# Make sure there's no table
self.assertTableNotExists("i_love_ponies")
new_state = project_state.clone()
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards("test_runsql", editor, project_state, new_state)
# Test parameter passing
with connection.schema_editor() as editor:
param_operation.database_forwards(
"test_runsql", editor, project_state, new_state
)
# Make sure all the SQL was processed
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM i_love_ponies")
self.assertEqual(cursor.fetchall()[0][0], 3)
with connection.schema_editor() as editor:
param_operation.database_backwards(
"test_runsql", editor, new_state, project_state
)
with connection.cursor() as cursor:
cursor.execute("SELECT COUNT(*) FROM i_love_ponies")
self.assertEqual(cursor.fetchall()[0][0], 0)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_runsql", editor, new_state, project_state
)
self.assertTableNotExists("i_love_ponies")
def test_run_sql_params_invalid(self):
"""
#23426 - RunSQL should fail when a list of statements with an incorrect
number of tuples is given.
"""
project_state = self.set_up_test_model("test_runsql")
new_state = project_state.clone()
operation = migrations.RunSQL(
# forwards
[["INSERT INTO foo (bar) VALUES ('buz');"]],
# backwards
(("DELETE FROM foo WHERE bar = 'buz';", "invalid", "parameter count"),),
)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 1"):
operation.database_forwards(
"test_runsql", editor, project_state, new_state
)
with connection.schema_editor() as editor:
with self.assertRaisesMessage(ValueError, "Expected a 2-tuple but got 3"):
operation.database_backwards(
"test_runsql", editor, new_state, project_state
)
def test_run_sql_noop(self):
"""
#24098 - Tests no-op RunSQL operations.
"""
operation = migrations.RunSQL(migrations.RunSQL.noop, migrations.RunSQL.noop)
with connection.schema_editor() as editor:
operation.database_forwards("test_runsql", editor, None, None)
operation.database_backwards("test_runsql", editor, None, None)
def test_run_sql_add_missing_semicolon_on_collect_sql(self):
project_state = self.set_up_test_model("test_runsql")
new_state = project_state.clone()
tests = [
"INSERT INTO test_runsql_pony (pink, weight) VALUES (1, 1);\n",
"INSERT INTO test_runsql_pony (pink, weight) VALUES (1, 1)\n",
]
for sql in tests:
with self.subTest(sql=sql):
operation = migrations.RunSQL(sql, migrations.RunPython.noop)
with connection.schema_editor(collect_sql=True) as editor:
operation.database_forwards(
"test_runsql", editor, project_state, new_state
)
collected_sql = "\n".join(editor.collected_sql)
self.assertEqual(collected_sql.count(";"), 1)
def test_run_python(self):
"""
Tests the RunPython operation
"""
project_state = self.set_up_test_model("test_runpython", mti_model=True)
# Create the operation
def inner_method(models, schema_editor):
Pony = models.get_model("test_runpython", "Pony")
Pony.objects.create(pink=1, weight=3.55)
Pony.objects.create(weight=5)
def inner_method_reverse(models, schema_editor):
Pony = models.get_model("test_runpython", "Pony")
Pony.objects.filter(pink=1, weight=3.55).delete()
Pony.objects.filter(weight=5).delete()
operation = migrations.RunPython(
inner_method, reverse_code=inner_method_reverse
)
self.assertEqual(operation.describe(), "Raw Python operation")
# Test the state alteration does nothing
new_state = project_state.clone()
operation.state_forwards("test_runpython", new_state)
self.assertEqual(new_state, project_state)
# Test the database alteration
self.assertEqual(
project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0
)
with connection.schema_editor() as editor:
operation.database_forwards(
"test_runpython", editor, project_state, new_state
)
self.assertEqual(
project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2
)
# Now test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards(
"test_runpython", editor, project_state, new_state
)
self.assertEqual(
project_state.apps.get_model("test_runpython", "Pony").objects.count(), 0
)
# Now test we can't use a string
with self.assertRaisesMessage(
ValueError, "RunPython must be supplied with a callable"
):
migrations.RunPython("print 'ahahaha'")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RunPython")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["code", "reverse_code"])
# Also test reversal fails, with an operation identical to above but
# without reverse_code set.
no_reverse_operation = migrations.RunPython(inner_method)
self.assertFalse(no_reverse_operation.reversible)
with connection.schema_editor() as editor:
no_reverse_operation.database_forwards(
"test_runpython", editor, project_state, new_state
)
with self.assertRaises(NotImplementedError):
no_reverse_operation.database_backwards(
"test_runpython", editor, new_state, project_state
)
self.assertEqual(
project_state.apps.get_model("test_runpython", "Pony").objects.count(), 2
)
def create_ponies(models, schema_editor):
Pony = models.get_model("test_runpython", "Pony")
pony1 = Pony.objects.create(pink=1, weight=3.55)
self.assertIsNot(pony1.pk, None)
pony2 = Pony.objects.create(weight=5)
self.assertIsNot(pony2.pk, None)
self.assertNotEqual(pony1.pk, pony2.pk)
operation = migrations.RunPython(create_ponies)
with connection.schema_editor() as editor:
operation.database_forwards(
"test_runpython", editor, project_state, new_state
)
self.assertEqual(
project_state.apps.get_model("test_runpython", "Pony").objects.count(), 4
)
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "RunPython")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["code"])
def create_shetlandponies(models, schema_editor):
ShetlandPony = models.get_model("test_runpython", "ShetlandPony")
pony1 = ShetlandPony.objects.create(weight=4.0)
self.assertIsNot(pony1.pk, None)
pony2 = ShetlandPony.objects.create(weight=5.0)
self.assertIsNot(pony2.pk, None)
self.assertNotEqual(pony1.pk, pony2.pk)
operation = migrations.RunPython(create_shetlandponies)
with connection.schema_editor() as editor:
operation.database_forwards(
"test_runpython", editor, project_state, new_state
)
self.assertEqual(
project_state.apps.get_model("test_runpython", "Pony").objects.count(), 6
)
self.assertEqual(
project_state.apps.get_model(
"test_runpython", "ShetlandPony"
).objects.count(),
2,
)
# And elidable reduction
self.assertIs(False, operation.reduce(operation, []))
elidable_operation = migrations.RunPython(inner_method, elidable=True)
self.assertEqual(elidable_operation.reduce(operation, []), [operation])
def test_run_python_atomic(self):
"""
Tests the RunPython operation correctly handles the "atomic" keyword
"""
project_state = self.set_up_test_model("test_runpythonatomic", mti_model=True)
def inner_method(models, schema_editor):
Pony = models.get_model("test_runpythonatomic", "Pony")
Pony.objects.create(pink=1, weight=3.55)
raise ValueError("Adrian hates ponies.")
# Verify atomicity when applying.
atomic_migration = Migration("test", "test_runpythonatomic")
atomic_migration.operations = [
migrations.RunPython(inner_method, reverse_code=inner_method)
]
non_atomic_migration = Migration("test", "test_runpythonatomic")
non_atomic_migration.operations = [
migrations.RunPython(inner_method, reverse_code=inner_method, atomic=False)
]
# If we're a fully-transactional database, both versions should rollback
if connection.features.can_rollback_ddl:
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.apply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.apply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
# Otherwise, the non-atomic operation should leave a row there
else:
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.apply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.apply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
1,
)
# Reset object count to zero and verify atomicity when unapplying.
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.all().delete()
# On a fully-transactional database, both versions rollback.
if connection.features.can_rollback_ddl:
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.unapply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.unapply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
# Otherwise, the non-atomic operation leaves a row there.
else:
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
atomic_migration.unapply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
0,
)
with self.assertRaises(ValueError):
with connection.schema_editor() as editor:
non_atomic_migration.unapply(project_state, editor)
self.assertEqual(
project_state.apps.get_model(
"test_runpythonatomic", "Pony"
).objects.count(),
1,
)
# Verify deconstruction.
definition = non_atomic_migration.operations[0].deconstruct()
self.assertEqual(definition[0], "RunPython")
self.assertEqual(definition[1], [])
self.assertEqual(sorted(definition[2]), ["atomic", "code", "reverse_code"])
def test_run_python_related_assignment(self):
"""
#24282 - Model changes to a FK reverse side update the model
on the FK side as well.
"""
def inner_method(models, schema_editor):
Author = models.get_model("test_authors", "Author")
Book = models.get_model("test_books", "Book")
author = Author.objects.create(name="Hemingway")
Book.objects.create(title="Old Man and The Sea", author=author)
create_author = migrations.CreateModel(
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
options={},
)
create_book = migrations.CreateModel(
"Book",
[
("id", models.AutoField(primary_key=True)),
("title", models.CharField(max_length=100)),
("author", models.ForeignKey("test_authors.Author", models.CASCADE)),
],
options={},
)
add_hometown = migrations.AddField(
"Author",
"hometown",
models.CharField(max_length=100),
)
create_old_man = migrations.RunPython(inner_method, inner_method)
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor() as editor:
create_author.state_forwards("test_authors", new_state)
create_author.database_forwards(
"test_authors", editor, project_state, new_state
)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_book.state_forwards("test_books", new_state)
create_book.database_forwards(
"test_books", editor, project_state, new_state
)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
add_hometown.state_forwards("test_authors", new_state)
add_hometown.database_forwards(
"test_authors", editor, project_state, new_state
)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_old_man.state_forwards("test_books", new_state)
create_old_man.database_forwards(
"test_books", editor, project_state, new_state
)
def test_model_with_bigautofield(self):
"""
A model with BigAutoField can be created.
"""
def create_data(models, schema_editor):
Author = models.get_model("test_author", "Author")
Book = models.get_model("test_book", "Book")
author1 = Author.objects.create(name="Hemingway")
Book.objects.create(title="Old Man and The Sea", author=author1)
Book.objects.create(id=2**33, title="A farewell to arms", author=author1)
author2 = Author.objects.create(id=2**33, name="Remarque")
Book.objects.create(title="All quiet on the western front", author=author2)
Book.objects.create(title="Arc de Triomphe", author=author2)
create_author = migrations.CreateModel(
"Author",
[
("id", models.BigAutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
options={},
)
create_book = migrations.CreateModel(
"Book",
[
("id", models.BigAutoField(primary_key=True)),
("title", models.CharField(max_length=100)),
(
"author",
models.ForeignKey(
to="test_author.Author", on_delete=models.CASCADE
),
),
],
options={},
)
fill_data = migrations.RunPython(create_data)
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor() as editor:
create_author.state_forwards("test_author", new_state)
create_author.database_forwards(
"test_author", editor, project_state, new_state
)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_book.state_forwards("test_book", new_state)
create_book.database_forwards("test_book", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
fill_data.state_forwards("fill_data", new_state)
fill_data.database_forwards("fill_data", editor, project_state, new_state)
def _test_autofield_foreignfield_growth(
self, source_field, target_field, target_value
):
"""
A field may be migrated in the following ways:
- AutoField to BigAutoField
- SmallAutoField to AutoField
- SmallAutoField to BigAutoField
"""
def create_initial_data(models, schema_editor):
Article = models.get_model("test_article", "Article")
Blog = models.get_model("test_blog", "Blog")
blog = Blog.objects.create(name="web development done right")
Article.objects.create(name="Frameworks", blog=blog)
Article.objects.create(name="Programming Languages", blog=blog)
def create_big_data(models, schema_editor):
Article = models.get_model("test_article", "Article")
Blog = models.get_model("test_blog", "Blog")
blog2 = Blog.objects.create(name="Frameworks", id=target_value)
Article.objects.create(name="Django", blog=blog2)
Article.objects.create(id=target_value, name="Django2", blog=blog2)
create_blog = migrations.CreateModel(
"Blog",
[
("id", source_field(primary_key=True)),
("name", models.CharField(max_length=100)),
],
options={},
)
create_article = migrations.CreateModel(
"Article",
[
("id", source_field(primary_key=True)),
(
"blog",
models.ForeignKey(to="test_blog.Blog", on_delete=models.CASCADE),
),
("name", models.CharField(max_length=100)),
("data", models.TextField(default="")),
],
options={},
)
fill_initial_data = migrations.RunPython(
create_initial_data, create_initial_data
)
fill_big_data = migrations.RunPython(create_big_data, create_big_data)
grow_article_id = migrations.AlterField(
"Article", "id", target_field(primary_key=True)
)
grow_blog_id = migrations.AlterField(
"Blog", "id", target_field(primary_key=True)
)
project_state = ProjectState()
new_state = project_state.clone()
with connection.schema_editor() as editor:
create_blog.state_forwards("test_blog", new_state)
create_blog.database_forwards("test_blog", editor, project_state, new_state)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
create_article.state_forwards("test_article", new_state)
create_article.database_forwards(
"test_article", editor, project_state, new_state
)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
fill_initial_data.state_forwards("fill_initial_data", new_state)
fill_initial_data.database_forwards(
"fill_initial_data", editor, project_state, new_state
)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
grow_article_id.state_forwards("test_article", new_state)
grow_article_id.database_forwards(
"test_article", editor, project_state, new_state
)
state = new_state.clone()
article = state.apps.get_model("test_article.Article")
self.assertIsInstance(article._meta.pk, target_field)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
grow_blog_id.state_forwards("test_blog", new_state)
grow_blog_id.database_forwards(
"test_blog", editor, project_state, new_state
)
state = new_state.clone()
blog = state.apps.get_model("test_blog.Blog")
self.assertIsInstance(blog._meta.pk, target_field)
project_state = new_state
new_state = new_state.clone()
with connection.schema_editor() as editor:
fill_big_data.state_forwards("fill_big_data", new_state)
fill_big_data.database_forwards(
"fill_big_data", editor, project_state, new_state
)
def test_autofield__bigautofield_foreignfield_growth(self):
"""A field may be migrated from AutoField to BigAutoField."""
self._test_autofield_foreignfield_growth(
models.AutoField,
models.BigAutoField,
2**33,
)
def test_smallfield_autofield_foreignfield_growth(self):
"""A field may be migrated from SmallAutoField to AutoField."""
self._test_autofield_foreignfield_growth(
models.SmallAutoField,
models.AutoField,
2**22,
)
def test_smallfield_bigautofield_foreignfield_growth(self):
"""A field may be migrated from SmallAutoField to BigAutoField."""
self._test_autofield_foreignfield_growth(
models.SmallAutoField,
models.BigAutoField,
2**33,
)
def test_run_python_noop(self):
"""
#24098 - Tests no-op RunPython operations.
"""
project_state = ProjectState()
new_state = project_state.clone()
operation = migrations.RunPython(
migrations.RunPython.noop, migrations.RunPython.noop
)
with connection.schema_editor() as editor:
operation.database_forwards(
"test_runpython", editor, project_state, new_state
)
operation.database_backwards(
"test_runpython", editor, new_state, project_state
)
def test_separate_database_and_state(self):
"""
Tests the SeparateDatabaseAndState operation.
"""
project_state = self.set_up_test_model("test_separatedatabaseandstate")
# Create the operation
database_operation = migrations.RunSQL(
"CREATE TABLE i_love_ponies (id int, special_thing int);",
"DROP TABLE i_love_ponies;",
)
state_operation = migrations.CreateModel(
"SomethingElse", [("id", models.AutoField(primary_key=True))]
)
operation = migrations.SeparateDatabaseAndState(
state_operations=[state_operation], database_operations=[database_operation]
)
self.assertEqual(
operation.describe(), "Custom state/database change combination"
)
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards("test_separatedatabaseandstate", new_state)
self.assertEqual(
len(
new_state.models[
"test_separatedatabaseandstate", "somethingelse"
].fields
),
1,
)
# Make sure there's no table
self.assertTableNotExists("i_love_ponies")
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards(
"test_separatedatabaseandstate", editor, project_state, new_state
)
self.assertTableExists("i_love_ponies")
# And test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards(
"test_separatedatabaseandstate", editor, new_state, project_state
)
self.assertTableNotExists("i_love_ponies")
# And deconstruction
definition = operation.deconstruct()
self.assertEqual(definition[0], "SeparateDatabaseAndState")
self.assertEqual(definition[1], [])
self.assertEqual(
sorted(definition[2]), ["database_operations", "state_operations"]
)
def test_separate_database_and_state2(self):
"""
A complex SeparateDatabaseAndState operation: Multiple operations both
for state and database. Verify the state dependencies within each list
and that state ops don't affect the database.
"""
app_label = "test_separatedatabaseandstate2"
project_state = self.set_up_test_model(app_label)
# Create the operation
database_operations = [
migrations.CreateModel(
"ILovePonies",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "iloveponies"},
),
migrations.CreateModel(
"ILoveMorePonies",
# We use IntegerField and not AutoField because
# the model is going to be deleted immediately
# and with an AutoField this fails on Oracle
[("id", models.IntegerField(primary_key=True))],
options={"db_table": "ilovemoreponies"},
),
migrations.DeleteModel("ILoveMorePonies"),
migrations.CreateModel(
"ILoveEvenMorePonies",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "iloveevenmoreponies"},
),
]
state_operations = [
migrations.CreateModel(
"SomethingElse",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "somethingelse"},
),
migrations.DeleteModel("SomethingElse"),
migrations.CreateModel(
"SomethingCompletelyDifferent",
[("id", models.AutoField(primary_key=True))],
options={"db_table": "somethingcompletelydifferent"},
),
]
operation = migrations.SeparateDatabaseAndState(
state_operations=state_operations,
database_operations=database_operations,
)
# Test the state alteration
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
def assertModelsAndTables(after_db):
# Tables and models exist, or don't, as they should:
self.assertNotIn((app_label, "somethingelse"), new_state.models)
self.assertEqual(
len(new_state.models[app_label, "somethingcompletelydifferent"].fields),
1,
)
self.assertNotIn((app_label, "iloveponiesonies"), new_state.models)
self.assertNotIn((app_label, "ilovemoreponies"), new_state.models)
self.assertNotIn((app_label, "iloveevenmoreponies"), new_state.models)
self.assertTableNotExists("somethingelse")
self.assertTableNotExists("somethingcompletelydifferent")
self.assertTableNotExists("ilovemoreponies")
if after_db:
self.assertTableExists("iloveponies")
self.assertTableExists("iloveevenmoreponies")
else:
self.assertTableNotExists("iloveponies")
self.assertTableNotExists("iloveevenmoreponies")
assertModelsAndTables(after_db=False)
# Test the database alteration
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
assertModelsAndTables(after_db=True)
# And test reversal
self.assertTrue(operation.reversible)
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
assertModelsAndTables(after_db=False)
class SwappableOperationTests(OperationTestBase):
"""
Key operations ignore swappable models
(we don't want to replicate all of them here, as the functionality
is in a common base class anyway)
"""
available_apps = ["migrations"]
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_create_ignore_swapped(self):
"""
The CreateTable operation ignores swapped models.
"""
operation = migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=1)),
],
options={
"swappable": "TEST_SWAP_MODEL",
},
)
# Test the state alteration (it should still be there!)
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards("test_crigsw", new_state)
self.assertEqual(new_state.models["test_crigsw", "pony"].name, "Pony")
self.assertEqual(len(new_state.models["test_crigsw", "pony"].fields), 2)
# Test the database alteration
self.assertTableNotExists("test_crigsw_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_crigsw", editor, project_state, new_state)
self.assertTableNotExists("test_crigsw_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_crigsw", editor, new_state, project_state
)
self.assertTableNotExists("test_crigsw_pony")
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_delete_ignore_swapped(self):
"""
Tests the DeleteModel operation ignores swapped models.
"""
operation = migrations.DeleteModel("Pony")
project_state, new_state = self.make_test_state("test_dligsw", operation)
# Test the database alteration
self.assertTableNotExists("test_dligsw_pony")
with connection.schema_editor() as editor:
operation.database_forwards("test_dligsw", editor, project_state, new_state)
self.assertTableNotExists("test_dligsw_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_dligsw", editor, new_state, project_state
)
self.assertTableNotExists("test_dligsw_pony")
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_add_field_ignore_swapped(self):
"""
Tests the AddField operation.
"""
# Test the state alteration
operation = migrations.AddField(
"Pony",
"height",
models.FloatField(null=True, default=5),
)
project_state, new_state = self.make_test_state("test_adfligsw", operation)
# Test the database alteration
self.assertTableNotExists("test_adfligsw_pony")
with connection.schema_editor() as editor:
operation.database_forwards(
"test_adfligsw", editor, project_state, new_state
)
self.assertTableNotExists("test_adfligsw_pony")
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(
"test_adfligsw", editor, new_state, project_state
)
self.assertTableNotExists("test_adfligsw_pony")
@override_settings(TEST_SWAP_MODEL="migrations.SomeFakeModel")
def test_indexes_ignore_swapped(self):
"""
Add/RemoveIndex operations ignore swapped models.
"""
operation = migrations.AddIndex(
"Pony", models.Index(fields=["pink"], name="my_name_idx")
)
project_state, new_state = self.make_test_state("test_adinigsw", operation)
with connection.schema_editor() as editor:
# No database queries should be run for swapped models
operation.database_forwards(
"test_adinigsw", editor, project_state, new_state
)
operation.database_backwards(
"test_adinigsw", editor, new_state, project_state
)
operation = migrations.RemoveIndex(
"Pony", models.Index(fields=["pink"], name="my_name_idx")
)
project_state, new_state = self.make_test_state("test_rminigsw", operation)
with connection.schema_editor() as editor:
operation.database_forwards(
"test_rminigsw", editor, project_state, new_state
)
operation.database_backwards(
"test_rminigsw", editor, new_state, project_state
)
class TestCreateModel(SimpleTestCase):
def test_references_model_mixin(self):
migrations.CreateModel(
"name",
fields=[],
bases=(Mixin, models.Model),
).references_model("other_model", "migrations")
class FieldOperationTests(SimpleTestCase):
def test_references_model(self):
operation = FieldOperation(
"MoDel", "field", models.ForeignKey("Other", models.CASCADE)
)
# Model name match.
self.assertIs(operation.references_model("mOdEl", "migrations"), True)
# Referenced field.
self.assertIs(operation.references_model("oTher", "migrations"), True)
# Doesn't reference.
self.assertIs(operation.references_model("Whatever", "migrations"), False)
def test_references_field_by_name(self):
operation = FieldOperation("MoDel", "field", models.BooleanField(default=False))
self.assertIs(operation.references_field("model", "field", "migrations"), True)
def test_references_field_by_remote_field_model(self):
operation = FieldOperation(
"Model", "field", models.ForeignKey("Other", models.CASCADE)
)
self.assertIs(
operation.references_field("Other", "whatever", "migrations"), True
)
self.assertIs(
operation.references_field("Missing", "whatever", "migrations"), False
)
def test_references_field_by_from_fields(self):
operation = FieldOperation(
"Model",
"field",
models.fields.related.ForeignObject(
"Other", models.CASCADE, ["from"], ["to"]
),
)
self.assertIs(operation.references_field("Model", "from", "migrations"), True)
self.assertIs(operation.references_field("Model", "to", "migrations"), False)
self.assertIs(operation.references_field("Other", "from", "migrations"), False)
self.assertIs(operation.references_field("Model", "to", "migrations"), False)
def test_references_field_by_to_fields(self):
operation = FieldOperation(
"Model",
"field",
models.ForeignKey("Other", models.CASCADE, to_field="field"),
)
self.assertIs(operation.references_field("Other", "field", "migrations"), True)
self.assertIs(
operation.references_field("Other", "whatever", "migrations"), False
)
self.assertIs(
operation.references_field("Missing", "whatever", "migrations"), False
)
def test_references_field_by_through(self):
operation = FieldOperation(
"Model", "field", models.ManyToManyField("Other", through="Through")
)
self.assertIs(
operation.references_field("Other", "whatever", "migrations"), True
)
self.assertIs(
operation.references_field("Through", "whatever", "migrations"), True
)
self.assertIs(
operation.references_field("Missing", "whatever", "migrations"), False
)
def test_reference_field_by_through_fields(self):
operation = FieldOperation(
"Model",
"field",
models.ManyToManyField(
"Other", through="Through", through_fields=("first", "second")
),
)
self.assertIs(
operation.references_field("Other", "whatever", "migrations"), True
)
self.assertIs(
operation.references_field("Through", "whatever", "migrations"), False
)
self.assertIs(
operation.references_field("Through", "first", "migrations"), True
)
self.assertIs(
operation.references_field("Through", "second", "migrations"), True
)
|
b63ef64bbb5e2809f631d2244f60aba3b30081c48d3a4dceb971799d4481f0ff | import os
import shutil
import tempfile
from contextlib import contextmanager
from importlib import import_module
from django.apps import apps
from django.db import connection, connections, migrations, models
from django.db.migrations.migration import Migration
from django.db.migrations.recorder import MigrationRecorder
from django.db.migrations.state import ProjectState
from django.test import TransactionTestCase
from django.test.utils import extend_sys_path
from django.utils.module_loading import module_dir
class MigrationTestBase(TransactionTestCase):
"""
Contains an extended set of asserts for testing migrations and schema operations.
"""
available_apps = ["migrations"]
databases = {"default", "other"}
def tearDown(self):
# Reset applied-migrations state.
for db in self.databases:
recorder = MigrationRecorder(connections[db])
recorder.migration_qs.filter(app="migrations").delete()
def get_table_description(self, table, using="default"):
with connections[using].cursor() as cursor:
return connections[using].introspection.get_table_description(cursor, table)
def assertTableExists(self, table, using="default"):
with connections[using].cursor() as cursor:
self.assertIn(table, connections[using].introspection.table_names(cursor))
def assertTableNotExists(self, table, using="default"):
with connections[using].cursor() as cursor:
self.assertNotIn(
table, connections[using].introspection.table_names(cursor)
)
def assertColumnExists(self, table, column, using="default"):
self.assertIn(
column, [c.name for c in self.get_table_description(table, using=using)]
)
def assertColumnNotExists(self, table, column, using="default"):
self.assertNotIn(
column, [c.name for c in self.get_table_description(table, using=using)]
)
def _get_column_allows_null(self, table, column, using):
return [
c.null_ok
for c in self.get_table_description(table, using=using)
if c.name == column
][0]
def assertColumnNull(self, table, column, using="default"):
self.assertTrue(self._get_column_allows_null(table, column, using))
def assertColumnNotNull(self, table, column, using="default"):
self.assertFalse(self._get_column_allows_null(table, column, using))
def assertIndexExists(
self, table, columns, value=True, using="default", index_type=None
):
with connections[using].cursor() as cursor:
self.assertEqual(
value,
any(
c["index"]
for c in connections[using]
.introspection.get_constraints(cursor, table)
.values()
if (
c["columns"] == list(columns)
and (index_type is None or c["type"] == index_type)
and not c["unique"]
)
),
)
def assertIndexNotExists(self, table, columns):
return self.assertIndexExists(table, columns, False)
def assertIndexNameExists(self, table, index, using="default"):
with connections[using].cursor() as cursor:
self.assertIn(
index,
connection.introspection.get_constraints(cursor, table),
)
def assertIndexNameNotExists(self, table, index, using="default"):
with connections[using].cursor() as cursor:
self.assertNotIn(
index,
connection.introspection.get_constraints(cursor, table),
)
def assertConstraintExists(self, table, name, value=True, using="default"):
with connections[using].cursor() as cursor:
constraints = (
connections[using].introspection.get_constraints(cursor, table).items()
)
self.assertEqual(
value,
any(c["check"] for n, c in constraints if n == name),
)
def assertConstraintNotExists(self, table, name):
return self.assertConstraintExists(table, name, False)
def assertUniqueConstraintExists(self, table, columns, value=True, using="default"):
with connections[using].cursor() as cursor:
constraints = (
connections[using].introspection.get_constraints(cursor, table).values()
)
self.assertEqual(
value,
any(c["unique"] for c in constraints if c["columns"] == list(columns)),
)
def assertFKExists(self, table, columns, to, value=True, using="default"):
with connections[using].cursor() as cursor:
self.assertEqual(
value,
any(
c["foreign_key"] == to
for c in connections[using]
.introspection.get_constraints(cursor, table)
.values()
if c["columns"] == list(columns)
),
)
def assertFKNotExists(self, table, columns, to):
return self.assertFKExists(table, columns, to, False)
@contextmanager
def temporary_migration_module(self, app_label="migrations", module=None):
"""
Allows testing management commands in a temporary migrations module.
Wrap all invocations to makemigrations and squashmigrations with this
context manager in order to avoid creating migration files in your
source tree inadvertently.
Takes the application label that will be passed to makemigrations or
squashmigrations and the Python path to a migrations module.
The migrations module is used as a template for creating the temporary
migrations module. If it isn't provided, the application's migrations
module is used, if it exists.
Returns the filesystem path to the temporary migrations module.
"""
with tempfile.TemporaryDirectory() as temp_dir:
target_dir = tempfile.mkdtemp(dir=temp_dir)
with open(os.path.join(target_dir, "__init__.py"), "w"):
pass
target_migrations_dir = os.path.join(target_dir, "migrations")
if module is None:
module = apps.get_app_config(app_label).name + ".migrations"
try:
source_migrations_dir = module_dir(import_module(module))
except (ImportError, ValueError):
pass
else:
shutil.copytree(source_migrations_dir, target_migrations_dir)
with extend_sys_path(temp_dir):
new_module = os.path.basename(target_dir) + ".migrations"
with self.settings(MIGRATION_MODULES={app_label: new_module}):
yield target_migrations_dir
class OperationTestBase(MigrationTestBase):
"""Common functions to help test operations."""
@classmethod
def setUpClass(cls):
super().setUpClass()
cls._initial_table_names = frozenset(connection.introspection.table_names())
def tearDown(self):
self.cleanup_test_tables()
super().tearDown()
def cleanup_test_tables(self):
table_names = (
frozenset(connection.introspection.table_names())
- self._initial_table_names
)
with connection.schema_editor() as editor:
with connection.constraint_checks_disabled():
for table_name in table_names:
editor.execute(
editor.sql_delete_table
% {
"table": editor.quote_name(table_name),
}
)
def apply_operations(self, app_label, project_state, operations, atomic=True):
migration = Migration("name", app_label)
migration.operations = operations
with connection.schema_editor(atomic=atomic) as editor:
return migration.apply(project_state, editor)
def unapply_operations(self, app_label, project_state, operations, atomic=True):
migration = Migration("name", app_label)
migration.operations = operations
with connection.schema_editor(atomic=atomic) as editor:
return migration.unapply(project_state, editor)
def make_test_state(self, app_label, operation, **kwargs):
"""
Makes a test state using set_up_test_model and returns the
original state and the state after the migration is applied.
"""
project_state = self.set_up_test_model(app_label, **kwargs)
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
return project_state, new_state
def set_up_test_model(
self,
app_label,
second_model=False,
third_model=False,
index=False,
multicol_index=False,
related_model=False,
mti_model=False,
proxy_model=False,
manager_model=False,
unique_together=False,
options=False,
db_table=None,
index_together=False,
constraints=None,
indexes=None,
):
"""Creates a test model state and database table."""
# Make the "current" state.
model_options = {
"swappable": "TEST_SWAP_MODEL",
"index_together": [["weight", "pink"]] if index_together else [],
"unique_together": [["pink", "weight"]] if unique_together else [],
}
if options:
model_options["permissions"] = [("can_groom", "Can groom")]
if db_table:
model_options["db_table"] = db_table
operations = [
migrations.CreateModel(
"Pony",
[
("id", models.AutoField(primary_key=True)),
("pink", models.IntegerField(default=3)),
("weight", models.FloatField()),
],
options=model_options,
)
]
if index:
operations.append(
migrations.AddIndex(
"Pony",
models.Index(fields=["pink"], name="pony_pink_idx"),
)
)
if multicol_index:
operations.append(
migrations.AddIndex(
"Pony",
models.Index(fields=["pink", "weight"], name="pony_test_idx"),
)
)
if indexes:
for index in indexes:
operations.append(migrations.AddIndex("Pony", index))
if constraints:
for constraint in constraints:
operations.append(migrations.AddConstraint("Pony", constraint))
if second_model:
operations.append(
migrations.CreateModel(
"Stable",
[
("id", models.AutoField(primary_key=True)),
],
)
)
if third_model:
operations.append(
migrations.CreateModel(
"Van",
[
("id", models.AutoField(primary_key=True)),
],
)
)
if related_model:
operations.append(
migrations.CreateModel(
"Rider",
[
("id", models.AutoField(primary_key=True)),
("pony", models.ForeignKey("Pony", models.CASCADE)),
(
"friend",
models.ForeignKey("self", models.CASCADE, null=True),
),
],
)
)
if mti_model:
operations.append(
migrations.CreateModel(
"ShetlandPony",
fields=[
(
"pony_ptr",
models.OneToOneField(
"Pony",
models.CASCADE,
auto_created=True,
parent_link=True,
primary_key=True,
to_field="id",
serialize=False,
),
),
("cuteness", models.IntegerField(default=1)),
],
bases=["%s.Pony" % app_label],
)
)
if proxy_model:
operations.append(
migrations.CreateModel(
"ProxyPony",
fields=[],
options={"proxy": True},
bases=["%s.Pony" % app_label],
)
)
if manager_model:
from .models import FoodManager, FoodQuerySet
operations.append(
migrations.CreateModel(
"Food",
fields=[
("id", models.AutoField(primary_key=True)),
],
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
],
)
)
return self.apply_operations(app_label, ProjectState(), operations)
|
4cc70b6ca4c64b0114e3a5143046fcd7cbcd783297c2a1f1c797931081ccb1ab | from django.core.management import call_command
from django.test import override_settings
from .test_base import MigrationTestBase
class Tests(MigrationTestBase):
"""
Deprecated model fields should still be usable in historic migrations.
"""
@override_settings(
MIGRATION_MODULES={"migrations": "migrations.deprecated_field_migrations"}
)
def test_migrate(self):
# Make sure no tables are created
self.assertTableNotExists("migrations_ipaddressfield")
# Run migration
call_command("migrate", verbosity=0)
# Make sure the right tables exist
self.assertTableExists("migrations_ipaddressfield")
# Unmigrate everything
call_command("migrate", "migrations", "zero", verbosity=0)
# Make sure it's all gone
self.assertTableNotExists("migrations_ipaddressfield")
|
64ec9d4b7a72b68a9de8cd6d9808b131360be90fad794462128cb2ff9d790e0f | from django.db.migrations.exceptions import NodeNotFoundError
from django.test import SimpleTestCase
class ExceptionTests(SimpleTestCase):
def test_node_not_found_error_repr(self):
node = ("some_app_label", "some_migration_label")
error_repr = repr(NodeNotFoundError("some message", node))
self.assertEqual(
error_repr, "NodeNotFoundError(('some_app_label', 'some_migration_label'))"
)
|
803d4a0692e7ecfc617432bb3e43a9727a278edcce1500045e50d9a09435d936 | from django.db import connection, migrations, models
from django.db.migrations.state import ProjectState
from django.test import override_settings
from .test_base import OperationTestBase
class AgnosticRouter:
"""
A router that doesn't have an opinion regarding migrating.
"""
def allow_migrate(self, db, app_label, **hints):
return None
class MigrateNothingRouter:
"""
A router that doesn't allow migrating.
"""
def allow_migrate(self, db, app_label, **hints):
return False
class MigrateEverythingRouter:
"""
A router that always allows migrating.
"""
def allow_migrate(self, db, app_label, **hints):
return True
class MigrateWhenFooRouter:
"""
A router that allows migrating depending on a hint.
"""
def allow_migrate(self, db, app_label, **hints):
return hints.get("foo", False)
class MultiDBOperationTests(OperationTestBase):
databases = {"default", "other"}
def _test_create_model(self, app_label, should_run):
"""
CreateModel honors multi-db settings.
"""
operation = migrations.CreateModel(
"Pony",
[("id", models.AutoField(primary_key=True))],
)
# Test the state alteration
project_state = ProjectState()
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
# Test the database alteration
self.assertTableNotExists("%s_pony" % app_label)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
if should_run:
self.assertTableExists("%s_pony" % app_label)
else:
self.assertTableNotExists("%s_pony" % app_label)
# And test reversal
with connection.schema_editor() as editor:
operation.database_backwards(app_label, editor, new_state, project_state)
self.assertTableNotExists("%s_pony" % app_label)
@override_settings(DATABASE_ROUTERS=[AgnosticRouter()])
def test_create_model(self):
"""
Test when router doesn't have an opinion (i.e. CreateModel should run).
"""
self._test_create_model("test_mltdb_crmo", should_run=True)
@override_settings(DATABASE_ROUTERS=[MigrateNothingRouter()])
def test_create_model2(self):
"""
Test when router returns False (i.e. CreateModel shouldn't run).
"""
self._test_create_model("test_mltdb_crmo2", should_run=False)
@override_settings(DATABASE_ROUTERS=[MigrateEverythingRouter()])
def test_create_model3(self):
"""
Test when router returns True (i.e. CreateModel should run).
"""
self._test_create_model("test_mltdb_crmo3", should_run=True)
def test_create_model4(self):
"""
Test multiple routers.
"""
with override_settings(DATABASE_ROUTERS=[AgnosticRouter(), AgnosticRouter()]):
self._test_create_model("test_mltdb_crmo4", should_run=True)
with override_settings(
DATABASE_ROUTERS=[MigrateNothingRouter(), MigrateEverythingRouter()]
):
self._test_create_model("test_mltdb_crmo4", should_run=False)
with override_settings(
DATABASE_ROUTERS=[MigrateEverythingRouter(), MigrateNothingRouter()]
):
self._test_create_model("test_mltdb_crmo4", should_run=True)
def _test_run_sql(self, app_label, should_run, hints=None):
with override_settings(DATABASE_ROUTERS=[MigrateEverythingRouter()]):
project_state = self.set_up_test_model(app_label)
sql = """
INSERT INTO {0}_pony (pink, weight) VALUES (1, 3.55);
INSERT INTO {0}_pony (pink, weight) VALUES (3, 5.0);
""".format(
app_label
)
operation = migrations.RunSQL(sql, hints=hints or {})
# Test the state alteration does nothing
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(new_state, project_state)
# Test the database alteration
self.assertEqual(
project_state.apps.get_model(app_label, "Pony").objects.count(), 0
)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
Pony = project_state.apps.get_model(app_label, "Pony")
if should_run:
self.assertEqual(Pony.objects.count(), 2)
else:
self.assertEqual(Pony.objects.count(), 0)
@override_settings(DATABASE_ROUTERS=[MigrateNothingRouter()])
def test_run_sql_migrate_nothing_router(self):
self._test_run_sql("test_mltdb_runsql", should_run=False)
@override_settings(DATABASE_ROUTERS=[MigrateWhenFooRouter()])
def test_run_sql_migrate_foo_router_without_hints(self):
self._test_run_sql("test_mltdb_runsql2", should_run=False)
@override_settings(DATABASE_ROUTERS=[MigrateWhenFooRouter()])
def test_run_sql_migrate_foo_router_with_hints(self):
self._test_run_sql("test_mltdb_runsql3", should_run=True, hints={"foo": True})
def _test_run_python(self, app_label, should_run, hints=None):
with override_settings(DATABASE_ROUTERS=[MigrateEverythingRouter()]):
project_state = self.set_up_test_model(app_label)
# Create the operation
def inner_method(models, schema_editor):
Pony = models.get_model(app_label, "Pony")
Pony.objects.create(pink=1, weight=3.55)
Pony.objects.create(weight=5)
operation = migrations.RunPython(inner_method, hints=hints or {})
# Test the state alteration does nothing
new_state = project_state.clone()
operation.state_forwards(app_label, new_state)
self.assertEqual(new_state, project_state)
# Test the database alteration
self.assertEqual(
project_state.apps.get_model(app_label, "Pony").objects.count(), 0
)
with connection.schema_editor() as editor:
operation.database_forwards(app_label, editor, project_state, new_state)
Pony = project_state.apps.get_model(app_label, "Pony")
if should_run:
self.assertEqual(Pony.objects.count(), 2)
else:
self.assertEqual(Pony.objects.count(), 0)
@override_settings(DATABASE_ROUTERS=[MigrateNothingRouter()])
def test_run_python_migrate_nothing_router(self):
self._test_run_python("test_mltdb_runpython", should_run=False)
@override_settings(DATABASE_ROUTERS=[MigrateWhenFooRouter()])
def test_run_python_migrate_foo_router_without_hints(self):
self._test_run_python("test_mltdb_runpython2", should_run=False)
@override_settings(DATABASE_ROUTERS=[MigrateWhenFooRouter()])
def test_run_python_migrate_foo_router_with_hints(self):
self._test_run_python(
"test_mltdb_runpython3", should_run=True, hints={"foo": True}
)
|
4c1b1f30fde2d2ea5cac36733a3342c57460f78a774d09d7cf957bf41504c1c8 | import functools
import re
from unittest import mock
from django.apps import apps
from django.conf import settings
from django.contrib.auth.models import AbstractBaseUser
from django.core.validators import RegexValidator, validate_slug
from django.db import connection, migrations, models
from django.db.migrations.autodetector import MigrationAutodetector
from django.db.migrations.graph import MigrationGraph
from django.db.migrations.loader import MigrationLoader
from django.db.migrations.questioner import MigrationQuestioner
from django.db.migrations.state import ModelState, ProjectState
from django.test import SimpleTestCase, TestCase, override_settings
from django.test.utils import isolate_lru_cache
from .models import FoodManager, FoodQuerySet
class DeconstructibleObject:
"""
A custom deconstructible object.
"""
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def deconstruct(self):
return (self.__module__ + "." + self.__class__.__name__, self.args, self.kwargs)
class AutodetectorTests(TestCase):
"""
Tests the migration autodetector.
"""
author_empty = ModelState(
"testapp", "Author", [("id", models.AutoField(primary_key=True))]
)
author_name = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
)
author_name_null = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, null=True)),
],
)
author_name_longer = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=400)),
],
)
author_name_renamed = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("names", models.CharField(max_length=200)),
],
)
author_name_default = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, default="Ada Lovelace")),
],
)
author_name_check_constraint = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
{
"constraints": [
models.CheckConstraint(
check=models.Q(name__contains="Bob"), name="name_contains_bob"
)
]
},
)
author_dates_of_birth_auto_now = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("date_of_birth", models.DateField(auto_now=True)),
("date_time_of_birth", models.DateTimeField(auto_now=True)),
("time_of_birth", models.TimeField(auto_now=True)),
],
)
author_dates_of_birth_auto_now_add = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("date_of_birth", models.DateField(auto_now_add=True)),
("date_time_of_birth", models.DateTimeField(auto_now_add=True)),
("time_of_birth", models.TimeField(auto_now_add=True)),
],
)
author_name_deconstructible_1 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, default=DeconstructibleObject())),
],
)
author_name_deconstructible_2 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, default=DeconstructibleObject())),
],
)
author_name_deconstructible_3 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, default=models.IntegerField())),
],
)
author_name_deconstructible_4 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, default=models.IntegerField())),
],
)
author_name_deconstructible_list_1 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=[DeconstructibleObject(), 123]
),
),
],
)
author_name_deconstructible_list_2 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=[DeconstructibleObject(), 123]
),
),
],
)
author_name_deconstructible_list_3 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=[DeconstructibleObject(), 999]
),
),
],
)
author_name_deconstructible_tuple_1 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=(DeconstructibleObject(), 123)
),
),
],
)
author_name_deconstructible_tuple_2 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=(DeconstructibleObject(), 123)
),
),
],
)
author_name_deconstructible_tuple_3 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200, default=(DeconstructibleObject(), 999)
),
),
],
)
author_name_deconstructible_dict_1 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default={"item": DeconstructibleObject(), "otheritem": 123},
),
),
],
)
author_name_deconstructible_dict_2 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default={"item": DeconstructibleObject(), "otheritem": 123},
),
),
],
)
author_name_deconstructible_dict_3 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default={"item": DeconstructibleObject(), "otheritem": 999},
),
),
],
)
author_name_nested_deconstructible_1 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2"),
),
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c")),
),
),
),
],
)
author_name_nested_deconstructible_2 = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2"),
),
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c")),
),
),
),
],
)
author_name_nested_deconstructible_changed_arg = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2-changed"),
),
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c")),
),
),
),
],
)
author_name_nested_deconstructible_extra_arg = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2"),
),
None,
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c")),
),
),
),
],
)
author_name_nested_deconstructible_changed_kwarg = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2"),
),
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c-changed")),
),
),
),
],
)
author_name_nested_deconstructible_extra_kwarg = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
default=DeconstructibleObject(
DeconstructibleObject(1),
(
DeconstructibleObject("t1"),
DeconstructibleObject("t2"),
),
a=DeconstructibleObject("A"),
b=DeconstructibleObject(B=DeconstructibleObject("c")),
c=None,
),
),
),
],
)
author_custom_pk = ModelState(
"testapp", "Author", [("pk_field", models.IntegerField(primary_key=True))]
)
author_with_biography_non_blank = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField()),
("biography", models.TextField()),
],
)
author_with_biography_blank = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(blank=True)),
("biography", models.TextField(blank=True)),
],
)
author_with_book = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
)
author_with_book_order_wrt = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={"order_with_respect_to": "book"},
)
author_renamed_with_book = ModelState(
"testapp",
"Writer",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
)
author_with_publisher_string = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("publisher_name", models.CharField(max_length=200)),
],
)
author_with_publisher = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)),
],
)
author_with_user = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("user", models.ForeignKey("auth.User", models.CASCADE)),
],
)
author_with_custom_user = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("user", models.ForeignKey("thirdapp.CustomUser", models.CASCADE)),
],
)
author_proxy = ModelState(
"testapp", "AuthorProxy", [], {"proxy": True}, ("testapp.author",)
)
author_proxy_options = ModelState(
"testapp",
"AuthorProxy",
[],
{
"proxy": True,
"verbose_name": "Super Author",
},
("testapp.author",),
)
author_proxy_notproxy = ModelState(
"testapp", "AuthorProxy", [], {}, ("testapp.author",)
)
author_proxy_third = ModelState(
"thirdapp", "AuthorProxy", [], {"proxy": True}, ("testapp.author",)
)
author_proxy_third_notproxy = ModelState(
"thirdapp", "AuthorProxy", [], {}, ("testapp.author",)
)
author_proxy_proxy = ModelState(
"testapp", "AAuthorProxyProxy", [], {"proxy": True}, ("testapp.authorproxy",)
)
author_unmanaged = ModelState(
"testapp", "AuthorUnmanaged", [], {"managed": False}, ("testapp.author",)
)
author_unmanaged_managed = ModelState(
"testapp", "AuthorUnmanaged", [], {}, ("testapp.author",)
)
author_unmanaged_default_pk = ModelState(
"testapp", "Author", [("id", models.AutoField(primary_key=True))]
)
author_unmanaged_custom_pk = ModelState(
"testapp",
"Author",
[
("pk_field", models.IntegerField(primary_key=True)),
],
)
author_with_m2m = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("publishers", models.ManyToManyField("testapp.Publisher")),
],
)
author_with_m2m_blank = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("publishers", models.ManyToManyField("testapp.Publisher", blank=True)),
],
)
author_with_m2m_through = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"publishers",
models.ManyToManyField("testapp.Publisher", through="testapp.Contract"),
),
],
)
author_with_renamed_m2m_through = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"publishers",
models.ManyToManyField("testapp.Publisher", through="testapp.Deal"),
),
],
)
author_with_former_m2m = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("publishers", models.CharField(max_length=100)),
],
)
author_with_options = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
],
{
"permissions": [("can_hire", "Can hire")],
"verbose_name": "Authi",
},
)
author_with_db_table_options = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table": "author_one"},
)
author_with_new_db_table_options = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table": "author_two"},
)
author_renamed_with_db_table_options = ModelState(
"testapp",
"NewAuthor",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table": "author_one"},
)
author_renamed_with_new_db_table_options = ModelState(
"testapp",
"NewAuthor",
[
("id", models.AutoField(primary_key=True)),
],
{"db_table": "author_three"},
)
contract = ModelState(
"testapp",
"Contract",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)),
],
)
contract_renamed = ModelState(
"testapp",
"Deal",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("publisher", models.ForeignKey("testapp.Publisher", models.CASCADE)),
],
)
publisher = ModelState(
"testapp",
"Publisher",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
)
publisher_with_author = ModelState(
"testapp",
"Publisher",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("name", models.CharField(max_length=100)),
],
)
publisher_with_aardvark_author = ModelState(
"testapp",
"Publisher",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Aardvark", models.CASCADE)),
("name", models.CharField(max_length=100)),
],
)
publisher_with_book = ModelState(
"testapp",
"Publisher",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("otherapp.Book", models.CASCADE)),
("name", models.CharField(max_length=100)),
],
)
other_pony = ModelState(
"otherapp",
"Pony",
[
("id", models.AutoField(primary_key=True)),
],
)
other_pony_food = ModelState(
"otherapp",
"Pony",
[
("id", models.AutoField(primary_key=True)),
],
managers=[
("food_qs", FoodQuerySet.as_manager()),
("food_mgr", FoodManager("a", "b")),
("food_mgr_kwargs", FoodManager("x", "y", 3, 4)),
],
)
other_stable = ModelState(
"otherapp", "Stable", [("id", models.AutoField(primary_key=True))]
)
third_thing = ModelState(
"thirdapp", "Thing", [("id", models.AutoField(primary_key=True))]
)
book = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
)
book_proxy_fk = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("thirdapp.AuthorProxy", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
)
book_proxy_proxy_fk = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.AAuthorProxyProxy", models.CASCADE)),
],
)
book_migrations_fk = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("migrations.UnmigratedModel", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
)
book_with_no_author_fk = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.IntegerField()),
("title", models.CharField(max_length=200)),
],
)
book_with_no_author = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("title", models.CharField(max_length=200)),
],
)
book_with_author_renamed = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Writer", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
)
book_with_field_and_author_renamed = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("writer", models.ForeignKey("testapp.Writer", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
)
book_with_multiple_authors = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("authors", models.ManyToManyField("testapp.Author")),
("title", models.CharField(max_length=200)),
],
)
book_with_multiple_authors_through_attribution = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
(
"authors",
models.ManyToManyField(
"testapp.Author", through="otherapp.Attribution"
),
),
("title", models.CharField(max_length=200)),
],
)
book_indexes = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"indexes": [
models.Index(fields=["author", "title"], name="book_title_author_idx")
],
},
)
book_unordered_indexes = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"indexes": [
models.Index(fields=["title", "author"], name="book_author_title_idx")
],
},
)
book_foo_together = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"index_together": {("author", "title")},
"unique_together": {("author", "title")},
},
)
book_foo_together_2 = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"index_together": {("title", "author")},
"unique_together": {("title", "author")},
},
)
book_foo_together_3 = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("newfield", models.IntegerField()),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"index_together": {("title", "newfield")},
"unique_together": {("title", "newfield")},
},
)
book_foo_together_4 = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("newfield2", models.IntegerField()),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"index_together": {("title", "newfield2")},
"unique_together": {("title", "newfield2")},
},
)
attribution = ModelState(
"otherapp",
"Attribution",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("testapp.Author", models.CASCADE)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
)
edition = ModelState(
"thirdapp",
"Edition",
[
("id", models.AutoField(primary_key=True)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
)
custom_user = ModelState(
"thirdapp",
"CustomUser",
[
("id", models.AutoField(primary_key=True)),
("username", models.CharField(max_length=255)),
],
bases=(AbstractBaseUser,),
)
custom_user_no_inherit = ModelState(
"thirdapp",
"CustomUser",
[
("id", models.AutoField(primary_key=True)),
("username", models.CharField(max_length=255)),
],
)
aardvark = ModelState(
"thirdapp", "Aardvark", [("id", models.AutoField(primary_key=True))]
)
aardvark_testapp = ModelState(
"testapp", "Aardvark", [("id", models.AutoField(primary_key=True))]
)
aardvark_based_on_author = ModelState(
"testapp", "Aardvark", [], bases=("testapp.Author",)
)
aardvark_pk_fk_author = ModelState(
"testapp",
"Aardvark",
[
(
"id",
models.OneToOneField(
"testapp.Author", models.CASCADE, primary_key=True
),
),
],
)
knight = ModelState("eggs", "Knight", [("id", models.AutoField(primary_key=True))])
rabbit = ModelState(
"eggs",
"Rabbit",
[
("id", models.AutoField(primary_key=True)),
("knight", models.ForeignKey("eggs.Knight", models.CASCADE)),
("parent", models.ForeignKey("eggs.Rabbit", models.CASCADE)),
],
{
"unique_together": {("parent", "knight")},
"indexes": [
models.Index(
fields=["parent", "knight"], name="rabbit_circular_fk_index"
)
],
},
)
def repr_changes(self, changes, include_dependencies=False):
output = ""
for app_label, migrations_ in sorted(changes.items()):
output += " %s:\n" % app_label
for migration in migrations_:
output += " %s\n" % migration.name
for operation in migration.operations:
output += " %s\n" % operation
if include_dependencies:
output += " Dependencies:\n"
if migration.dependencies:
for dep in migration.dependencies:
output += " %s\n" % (dep,)
else:
output += " None\n"
return output
def assertNumberMigrations(self, changes, app_label, number):
if len(changes.get(app_label, [])) != number:
self.fail(
"Incorrect number of migrations (%s) for %s (expected %s)\n%s"
% (
len(changes.get(app_label, [])),
app_label,
number,
self.repr_changes(changes),
)
)
def assertMigrationDependencies(self, changes, app_label, position, dependencies):
if not changes.get(app_label):
self.fail(
"No migrations found for %s\n%s"
% (app_label, self.repr_changes(changes))
)
if len(changes[app_label]) < position + 1:
self.fail(
"No migration at index %s for %s\n%s"
% (position, app_label, self.repr_changes(changes))
)
migration = changes[app_label][position]
if set(migration.dependencies) != set(dependencies):
self.fail(
"Migration dependencies mismatch for %s.%s (expected %s):\n%s"
% (
app_label,
migration.name,
dependencies,
self.repr_changes(changes, include_dependencies=True),
)
)
def assertOperationTypes(self, changes, app_label, position, types):
if not changes.get(app_label):
self.fail(
"No migrations found for %s\n%s"
% (app_label, self.repr_changes(changes))
)
if len(changes[app_label]) < position + 1:
self.fail(
"No migration at index %s for %s\n%s"
% (position, app_label, self.repr_changes(changes))
)
migration = changes[app_label][position]
real_types = [
operation.__class__.__name__ for operation in migration.operations
]
if types != real_types:
self.fail(
"Operation type mismatch for %s.%s (expected %s):\n%s"
% (
app_label,
migration.name,
types,
self.repr_changes(changes),
)
)
def assertOperationAttributes(
self, changes, app_label, position, operation_position, **attrs
):
if not changes.get(app_label):
self.fail(
"No migrations found for %s\n%s"
% (app_label, self.repr_changes(changes))
)
if len(changes[app_label]) < position + 1:
self.fail(
"No migration at index %s for %s\n%s"
% (position, app_label, self.repr_changes(changes))
)
migration = changes[app_label][position]
if len(changes[app_label]) < position + 1:
self.fail(
"No operation at index %s for %s.%s\n%s"
% (
operation_position,
app_label,
migration.name,
self.repr_changes(changes),
)
)
operation = migration.operations[operation_position]
for attr, value in attrs.items():
if getattr(operation, attr, None) != value:
self.fail(
"Attribute mismatch for %s.%s op #%s, %s (expected %r, got %r):\n%s"
% (
app_label,
migration.name,
operation_position,
attr,
value,
getattr(operation, attr, None),
self.repr_changes(changes),
)
)
def assertOperationFieldAttributes(
self, changes, app_label, position, operation_position, **attrs
):
if not changes.get(app_label):
self.fail(
"No migrations found for %s\n%s"
% (app_label, self.repr_changes(changes))
)
if len(changes[app_label]) < position + 1:
self.fail(
"No migration at index %s for %s\n%s"
% (position, app_label, self.repr_changes(changes))
)
migration = changes[app_label][position]
if len(changes[app_label]) < position + 1:
self.fail(
"No operation at index %s for %s.%s\n%s"
% (
operation_position,
app_label,
migration.name,
self.repr_changes(changes),
)
)
operation = migration.operations[operation_position]
if not hasattr(operation, "field"):
self.fail(
"No field attribute for %s.%s op #%s."
% (
app_label,
migration.name,
operation_position,
)
)
field = operation.field
for attr, value in attrs.items():
if getattr(field, attr, None) != value:
self.fail(
"Field attribute mismatch for %s.%s op #%s, field.%s (expected %r, "
"got %r):\n%s"
% (
app_label,
migration.name,
operation_position,
attr,
value,
getattr(field, attr, None),
self.repr_changes(changes),
)
)
def make_project_state(self, model_states):
"Shortcut to make ProjectStates from lists of predefined models"
project_state = ProjectState()
for model_state in model_states:
project_state.add_model(model_state.clone())
return project_state
def get_changes(self, before_states, after_states, questioner=None):
if not isinstance(before_states, ProjectState):
before_states = self.make_project_state(before_states)
if not isinstance(after_states, ProjectState):
after_states = self.make_project_state(after_states)
return MigrationAutodetector(
before_states,
after_states,
questioner,
)._detect_changes()
def test_arrange_for_graph(self):
"""Tests auto-naming of migrations for graph matching."""
# Make a fake graph
graph = MigrationGraph()
graph.add_node(("testapp", "0001_initial"), None)
graph.add_node(("testapp", "0002_foobar"), None)
graph.add_node(("otherapp", "0001_initial"), None)
graph.add_dependency(
"testapp.0002_foobar",
("testapp", "0002_foobar"),
("testapp", "0001_initial"),
)
graph.add_dependency(
"testapp.0002_foobar",
("testapp", "0002_foobar"),
("otherapp", "0001_initial"),
)
# Use project state to make a new migration change set
before = self.make_project_state([self.publisher, self.other_pony])
after = self.make_project_state(
[
self.author_empty,
self.publisher,
self.other_pony,
self.other_stable,
]
)
autodetector = MigrationAutodetector(before, after)
changes = autodetector._detect_changes()
# Run through arrange_for_graph
changes = autodetector.arrange_for_graph(changes, graph)
# Make sure there's a new name, deps match, etc.
self.assertEqual(changes["testapp"][0].name, "0003_author")
self.assertEqual(
changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]
)
self.assertEqual(changes["otherapp"][0].name, "0002_stable")
self.assertEqual(
changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]
)
def test_arrange_for_graph_with_multiple_initial(self):
# Make a fake graph.
graph = MigrationGraph()
# Use project state to make a new migration change set.
before = self.make_project_state([])
after = self.make_project_state(
[self.author_with_book, self.book, self.attribution]
)
autodetector = MigrationAutodetector(
before, after, MigrationQuestioner({"ask_initial": True})
)
changes = autodetector._detect_changes()
changes = autodetector.arrange_for_graph(changes, graph)
self.assertEqual(changes["otherapp"][0].name, "0001_initial")
self.assertEqual(changes["otherapp"][0].dependencies, [])
self.assertEqual(changes["otherapp"][1].name, "0002_initial")
self.assertCountEqual(
changes["otherapp"][1].dependencies,
[("testapp", "0001_initial"), ("otherapp", "0001_initial")],
)
self.assertEqual(changes["testapp"][0].name, "0001_initial")
self.assertEqual(
changes["testapp"][0].dependencies, [("otherapp", "0001_initial")]
)
def test_trim_apps(self):
"""
Trim does not remove dependencies but does remove unwanted apps.
"""
# Use project state to make a new migration change set
before = self.make_project_state([])
after = self.make_project_state(
[self.author_empty, self.other_pony, self.other_stable, self.third_thing]
)
autodetector = MigrationAutodetector(
before, after, MigrationQuestioner({"ask_initial": True})
)
changes = autodetector._detect_changes()
# Run through arrange_for_graph
graph = MigrationGraph()
changes = autodetector.arrange_for_graph(changes, graph)
changes["testapp"][0].dependencies.append(("otherapp", "0001_initial"))
changes = autodetector._trim_to_apps(changes, {"testapp"})
# Make sure there's the right set of migrations
self.assertEqual(changes["testapp"][0].name, "0001_initial")
self.assertEqual(changes["otherapp"][0].name, "0001_initial")
self.assertNotIn("thirdapp", changes)
def test_custom_migration_name(self):
"""Tests custom naming of migrations for graph matching."""
# Make a fake graph
graph = MigrationGraph()
graph.add_node(("testapp", "0001_initial"), None)
graph.add_node(("testapp", "0002_foobar"), None)
graph.add_node(("otherapp", "0001_initial"), None)
graph.add_dependency(
"testapp.0002_foobar",
("testapp", "0002_foobar"),
("testapp", "0001_initial"),
)
# Use project state to make a new migration change set
before = self.make_project_state([])
after = self.make_project_state(
[self.author_empty, self.other_pony, self.other_stable]
)
autodetector = MigrationAutodetector(before, after)
changes = autodetector._detect_changes()
# Run through arrange_for_graph
migration_name = "custom_name"
changes = autodetector.arrange_for_graph(changes, graph, migration_name)
# Make sure there's a new name, deps match, etc.
self.assertEqual(changes["testapp"][0].name, "0003_%s" % migration_name)
self.assertEqual(
changes["testapp"][0].dependencies, [("testapp", "0002_foobar")]
)
self.assertEqual(changes["otherapp"][0].name, "0002_%s" % migration_name)
self.assertEqual(
changes["otherapp"][0].dependencies, [("otherapp", "0001_initial")]
)
def test_new_model(self):
"""Tests autodetection of new models."""
changes = self.get_changes([], [self.other_pony_food])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Pony")
self.assertEqual(
[name for name, mgr in changes["otherapp"][0].operations[0].managers],
["food_qs", "food_mgr", "food_mgr_kwargs"],
)
def test_old_model(self):
"""Tests deletion of old models."""
changes = self.get_changes([self.author_empty], [])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
def test_add_field(self):
"""Tests autodetection of new fields."""
changes = self.get_changes([self.author_empty], [self.author_name])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="name")
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_add_date_fields_with_auto_now_not_asking_for_default(
self, mocked_ask_method
):
changes = self.get_changes(
[self.author_empty], [self.author_dates_of_birth_auto_now]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["AddField", "AddField", "AddField"]
)
self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now=True)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_add_date_fields_with_auto_now_add_not_asking_for_null_addition(
self, mocked_ask_method
):
changes = self.get_changes(
[self.author_empty], [self.author_dates_of_birth_auto_now_add]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["AddField", "AddField", "AddField"]
)
self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now_add=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now_add=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now_add=True)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_auto_now_add_addition"
)
def test_add_date_fields_with_auto_now_add_asking_for_default(
self, mocked_ask_method
):
changes = self.get_changes(
[self.author_empty], [self.author_dates_of_birth_auto_now_add]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["AddField", "AddField", "AddField"]
)
self.assertOperationFieldAttributes(changes, "testapp", 0, 0, auto_now_add=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 1, auto_now_add=True)
self.assertOperationFieldAttributes(changes, "testapp", 0, 2, auto_now_add=True)
self.assertEqual(mocked_ask_method.call_count, 3)
def test_remove_field(self):
"""Tests autodetection of removed fields."""
changes = self.get_changes([self.author_name], [self.author_empty])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RemoveField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="name")
def test_alter_field(self):
"""Tests autodetection of new fields."""
changes = self.get_changes([self.author_name], [self.author_name_longer])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="name", preserve_default=True
)
def test_supports_functools_partial(self):
def _content_file_name(instance, filename, key, **kwargs):
return "{}/{}".format(instance, filename)
def content_file_name(key, **kwargs):
return functools.partial(_content_file_name, key, **kwargs)
# An unchanged partial reference.
before = [
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"file",
models.FileField(
max_length=200, upload_to=content_file_name("file")
),
),
],
)
]
after = [
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"file",
models.FileField(
max_length=200, upload_to=content_file_name("file")
),
),
],
)
]
changes = self.get_changes(before, after)
self.assertNumberMigrations(changes, "testapp", 0)
# A changed partial reference.
args_changed = [
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"file",
models.FileField(
max_length=200, upload_to=content_file_name("other-file")
),
),
],
)
]
changes = self.get_changes(before, args_changed)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
# Can't use assertOperationFieldAttributes because we need the
# deconstructed version, i.e., the exploded func/args/keywords rather
# than the partial: we don't care if it's not the same instance of the
# partial, only if it's the same source function, args, and keywords.
value = changes["testapp"][0].operations[0].field.upload_to
self.assertEqual(
(_content_file_name, ("other-file",), {}),
(value.func, value.args, value.keywords),
)
kwargs_changed = [
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"file",
models.FileField(
max_length=200,
upload_to=content_file_name("file", spam="eggs"),
),
),
],
)
]
changes = self.get_changes(before, kwargs_changed)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
value = changes["testapp"][0].operations[0].field.upload_to
self.assertEqual(
(_content_file_name, ("file",), {"spam": "eggs"}),
(value.func, value.args, value.keywords),
)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_alter_field_to_not_null_with_default(self, mocked_ask_method):
"""
#23609 - Tests autodetection of nullable to non-nullable alterations.
"""
changes = self.get_changes([self.author_name_null], [self.author_name_default])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="name", preserve_default=True
)
self.assertOperationFieldAttributes(
changes, "testapp", 0, 0, default="Ada Lovelace"
)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration",
return_value=models.NOT_PROVIDED,
)
def test_alter_field_to_not_null_without_default(self, mocked_ask_method):
"""
#23609 - Tests autodetection of nullable to non-nullable alterations.
"""
changes = self.get_changes([self.author_name_null], [self.author_name])
self.assertEqual(mocked_ask_method.call_count, 1)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="name", preserve_default=True
)
self.assertOperationFieldAttributes(
changes, "testapp", 0, 0, default=models.NOT_PROVIDED
)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_alteration",
return_value="Some Name",
)
def test_alter_field_to_not_null_oneoff_default(self, mocked_ask_method):
"""
#23609 - Tests autodetection of nullable to non-nullable alterations.
"""
changes = self.get_changes([self.author_name_null], [self.author_name])
self.assertEqual(mocked_ask_method.call_count, 1)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="name", preserve_default=False
)
self.assertOperationFieldAttributes(
changes, "testapp", 0, 0, default="Some Name"
)
def test_rename_field(self):
"""Tests autodetection of renamed fields."""
changes = self.get_changes(
[self.author_name],
[self.author_name_renamed],
MigrationQuestioner({"ask_rename": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="name", new_name="names"
)
def test_rename_field_foreign_key_to_field(self):
before = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("field", models.IntegerField(unique=True)),
],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
(
"foo",
models.ForeignKey("app.Foo", models.CASCADE, to_field="field"),
),
],
),
]
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("renamed_field", models.IntegerField(unique=True)),
],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
(
"foo",
models.ForeignKey(
"app.Foo", models.CASCADE, to_field="renamed_field"
),
),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["RenameField"])
self.assertOperationAttributes(
changes, "app", 0, 0, old_name="field", new_name="renamed_field"
)
def test_rename_foreign_object_fields(self):
fields = ("first", "second")
renamed_fields = ("first_renamed", "second_renamed")
before = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("first", models.IntegerField()),
("second", models.IntegerField()),
],
options={"unique_together": {fields}},
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("first", models.IntegerField()),
("second", models.IntegerField()),
(
"foo",
models.ForeignObject(
"app.Foo",
models.CASCADE,
from_fields=fields,
to_fields=fields,
),
),
],
),
]
# Case 1: to_fields renames.
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("first_renamed", models.IntegerField()),
("second_renamed", models.IntegerField()),
],
options={"unique_together": {renamed_fields}},
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("first", models.IntegerField()),
("second", models.IntegerField()),
(
"foo",
models.ForeignObject(
"app.Foo",
models.CASCADE,
from_fields=fields,
to_fields=renamed_fields,
),
),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(
changes, "app", 0, ["RenameField", "RenameField", "AlterUniqueTogether"]
)
self.assertOperationAttributes(
changes,
"app",
0,
0,
model_name="foo",
old_name="first",
new_name="first_renamed",
)
self.assertOperationAttributes(
changes,
"app",
0,
1,
model_name="foo",
old_name="second",
new_name="second_renamed",
)
# Case 2: from_fields renames.
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("first", models.IntegerField()),
("second", models.IntegerField()),
],
options={"unique_together": {fields}},
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("first_renamed", models.IntegerField()),
("second_renamed", models.IntegerField()),
(
"foo",
models.ForeignObject(
"app.Foo",
models.CASCADE,
from_fields=renamed_fields,
to_fields=fields,
),
),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["RenameField", "RenameField"])
self.assertOperationAttributes(
changes,
"app",
0,
0,
model_name="bar",
old_name="first",
new_name="first_renamed",
)
self.assertOperationAttributes(
changes,
"app",
0,
1,
model_name="bar",
old_name="second",
new_name="second_renamed",
)
def test_rename_referenced_primary_key(self):
before = [
ModelState(
"app",
"Foo",
[
("id", models.CharField(primary_key=True, serialize=False)),
],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("foo", models.ForeignKey("app.Foo", models.CASCADE)),
],
),
]
after = [
ModelState(
"app",
"Foo",
[("renamed_id", models.CharField(primary_key=True, serialize=False))],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("foo", models.ForeignKey("app.Foo", models.CASCADE)),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["RenameField"])
self.assertOperationAttributes(
changes, "app", 0, 0, old_name="id", new_name="renamed_id"
)
def test_rename_field_preserved_db_column(self):
"""
RenameField is used if a field is renamed and db_column equal to the
old field's column is added.
"""
before = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("field", models.IntegerField()),
],
),
]
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
("renamed_field", models.IntegerField(db_column="field")),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["AlterField", "RenameField"])
self.assertOperationAttributes(
changes,
"app",
0,
0,
model_name="foo",
name="field",
)
self.assertEqual(
changes["app"][0].operations[0].field.deconstruct(),
(
"field",
"django.db.models.IntegerField",
[],
{"db_column": "field"},
),
)
self.assertOperationAttributes(
changes,
"app",
0,
1,
model_name="foo",
old_name="field",
new_name="renamed_field",
)
def test_rename_related_field_preserved_db_column(self):
before = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
("foo", models.ForeignKey("app.Foo", models.CASCADE)),
],
),
]
after = [
ModelState(
"app",
"Foo",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"app",
"Bar",
[
("id", models.AutoField(primary_key=True)),
(
"renamed_foo",
models.ForeignKey(
"app.Foo", models.CASCADE, db_column="foo_id"
),
),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename": True})
)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["AlterField", "RenameField"])
self.assertOperationAttributes(
changes,
"app",
0,
0,
model_name="bar",
name="foo",
)
self.assertEqual(
changes["app"][0].operations[0].field.deconstruct(),
(
"foo",
"django.db.models.ForeignKey",
[],
{"to": "app.foo", "on_delete": models.CASCADE, "db_column": "foo_id"},
),
)
self.assertOperationAttributes(
changes,
"app",
0,
1,
model_name="bar",
old_name="foo",
new_name="renamed_foo",
)
def test_rename_field_with_renamed_model(self):
changes = self.get_changes(
[self.author_name],
[
ModelState(
"testapp",
"RenamedAuthor",
[
("id", models.AutoField(primary_key=True)),
("renamed_name", models.CharField(max_length=200)),
],
),
],
MigrationQuestioner({"ask_rename_model": True, "ask_rename": True}),
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel", "RenameField"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
old_name="Author",
new_name="RenamedAuthor",
)
self.assertOperationAttributes(
changes,
"testapp",
0,
1,
old_name="name",
new_name="renamed_name",
)
def test_rename_model(self):
"""Tests autodetection of renamed models."""
changes = self.get_changes(
[self.author_with_book, self.book],
[self.author_renamed_with_book, self.book_with_author_renamed],
MigrationQuestioner({"ask_rename_model": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="Author", new_name="Writer"
)
# Now that RenameModel handles related fields too, there should be
# no AlterField for the related field.
self.assertNumberMigrations(changes, "otherapp", 0)
def test_rename_model_case(self):
"""
Model name is case-insensitive. Changing case doesn't lead to any
autodetected operations.
"""
author_renamed = ModelState(
"testapp",
"author",
[
("id", models.AutoField(primary_key=True)),
],
)
changes = self.get_changes(
[self.author_empty, self.book],
[author_renamed, self.book],
questioner=MigrationQuestioner({"ask_rename_model": True}),
)
self.assertNumberMigrations(changes, "testapp", 0)
self.assertNumberMigrations(changes, "otherapp", 0)
def test_renamed_referenced_m2m_model_case(self):
publisher_renamed = ModelState(
"testapp",
"publisher",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
],
)
changes = self.get_changes(
[self.publisher, self.author_with_m2m],
[publisher_renamed, self.author_with_m2m],
questioner=MigrationQuestioner({"ask_rename_model": True}),
)
self.assertNumberMigrations(changes, "testapp", 0)
self.assertNumberMigrations(changes, "otherapp", 0)
def test_rename_m2m_through_model(self):
"""
Tests autodetection of renamed models that are used in M2M relations as
through models.
"""
changes = self.get_changes(
[self.author_with_m2m_through, self.publisher, self.contract],
[
self.author_with_renamed_m2m_through,
self.publisher,
self.contract_renamed,
],
MigrationQuestioner({"ask_rename_model": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="Contract", new_name="Deal"
)
def test_rename_model_with_renamed_rel_field(self):
"""
Tests autodetection of renamed models while simultaneously renaming one
of the fields that relate to the renamed model.
"""
changes = self.get_changes(
[self.author_with_book, self.book],
[self.author_renamed_with_book, self.book_with_field_and_author_renamed],
MigrationQuestioner({"ask_rename": True, "ask_rename_model": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="Author", new_name="Writer"
)
# Right number/type of migrations for related field rename?
# Alter is already taken care of.
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["RenameField"])
self.assertOperationAttributes(
changes, "otherapp", 0, 0, old_name="author", new_name="writer"
)
def test_rename_model_with_fks_in_different_position(self):
"""
#24537 - The order of fields in a model does not influence
the RenameModel detection.
"""
before = [
ModelState(
"testapp",
"EntityA",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"testapp",
"EntityB",
[
("id", models.AutoField(primary_key=True)),
("some_label", models.CharField(max_length=255)),
("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)),
],
),
]
after = [
ModelState(
"testapp",
"EntityA",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"testapp",
"RenamedEntityB",
[
("id", models.AutoField(primary_key=True)),
("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)),
("some_label", models.CharField(max_length=255)),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename_model": True})
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="EntityB", new_name="RenamedEntityB"
)
def test_rename_model_reverse_relation_dependencies(self):
"""
The migration to rename a model pointed to by a foreign key in another
app must run after the other app's migration that adds the foreign key
with model's original name. Therefore, the renaming migration has a
dependency on that other migration.
"""
before = [
ModelState(
"testapp",
"EntityA",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"otherapp",
"EntityB",
[
("id", models.AutoField(primary_key=True)),
("entity_a", models.ForeignKey("testapp.EntityA", models.CASCADE)),
],
),
]
after = [
ModelState(
"testapp",
"RenamedEntityA",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"otherapp",
"EntityB",
[
("id", models.AutoField(primary_key=True)),
(
"entity_a",
models.ForeignKey("testapp.RenamedEntityA", models.CASCADE),
),
],
),
]
changes = self.get_changes(
before, after, MigrationQuestioner({"ask_rename_model": True})
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertMigrationDependencies(
changes, "testapp", 0, [("otherapp", "__first__")]
)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="EntityA", new_name="RenamedEntityA"
)
def test_fk_dependency(self):
"""Having a ForeignKey automatically adds a dependency."""
# Note that testapp (author) has no dependencies,
# otherapp (book) depends on testapp (author),
# thirdapp (edition) depends on otherapp (book)
changes = self.get_changes([], [self.author_name, self.book, self.edition])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertMigrationDependencies(changes, "testapp", 0, [])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Book")
self.assertMigrationDependencies(
changes, "otherapp", 0, [("testapp", "auto_1")]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "thirdapp", 1)
self.assertOperationTypes(changes, "thirdapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "thirdapp", 0, 0, name="Edition")
self.assertMigrationDependencies(
changes, "thirdapp", 0, [("otherapp", "auto_1")]
)
def test_proxy_fk_dependency(self):
"""FK dependencies still work on proxy models."""
# Note that testapp (author) has no dependencies,
# otherapp (book) depends on testapp (authorproxy)
changes = self.get_changes(
[], [self.author_empty, self.author_proxy_third, self.book_proxy_fk]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertMigrationDependencies(changes, "testapp", 0, [])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Book")
self.assertMigrationDependencies(
changes, "otherapp", 0, [("thirdapp", "auto_1")]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "thirdapp", 1)
self.assertOperationTypes(changes, "thirdapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "thirdapp", 0, 0, name="AuthorProxy")
self.assertMigrationDependencies(
changes, "thirdapp", 0, [("testapp", "auto_1")]
)
def test_same_app_no_fk_dependency(self):
"""
A migration with a FK between two models of the same app
does not have a dependency to itself.
"""
changes = self.get_changes([], [self.author_with_publisher, self.publisher])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author")
self.assertMigrationDependencies(changes, "testapp", 0, [])
def test_circular_fk_dependency(self):
"""
Having a circular ForeignKey dependency automatically
resolves the situation into 2 migrations on one side and 1 on the other.
"""
changes = self.get_changes(
[], [self.author_with_book, self.book, self.publisher_with_book]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author")
self.assertMigrationDependencies(
changes, "testapp", 0, [("otherapp", "auto_1")]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 2)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationTypes(changes, "otherapp", 1, ["AddField"])
self.assertMigrationDependencies(changes, "otherapp", 0, [])
self.assertMigrationDependencies(
changes, "otherapp", 1, [("otherapp", "auto_1"), ("testapp", "auto_1")]
)
# both split migrations should be `initial`
self.assertTrue(changes["otherapp"][0].initial)
self.assertTrue(changes["otherapp"][1].initial)
def test_same_app_circular_fk_dependency(self):
"""
A migration with a FK between two models of the same app does
not have a dependency to itself.
"""
changes = self.get_changes(
[], [self.author_with_publisher, self.publisher_with_author]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["CreateModel", "CreateModel", "AddField"]
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher")
self.assertOperationAttributes(changes, "testapp", 0, 2, name="publisher")
self.assertMigrationDependencies(changes, "testapp", 0, [])
def test_same_app_circular_fk_dependency_with_unique_together_and_indexes(self):
"""
#22275 - A migration with circular FK dependency does not try
to create unique together constraint and indexes before creating all
required fields first.
"""
changes = self.get_changes([], [self.knight, self.rabbit])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "eggs", 1)
self.assertOperationTypes(
changes,
"eggs",
0,
["CreateModel", "CreateModel", "AddIndex", "AlterUniqueTogether"],
)
self.assertNotIn("unique_together", changes["eggs"][0].operations[0].options)
self.assertNotIn("unique_together", changes["eggs"][0].operations[1].options)
self.assertMigrationDependencies(changes, "eggs", 0, [])
def test_alter_db_table_add(self):
"""Tests detection for adding db_table in model's options."""
changes = self.get_changes(
[self.author_empty], [self.author_with_db_table_options]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTable"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", table="author_one"
)
def test_alter_db_table_change(self):
"""Tests detection for changing db_table in model's options'."""
changes = self.get_changes(
[self.author_with_db_table_options], [self.author_with_new_db_table_options]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTable"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", table="author_two"
)
def test_alter_db_table_remove(self):
"""Tests detection for removing db_table in model's options."""
changes = self.get_changes(
[self.author_with_db_table_options], [self.author_empty]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelTable"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", table=None
)
def test_alter_db_table_no_changes(self):
"""
Alter_db_table doesn't generate a migration if no changes have been made.
"""
changes = self.get_changes(
[self.author_with_db_table_options], [self.author_with_db_table_options]
)
# Right number of migrations?
self.assertEqual(len(changes), 0)
def test_keep_db_table_with_model_change(self):
"""
Tests when model changes but db_table stays as-is, autodetector must not
create more than one operation.
"""
changes = self.get_changes(
[self.author_with_db_table_options],
[self.author_renamed_with_db_table_options],
MigrationQuestioner({"ask_rename_model": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RenameModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="Author", new_name="NewAuthor"
)
def test_alter_db_table_with_model_change(self):
"""
Tests when model and db_table changes, autodetector must create two
operations.
"""
changes = self.get_changes(
[self.author_with_db_table_options],
[self.author_renamed_with_new_db_table_options],
MigrationQuestioner({"ask_rename_model": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["RenameModel", "AlterModelTable"]
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, old_name="Author", new_name="NewAuthor"
)
self.assertOperationAttributes(
changes, "testapp", 0, 1, name="newauthor", table="author_three"
)
def test_identical_regex_doesnt_alter(self):
from_state = ModelState(
"testapp",
"model",
[
(
"id",
models.AutoField(
primary_key=True,
validators=[
RegexValidator(
re.compile("^[-a-zA-Z0-9_]+\\Z"),
"Enter a valid “slug” consisting of letters, numbers, "
"underscores or hyphens.",
"invalid",
)
],
),
)
],
)
to_state = ModelState(
"testapp",
"model",
[("id", models.AutoField(primary_key=True, validators=[validate_slug]))],
)
changes = self.get_changes([from_state], [to_state])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 0)
def test_different_regex_does_alter(self):
from_state = ModelState(
"testapp",
"model",
[
(
"id",
models.AutoField(
primary_key=True,
validators=[
RegexValidator(
re.compile("^[a-z]+\\Z", 32),
"Enter a valid “slug” consisting of letters, numbers, "
"underscores or hyphens.",
"invalid",
)
],
),
)
],
)
to_state = ModelState(
"testapp",
"model",
[("id", models.AutoField(primary_key=True, validators=[validate_slug]))],
)
changes = self.get_changes([from_state], [to_state])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
def test_empty_foo_together(self):
"""
#23452 - Empty unique/index_together shouldn't generate a migration.
"""
# Explicitly testing for not specified, since this is the case after
# a CreateModel operation w/o any definition on the original model
model_state_not_specified = ModelState(
"a", "model", [("id", models.AutoField(primary_key=True))]
)
# Explicitly testing for None, since this was the issue in #23452 after
# an AlterFooTogether operation with e.g. () as value
model_state_none = ModelState(
"a",
"model",
[("id", models.AutoField(primary_key=True))],
{
"index_together": None,
"unique_together": None,
},
)
# Explicitly testing for the empty set, since we now always have sets.
# During removal (('col1', 'col2'),) --> () this becomes set([])
model_state_empty = ModelState(
"a",
"model",
[("id", models.AutoField(primary_key=True))],
{
"index_together": set(),
"unique_together": set(),
},
)
def test(from_state, to_state, msg):
changes = self.get_changes([from_state], [to_state])
if changes:
ops = ", ".join(
o.__class__.__name__ for o in changes["a"][0].operations
)
self.fail("Created operation(s) %s from %s" % (ops, msg))
tests = (
(
model_state_not_specified,
model_state_not_specified,
'"not specified" to "not specified"',
),
(model_state_not_specified, model_state_none, '"not specified" to "None"'),
(
model_state_not_specified,
model_state_empty,
'"not specified" to "empty"',
),
(model_state_none, model_state_not_specified, '"None" to "not specified"'),
(model_state_none, model_state_none, '"None" to "None"'),
(model_state_none, model_state_empty, '"None" to "empty"'),
(
model_state_empty,
model_state_not_specified,
'"empty" to "not specified"',
),
(model_state_empty, model_state_none, '"empty" to "None"'),
(model_state_empty, model_state_empty, '"empty" to "empty"'),
)
for t in tests:
test(*t)
def test_create_model_with_indexes(self):
"""Test creation of new model with indexes already defined."""
author = ModelState(
"otherapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
{
"indexes": [
models.Index(fields=["name"], name="create_model_with_indexes_idx")
]
},
)
changes = self.get_changes([], [author])
added_index = models.Index(
fields=["name"], name="create_model_with_indexes_idx"
)
# Right number of migrations?
self.assertEqual(len(changes["otherapp"]), 1)
# Right number of actions?
migration = changes["otherapp"][0]
self.assertEqual(len(migration.operations), 2)
# Right actions order?
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel", "AddIndex"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Author")
self.assertOperationAttributes(
changes, "otherapp", 0, 1, model_name="author", index=added_index
)
def test_add_indexes(self):
"""Test change detection of new indexes."""
changes = self.get_changes(
[self.author_empty, self.book], [self.author_empty, self.book_indexes]
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AddIndex"])
added_index = models.Index(
fields=["author", "title"], name="book_title_author_idx"
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, model_name="book", index=added_index
)
def test_remove_indexes(self):
"""Test change detection of removed indexes."""
changes = self.get_changes(
[self.author_empty, self.book_indexes], [self.author_empty, self.book]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["RemoveIndex"])
self.assertOperationAttributes(
changes, "otherapp", 0, 0, model_name="book", name="book_title_author_idx"
)
def test_order_fields_indexes(self):
"""Test change detection of reordering of fields in indexes."""
changes = self.get_changes(
[self.author_empty, self.book_indexes],
[self.author_empty, self.book_unordered_indexes],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["RemoveIndex", "AddIndex"])
self.assertOperationAttributes(
changes, "otherapp", 0, 0, model_name="book", name="book_title_author_idx"
)
added_index = models.Index(
fields=["title", "author"], name="book_author_title_idx"
)
self.assertOperationAttributes(
changes, "otherapp", 0, 1, model_name="book", index=added_index
)
def test_create_model_with_check_constraint(self):
"""Test creation of new model with constraints already defined."""
author = ModelState(
"otherapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
{
"constraints": [
models.CheckConstraint(
check=models.Q(name__contains="Bob"), name="name_contains_bob"
)
]
},
)
changes = self.get_changes([], [author])
added_constraint = models.CheckConstraint(
check=models.Q(name__contains="Bob"), name="name_contains_bob"
)
# Right number of migrations?
self.assertEqual(len(changes["otherapp"]), 1)
# Right number of actions?
migration = changes["otherapp"][0]
self.assertEqual(len(migration.operations), 2)
# Right actions order?
self.assertOperationTypes(
changes, "otherapp", 0, ["CreateModel", "AddConstraint"]
)
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Author")
self.assertOperationAttributes(
changes, "otherapp", 0, 1, model_name="author", constraint=added_constraint
)
def test_add_constraints(self):
"""Test change detection of new constraints."""
changes = self.get_changes(
[self.author_name], [self.author_name_check_constraint]
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddConstraint"])
added_constraint = models.CheckConstraint(
check=models.Q(name__contains="Bob"), name="name_contains_bob"
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, model_name="author", constraint=added_constraint
)
def test_remove_constraints(self):
"""Test change detection of removed constraints."""
changes = self.get_changes(
[self.author_name_check_constraint], [self.author_name]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RemoveConstraint"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, model_name="author", name="name_contains_bob"
)
def test_add_foo_together(self):
"""Tests index/unique_together detection."""
changes = self.get_changes(
[self.author_empty, self.book], [self.author_empty, self.book_foo_together]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether"]
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
name="book",
unique_together={("author", "title")},
)
self.assertOperationAttributes(
changes, "otherapp", 0, 1, name="book", index_together={("author", "title")}
)
def test_remove_foo_together(self):
"""Tests index/unique_together detection."""
changes = self.get_changes(
[self.author_empty, self.book_foo_together], [self.author_empty, self.book]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes, "otherapp", 0, ["AlterUniqueTogether", "AlterIndexTogether"]
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="book", unique_together=set()
)
self.assertOperationAttributes(
changes, "otherapp", 0, 1, name="book", index_together=set()
)
def test_foo_together_remove_fk(self):
"""Tests unique_together and field removal detection & ordering"""
changes = self.get_changes(
[self.author_empty, self.book_foo_together],
[self.author_empty, self.book_with_no_author],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AlterUniqueTogether", "AlterIndexTogether", "RemoveField"],
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="book", unique_together=set()
)
self.assertOperationAttributes(
changes, "otherapp", 0, 1, name="book", index_together=set()
)
self.assertOperationAttributes(
changes, "otherapp", 0, 2, model_name="book", name="author"
)
def test_foo_together_no_changes(self):
"""
index/unique_together doesn't generate a migration if no
changes have been made.
"""
changes = self.get_changes(
[self.author_empty, self.book_foo_together],
[self.author_empty, self.book_foo_together],
)
# Right number of migrations?
self.assertEqual(len(changes), 0)
def test_foo_together_ordering(self):
"""
index/unique_together also triggers on ordering changes.
"""
changes = self.get_changes(
[self.author_empty, self.book_foo_together],
[self.author_empty, self.book_foo_together_2],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
[
"AlterUniqueTogether",
"AlterIndexTogether",
],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
name="book",
unique_together={("title", "author")},
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
1,
name="book",
index_together={("title", "author")},
)
def test_add_field_and_foo_together(self):
"""
Added fields will be created before using them in index/unique_together.
"""
changes = self.get_changes(
[self.author_empty, self.book],
[self.author_empty, self.book_foo_together_3],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
["AddField", "AlterUniqueTogether", "AlterIndexTogether"],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
1,
name="book",
unique_together={("title", "newfield")},
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
2,
name="book",
index_together={("title", "newfield")},
)
def test_create_model_and_unique_together(self):
author = ModelState(
"otherapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
],
)
book_with_author = ModelState(
"otherapp",
"Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("otherapp.Author", models.CASCADE)),
("title", models.CharField(max_length=200)),
],
{
"index_together": {("title", "author")},
"unique_together": {("title", "author")},
},
)
changes = self.get_changes(
[self.book_with_no_author], [author, book_with_author]
)
# Right number of migrations?
self.assertEqual(len(changes["otherapp"]), 1)
# Right number of actions?
migration = changes["otherapp"][0]
self.assertEqual(len(migration.operations), 4)
# Right actions order?
self.assertOperationTypes(
changes,
"otherapp",
0,
["CreateModel", "AddField", "AlterUniqueTogether", "AlterIndexTogether"],
)
def test_remove_field_and_foo_together(self):
"""
Removed fields will be removed after updating index/unique_together.
"""
changes = self.get_changes(
[self.author_empty, self.book_foo_together_3],
[self.author_empty, self.book_foo_together],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
[
"AlterUniqueTogether",
"AlterIndexTogether",
"RemoveField",
],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
0,
name="book",
unique_together={("author", "title")},
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
1,
name="book",
index_together={("author", "title")},
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
2,
model_name="book",
name="newfield",
)
def test_alter_field_and_foo_together(self):
"""Fields are altered after deleting some index/unique_together."""
initial_author = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField(db_index=True)),
],
{
"unique_together": {("name",)},
},
)
author_reversed_constraints = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200, unique=True)),
("age", models.IntegerField()),
],
{
"index_together": {("age",)},
},
)
changes = self.get_changes([initial_author], [author_reversed_constraints])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
[
"AlterUniqueTogether",
"AlterField",
"AlterField",
"AlterIndexTogether",
],
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="author",
unique_together=set(),
)
self.assertOperationAttributes(
changes,
"testapp",
0,
1,
model_name="author",
name="age",
)
self.assertOperationAttributes(
changes,
"testapp",
0,
2,
model_name="author",
name="name",
)
self.assertOperationAttributes(
changes,
"testapp",
0,
3,
name="author",
index_together={("age",)},
)
def test_partly_alter_foo_together(self):
initial_author = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"unique_together": {("name",), ("age",)},
"index_together": {("name",)},
},
)
author_reversed_constraints = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("age", models.IntegerField()),
],
{
"unique_together": {("age",)},
"index_together": {("name",), ("age",)},
},
)
changes = self.get_changes([initial_author], [author_reversed_constraints])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
[
"AlterUniqueTogether",
"AlterIndexTogether",
],
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="author",
unique_together={("age",)},
)
self.assertOperationAttributes(
changes,
"testapp",
0,
1,
name="author",
index_together={("name",), ("age",)},
)
def test_rename_field_and_foo_together(self):
"""Fields are renamed before updating index/unique_together."""
changes = self.get_changes(
[self.author_empty, self.book_foo_together_3],
[self.author_empty, self.book_foo_together_4],
MigrationQuestioner({"ask_rename": True}),
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes,
"otherapp",
0,
[
"RenameField",
"AlterUniqueTogether",
"AlterIndexTogether",
],
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
1,
name="book",
unique_together={("title", "newfield2")},
)
self.assertOperationAttributes(
changes,
"otherapp",
0,
2,
name="book",
index_together={("title", "newfield2")},
)
def test_proxy(self):
"""The autodetector correctly deals with proxy models."""
# First, we test adding a proxy model
changes = self.get_changes(
[self.author_empty], [self.author_empty, self.author_proxy]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="AuthorProxy",
options={"proxy": True, "indexes": [], "constraints": []},
)
# Now, we test turning a proxy model into a non-proxy model
# It should delete the proxy then make the real one
changes = self.get_changes(
[self.author_empty, self.author_proxy],
[self.author_empty, self.author_proxy_notproxy],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel", "CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="AuthorProxy")
self.assertOperationAttributes(
changes, "testapp", 0, 1, name="AuthorProxy", options={}
)
def test_proxy_non_model_parent(self):
class Mixin:
pass
author_proxy_non_model_parent = ModelState(
"testapp",
"AuthorProxy",
[],
{"proxy": True},
(Mixin, "testapp.author"),
)
changes = self.get_changes(
[self.author_empty],
[self.author_empty, author_proxy_non_model_parent],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="AuthorProxy",
options={"proxy": True, "indexes": [], "constraints": []},
bases=(Mixin, "testapp.author"),
)
def test_proxy_custom_pk(self):
"""
#23415 - The autodetector must correctly deal with custom FK on proxy
models.
"""
# First, we test the default pk field name
changes = self.get_changes(
[], [self.author_empty, self.author_proxy_third, self.book_proxy_fk]
)
# The model the FK is pointing from and to.
self.assertEqual(
changes["otherapp"][0].operations[0].fields[2][1].remote_field.model,
"thirdapp.AuthorProxy",
)
# Now, we test the custom pk field name
changes = self.get_changes(
[], [self.author_custom_pk, self.author_proxy_third, self.book_proxy_fk]
)
# The model the FK is pointing from and to.
self.assertEqual(
changes["otherapp"][0].operations[0].fields[2][1].remote_field.model,
"thirdapp.AuthorProxy",
)
def test_proxy_to_mti_with_fk_to_proxy(self):
# First, test the pk table and field name.
to_state = self.make_project_state(
[self.author_empty, self.author_proxy_third, self.book_proxy_fk],
)
changes = self.get_changes([], to_state)
fk_field = changes["otherapp"][0].operations[0].fields[2][1]
self.assertEqual(
to_state.get_concrete_model_key(fk_field.remote_field.model),
("testapp", "author"),
)
self.assertEqual(fk_field.remote_field.model, "thirdapp.AuthorProxy")
# Change AuthorProxy to use MTI.
from_state = to_state.clone()
to_state = self.make_project_state(
[self.author_empty, self.author_proxy_third_notproxy, self.book_proxy_fk],
)
changes = self.get_changes(from_state, to_state)
# Right number/type of migrations for the AuthorProxy model?
self.assertNumberMigrations(changes, "thirdapp", 1)
self.assertOperationTypes(
changes, "thirdapp", 0, ["DeleteModel", "CreateModel"]
)
# Right number/type of migrations for the Book model with a FK to
# AuthorProxy?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterField"])
# otherapp should depend on thirdapp.
self.assertMigrationDependencies(
changes, "otherapp", 0, [("thirdapp", "auto_1")]
)
# Now, test the pk table and field name.
fk_field = changes["otherapp"][0].operations[0].field
self.assertEqual(
to_state.get_concrete_model_key(fk_field.remote_field.model),
("thirdapp", "authorproxy"),
)
self.assertEqual(fk_field.remote_field.model, "thirdapp.AuthorProxy")
def test_proxy_to_mti_with_fk_to_proxy_proxy(self):
# First, test the pk table and field name.
to_state = self.make_project_state(
[
self.author_empty,
self.author_proxy,
self.author_proxy_proxy,
self.book_proxy_proxy_fk,
]
)
changes = self.get_changes([], to_state)
fk_field = changes["otherapp"][0].operations[0].fields[1][1]
self.assertEqual(
to_state.get_concrete_model_key(fk_field.remote_field.model),
("testapp", "author"),
)
self.assertEqual(fk_field.remote_field.model, "testapp.AAuthorProxyProxy")
# Change AuthorProxy to use MTI. FK still points to AAuthorProxyProxy,
# a proxy of AuthorProxy.
from_state = to_state.clone()
to_state = self.make_project_state(
[
self.author_empty,
self.author_proxy_notproxy,
self.author_proxy_proxy,
self.book_proxy_proxy_fk,
]
)
changes = self.get_changes(from_state, to_state)
# Right number/type of migrations for the AuthorProxy model?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel", "CreateModel"])
# Right number/type of migrations for the Book model with a FK to
# AAuthorProxyProxy?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterField"])
# otherapp should depend on testapp.
self.assertMigrationDependencies(
changes, "otherapp", 0, [("testapp", "auto_1")]
)
# Now, test the pk table and field name.
fk_field = changes["otherapp"][0].operations[0].field
self.assertEqual(
to_state.get_concrete_model_key(fk_field.remote_field.model),
("testapp", "authorproxy"),
)
self.assertEqual(fk_field.remote_field.model, "testapp.AAuthorProxyProxy")
def test_unmanaged_create(self):
"""The autodetector correctly deals with managed models."""
# First, we test adding an unmanaged model
changes = self.get_changes(
[self.author_empty], [self.author_empty, self.author_unmanaged]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="AuthorUnmanaged", options={"managed": False}
)
def test_unmanaged_delete(self):
changes = self.get_changes(
[self.author_empty, self.author_unmanaged], [self.author_empty]
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["DeleteModel"])
def test_unmanaged_to_managed(self):
# Now, we test turning an unmanaged model into a managed model
changes = self.get_changes(
[self.author_empty, self.author_unmanaged],
[self.author_empty, self.author_unmanaged_managed],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="authorunmanaged", options={}
)
def test_managed_to_unmanaged(self):
# Now, we turn managed to unmanaged.
changes = self.get_changes(
[self.author_empty, self.author_unmanaged_managed],
[self.author_empty, self.author_unmanaged],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="authorunmanaged", options={"managed": False}
)
def test_unmanaged_custom_pk(self):
"""
#23415 - The autodetector must correctly deal with custom FK on
unmanaged models.
"""
# First, we test the default pk field name
changes = self.get_changes([], [self.author_unmanaged_default_pk, self.book])
# The model the FK on the book model points to.
fk_field = changes["otherapp"][0].operations[0].fields[2][1]
self.assertEqual(fk_field.remote_field.model, "testapp.Author")
# Now, we test the custom pk field name
changes = self.get_changes([], [self.author_unmanaged_custom_pk, self.book])
# The model the FK on the book model points to.
fk_field = changes["otherapp"][0].operations[0].fields[2][1]
self.assertEqual(fk_field.remote_field.model, "testapp.Author")
@override_settings(AUTH_USER_MODEL="thirdapp.CustomUser")
def test_swappable(self):
with isolate_lru_cache(apps.get_swappable_settings_name):
changes = self.get_changes(
[self.custom_user], [self.custom_user, self.author_with_custom_user]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertMigrationDependencies(
changes, "testapp", 0, [("__setting__", "AUTH_USER_MODEL")]
)
def test_swappable_lowercase(self):
model_state = ModelState(
"testapp",
"Document",
[
("id", models.AutoField(primary_key=True)),
(
"owner",
models.ForeignKey(
settings.AUTH_USER_MODEL.lower(),
models.CASCADE,
),
),
],
)
with isolate_lru_cache(apps.get_swappable_settings_name):
changes = self.get_changes([], [model_state])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Document")
self.assertMigrationDependencies(
changes,
"testapp",
0,
[("__setting__", "AUTH_USER_MODEL")],
)
@override_settings(AUTH_USER_MODEL="thirdapp.CustomUser")
def test_swappable_many_to_many_model_case(self):
document_lowercase = ModelState(
"testapp",
"Document",
[
("id", models.AutoField(primary_key=True)),
("owners", models.ManyToManyField(settings.AUTH_USER_MODEL.lower())),
],
)
document = ModelState(
"testapp",
"Document",
[
("id", models.AutoField(primary_key=True)),
("owners", models.ManyToManyField(settings.AUTH_USER_MODEL)),
],
)
with isolate_lru_cache(apps.get_swappable_settings_name):
changes = self.get_changes(
[self.custom_user, document_lowercase],
[self.custom_user, document],
)
self.assertEqual(len(changes), 0)
def test_swappable_changed(self):
with isolate_lru_cache(apps.get_swappable_settings_name):
before = self.make_project_state([self.custom_user, self.author_with_user])
with override_settings(AUTH_USER_MODEL="thirdapp.CustomUser"):
after = self.make_project_state(
[self.custom_user, self.author_with_custom_user]
)
autodetector = MigrationAutodetector(before, after)
changes = autodetector._detect_changes()
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, model_name="author", name="user"
)
fk_field = changes["testapp"][0].operations[0].field
self.assertEqual(fk_field.remote_field.model, "thirdapp.CustomUser")
def test_add_field_with_default(self):
"""#22030 - Adding a field with a default should work."""
changes = self.get_changes([self.author_empty], [self.author_name_default])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="name")
def test_custom_deconstructible(self):
"""
Two instances which deconstruct to the same value aren't considered a
change.
"""
changes = self.get_changes(
[self.author_name_deconstructible_1], [self.author_name_deconstructible_2]
)
# Right number of migrations?
self.assertEqual(len(changes), 0)
def test_deconstruct_field_kwarg(self):
"""Field instances are handled correctly by nested deconstruction."""
changes = self.get_changes(
[self.author_name_deconstructible_3], [self.author_name_deconstructible_4]
)
self.assertEqual(changes, {})
def test_deconstructible_list(self):
"""Nested deconstruction descends into lists."""
# When lists contain items that deconstruct to identical values, those lists
# should be considered equal for the purpose of detecting state changes
# (even if the original items are unequal).
changes = self.get_changes(
[self.author_name_deconstructible_list_1],
[self.author_name_deconstructible_list_2],
)
self.assertEqual(changes, {})
# Legitimate differences within the deconstructed lists should be reported
# as a change
changes = self.get_changes(
[self.author_name_deconstructible_list_1],
[self.author_name_deconstructible_list_3],
)
self.assertEqual(len(changes), 1)
def test_deconstructible_tuple(self):
"""Nested deconstruction descends into tuples."""
# When tuples contain items that deconstruct to identical values, those tuples
# should be considered equal for the purpose of detecting state changes
# (even if the original items are unequal).
changes = self.get_changes(
[self.author_name_deconstructible_tuple_1],
[self.author_name_deconstructible_tuple_2],
)
self.assertEqual(changes, {})
# Legitimate differences within the deconstructed tuples should be reported
# as a change
changes = self.get_changes(
[self.author_name_deconstructible_tuple_1],
[self.author_name_deconstructible_tuple_3],
)
self.assertEqual(len(changes), 1)
def test_deconstructible_dict(self):
"""Nested deconstruction descends into dict values."""
# When dicts contain items whose values deconstruct to identical values,
# those dicts should be considered equal for the purpose of detecting
# state changes (even if the original values are unequal).
changes = self.get_changes(
[self.author_name_deconstructible_dict_1],
[self.author_name_deconstructible_dict_2],
)
self.assertEqual(changes, {})
# Legitimate differences within the deconstructed dicts should be reported
# as a change
changes = self.get_changes(
[self.author_name_deconstructible_dict_1],
[self.author_name_deconstructible_dict_3],
)
self.assertEqual(len(changes), 1)
def test_nested_deconstructible_objects(self):
"""
Nested deconstruction is applied recursively to the args/kwargs of
deconstructed objects.
"""
# If the items within a deconstructed object's args/kwargs have the same
# deconstructed values - whether or not the items themselves are different
# instances - then the object as a whole is regarded as unchanged.
changes = self.get_changes(
[self.author_name_nested_deconstructible_1],
[self.author_name_nested_deconstructible_2],
)
self.assertEqual(changes, {})
# Differences that exist solely within the args list of a deconstructed object
# should be reported as changes
changes = self.get_changes(
[self.author_name_nested_deconstructible_1],
[self.author_name_nested_deconstructible_changed_arg],
)
self.assertEqual(len(changes), 1)
# Additional args should also be reported as a change
changes = self.get_changes(
[self.author_name_nested_deconstructible_1],
[self.author_name_nested_deconstructible_extra_arg],
)
self.assertEqual(len(changes), 1)
# Differences that exist solely within the kwargs dict of a deconstructed object
# should be reported as changes
changes = self.get_changes(
[self.author_name_nested_deconstructible_1],
[self.author_name_nested_deconstructible_changed_kwarg],
)
self.assertEqual(len(changes), 1)
# Additional kwargs should also be reported as a change
changes = self.get_changes(
[self.author_name_nested_deconstructible_1],
[self.author_name_nested_deconstructible_extra_kwarg],
)
self.assertEqual(len(changes), 1)
def test_deconstruct_type(self):
"""
#22951 -- Uninstantiated classes with deconstruct are correctly returned
by deep_deconstruct during serialization.
"""
author = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
(
"name",
models.CharField(
max_length=200,
# IntegerField intentionally not instantiated.
default=models.IntegerField,
),
),
],
)
changes = self.get_changes([], [author])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
def test_replace_string_with_foreignkey(self):
"""
#22300 - Adding an FK in the same "spot" as a deleted CharField should
work.
"""
changes = self.get_changes(
[self.author_with_publisher_string],
[self.author_with_publisher, self.publisher],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["CreateModel", "RemoveField", "AddField"]
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="publisher_name")
self.assertOperationAttributes(changes, "testapp", 0, 2, name="publisher")
def test_foreign_key_removed_before_target_model(self):
"""
Removing an FK and the model it targets in the same change must remove
the FK field before the model to maintain consistency.
"""
changes = self.get_changes(
[self.author_with_publisher, self.publisher], [self.author_name]
) # removes both the model and FK
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["RemoveField", "DeleteModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="publisher")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher")
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_add_many_to_many(self, mocked_ask_method):
"""#22435 - Adding a ManyToManyField should not prompt for a default."""
changes = self.get_changes(
[self.author_empty, self.publisher], [self.author_with_m2m, self.publisher]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="publishers")
def test_alter_many_to_many(self):
changes = self.get_changes(
[self.author_with_m2m, self.publisher],
[self.author_with_m2m_blank, self.publisher],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="publishers")
def test_create_with_through_model(self):
"""
Adding a m2m with a through model and the models that use it should be
ordered correctly.
"""
changes = self.get_changes(
[], [self.author_with_m2m_through, self.publisher, self.contract]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
[
"CreateModel",
"CreateModel",
"CreateModel",
"AddField",
],
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Publisher")
self.assertOperationAttributes(changes, "testapp", 0, 2, name="Contract")
self.assertOperationAttributes(
changes, "testapp", 0, 3, model_name="author", name="publishers"
)
def test_many_to_many_removed_before_through_model(self):
"""
Removing a ManyToManyField and the "through" model in the same change
must remove the field before the model to maintain consistency.
"""
changes = self.get_changes(
[
self.book_with_multiple_authors_through_attribution,
self.author_name,
self.attribution,
],
[self.book_with_no_author, self.author_name],
)
# Remove both the through model and ManyToMany
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes, "otherapp", 0, ["RemoveField", "DeleteModel"]
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="authors", model_name="book"
)
self.assertOperationAttributes(changes, "otherapp", 0, 1, name="Attribution")
def test_many_to_many_removed_before_through_model_2(self):
"""
Removing a model that contains a ManyToManyField and the "through" model
in the same change must remove the field before the model to maintain
consistency.
"""
changes = self.get_changes(
[
self.book_with_multiple_authors_through_attribution,
self.author_name,
self.attribution,
],
[self.author_name],
)
# Remove both the through model and ManyToMany
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(
changes, "otherapp", 0, ["RemoveField", "DeleteModel", "DeleteModel"]
)
self.assertOperationAttributes(
changes, "otherapp", 0, 0, name="authors", model_name="book"
)
self.assertOperationAttributes(changes, "otherapp", 0, 1, name="Attribution")
self.assertOperationAttributes(changes, "otherapp", 0, 2, name="Book")
def test_m2m_w_through_multistep_remove(self):
"""
A model with a m2m field that specifies a "through" model cannot be
removed in the same migration as that through model as the schema will
pass through an inconsistent state. The autodetector should produce two
migrations to avoid this issue.
"""
changes = self.get_changes(
[self.author_with_m2m_through, self.publisher, self.contract],
[self.publisher],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
["RemoveField", "RemoveField", "DeleteModel", "DeleteModel"],
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", model_name="contract"
)
self.assertOperationAttributes(
changes, "testapp", 0, 1, name="publisher", model_name="contract"
)
self.assertOperationAttributes(changes, "testapp", 0, 2, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 3, name="Contract")
def test_concrete_field_changed_to_many_to_many(self):
"""
#23938 - Changing a concrete field into a ManyToManyField
first removes the concrete field and then adds the m2m field.
"""
changes = self.get_changes(
[self.author_with_former_m2m], [self.author_with_m2m, self.publisher]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["CreateModel", "RemoveField", "AddField"]
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Publisher")
self.assertOperationAttributes(
changes, "testapp", 0, 1, name="publishers", model_name="author"
)
self.assertOperationAttributes(
changes, "testapp", 0, 2, name="publishers", model_name="author"
)
def test_many_to_many_changed_to_concrete_field(self):
"""
#23938 - Changing a ManyToManyField into a concrete field
first removes the m2m field and then adds the concrete field.
"""
changes = self.get_changes(
[self.author_with_m2m, self.publisher], [self.author_with_former_m2m]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["RemoveField", "AddField", "DeleteModel"]
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="publishers", model_name="author"
)
self.assertOperationAttributes(
changes, "testapp", 0, 1, name="publishers", model_name="author"
)
self.assertOperationAttributes(changes, "testapp", 0, 2, name="Publisher")
self.assertOperationFieldAttributes(changes, "testapp", 0, 1, max_length=100)
def test_non_circular_foreignkey_dependency_removal(self):
"""
If two models with a ForeignKey from one to the other are removed at the
same time, the autodetector should remove them in the correct order.
"""
changes = self.get_changes(
[self.author_with_publisher, self.publisher_with_author], []
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["RemoveField", "DeleteModel", "DeleteModel"]
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", model_name="publisher"
)
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 2, name="Publisher")
def test_alter_model_options(self):
"""Changing a model's options should make a change."""
changes = self.get_changes([self.author_empty], [self.author_with_options])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
options={
"permissions": [("can_hire", "Can hire")],
"verbose_name": "Authi",
},
)
# Changing them back to empty should also make a change
changes = self.get_changes([self.author_with_options], [self.author_empty])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", options={}
)
def test_alter_model_options_proxy(self):
"""Changing a proxy model's options should also make a change."""
changes = self.get_changes(
[self.author_proxy, self.author_empty],
[self.author_proxy_options, self.author_empty],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterModelOptions"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="authorproxy",
options={"verbose_name": "Super Author"},
)
def test_set_alter_order_with_respect_to(self):
"""Setting order_with_respect_to adds a field."""
changes = self.get_changes(
[self.book, self.author_with_book],
[self.book, self.author_with_book_order_wrt],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AlterOrderWithRespectTo"])
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", order_with_respect_to="book"
)
def test_add_alter_order_with_respect_to(self):
"""
Setting order_with_respect_to when adding the FK too does
things in the right order.
"""
changes = self.get_changes(
[self.author_name], [self.book, self.author_with_book_order_wrt]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["AddField", "AlterOrderWithRespectTo"]
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, model_name="author", name="book"
)
self.assertOperationAttributes(
changes, "testapp", 0, 1, name="author", order_with_respect_to="book"
)
def test_remove_alter_order_with_respect_to(self):
"""
Removing order_with_respect_to when removing the FK too does
things in the right order.
"""
changes = self.get_changes(
[self.book, self.author_with_book_order_wrt], [self.author_name]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["AlterOrderWithRespectTo", "RemoveField"]
)
self.assertOperationAttributes(
changes, "testapp", 0, 0, name="author", order_with_respect_to=None
)
self.assertOperationAttributes(
changes, "testapp", 0, 1, model_name="author", name="book"
)
def test_add_model_order_with_respect_to(self):
"""
Setting order_with_respect_to when adding the whole model
does things in the right order.
"""
changes = self.get_changes([], [self.book, self.author_with_book_order_wrt])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="Author",
options={"order_with_respect_to": "book"},
)
self.assertNotIn(
"_order",
[name for name, field in changes["testapp"][0].operations[0].fields],
)
def test_add_model_order_with_respect_to_index_foo_together(self):
changes = self.get_changes(
[],
[
self.book,
ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={
"order_with_respect_to": "book",
"index_together": {("name", "_order")},
"unique_together": {("id", "_order")},
},
),
],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="Author",
options={
"order_with_respect_to": "book",
"index_together": {("name", "_order")},
"unique_together": {("id", "_order")},
},
)
def test_add_model_order_with_respect_to_index_constraint(self):
tests = [
(
"AddIndex",
{
"indexes": [
models.Index(fields=["_order"], name="book_order_idx"),
]
},
),
(
"AddConstraint",
{
"constraints": [
models.CheckConstraint(
check=models.Q(_order__gt=1),
name="book_order_gt_1",
),
]
},
),
]
for operation, extra_option in tests:
with self.subTest(operation=operation):
after = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={
"order_with_respect_to": "book",
**extra_option,
},
)
changes = self.get_changes([], [self.book, after])
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
[
"CreateModel",
operation,
],
)
self.assertOperationAttributes(
changes,
"testapp",
0,
0,
name="Author",
options={"order_with_respect_to": "book"},
)
def test_set_alter_order_with_respect_to_index_constraint_foo_together(self):
tests = [
(
"AddIndex",
{
"indexes": [
models.Index(fields=["_order"], name="book_order_idx"),
]
},
),
(
"AddConstraint",
{
"constraints": [
models.CheckConstraint(
check=models.Q(_order__gt=1),
name="book_order_gt_1",
),
]
},
),
("AlterIndexTogether", {"index_together": {("name", "_order")}}),
("AlterUniqueTogether", {"unique_together": {("id", "_order")}}),
]
for operation, extra_option in tests:
with self.subTest(operation=operation):
after = ModelState(
"testapp",
"Author",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=200)),
("book", models.ForeignKey("otherapp.Book", models.CASCADE)),
],
options={
"order_with_respect_to": "book",
**extra_option,
},
)
changes = self.get_changes(
[self.book, self.author_with_book],
[self.book, after],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes,
"testapp",
0,
[
"AlterOrderWithRespectTo",
operation,
],
)
def test_alter_model_managers(self):
"""
Changing the model managers adds a new operation.
"""
changes = self.get_changes([self.other_pony], [self.other_pony_food])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterModelManagers"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="pony")
self.assertEqual(
[name for name, mgr in changes["otherapp"][0].operations[0].managers],
["food_qs", "food_mgr", "food_mgr_kwargs"],
)
self.assertEqual(
changes["otherapp"][0].operations[0].managers[1][1].args, ("a", "b", 1, 2)
)
self.assertEqual(
changes["otherapp"][0].operations[0].managers[2][1].args, ("x", "y", 3, 4)
)
def test_swappable_first_inheritance(self):
"""Swappable models get their CreateModel first."""
changes = self.get_changes([], [self.custom_user, self.aardvark])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "thirdapp", 1)
self.assertOperationTypes(
changes, "thirdapp", 0, ["CreateModel", "CreateModel"]
)
self.assertOperationAttributes(changes, "thirdapp", 0, 0, name="CustomUser")
self.assertOperationAttributes(changes, "thirdapp", 0, 1, name="Aardvark")
def test_default_related_name_option(self):
model_state = ModelState(
"app",
"model",
[
("id", models.AutoField(primary_key=True)),
],
options={"default_related_name": "related_name"},
)
changes = self.get_changes([], [model_state])
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["CreateModel"])
self.assertOperationAttributes(
changes,
"app",
0,
0,
name="model",
options={"default_related_name": "related_name"},
)
altered_model_state = ModelState(
"app",
"Model",
[
("id", models.AutoField(primary_key=True)),
],
)
changes = self.get_changes([model_state], [altered_model_state])
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["AlterModelOptions"])
self.assertOperationAttributes(changes, "app", 0, 0, name="model", options={})
@override_settings(AUTH_USER_MODEL="thirdapp.CustomUser")
def test_swappable_first_setting(self):
"""Swappable models get their CreateModel first."""
with isolate_lru_cache(apps.get_swappable_settings_name):
changes = self.get_changes([], [self.custom_user_no_inherit, self.aardvark])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "thirdapp", 1)
self.assertOperationTypes(
changes, "thirdapp", 0, ["CreateModel", "CreateModel"]
)
self.assertOperationAttributes(changes, "thirdapp", 0, 0, name="CustomUser")
self.assertOperationAttributes(changes, "thirdapp", 0, 1, name="Aardvark")
def test_bases_first(self):
"""Bases of other models come first."""
changes = self.get_changes(
[], [self.aardvark_based_on_author, self.author_name]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Aardvark")
def test_bases_first_mixed_case_app_label(self):
app_label = "MiXedCaseApp"
changes = self.get_changes(
[],
[
ModelState(
app_label,
"owner",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
app_label,
"place",
[
("id", models.AutoField(primary_key=True)),
(
"owner",
models.ForeignKey("MiXedCaseApp.owner", models.CASCADE),
),
],
),
ModelState(app_label, "restaurant", [], bases=("MiXedCaseApp.place",)),
],
)
self.assertNumberMigrations(changes, app_label, 1)
self.assertOperationTypes(
changes,
app_label,
0,
[
"CreateModel",
"CreateModel",
"CreateModel",
],
)
self.assertOperationAttributes(changes, app_label, 0, 0, name="owner")
self.assertOperationAttributes(changes, app_label, 0, 1, name="place")
self.assertOperationAttributes(changes, app_label, 0, 2, name="restaurant")
def test_multiple_bases(self):
"""
Inheriting models doesn't move *_ptr fields into AddField operations.
"""
A = ModelState("app", "A", [("a_id", models.AutoField(primary_key=True))])
B = ModelState("app", "B", [("b_id", models.AutoField(primary_key=True))])
C = ModelState("app", "C", [], bases=("app.A", "app.B"))
D = ModelState("app", "D", [], bases=("app.A", "app.B"))
E = ModelState("app", "E", [], bases=("app.A", "app.B"))
changes = self.get_changes([], [A, B, C, D, E])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(
changes,
"app",
0,
["CreateModel", "CreateModel", "CreateModel", "CreateModel", "CreateModel"],
)
self.assertOperationAttributes(changes, "app", 0, 0, name="A")
self.assertOperationAttributes(changes, "app", 0, 1, name="B")
self.assertOperationAttributes(changes, "app", 0, 2, name="C")
self.assertOperationAttributes(changes, "app", 0, 3, name="D")
self.assertOperationAttributes(changes, "app", 0, 4, name="E")
def test_proxy_bases_first(self):
"""Bases of proxies come first."""
changes = self.get_changes(
[], [self.author_empty, self.author_proxy, self.author_proxy_proxy]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["CreateModel", "CreateModel", "CreateModel"]
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="AuthorProxy")
self.assertOperationAttributes(
changes, "testapp", 0, 2, name="AAuthorProxyProxy"
)
def test_pk_fk_included(self):
"""
A relation used as the primary key is kept as part of CreateModel.
"""
changes = self.get_changes([], [self.aardvark_pk_fk_author, self.author_name])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel", "CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Author")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="Aardvark")
def test_first_dependency(self):
"""
A dependency to an app with no migrations uses __first__.
"""
# Load graph
loader = MigrationLoader(connection)
before = self.make_project_state([])
after = self.make_project_state([self.book_migrations_fk])
after.real_apps = {"migrations"}
autodetector = MigrationAutodetector(before, after)
changes = autodetector._detect_changes(graph=loader.graph)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Book")
self.assertMigrationDependencies(
changes, "otherapp", 0, [("migrations", "__first__")]
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_last_dependency(self):
"""
A dependency to an app with existing migrations uses the
last migration of that app.
"""
# Load graph
loader = MigrationLoader(connection)
before = self.make_project_state([])
after = self.make_project_state([self.book_migrations_fk])
after.real_apps = {"migrations"}
autodetector = MigrationAutodetector(before, after)
changes = autodetector._detect_changes(graph=loader.graph)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "otherapp", 0, 0, name="Book")
self.assertMigrationDependencies(
changes, "otherapp", 0, [("migrations", "0002_second")]
)
def test_alter_fk_before_model_deletion(self):
"""
ForeignKeys are altered _before_ the model they used to
refer to are deleted.
"""
changes = self.get_changes(
[self.author_name, self.publisher_with_author],
[self.aardvark_testapp, self.publisher_with_aardvark_author],
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(
changes, "testapp", 0, ["CreateModel", "AlterField", "DeleteModel"]
)
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Aardvark")
self.assertOperationAttributes(changes, "testapp", 0, 1, name="author")
self.assertOperationAttributes(changes, "testapp", 0, 2, name="Author")
def test_fk_dependency_other_app(self):
"""
#23100 - ForeignKeys correctly depend on other apps' models.
"""
changes = self.get_changes(
[self.author_name, self.book], [self.author_with_book, self.book]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="book")
self.assertMigrationDependencies(
changes, "testapp", 0, [("otherapp", "__first__")]
)
def test_alter_field_to_fk_dependency_other_app(self):
changes = self.get_changes(
[self.author_empty, self.book_with_no_author_fk],
[self.author_empty, self.book],
)
self.assertNumberMigrations(changes, "otherapp", 1)
self.assertOperationTypes(changes, "otherapp", 0, ["AlterField"])
self.assertMigrationDependencies(
changes, "otherapp", 0, [("testapp", "__first__")]
)
def test_circular_dependency_mixed_addcreate(self):
"""
#23315 - The dependency resolver knows to put all CreateModel
before AddField and not become unsolvable.
"""
address = ModelState(
"a",
"Address",
[
("id", models.AutoField(primary_key=True)),
("country", models.ForeignKey("b.DeliveryCountry", models.CASCADE)),
],
)
person = ModelState(
"a",
"Person",
[
("id", models.AutoField(primary_key=True)),
],
)
apackage = ModelState(
"b",
"APackage",
[
("id", models.AutoField(primary_key=True)),
("person", models.ForeignKey("a.Person", models.CASCADE)),
],
)
country = ModelState(
"b",
"DeliveryCountry",
[
("id", models.AutoField(primary_key=True)),
],
)
changes = self.get_changes([], [address, person, apackage, country])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "a", 2)
self.assertNumberMigrations(changes, "b", 1)
self.assertOperationTypes(changes, "a", 0, ["CreateModel", "CreateModel"])
self.assertOperationTypes(changes, "a", 1, ["AddField"])
self.assertOperationTypes(changes, "b", 0, ["CreateModel", "CreateModel"])
@override_settings(AUTH_USER_MODEL="a.Tenant")
def test_circular_dependency_swappable(self):
"""
#23322 - The dependency resolver knows to explicitly resolve
swappable models.
"""
with isolate_lru_cache(apps.get_swappable_settings_name):
tenant = ModelState(
"a",
"Tenant",
[
("id", models.AutoField(primary_key=True)),
("primary_address", models.ForeignKey("b.Address", models.CASCADE)),
],
bases=(AbstractBaseUser,),
)
address = ModelState(
"b",
"Address",
[
("id", models.AutoField(primary_key=True)),
(
"tenant",
models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE),
),
],
)
changes = self.get_changes([], [address, tenant])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "a", 2)
self.assertOperationTypes(changes, "a", 0, ["CreateModel"])
self.assertOperationTypes(changes, "a", 1, ["AddField"])
self.assertMigrationDependencies(changes, "a", 0, [])
self.assertMigrationDependencies(
changes, "a", 1, [("a", "auto_1"), ("b", "auto_1")]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "b", 1)
self.assertOperationTypes(changes, "b", 0, ["CreateModel"])
self.assertMigrationDependencies(
changes, "b", 0, [("__setting__", "AUTH_USER_MODEL")]
)
@override_settings(AUTH_USER_MODEL="b.Tenant")
def test_circular_dependency_swappable2(self):
"""
#23322 - The dependency resolver knows to explicitly resolve
swappable models but with the swappable not being the first migrated
model.
"""
with isolate_lru_cache(apps.get_swappable_settings_name):
address = ModelState(
"a",
"Address",
[
("id", models.AutoField(primary_key=True)),
(
"tenant",
models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE),
),
],
)
tenant = ModelState(
"b",
"Tenant",
[
("id", models.AutoField(primary_key=True)),
("primary_address", models.ForeignKey("a.Address", models.CASCADE)),
],
bases=(AbstractBaseUser,),
)
changes = self.get_changes([], [address, tenant])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "a", 2)
self.assertOperationTypes(changes, "a", 0, ["CreateModel"])
self.assertOperationTypes(changes, "a", 1, ["AddField"])
self.assertMigrationDependencies(changes, "a", 0, [])
self.assertMigrationDependencies(
changes, "a", 1, [("__setting__", "AUTH_USER_MODEL"), ("a", "auto_1")]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "b", 1)
self.assertOperationTypes(changes, "b", 0, ["CreateModel"])
self.assertMigrationDependencies(changes, "b", 0, [("a", "auto_1")])
@override_settings(AUTH_USER_MODEL="a.Person")
def test_circular_dependency_swappable_self(self):
"""
#23322 - The dependency resolver knows to explicitly resolve
swappable models.
"""
with isolate_lru_cache(apps.get_swappable_settings_name):
person = ModelState(
"a",
"Person",
[
("id", models.AutoField(primary_key=True)),
(
"parent1",
models.ForeignKey(
settings.AUTH_USER_MODEL,
models.CASCADE,
related_name="children",
),
),
],
)
changes = self.get_changes([], [person])
# Right number/type of migrations?
self.assertNumberMigrations(changes, "a", 1)
self.assertOperationTypes(changes, "a", 0, ["CreateModel"])
self.assertMigrationDependencies(changes, "a", 0, [])
@override_settings(AUTH_USER_MODEL="a.User")
def test_swappable_circular_multi_mti(self):
with isolate_lru_cache(apps.get_swappable_settings_name):
parent = ModelState(
"a",
"Parent",
[("user", models.ForeignKey(settings.AUTH_USER_MODEL, models.CASCADE))],
)
child = ModelState("a", "Child", [], bases=("a.Parent",))
user = ModelState("a", "User", [], bases=(AbstractBaseUser, "a.Child"))
changes = self.get_changes([], [parent, child, user])
self.assertNumberMigrations(changes, "a", 1)
self.assertOperationTypes(
changes, "a", 0, ["CreateModel", "CreateModel", "CreateModel", "AddField"]
)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition",
side_effect=AssertionError("Should not have prompted for not null addition"),
)
def test_add_blank_textfield_and_charfield(self, mocked_ask_method):
"""
#23405 - Adding a NOT NULL and blank `CharField` or `TextField`
without default should not prompt for a default.
"""
changes = self.get_changes(
[self.author_empty], [self.author_with_biography_blank]
)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField", "AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0)
@mock.patch(
"django.db.migrations.questioner.MigrationQuestioner.ask_not_null_addition"
)
def test_add_non_blank_textfield_and_charfield(self, mocked_ask_method):
"""
#23405 - Adding a NOT NULL and non-blank `CharField` or `TextField`
without default should prompt for a default.
"""
changes = self.get_changes(
[self.author_empty], [self.author_with_biography_non_blank]
)
self.assertEqual(mocked_ask_method.call_count, 2)
# Right number/type of migrations?
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["AddField", "AddField"])
self.assertOperationAttributes(changes, "testapp", 0, 0)
def test_mti_inheritance_model_removal(self):
Animal = ModelState(
"app",
"Animal",
[
("id", models.AutoField(primary_key=True)),
],
)
Dog = ModelState("app", "Dog", [], bases=("app.Animal",))
changes = self.get_changes([Animal, Dog], [Animal])
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["DeleteModel"])
self.assertOperationAttributes(changes, "app", 0, 0, name="Dog")
def test_add_model_with_field_removed_from_base_model(self):
"""
Removing a base field takes place before adding a new inherited model
that has a field with the same name.
"""
before = [
ModelState(
"app",
"readable",
[
("id", models.AutoField(primary_key=True)),
("title", models.CharField(max_length=200)),
],
),
]
after = [
ModelState(
"app",
"readable",
[
("id", models.AutoField(primary_key=True)),
],
),
ModelState(
"app",
"book",
[
("title", models.CharField(max_length=200)),
],
bases=("app.readable",),
),
]
changes = self.get_changes(before, after)
self.assertNumberMigrations(changes, "app", 1)
self.assertOperationTypes(changes, "app", 0, ["RemoveField", "CreateModel"])
self.assertOperationAttributes(
changes, "app", 0, 0, name="title", model_name="readable"
)
self.assertOperationAttributes(changes, "app", 0, 1, name="book")
def test_parse_number(self):
tests = [
("no_number", None),
("0001_initial", 1),
("0002_model3", 2),
("0002_auto_20380101_1112", 2),
("0002_squashed_0003", 3),
("0002_model2_squashed_0003_other4", 3),
("0002_squashed_0003_squashed_0004", 4),
("0002_model2_squashed_0003_other4_squashed_0005_other6", 5),
("0002_custom_name_20380101_1112_squashed_0003_model", 3),
("2_squashed_4", 4),
]
for migration_name, expected_number in tests:
with self.subTest(migration_name=migration_name):
self.assertEqual(
MigrationAutodetector.parse_number(migration_name),
expected_number,
)
def test_add_custom_fk_with_hardcoded_to(self):
class HardcodedForeignKey(models.ForeignKey):
def __init__(self, *args, **kwargs):
kwargs["to"] = "testapp.Author"
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs["to"]
return name, path, args, kwargs
book_hardcoded_fk_to = ModelState(
"testapp",
"Book",
[
("author", HardcodedForeignKey(on_delete=models.CASCADE)),
],
)
changes = self.get_changes(
[self.author_empty],
[self.author_empty, book_hardcoded_fk_to],
)
self.assertNumberMigrations(changes, "testapp", 1)
self.assertOperationTypes(changes, "testapp", 0, ["CreateModel"])
self.assertOperationAttributes(changes, "testapp", 0, 0, name="Book")
class MigrationSuggestNameTests(SimpleTestCase):
def test_no_operations(self):
class Migration(migrations.Migration):
operations = []
migration = Migration("some_migration", "test_app")
self.assertIs(migration.suggest_name().startswith("auto_"), True)
def test_no_operations_initial(self):
class Migration(migrations.Migration):
initial = True
operations = []
migration = Migration("some_migration", "test_app")
self.assertEqual(migration.suggest_name(), "initial")
def test_single_operation(self):
class Migration(migrations.Migration):
operations = [migrations.CreateModel("Person", fields=[])]
migration = Migration("0001_initial", "test_app")
self.assertEqual(migration.suggest_name(), "person")
class Migration(migrations.Migration):
operations = [migrations.DeleteModel("Person")]
migration = Migration("0002_initial", "test_app")
self.assertEqual(migration.suggest_name(), "delete_person")
def test_single_operation_long_name(self):
class Migration(migrations.Migration):
operations = [migrations.CreateModel("A" * 53, fields=[])]
migration = Migration("some_migration", "test_app")
self.assertEqual(migration.suggest_name(), "a" * 53)
def test_two_operations(self):
class Migration(migrations.Migration):
operations = [
migrations.CreateModel("Person", fields=[]),
migrations.DeleteModel("Animal"),
]
migration = Migration("some_migration", "test_app")
self.assertEqual(migration.suggest_name(), "person_delete_animal")
def test_two_create_models(self):
class Migration(migrations.Migration):
operations = [
migrations.CreateModel("Person", fields=[]),
migrations.CreateModel("Animal", fields=[]),
]
migration = Migration("0001_initial", "test_app")
self.assertEqual(migration.suggest_name(), "person_animal")
def test_two_create_models_with_initial_true(self):
class Migration(migrations.Migration):
initial = True
operations = [
migrations.CreateModel("Person", fields=[]),
migrations.CreateModel("Animal", fields=[]),
]
migration = Migration("0001_initial", "test_app")
self.assertEqual(migration.suggest_name(), "initial")
def test_many_operations_suffix(self):
class Migration(migrations.Migration):
operations = [
migrations.CreateModel("Person1", fields=[]),
migrations.CreateModel("Person2", fields=[]),
migrations.CreateModel("Person3", fields=[]),
migrations.DeleteModel("Person4"),
migrations.DeleteModel("Person5"),
]
migration = Migration("some_migration", "test_app")
self.assertEqual(
migration.suggest_name(),
"person1_person2_person3_delete_person4_and_more",
)
def test_operation_with_no_suggested_name(self):
class Migration(migrations.Migration):
operations = [
migrations.CreateModel("Person", fields=[]),
migrations.RunSQL("SELECT 1 FROM person;"),
]
migration = Migration("some_migration", "test_app")
self.assertIs(migration.suggest_name().startswith("auto_"), True)
def test_none_name(self):
class Migration(migrations.Migration):
operations = [migrations.RunSQL("SELECT 1 FROM person;")]
migration = Migration("0001_initial", "test_app")
suggest_name = migration.suggest_name()
self.assertIs(suggest_name.startswith("auto_"), True)
def test_none_name_with_initial_true(self):
class Migration(migrations.Migration):
initial = True
operations = [migrations.RunSQL("SELECT 1 FROM person;")]
migration = Migration("0001_initial", "test_app")
self.assertEqual(migration.suggest_name(), "initial")
def test_auto(self):
migration = migrations.Migration("0001_initial", "test_app")
suggest_name = migration.suggest_name()
self.assertIs(suggest_name.startswith("auto_"), True)
|
094c678a1b4af9fc316369dcd25d43a0d8b423606f965b4c5f8bc998337d8fb0 | from django.apps.registry import Apps
from django.contrib.contenttypes.fields import GenericForeignKey
from django.db import models
from django.db.migrations.exceptions import InvalidBasesError
from django.db.migrations.operations import (
AddField,
AlterField,
DeleteModel,
RemoveField,
)
from django.db.migrations.state import (
ModelState,
ProjectState,
get_related_models_recursive,
)
from django.test import SimpleTestCase, override_settings
from django.test.utils import isolate_apps
from .models import (
FoodManager,
FoodQuerySet,
ModelWithCustomBase,
NoMigrationFoodManager,
UnicodeModel,
)
class StateTests(SimpleTestCase):
"""
Tests state construction, rendering and modification by operations.
"""
def test_create(self):
"""
Tests making a ProjectState from an Apps
"""
new_apps = Apps(["migrations"])
class Author(models.Model):
name = models.CharField(max_length=255)
bio = models.TextField()
age = models.IntegerField(blank=True, null=True)
class Meta:
app_label = "migrations"
apps = new_apps
unique_together = ["name", "bio"]
index_together = ["bio", "age"]
class AuthorProxy(Author):
class Meta:
app_label = "migrations"
apps = new_apps
proxy = True
ordering = ["name"]
class SubAuthor(Author):
width = models.FloatField(null=True)
class Meta:
app_label = "migrations"
apps = new_apps
class Book(models.Model):
title = models.CharField(max_length=1000)
author = models.ForeignKey(Author, models.CASCADE)
contributors = models.ManyToManyField(Author)
class Meta:
app_label = "migrations"
apps = new_apps
verbose_name = "tome"
db_table = "test_tome"
indexes = [models.Index(fields=["title"])]
class Food(models.Model):
food_mgr = FoodManager("a", "b")
food_qs = FoodQuerySet.as_manager()
food_no_mgr = NoMigrationFoodManager("x", "y")
class Meta:
app_label = "migrations"
apps = new_apps
class FoodNoManagers(models.Model):
class Meta:
app_label = "migrations"
apps = new_apps
class FoodNoDefaultManager(models.Model):
food_no_mgr = NoMigrationFoodManager("x", "y")
food_mgr = FoodManager("a", "b")
food_qs = FoodQuerySet.as_manager()
class Meta:
app_label = "migrations"
apps = new_apps
mgr1 = FoodManager("a", "b")
mgr2 = FoodManager("x", "y", c=3, d=4)
class FoodOrderedManagers(models.Model):
# The managers on this model should be ordered by their creation
# counter and not by the order in model body
food_no_mgr = NoMigrationFoodManager("x", "y")
food_mgr2 = mgr2
food_mgr1 = mgr1
class Meta:
app_label = "migrations"
apps = new_apps
project_state = ProjectState.from_apps(new_apps)
author_state = project_state.models["migrations", "author"]
author_proxy_state = project_state.models["migrations", "authorproxy"]
sub_author_state = project_state.models["migrations", "subauthor"]
book_state = project_state.models["migrations", "book"]
food_state = project_state.models["migrations", "food"]
food_no_managers_state = project_state.models["migrations", "foodnomanagers"]
food_no_default_manager_state = project_state.models[
"migrations", "foodnodefaultmanager"
]
food_order_manager_state = project_state.models[
"migrations", "foodorderedmanagers"
]
book_index = models.Index(fields=["title"])
book_index.set_name_with_model(Book)
self.assertEqual(author_state.app_label, "migrations")
self.assertEqual(author_state.name, "Author")
self.assertEqual(list(author_state.fields), ["id", "name", "bio", "age"])
self.assertEqual(author_state.fields["name"].max_length, 255)
self.assertIs(author_state.fields["bio"].null, False)
self.assertIs(author_state.fields["age"].null, True)
self.assertEqual(
author_state.options,
{
"unique_together": {("name", "bio")},
"index_together": {("bio", "age")},
"indexes": [],
"constraints": [],
},
)
self.assertEqual(author_state.bases, (models.Model,))
self.assertEqual(book_state.app_label, "migrations")
self.assertEqual(book_state.name, "Book")
self.assertEqual(
list(book_state.fields), ["id", "title", "author", "contributors"]
)
self.assertEqual(book_state.fields["title"].max_length, 1000)
self.assertIs(book_state.fields["author"].null, False)
self.assertEqual(
book_state.fields["contributors"].__class__.__name__, "ManyToManyField"
)
self.assertEqual(
book_state.options,
{
"verbose_name": "tome",
"db_table": "test_tome",
"indexes": [book_index],
"constraints": [],
},
)
self.assertEqual(book_state.bases, (models.Model,))
self.assertEqual(author_proxy_state.app_label, "migrations")
self.assertEqual(author_proxy_state.name, "AuthorProxy")
self.assertEqual(author_proxy_state.fields, {})
self.assertEqual(
author_proxy_state.options,
{"proxy": True, "ordering": ["name"], "indexes": [], "constraints": []},
)
self.assertEqual(author_proxy_state.bases, ("migrations.author",))
self.assertEqual(sub_author_state.app_label, "migrations")
self.assertEqual(sub_author_state.name, "SubAuthor")
self.assertEqual(len(sub_author_state.fields), 2)
self.assertEqual(sub_author_state.bases, ("migrations.author",))
# The default manager is used in migrations
self.assertEqual([name for name, mgr in food_state.managers], ["food_mgr"])
self.assertTrue(all(isinstance(name, str) for name, mgr in food_state.managers))
self.assertEqual(food_state.managers[0][1].args, ("a", "b", 1, 2))
# No explicit managers defined. Migrations will fall back to the default
self.assertEqual(food_no_managers_state.managers, [])
# food_mgr is used in migration but isn't the default mgr, hence add the
# default
self.assertEqual(
[name for name, mgr in food_no_default_manager_state.managers],
["food_no_mgr", "food_mgr"],
)
self.assertTrue(
all(
isinstance(name, str)
for name, mgr in food_no_default_manager_state.managers
)
)
self.assertEqual(
food_no_default_manager_state.managers[0][1].__class__, models.Manager
)
self.assertIsInstance(food_no_default_manager_state.managers[1][1], FoodManager)
self.assertEqual(
[name for name, mgr in food_order_manager_state.managers],
["food_mgr1", "food_mgr2"],
)
self.assertTrue(
all(
isinstance(name, str) for name, mgr in food_order_manager_state.managers
)
)
self.assertEqual(
[mgr.args for name, mgr in food_order_manager_state.managers],
[("a", "b", 1, 2), ("x", "y", 3, 4)],
)
def test_custom_default_manager_added_to_the_model_state(self):
"""
When the default manager of the model is a custom manager,
it needs to be added to the model state.
"""
new_apps = Apps(["migrations"])
custom_manager = models.Manager()
class Author(models.Model):
objects = models.TextField()
authors = custom_manager
class Meta:
app_label = "migrations"
apps = new_apps
project_state = ProjectState.from_apps(new_apps)
author_state = project_state.models["migrations", "author"]
self.assertEqual(author_state.managers, [("authors", custom_manager)])
def test_custom_default_manager_named_objects_with_false_migration_flag(self):
"""
When a manager is added with a name of 'objects' but it does not
have `use_in_migrations = True`, no migration should be added to the
model state (#26643).
"""
new_apps = Apps(["migrations"])
class Author(models.Model):
objects = models.Manager()
class Meta:
app_label = "migrations"
apps = new_apps
project_state = ProjectState.from_apps(new_apps)
author_state = project_state.models["migrations", "author"]
self.assertEqual(author_state.managers, [])
def test_no_duplicate_managers(self):
"""
When a manager is added with `use_in_migrations = True` and a parent
model had a manager with the same name and `use_in_migrations = True`,
the parent's manager shouldn't appear in the model state (#26881).
"""
new_apps = Apps(["migrations"])
class PersonManager(models.Manager):
use_in_migrations = True
class Person(models.Model):
objects = PersonManager()
class Meta:
abstract = True
class BossManager(PersonManager):
use_in_migrations = True
class Boss(Person):
objects = BossManager()
class Meta:
app_label = "migrations"
apps = new_apps
project_state = ProjectState.from_apps(new_apps)
boss_state = project_state.models["migrations", "boss"]
self.assertEqual(boss_state.managers, [("objects", Boss.objects)])
def test_custom_default_manager(self):
new_apps = Apps(["migrations"])
class Author(models.Model):
manager1 = models.Manager()
manager2 = models.Manager()
class Meta:
app_label = "migrations"
apps = new_apps
default_manager_name = "manager2"
project_state = ProjectState.from_apps(new_apps)
author_state = project_state.models["migrations", "author"]
self.assertEqual(author_state.options["default_manager_name"], "manager2")
self.assertEqual(author_state.managers, [("manager2", Author.manager1)])
def test_custom_base_manager(self):
new_apps = Apps(["migrations"])
class Author(models.Model):
manager1 = models.Manager()
manager2 = models.Manager()
class Meta:
app_label = "migrations"
apps = new_apps
base_manager_name = "manager2"
class Author2(models.Model):
manager1 = models.Manager()
manager2 = models.Manager()
class Meta:
app_label = "migrations"
apps = new_apps
base_manager_name = "manager1"
project_state = ProjectState.from_apps(new_apps)
author_state = project_state.models["migrations", "author"]
self.assertEqual(author_state.options["base_manager_name"], "manager2")
self.assertEqual(
author_state.managers,
[
("manager1", Author.manager1),
("manager2", Author.manager2),
],
)
author2_state = project_state.models["migrations", "author2"]
self.assertEqual(author2_state.options["base_manager_name"], "manager1")
self.assertEqual(
author2_state.managers,
[
("manager1", Author2.manager1),
],
)
def test_apps_bulk_update(self):
"""
StateApps.bulk_update() should update apps.ready to False and reset
the value afterward.
"""
project_state = ProjectState()
apps = project_state.apps
with apps.bulk_update():
self.assertFalse(apps.ready)
self.assertTrue(apps.ready)
with self.assertRaises(ValueError):
with apps.bulk_update():
self.assertFalse(apps.ready)
raise ValueError()
self.assertTrue(apps.ready)
def test_render(self):
"""
Tests rendering a ProjectState into an Apps.
"""
project_state = ProjectState()
project_state.add_model(
ModelState(
app_label="migrations",
name="Tag",
fields=[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
("hidden", models.BooleanField()),
],
)
)
project_state.add_model(
ModelState(
app_label="migrations",
name="SubTag",
fields=[
(
"tag_ptr",
models.OneToOneField(
"migrations.Tag",
models.CASCADE,
auto_created=True,
parent_link=True,
primary_key=True,
to_field="id",
serialize=False,
),
),
("awesome", models.BooleanField()),
],
bases=("migrations.Tag",),
)
)
base_mgr = models.Manager()
mgr1 = FoodManager("a", "b")
mgr2 = FoodManager("x", "y", c=3, d=4)
project_state.add_model(
ModelState(
app_label="migrations",
name="Food",
fields=[
("id", models.AutoField(primary_key=True)),
],
managers=[
# The ordering we really want is objects, mgr1, mgr2
("default", base_mgr),
("food_mgr2", mgr2),
("food_mgr1", mgr1),
],
)
)
new_apps = project_state.apps
self.assertEqual(
new_apps.get_model("migrations", "Tag")._meta.get_field("name").max_length,
100,
)
self.assertIs(
new_apps.get_model("migrations", "Tag")._meta.get_field("hidden").null,
False,
)
self.assertEqual(
len(new_apps.get_model("migrations", "SubTag")._meta.local_fields), 2
)
Food = new_apps.get_model("migrations", "Food")
self.assertEqual(
[mgr.name for mgr in Food._meta.managers],
["default", "food_mgr1", "food_mgr2"],
)
self.assertTrue(all(isinstance(mgr.name, str) for mgr in Food._meta.managers))
self.assertEqual(
[mgr.__class__ for mgr in Food._meta.managers],
[models.Manager, FoodManager, FoodManager],
)
def test_render_model_inheritance(self):
class Book(models.Model):
title = models.CharField(max_length=1000)
class Meta:
app_label = "migrations"
apps = Apps()
class Novel(Book):
class Meta:
app_label = "migrations"
apps = Apps()
# First, test rendering individually
apps = Apps(["migrations"])
# We shouldn't be able to render yet
ms = ModelState.from_model(Novel)
with self.assertRaises(InvalidBasesError):
ms.render(apps)
# Once the parent model is in the app registry, it should be fine
ModelState.from_model(Book).render(apps)
ModelState.from_model(Novel).render(apps)
def test_render_model_with_multiple_inheritance(self):
class Foo(models.Model):
class Meta:
app_label = "migrations"
apps = Apps()
class Bar(models.Model):
class Meta:
app_label = "migrations"
apps = Apps()
class FooBar(Foo, Bar):
class Meta:
app_label = "migrations"
apps = Apps()
class AbstractSubFooBar(FooBar):
class Meta:
abstract = True
apps = Apps()
class SubFooBar(AbstractSubFooBar):
class Meta:
app_label = "migrations"
apps = Apps()
apps = Apps(["migrations"])
# We shouldn't be able to render yet
ms = ModelState.from_model(FooBar)
with self.assertRaises(InvalidBasesError):
ms.render(apps)
# Once the parent models are in the app registry, it should be fine
ModelState.from_model(Foo).render(apps)
self.assertSequenceEqual(ModelState.from_model(Foo).bases, [models.Model])
ModelState.from_model(Bar).render(apps)
self.assertSequenceEqual(ModelState.from_model(Bar).bases, [models.Model])
ModelState.from_model(FooBar).render(apps)
self.assertSequenceEqual(
ModelState.from_model(FooBar).bases, ["migrations.foo", "migrations.bar"]
)
ModelState.from_model(SubFooBar).render(apps)
self.assertSequenceEqual(
ModelState.from_model(SubFooBar).bases, ["migrations.foobar"]
)
def test_render_project_dependencies(self):
"""
The ProjectState render method correctly renders models
to account for inter-model base dependencies.
"""
new_apps = Apps()
class A(models.Model):
class Meta:
app_label = "migrations"
apps = new_apps
class B(A):
class Meta:
app_label = "migrations"
apps = new_apps
class C(B):
class Meta:
app_label = "migrations"
apps = new_apps
class D(A):
class Meta:
app_label = "migrations"
apps = new_apps
class E(B):
class Meta:
app_label = "migrations"
apps = new_apps
proxy = True
class F(D):
class Meta:
app_label = "migrations"
apps = new_apps
proxy = True
# Make a ProjectState and render it
project_state = ProjectState()
project_state.add_model(ModelState.from_model(A))
project_state.add_model(ModelState.from_model(B))
project_state.add_model(ModelState.from_model(C))
project_state.add_model(ModelState.from_model(D))
project_state.add_model(ModelState.from_model(E))
project_state.add_model(ModelState.from_model(F))
final_apps = project_state.apps
self.assertEqual(len(final_apps.get_models()), 6)
# Now make an invalid ProjectState and make sure it fails
project_state = ProjectState()
project_state.add_model(ModelState.from_model(A))
project_state.add_model(ModelState.from_model(B))
project_state.add_model(ModelState.from_model(C))
project_state.add_model(ModelState.from_model(F))
with self.assertRaises(InvalidBasesError):
project_state.apps
def test_render_unique_app_labels(self):
"""
The ProjectState render method doesn't raise an
ImproperlyConfigured exception about unique labels if two dotted app
names have the same last part.
"""
class A(models.Model):
class Meta:
app_label = "django.contrib.auth"
class B(models.Model):
class Meta:
app_label = "vendor.auth"
# Make a ProjectState and render it
project_state = ProjectState()
project_state.add_model(ModelState.from_model(A))
project_state.add_model(ModelState.from_model(B))
self.assertEqual(len(project_state.apps.get_models()), 2)
def test_reload_related_model_on_non_relational_fields(self):
"""
The model is reloaded even on changes that are not involved in
relations. Other models pointing to or from it are also reloaded.
"""
project_state = ProjectState()
project_state.apps # Render project state.
project_state.add_model(ModelState("migrations", "A", []))
project_state.add_model(
ModelState(
"migrations",
"B",
[
("a", models.ForeignKey("A", models.CASCADE)),
],
)
)
project_state.add_model(
ModelState(
"migrations",
"C",
[
("b", models.ForeignKey("B", models.CASCADE)),
("name", models.TextField()),
],
)
)
project_state.add_model(
ModelState(
"migrations",
"D",
[
("a", models.ForeignKey("A", models.CASCADE)),
],
)
)
operation = AlterField(
model_name="C",
name="name",
field=models.TextField(blank=True),
)
operation.state_forwards("migrations", project_state)
project_state.reload_model("migrations", "a", delay=True)
A = project_state.apps.get_model("migrations.A")
B = project_state.apps.get_model("migrations.B")
D = project_state.apps.get_model("migrations.D")
self.assertIs(B._meta.get_field("a").related_model, A)
self.assertIs(D._meta.get_field("a").related_model, A)
def test_reload_model_relationship_consistency(self):
project_state = ProjectState()
project_state.add_model(ModelState("migrations", "A", []))
project_state.add_model(
ModelState(
"migrations",
"B",
[
("a", models.ForeignKey("A", models.CASCADE)),
],
)
)
project_state.add_model(
ModelState(
"migrations",
"C",
[
("b", models.ForeignKey("B", models.CASCADE)),
],
)
)
A = project_state.apps.get_model("migrations.A")
B = project_state.apps.get_model("migrations.B")
C = project_state.apps.get_model("migrations.C")
self.assertEqual([r.related_model for r in A._meta.related_objects], [B])
self.assertEqual([r.related_model for r in B._meta.related_objects], [C])
self.assertEqual([r.related_model for r in C._meta.related_objects], [])
project_state.reload_model("migrations", "a", delay=True)
A = project_state.apps.get_model("migrations.A")
B = project_state.apps.get_model("migrations.B")
C = project_state.apps.get_model("migrations.C")
self.assertEqual([r.related_model for r in A._meta.related_objects], [B])
self.assertEqual([r.related_model for r in B._meta.related_objects], [C])
self.assertEqual([r.related_model for r in C._meta.related_objects], [])
def test_add_relations(self):
"""
#24573 - Adding relations to existing models should reload the
referenced models too.
"""
new_apps = Apps()
class A(models.Model):
class Meta:
app_label = "something"
apps = new_apps
class B(A):
class Meta:
app_label = "something"
apps = new_apps
class C(models.Model):
class Meta:
app_label = "something"
apps = new_apps
project_state = ProjectState()
project_state.add_model(ModelState.from_model(A))
project_state.add_model(ModelState.from_model(B))
project_state.add_model(ModelState.from_model(C))
project_state.apps # We need to work with rendered models
old_state = project_state.clone()
model_a_old = old_state.apps.get_model("something", "A")
model_b_old = old_state.apps.get_model("something", "B")
model_c_old = old_state.apps.get_model("something", "C")
# The relations between the old models are correct
self.assertIs(model_a_old._meta.get_field("b").related_model, model_b_old)
self.assertIs(model_b_old._meta.get_field("a_ptr").related_model, model_a_old)
operation = AddField(
"c",
"to_a",
models.OneToOneField(
"something.A",
models.CASCADE,
related_name="from_c",
),
)
operation.state_forwards("something", project_state)
model_a_new = project_state.apps.get_model("something", "A")
model_b_new = project_state.apps.get_model("something", "B")
model_c_new = project_state.apps.get_model("something", "C")
# All models have changed
self.assertIsNot(model_a_old, model_a_new)
self.assertIsNot(model_b_old, model_b_new)
self.assertIsNot(model_c_old, model_c_new)
# The relations between the old models still hold
self.assertIs(model_a_old._meta.get_field("b").related_model, model_b_old)
self.assertIs(model_b_old._meta.get_field("a_ptr").related_model, model_a_old)
# The relations between the new models correct
self.assertIs(model_a_new._meta.get_field("b").related_model, model_b_new)
self.assertIs(model_b_new._meta.get_field("a_ptr").related_model, model_a_new)
self.assertIs(model_a_new._meta.get_field("from_c").related_model, model_c_new)
self.assertIs(model_c_new._meta.get_field("to_a").related_model, model_a_new)
def test_remove_relations(self):
"""
#24225 - Relations between models are updated while
remaining the relations and references for models of an old state.
"""
new_apps = Apps()
class A(models.Model):
class Meta:
app_label = "something"
apps = new_apps
class B(models.Model):
to_a = models.ForeignKey(A, models.CASCADE)
class Meta:
app_label = "something"
apps = new_apps
def get_model_a(state):
return [
mod for mod in state.apps.get_models() if mod._meta.model_name == "a"
][0]
project_state = ProjectState()
project_state.add_model(ModelState.from_model(A))
project_state.add_model(ModelState.from_model(B))
self.assertEqual(len(get_model_a(project_state)._meta.related_objects), 1)
old_state = project_state.clone()
operation = RemoveField("b", "to_a")
operation.state_forwards("something", project_state)
# Model from old_state still has the relation
model_a_old = get_model_a(old_state)
model_a_new = get_model_a(project_state)
self.assertIsNot(model_a_old, model_a_new)
self.assertEqual(len(model_a_old._meta.related_objects), 1)
self.assertEqual(len(model_a_new._meta.related_objects), 0)
# Same test for deleted model
project_state = ProjectState()
project_state.add_model(ModelState.from_model(A))
project_state.add_model(ModelState.from_model(B))
old_state = project_state.clone()
operation = DeleteModel("b")
operation.state_forwards("something", project_state)
model_a_old = get_model_a(old_state)
model_a_new = get_model_a(project_state)
self.assertIsNot(model_a_old, model_a_new)
self.assertEqual(len(model_a_old._meta.related_objects), 1)
self.assertEqual(len(model_a_new._meta.related_objects), 0)
def test_self_relation(self):
"""
#24513 - Modifying an object pointing to itself would cause it to be
rendered twice and thus breaking its related M2M through objects.
"""
class A(models.Model):
to_a = models.ManyToManyField("something.A", symmetrical=False)
class Meta:
app_label = "something"
def get_model_a(state):
return [
mod for mod in state.apps.get_models() if mod._meta.model_name == "a"
][0]
project_state = ProjectState()
project_state.add_model(ModelState.from_model(A))
self.assertEqual(len(get_model_a(project_state)._meta.related_objects), 1)
old_state = project_state.clone()
operation = AlterField(
model_name="a",
name="to_a",
field=models.ManyToManyField("something.A", symmetrical=False, blank=True),
)
# At this point the model would be rendered twice causing its related
# M2M through objects to point to an old copy and thus breaking their
# attribute lookup.
operation.state_forwards("something", project_state)
model_a_old = get_model_a(old_state)
model_a_new = get_model_a(project_state)
self.assertIsNot(model_a_old, model_a_new)
# The old model's _meta is still consistent
field_to_a_old = model_a_old._meta.get_field("to_a")
self.assertEqual(field_to_a_old.m2m_field_name(), "from_a")
self.assertEqual(field_to_a_old.m2m_reverse_field_name(), "to_a")
self.assertIs(field_to_a_old.related_model, model_a_old)
self.assertIs(
field_to_a_old.remote_field.through._meta.get_field("to_a").related_model,
model_a_old,
)
self.assertIs(
field_to_a_old.remote_field.through._meta.get_field("from_a").related_model,
model_a_old,
)
# The new model's _meta is still consistent
field_to_a_new = model_a_new._meta.get_field("to_a")
self.assertEqual(field_to_a_new.m2m_field_name(), "from_a")
self.assertEqual(field_to_a_new.m2m_reverse_field_name(), "to_a")
self.assertIs(field_to_a_new.related_model, model_a_new)
self.assertIs(
field_to_a_new.remote_field.through._meta.get_field("to_a").related_model,
model_a_new,
)
self.assertIs(
field_to_a_new.remote_field.through._meta.get_field("from_a").related_model,
model_a_new,
)
def test_equality(self):
"""
== and != are implemented correctly.
"""
# Test two things that should be equal
project_state = ProjectState()
project_state.add_model(
ModelState(
"migrations",
"Tag",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
("hidden", models.BooleanField()),
],
{},
None,
)
)
project_state.apps # Fill the apps cached property
other_state = project_state.clone()
self.assertEqual(project_state, project_state)
self.assertEqual(project_state, other_state)
self.assertIs(project_state != project_state, False)
self.assertIs(project_state != other_state, False)
self.assertNotEqual(project_state.apps, other_state.apps)
# Make a very small change (max_len 99) and see if that affects it
project_state = ProjectState()
project_state.add_model(
ModelState(
"migrations",
"Tag",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=99)),
("hidden", models.BooleanField()),
],
{},
None,
)
)
self.assertNotEqual(project_state, other_state)
self.assertIs(project_state == other_state, False)
def test_dangling_references_throw_error(self):
new_apps = Apps()
class Author(models.Model):
name = models.TextField()
class Meta:
app_label = "migrations"
apps = new_apps
class Publisher(models.Model):
name = models.TextField()
class Meta:
app_label = "migrations"
apps = new_apps
class Book(models.Model):
author = models.ForeignKey(Author, models.CASCADE)
publisher = models.ForeignKey(Publisher, models.CASCADE)
class Meta:
app_label = "migrations"
apps = new_apps
class Magazine(models.Model):
authors = models.ManyToManyField(Author)
class Meta:
app_label = "migrations"
apps = new_apps
# Make a valid ProjectState and render it
project_state = ProjectState()
project_state.add_model(ModelState.from_model(Author))
project_state.add_model(ModelState.from_model(Publisher))
project_state.add_model(ModelState.from_model(Book))
project_state.add_model(ModelState.from_model(Magazine))
self.assertEqual(len(project_state.apps.get_models()), 4)
# now make an invalid one with a ForeignKey
project_state = ProjectState()
project_state.add_model(ModelState.from_model(Book))
msg = (
"The field migrations.Book.author was declared with a lazy reference "
"to 'migrations.author', but app 'migrations' doesn't provide model "
"'author'.\n"
"The field migrations.Book.publisher was declared with a lazy reference "
"to 'migrations.publisher', but app 'migrations' doesn't provide model "
"'publisher'."
)
with self.assertRaisesMessage(ValueError, msg):
project_state.apps
# And another with ManyToManyField.
project_state = ProjectState()
project_state.add_model(ModelState.from_model(Magazine))
msg = (
"The field migrations.Magazine.authors was declared with a lazy reference "
"to 'migrations.author', but app 'migrations' doesn't provide model "
"'author'.\n"
"The field migrations.Magazine_authors.author was declared with a lazy "
"reference to 'migrations.author', but app 'migrations' doesn't provide "
"model 'author'."
)
with self.assertRaisesMessage(ValueError, msg):
project_state.apps
# And now with multiple models and multiple fields.
project_state.add_model(ModelState.from_model(Book))
msg = (
"The field migrations.Book.author was declared with a lazy reference "
"to 'migrations.author', but app 'migrations' doesn't provide model "
"'author'.\n"
"The field migrations.Book.publisher was declared with a lazy reference "
"to 'migrations.publisher', but app 'migrations' doesn't provide model "
"'publisher'.\n"
"The field migrations.Magazine.authors was declared with a lazy reference "
"to 'migrations.author', but app 'migrations' doesn't provide model "
"'author'.\n"
"The field migrations.Magazine_authors.author was declared with a lazy "
"reference to 'migrations.author', but app 'migrations' doesn't provide "
"model 'author'."
)
with self.assertRaisesMessage(ValueError, msg):
project_state.apps
def test_reference_mixed_case_app_label(self):
new_apps = Apps()
class Author(models.Model):
class Meta:
app_label = "MiXedCase_migrations"
apps = new_apps
class Book(models.Model):
author = models.ForeignKey(Author, models.CASCADE)
class Meta:
app_label = "MiXedCase_migrations"
apps = new_apps
class Magazine(models.Model):
authors = models.ManyToManyField(Author)
class Meta:
app_label = "MiXedCase_migrations"
apps = new_apps
project_state = ProjectState()
project_state.add_model(ModelState.from_model(Author))
project_state.add_model(ModelState.from_model(Book))
project_state.add_model(ModelState.from_model(Magazine))
self.assertEqual(len(project_state.apps.get_models()), 3)
def test_real_apps(self):
"""
Including real apps can resolve dangling FK errors.
This test relies on the fact that contenttypes is always loaded.
"""
new_apps = Apps()
class TestModel(models.Model):
ct = models.ForeignKey("contenttypes.ContentType", models.CASCADE)
class Meta:
app_label = "migrations"
apps = new_apps
# If we just stick it into an empty state it should fail
project_state = ProjectState()
project_state.add_model(ModelState.from_model(TestModel))
with self.assertRaises(ValueError):
project_state.apps
# If we include the real app it should succeed
project_state = ProjectState(real_apps={"contenttypes"})
project_state.add_model(ModelState.from_model(TestModel))
rendered_state = project_state.apps
self.assertEqual(
len(
[
x
for x in rendered_state.get_models()
if x._meta.app_label == "migrations"
]
),
1,
)
def test_real_apps_non_set(self):
with self.assertRaises(AssertionError):
ProjectState(real_apps=["contenttypes"])
def test_ignore_order_wrt(self):
"""
Makes sure ProjectState doesn't include OrderWrt fields when
making from existing models.
"""
new_apps = Apps()
class Author(models.Model):
name = models.TextField()
class Meta:
app_label = "migrations"
apps = new_apps
class Book(models.Model):
author = models.ForeignKey(Author, models.CASCADE)
class Meta:
app_label = "migrations"
apps = new_apps
order_with_respect_to = "author"
# Make a valid ProjectState and render it
project_state = ProjectState()
project_state.add_model(ModelState.from_model(Author))
project_state.add_model(ModelState.from_model(Book))
self.assertEqual(
list(project_state.models["migrations", "book"].fields),
["id", "author"],
)
def test_modelstate_get_field_order_wrt(self):
new_apps = Apps()
class Author(models.Model):
name = models.TextField()
class Meta:
app_label = "migrations"
apps = new_apps
class Book(models.Model):
author = models.ForeignKey(Author, models.CASCADE)
class Meta:
app_label = "migrations"
apps = new_apps
order_with_respect_to = "author"
model_state = ModelState.from_model(Book)
order_wrt_field = model_state.get_field("_order")
self.assertIsInstance(order_wrt_field, models.ForeignKey)
self.assertEqual(order_wrt_field.related_model, "migrations.author")
def test_modelstate_get_field_no_order_wrt_order_field(self):
new_apps = Apps()
class HistoricalRecord(models.Model):
_order = models.PositiveSmallIntegerField()
class Meta:
app_label = "migrations"
apps = new_apps
model_state = ModelState.from_model(HistoricalRecord)
order_field = model_state.get_field("_order")
self.assertIsNone(order_field.related_model)
self.assertIsInstance(order_field, models.PositiveSmallIntegerField)
def test_manager_refer_correct_model_version(self):
"""
#24147 - Managers refer to the correct version of a
historical model
"""
project_state = ProjectState()
project_state.add_model(
ModelState(
app_label="migrations",
name="Tag",
fields=[
("id", models.AutoField(primary_key=True)),
("hidden", models.BooleanField()),
],
managers=[
("food_mgr", FoodManager("a", "b")),
("food_qs", FoodQuerySet.as_manager()),
],
)
)
old_model = project_state.apps.get_model("migrations", "tag")
new_state = project_state.clone()
operation = RemoveField("tag", "hidden")
operation.state_forwards("migrations", new_state)
new_model = new_state.apps.get_model("migrations", "tag")
self.assertIsNot(old_model, new_model)
self.assertIs(old_model, old_model.food_mgr.model)
self.assertIs(old_model, old_model.food_qs.model)
self.assertIs(new_model, new_model.food_mgr.model)
self.assertIs(new_model, new_model.food_qs.model)
self.assertIsNot(old_model.food_mgr, new_model.food_mgr)
self.assertIsNot(old_model.food_qs, new_model.food_qs)
self.assertIsNot(old_model.food_mgr.model, new_model.food_mgr.model)
self.assertIsNot(old_model.food_qs.model, new_model.food_qs.model)
def test_choices_iterator(self):
"""
#24483 - ProjectState.from_apps should not destructively consume
Field.choices iterators.
"""
new_apps = Apps(["migrations"])
choices = [("a", "A"), ("b", "B")]
class Author(models.Model):
name = models.CharField(max_length=255)
choice = models.CharField(max_length=255, choices=iter(choices))
class Meta:
app_label = "migrations"
apps = new_apps
ProjectState.from_apps(new_apps)
choices_field = Author._meta.get_field("choice")
self.assertEqual(list(choices_field.choices), choices)
class StateRelationsTests(SimpleTestCase):
def get_base_project_state(self):
new_apps = Apps()
class User(models.Model):
class Meta:
app_label = "tests"
apps = new_apps
class Comment(models.Model):
text = models.TextField()
user = models.ForeignKey(User, models.CASCADE)
comments = models.ManyToManyField("self")
class Meta:
app_label = "tests"
apps = new_apps
class Post(models.Model):
text = models.TextField()
authors = models.ManyToManyField(User)
class Meta:
app_label = "tests"
apps = new_apps
project_state = ProjectState()
project_state.add_model(ModelState.from_model(User))
project_state.add_model(ModelState.from_model(Comment))
project_state.add_model(ModelState.from_model(Post))
return project_state
def test_relations_population(self):
tests = [
(
"add_model",
[
ModelState(
app_label="migrations",
name="Tag",
fields=[("id", models.AutoField(primary_key=True))],
),
],
),
("remove_model", ["tests", "comment"]),
("rename_model", ["tests", "comment", "opinion"]),
(
"add_field",
[
"tests",
"post",
"next_post",
models.ForeignKey("self", models.CASCADE),
True,
],
),
("remove_field", ["tests", "post", "text"]),
("rename_field", ["tests", "comment", "user", "author"]),
(
"alter_field",
[
"tests",
"comment",
"user",
models.IntegerField(),
True,
],
),
]
for method, args in tests:
with self.subTest(method=method):
project_state = self.get_base_project_state()
getattr(project_state, method)(*args)
# ProjectState's `_relations` are populated on `relations` access.
self.assertIsNone(project_state._relations)
self.assertEqual(project_state.relations, project_state._relations)
self.assertIsNotNone(project_state._relations)
def test_add_model(self):
project_state = self.get_base_project_state()
self.assertEqual(
list(project_state.relations["tests", "user"]),
[("tests", "comment"), ("tests", "post")],
)
self.assertEqual(
list(project_state.relations["tests", "comment"]),
[("tests", "comment")],
)
self.assertNotIn(("tests", "post"), project_state.relations)
def test_add_model_no_relations(self):
project_state = ProjectState()
project_state.add_model(
ModelState(
app_label="migrations",
name="Tag",
fields=[("id", models.AutoField(primary_key=True))],
)
)
self.assertEqual(project_state.relations, {})
def test_add_model_other_app(self):
project_state = self.get_base_project_state()
self.assertEqual(
list(project_state.relations["tests", "user"]),
[("tests", "comment"), ("tests", "post")],
)
project_state.add_model(
ModelState(
app_label="tests_other",
name="comment",
fields=[
("id", models.AutoField(primary_key=True)),
("user", models.ForeignKey("tests.user", models.CASCADE)),
],
)
)
self.assertEqual(
list(project_state.relations["tests", "user"]),
[("tests", "comment"), ("tests", "post"), ("tests_other", "comment")],
)
def test_remove_model(self):
project_state = self.get_base_project_state()
self.assertEqual(
list(project_state.relations["tests", "user"]),
[("tests", "comment"), ("tests", "post")],
)
self.assertEqual(
list(project_state.relations["tests", "comment"]),
[("tests", "comment")],
)
project_state.remove_model("tests", "comment")
self.assertEqual(
list(project_state.relations["tests", "user"]),
[("tests", "post")],
)
self.assertNotIn(("tests", "comment"), project_state.relations)
project_state.remove_model("tests", "post")
self.assertEqual(project_state.relations, {})
project_state.remove_model("tests", "user")
self.assertEqual(project_state.relations, {})
def test_rename_model(self):
project_state = self.get_base_project_state()
self.assertEqual(
list(project_state.relations["tests", "user"]),
[("tests", "comment"), ("tests", "post")],
)
self.assertEqual(
list(project_state.relations["tests", "comment"]),
[("tests", "comment")],
)
related_field = project_state.relations["tests", "user"]["tests", "comment"]
project_state.rename_model("tests", "comment", "opinion")
self.assertEqual(
list(project_state.relations["tests", "user"]),
[("tests", "post"), ("tests", "opinion")],
)
self.assertEqual(
list(project_state.relations["tests", "opinion"]),
[("tests", "opinion")],
)
self.assertNotIn(("tests", "comment"), project_state.relations)
self.assertEqual(
project_state.relations["tests", "user"]["tests", "opinion"],
related_field,
)
project_state.rename_model("tests", "user", "author")
self.assertEqual(
list(project_state.relations["tests", "author"]),
[("tests", "post"), ("tests", "opinion")],
)
self.assertNotIn(("tests", "user"), project_state.relations)
def test_rename_model_no_relations(self):
project_state = self.get_base_project_state()
self.assertEqual(
list(project_state.relations["tests", "user"]),
[("tests", "comment"), ("tests", "post")],
)
related_field = project_state.relations["tests", "user"]["tests", "post"]
self.assertNotIn(("tests", "post"), project_state.relations)
# Rename a model without relations.
project_state.rename_model("tests", "post", "blog")
self.assertEqual(
list(project_state.relations["tests", "user"]),
[("tests", "comment"), ("tests", "blog")],
)
self.assertNotIn(("tests", "blog"), project_state.relations)
self.assertEqual(
related_field,
project_state.relations["tests", "user"]["tests", "blog"],
)
def test_add_field(self):
project_state = self.get_base_project_state()
self.assertNotIn(("tests", "post"), project_state.relations)
# Add a self-referential foreign key.
new_field = models.ForeignKey("self", models.CASCADE)
project_state.add_field(
"tests",
"post",
"next_post",
new_field,
preserve_default=True,
)
self.assertEqual(
list(project_state.relations["tests", "post"]),
[("tests", "post")],
)
self.assertEqual(
project_state.relations["tests", "post"]["tests", "post"],
{"next_post": new_field},
)
# Add a foreign key.
new_field = models.ForeignKey("tests.post", models.CASCADE)
project_state.add_field(
"tests",
"comment",
"post",
new_field,
preserve_default=True,
)
self.assertEqual(
list(project_state.relations["tests", "post"]),
[("tests", "post"), ("tests", "comment")],
)
self.assertEqual(
project_state.relations["tests", "post"]["tests", "comment"],
{"post": new_field},
)
def test_add_field_m2m_with_through(self):
project_state = self.get_base_project_state()
project_state.add_model(
ModelState(
app_label="tests",
name="Tag",
fields=[("id", models.AutoField(primary_key=True))],
)
)
project_state.add_model(
ModelState(
app_label="tests",
name="PostTag",
fields=[
("id", models.AutoField(primary_key=True)),
("post", models.ForeignKey("tests.post", models.CASCADE)),
("tag", models.ForeignKey("tests.tag", models.CASCADE)),
],
)
)
self.assertEqual(
list(project_state.relations["tests", "post"]),
[("tests", "posttag")],
)
self.assertEqual(
list(project_state.relations["tests", "tag"]),
[("tests", "posttag")],
)
# Add a many-to-many field with the through model.
new_field = models.ManyToManyField("tests.tag", through="tests.posttag")
project_state.add_field(
"tests",
"post",
"tags",
new_field,
preserve_default=True,
)
self.assertEqual(
list(project_state.relations["tests", "post"]),
[("tests", "posttag")],
)
self.assertEqual(
list(project_state.relations["tests", "tag"]),
[("tests", "posttag"), ("tests", "post")],
)
self.assertEqual(
project_state.relations["tests", "tag"]["tests", "post"],
{"tags": new_field},
)
def test_remove_field(self):
project_state = self.get_base_project_state()
self.assertEqual(
list(project_state.relations["tests", "user"]),
[("tests", "comment"), ("tests", "post")],
)
# Remove a many-to-many field.
project_state.remove_field("tests", "post", "authors")
self.assertEqual(
list(project_state.relations["tests", "user"]),
[("tests", "comment")],
)
# Remove a foreign key.
project_state.remove_field("tests", "comment", "user")
self.assertEqual(project_state.relations["tests", "user"], {})
def test_remove_field_no_relations(self):
project_state = self.get_base_project_state()
self.assertEqual(
list(project_state.relations["tests", "user"]),
[("tests", "comment"), ("tests", "post")],
)
# Remove a non-relation field.
project_state.remove_field("tests", "post", "text")
self.assertEqual(
list(project_state.relations["tests", "user"]),
[("tests", "comment"), ("tests", "post")],
)
def test_rename_field(self):
project_state = self.get_base_project_state()
field = project_state.models["tests", "comment"].fields["user"]
self.assertEqual(
project_state.relations["tests", "user"]["tests", "comment"],
{"user": field},
)
project_state.rename_field("tests", "comment", "user", "author")
renamed_field = project_state.models["tests", "comment"].fields["author"]
self.assertEqual(
project_state.relations["tests", "user"]["tests", "comment"],
{"author": renamed_field},
)
self.assertEqual(field, renamed_field)
def test_rename_field_no_relations(self):
project_state = self.get_base_project_state()
self.assertEqual(
list(project_state.relations["tests", "user"]),
[("tests", "comment"), ("tests", "post")],
)
# Rename a non-relation field.
project_state.rename_field("tests", "post", "text", "description")
self.assertEqual(
list(project_state.relations["tests", "user"]),
[("tests", "comment"), ("tests", "post")],
)
def test_alter_field(self):
project_state = self.get_base_project_state()
self.assertEqual(
list(project_state.relations["tests", "user"]),
[("tests", "comment"), ("tests", "post")],
)
# Alter a foreign key to a non-relation field.
project_state.alter_field(
"tests",
"comment",
"user",
models.IntegerField(),
preserve_default=True,
)
self.assertEqual(
list(project_state.relations["tests", "user"]),
[("tests", "post")],
)
# Alter a non-relation field to a many-to-many field.
m2m_field = models.ManyToManyField("tests.user")
project_state.alter_field(
"tests",
"comment",
"user",
m2m_field,
preserve_default=True,
)
self.assertEqual(
list(project_state.relations["tests", "user"]),
[("tests", "post"), ("tests", "comment")],
)
self.assertEqual(
project_state.relations["tests", "user"]["tests", "comment"],
{"user": m2m_field},
)
def test_alter_field_m2m_to_fk(self):
project_state = self.get_base_project_state()
project_state.add_model(
ModelState(
app_label="tests_other",
name="user_other",
fields=[("id", models.AutoField(primary_key=True))],
)
)
self.assertEqual(
list(project_state.relations["tests", "user"]),
[("tests", "comment"), ("tests", "post")],
)
self.assertNotIn(("tests_other", "user_other"), project_state.relations)
# Alter a many-to-many field to a foreign key.
foreign_key = models.ForeignKey("tests_other.user_other", models.CASCADE)
project_state.alter_field(
"tests",
"post",
"authors",
foreign_key,
preserve_default=True,
)
self.assertEqual(
list(project_state.relations["tests", "user"]),
[("tests", "comment")],
)
self.assertEqual(
list(project_state.relations["tests_other", "user_other"]),
[("tests", "post")],
)
self.assertEqual(
project_state.relations["tests_other", "user_other"]["tests", "post"],
{"authors": foreign_key},
)
def test_many_relations_to_same_model(self):
project_state = self.get_base_project_state()
new_field = models.ForeignKey("tests.user", models.CASCADE)
project_state.add_field(
"tests",
"comment",
"reviewer",
new_field,
preserve_default=True,
)
self.assertEqual(
list(project_state.relations["tests", "user"]),
[("tests", "comment"), ("tests", "post")],
)
comment_rels = project_state.relations["tests", "user"]["tests", "comment"]
# Two foreign keys to the same model.
self.assertEqual(len(comment_rels), 2)
self.assertEqual(comment_rels["reviewer"], new_field)
# Rename the second foreign key.
project_state.rename_field("tests", "comment", "reviewer", "supervisor")
self.assertEqual(len(comment_rels), 2)
self.assertEqual(comment_rels["supervisor"], new_field)
# Remove the first foreign key.
project_state.remove_field("tests", "comment", "user")
self.assertEqual(comment_rels, {"supervisor": new_field})
class ModelStateTests(SimpleTestCase):
def test_custom_model_base(self):
state = ModelState.from_model(ModelWithCustomBase)
self.assertEqual(state.bases, (models.Model,))
def test_bound_field_sanity_check(self):
field = models.CharField(max_length=1)
field.model = models.Model
with self.assertRaisesMessage(
ValueError, 'ModelState.fields cannot be bound to a model - "field" is.'
):
ModelState("app", "Model", [("field", field)])
def test_sanity_check_to(self):
field = models.ForeignKey(UnicodeModel, models.CASCADE)
with self.assertRaisesMessage(
ValueError,
'ModelState.fields cannot refer to a model class - "field.to" does. '
"Use a string reference instead.",
):
ModelState("app", "Model", [("field", field)])
def test_sanity_check_through(self):
field = models.ManyToManyField("UnicodeModel")
field.remote_field.through = UnicodeModel
with self.assertRaisesMessage(
ValueError,
'ModelState.fields cannot refer to a model class - "field.through" does. '
"Use a string reference instead.",
):
ModelState("app", "Model", [("field", field)])
def test_sanity_index_name(self):
field = models.IntegerField()
options = {"indexes": [models.Index(fields=["field"])]}
msg = (
"Indexes passed to ModelState require a name attribute. <Index: "
"fields=['field']> doesn't have one."
)
with self.assertRaisesMessage(ValueError, msg):
ModelState("app", "Model", [("field", field)], options=options)
def test_fields_immutability(self):
"""
Rendering a model state doesn't alter its internal fields.
"""
apps = Apps()
field = models.CharField(max_length=1)
state = ModelState("app", "Model", [("name", field)])
Model = state.render(apps)
self.assertNotEqual(Model._meta.get_field("name"), field)
def test_repr(self):
field = models.CharField(max_length=1)
state = ModelState(
"app", "Model", [("name", field)], bases=["app.A", "app.B", "app.C"]
)
self.assertEqual(repr(state), "<ModelState: 'app.Model'>")
project_state = ProjectState()
project_state.add_model(state)
with self.assertRaisesMessage(
InvalidBasesError, "Cannot resolve bases for [<ModelState: 'app.Model'>]"
):
project_state.apps
def test_fields_ordering_equality(self):
state = ModelState(
"migrations",
"Tag",
[
("id", models.AutoField(primary_key=True)),
("name", models.CharField(max_length=100)),
("hidden", models.BooleanField()),
],
)
reordered_state = ModelState(
"migrations",
"Tag",
[
("id", models.AutoField(primary_key=True)),
# Purposely re-ordered.
("hidden", models.BooleanField()),
("name", models.CharField(max_length=100)),
],
)
self.assertEqual(state, reordered_state)
@override_settings(TEST_SWAPPABLE_MODEL="migrations.SomeFakeModel")
def test_create_swappable(self):
"""
Tests making a ProjectState from an Apps with a swappable model
"""
new_apps = Apps(["migrations"])
class Author(models.Model):
name = models.CharField(max_length=255)
bio = models.TextField()
age = models.IntegerField(blank=True, null=True)
class Meta:
app_label = "migrations"
apps = new_apps
swappable = "TEST_SWAPPABLE_MODEL"
author_state = ModelState.from_model(Author)
self.assertEqual(author_state.app_label, "migrations")
self.assertEqual(author_state.name, "Author")
self.assertEqual(list(author_state.fields), ["id", "name", "bio", "age"])
self.assertEqual(author_state.fields["name"].max_length, 255)
self.assertIs(author_state.fields["bio"].null, False)
self.assertIs(author_state.fields["age"].null, True)
self.assertEqual(
author_state.options,
{"swappable": "TEST_SWAPPABLE_MODEL", "indexes": [], "constraints": []},
)
self.assertEqual(author_state.bases, (models.Model,))
self.assertEqual(author_state.managers, [])
@override_settings(TEST_SWAPPABLE_MODEL="migrations.SomeFakeModel")
def test_create_swappable_from_abstract(self):
"""
A swappable model inheriting from a hierarchy:
concrete -> abstract -> concrete.
"""
new_apps = Apps(["migrations"])
class SearchableLocation(models.Model):
keywords = models.CharField(max_length=256)
class Meta:
app_label = "migrations"
apps = new_apps
class Station(SearchableLocation):
name = models.CharField(max_length=128)
class Meta:
abstract = True
class BusStation(Station):
bus_routes = models.CharField(max_length=128)
inbound = models.BooleanField(default=False)
class Meta(Station.Meta):
app_label = "migrations"
apps = new_apps
swappable = "TEST_SWAPPABLE_MODEL"
station_state = ModelState.from_model(BusStation)
self.assertEqual(station_state.app_label, "migrations")
self.assertEqual(station_state.name, "BusStation")
self.assertEqual(
list(station_state.fields),
["searchablelocation_ptr", "name", "bus_routes", "inbound"],
)
self.assertEqual(station_state.fields["name"].max_length, 128)
self.assertIs(station_state.fields["bus_routes"].null, False)
self.assertEqual(
station_state.options,
{
"abstract": False,
"swappable": "TEST_SWAPPABLE_MODEL",
"indexes": [],
"constraints": [],
},
)
self.assertEqual(station_state.bases, ("migrations.searchablelocation",))
self.assertEqual(station_state.managers, [])
@override_settings(TEST_SWAPPABLE_MODEL="migrations.SomeFakeModel")
def test_custom_manager_swappable(self):
"""
Tests making a ProjectState from unused models with custom managers
"""
new_apps = Apps(["migrations"])
class Food(models.Model):
food_mgr = FoodManager("a", "b")
food_qs = FoodQuerySet.as_manager()
food_no_mgr = NoMigrationFoodManager("x", "y")
class Meta:
app_label = "migrations"
apps = new_apps
swappable = "TEST_SWAPPABLE_MODEL"
food_state = ModelState.from_model(Food)
# The default manager is used in migrations
self.assertEqual([name for name, mgr in food_state.managers], ["food_mgr"])
self.assertEqual(food_state.managers[0][1].args, ("a", "b", 1, 2))
@isolate_apps("migrations", "django.contrib.contenttypes")
def test_order_with_respect_to_private_field(self):
class PrivateFieldModel(models.Model):
content_type = models.ForeignKey("contenttypes.ContentType", models.CASCADE)
object_id = models.PositiveIntegerField()
private = GenericForeignKey()
class Meta:
order_with_respect_to = "private"
state = ModelState.from_model(PrivateFieldModel)
self.assertNotIn("order_with_respect_to", state.options)
@isolate_apps("migrations")
def test_abstract_model_children_inherit_indexes(self):
class Abstract(models.Model):
name = models.CharField(max_length=50)
class Meta:
app_label = "migrations"
abstract = True
indexes = [models.Index(fields=["name"])]
class Child1(Abstract):
pass
class Child2(Abstract):
pass
child1_state = ModelState.from_model(Child1)
child2_state = ModelState.from_model(Child2)
index_names = [index.name for index in child1_state.options["indexes"]]
self.assertEqual(index_names, ["migrations__name_b0afd7_idx"])
index_names = [index.name for index in child2_state.options["indexes"]]
self.assertEqual(index_names, ["migrations__name_016466_idx"])
# Modifying the state doesn't modify the index on the model.
child1_state.options["indexes"][0].name = "bar"
self.assertEqual(Child1._meta.indexes[0].name, "migrations__name_b0afd7_idx")
@isolate_apps("migrations")
def test_explicit_index_name(self):
class TestModel(models.Model):
name = models.CharField(max_length=50)
class Meta:
app_label = "migrations"
indexes = [models.Index(fields=["name"], name="foo_idx")]
model_state = ModelState.from_model(TestModel)
index_names = [index.name for index in model_state.options["indexes"]]
self.assertEqual(index_names, ["foo_idx"])
@isolate_apps("migrations")
def test_from_model_constraints(self):
class ModelWithConstraints(models.Model):
size = models.IntegerField()
class Meta:
constraints = [
models.CheckConstraint(check=models.Q(size__gt=1), name="size_gt_1")
]
state = ModelState.from_model(ModelWithConstraints)
model_constraints = ModelWithConstraints._meta.constraints
state_constraints = state.options["constraints"]
self.assertEqual(model_constraints, state_constraints)
self.assertIsNot(model_constraints, state_constraints)
self.assertIsNot(model_constraints[0], state_constraints[0])
class RelatedModelsTests(SimpleTestCase):
def setUp(self):
self.apps = Apps(["migrations.related_models_app"])
def create_model(
self, name, foreign_keys=[], bases=(), abstract=False, proxy=False
):
test_name = "related_models_app"
assert not (abstract and proxy)
meta_contents = {
"abstract": abstract,
"app_label": test_name,
"apps": self.apps,
"proxy": proxy,
}
meta = type("Meta", (), meta_contents)
if not bases:
bases = (models.Model,)
body = {
"Meta": meta,
"__module__": "__fake__",
}
fname_base = fname = "%s_%%d" % name.lower()
for i, fk in enumerate(foreign_keys, 1):
fname = fname_base % i
body[fname] = fk
return type(name, bases, body)
def assertRelated(self, model, needle):
self.assertEqual(
get_related_models_recursive(model),
{(n._meta.app_label, n._meta.model_name) for n in needle},
)
def test_unrelated(self):
A = self.create_model("A")
B = self.create_model("B")
self.assertRelated(A, [])
self.assertRelated(B, [])
def test_direct_fk(self):
A = self.create_model(
"A", foreign_keys=[models.ForeignKey("B", models.CASCADE)]
)
B = self.create_model("B")
self.assertRelated(A, [B])
self.assertRelated(B, [A])
def test_direct_hidden_fk(self):
A = self.create_model(
"A", foreign_keys=[models.ForeignKey("B", models.CASCADE, related_name="+")]
)
B = self.create_model("B")
self.assertRelated(A, [B])
self.assertRelated(B, [A])
def test_fk_through_proxy(self):
A = self.create_model("A")
B = self.create_model("B", bases=(A,), proxy=True)
C = self.create_model("C", bases=(B,), proxy=True)
D = self.create_model(
"D", foreign_keys=[models.ForeignKey("C", models.CASCADE)]
)
self.assertRelated(A, [B, C, D])
self.assertRelated(B, [A, C, D])
self.assertRelated(C, [A, B, D])
self.assertRelated(D, [A, B, C])
def test_nested_fk(self):
A = self.create_model(
"A", foreign_keys=[models.ForeignKey("B", models.CASCADE)]
)
B = self.create_model(
"B", foreign_keys=[models.ForeignKey("C", models.CASCADE)]
)
C = self.create_model("C")
self.assertRelated(A, [B, C])
self.assertRelated(B, [A, C])
self.assertRelated(C, [A, B])
def test_two_sided(self):
A = self.create_model(
"A", foreign_keys=[models.ForeignKey("B", models.CASCADE)]
)
B = self.create_model(
"B", foreign_keys=[models.ForeignKey("A", models.CASCADE)]
)
self.assertRelated(A, [B])
self.assertRelated(B, [A])
def test_circle(self):
A = self.create_model(
"A", foreign_keys=[models.ForeignKey("B", models.CASCADE)]
)
B = self.create_model(
"B", foreign_keys=[models.ForeignKey("C", models.CASCADE)]
)
C = self.create_model(
"C", foreign_keys=[models.ForeignKey("A", models.CASCADE)]
)
self.assertRelated(A, [B, C])
self.assertRelated(B, [A, C])
self.assertRelated(C, [A, B])
def test_base(self):
A = self.create_model("A")
B = self.create_model("B", bases=(A,))
self.assertRelated(A, [B])
self.assertRelated(B, [A])
def test_nested_base(self):
A = self.create_model("A")
B = self.create_model("B", bases=(A,))
C = self.create_model("C", bases=(B,))
self.assertRelated(A, [B, C])
self.assertRelated(B, [A, C])
self.assertRelated(C, [A, B])
def test_multiple_bases(self):
A = self.create_model("A")
B = self.create_model("B")
C = self.create_model(
"C",
bases=(
A,
B,
),
)
self.assertRelated(A, [B, C])
self.assertRelated(B, [A, C])
self.assertRelated(C, [A, B])
def test_multiple_nested_bases(self):
A = self.create_model("A")
B = self.create_model("B")
C = self.create_model(
"C",
bases=(
A,
B,
),
)
D = self.create_model("D")
E = self.create_model("E", bases=(D,))
F = self.create_model(
"F",
bases=(
C,
E,
),
)
Y = self.create_model("Y")
Z = self.create_model("Z", bases=(Y,))
self.assertRelated(A, [B, C, D, E, F])
self.assertRelated(B, [A, C, D, E, F])
self.assertRelated(C, [A, B, D, E, F])
self.assertRelated(D, [A, B, C, E, F])
self.assertRelated(E, [A, B, C, D, F])
self.assertRelated(F, [A, B, C, D, E])
self.assertRelated(Y, [Z])
self.assertRelated(Z, [Y])
def test_base_to_base_fk(self):
A = self.create_model(
"A", foreign_keys=[models.ForeignKey("Y", models.CASCADE)]
)
B = self.create_model("B", bases=(A,))
Y = self.create_model("Y")
Z = self.create_model("Z", bases=(Y,))
self.assertRelated(A, [B, Y, Z])
self.assertRelated(B, [A, Y, Z])
self.assertRelated(Y, [A, B, Z])
self.assertRelated(Z, [A, B, Y])
def test_base_to_subclass_fk(self):
A = self.create_model(
"A", foreign_keys=[models.ForeignKey("Z", models.CASCADE)]
)
B = self.create_model("B", bases=(A,))
Y = self.create_model("Y")
Z = self.create_model("Z", bases=(Y,))
self.assertRelated(A, [B, Y, Z])
self.assertRelated(B, [A, Y, Z])
self.assertRelated(Y, [A, B, Z])
self.assertRelated(Z, [A, B, Y])
def test_direct_m2m(self):
A = self.create_model("A", foreign_keys=[models.ManyToManyField("B")])
B = self.create_model("B")
self.assertRelated(A, [A.a_1.rel.through, B])
self.assertRelated(B, [A, A.a_1.rel.through])
def test_direct_m2m_self(self):
A = self.create_model("A", foreign_keys=[models.ManyToManyField("A")])
self.assertRelated(A, [A.a_1.rel.through])
def test_intermediate_m2m_self(self):
A = self.create_model(
"A", foreign_keys=[models.ManyToManyField("A", through="T")]
)
T = self.create_model(
"T",
foreign_keys=[
models.ForeignKey("A", models.CASCADE),
models.ForeignKey("A", models.CASCADE),
],
)
self.assertRelated(A, [T])
self.assertRelated(T, [A])
def test_intermediate_m2m(self):
A = self.create_model(
"A", foreign_keys=[models.ManyToManyField("B", through="T")]
)
B = self.create_model("B")
T = self.create_model(
"T",
foreign_keys=[
models.ForeignKey("A", models.CASCADE),
models.ForeignKey("B", models.CASCADE),
],
)
self.assertRelated(A, [B, T])
self.assertRelated(B, [A, T])
self.assertRelated(T, [A, B])
def test_intermediate_m2m_extern_fk(self):
A = self.create_model(
"A", foreign_keys=[models.ManyToManyField("B", through="T")]
)
B = self.create_model("B")
Z = self.create_model("Z")
T = self.create_model(
"T",
foreign_keys=[
models.ForeignKey("A", models.CASCADE),
models.ForeignKey("B", models.CASCADE),
models.ForeignKey("Z", models.CASCADE),
],
)
self.assertRelated(A, [B, T, Z])
self.assertRelated(B, [A, T, Z])
self.assertRelated(T, [A, B, Z])
self.assertRelated(Z, [A, B, T])
def test_intermediate_m2m_base(self):
A = self.create_model(
"A", foreign_keys=[models.ManyToManyField("B", through="T")]
)
B = self.create_model("B")
S = self.create_model("S")
T = self.create_model(
"T",
foreign_keys=[
models.ForeignKey("A", models.CASCADE),
models.ForeignKey("B", models.CASCADE),
],
bases=(S,),
)
self.assertRelated(A, [B, S, T])
self.assertRelated(B, [A, S, T])
self.assertRelated(S, [A, B, T])
self.assertRelated(T, [A, B, S])
def test_generic_fk(self):
A = self.create_model(
"A",
foreign_keys=[
models.ForeignKey("B", models.CASCADE),
GenericForeignKey(),
],
)
B = self.create_model(
"B",
foreign_keys=[
models.ForeignKey("C", models.CASCADE),
],
)
self.assertRelated(A, [B])
self.assertRelated(B, [A])
def test_abstract_base(self):
A = self.create_model("A", abstract=True)
B = self.create_model("B", bases=(A,))
self.assertRelated(A, [B])
self.assertRelated(B, [])
def test_nested_abstract_base(self):
A = self.create_model("A", abstract=True)
B = self.create_model("B", bases=(A,), abstract=True)
C = self.create_model("C", bases=(B,))
self.assertRelated(A, [B, C])
self.assertRelated(B, [C])
self.assertRelated(C, [])
def test_proxy_base(self):
A = self.create_model("A")
B = self.create_model("B", bases=(A,), proxy=True)
self.assertRelated(A, [B])
self.assertRelated(B, [])
def test_nested_proxy_base(self):
A = self.create_model("A")
B = self.create_model("B", bases=(A,), proxy=True)
C = self.create_model("C", bases=(B,), proxy=True)
self.assertRelated(A, [B, C])
self.assertRelated(B, [C])
self.assertRelated(C, [])
def test_multiple_mixed_bases(self):
A = self.create_model("A", abstract=True)
M = self.create_model("M")
P = self.create_model("P")
Q = self.create_model("Q", bases=(P,), proxy=True)
Z = self.create_model("Z", bases=(A, M, Q))
# M has a pointer O2O field p_ptr to P
self.assertRelated(A, [M, P, Q, Z])
self.assertRelated(M, [P, Q, Z])
self.assertRelated(P, [M, Q, Z])
self.assertRelated(Q, [M, P, Z])
self.assertRelated(Z, [M, P, Q])
|
719e001e36c630ee1e421fad4da9723fd406bb3c81800fde745fee7ba30ea6bf | import re
from django.forms import CharField, Form, Media
from django.http import HttpRequest, HttpResponse
from django.middleware.csrf import (
CSRF_TOKEN_LENGTH,
CsrfViewMiddleware,
_unmask_cipher_token,
get_token,
)
from django.template import TemplateDoesNotExist, TemplateSyntaxError
from django.template.backends.dummy import TemplateStrings
from django.test import SimpleTestCase
class TemplateStringsTests(SimpleTestCase):
engine_class = TemplateStrings
backend_name = "dummy"
options = {}
@classmethod
def setUpClass(cls):
super().setUpClass()
params = {
"DIRS": [],
"APP_DIRS": True,
"NAME": cls.backend_name,
"OPTIONS": cls.options,
}
cls.engine = cls.engine_class(params)
def test_from_string(self):
template = self.engine.from_string("Hello!\n")
content = template.render()
self.assertEqual(content, "Hello!\n")
def test_get_template(self):
template = self.engine.get_template("template_backends/hello.html")
content = template.render({"name": "world"})
self.assertEqual(content, "Hello world!\n")
def test_get_template_nonexistent(self):
with self.assertRaises(TemplateDoesNotExist) as e:
self.engine.get_template("template_backends/nonexistent.html")
self.assertEqual(e.exception.backend, self.engine)
def test_get_template_syntax_error(self):
# There's no way to trigger a syntax error with the dummy backend.
# The test still lives here to factor it between other backends.
if self.backend_name == "dummy":
self.skipTest("test doesn't apply to dummy backend")
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template("template_backends/syntax_error.html")
def test_html_escaping(self):
template = self.engine.get_template("template_backends/hello.html")
context = {"name": '<script>alert("XSS!");</script>'}
content = template.render(context)
self.assertIn("<script>", content)
self.assertNotIn("<script>", content)
def test_django_html_escaping(self):
if self.backend_name == "dummy":
self.skipTest("test doesn't apply to dummy backend")
class TestForm(Form):
test_field = CharField()
media = Media(js=["my-script.js"])
form = TestForm()
template = self.engine.get_template("template_backends/django_escaping.html")
content = template.render({"media": media, "test_form": form})
expected = "{}\n\n{}\n\n{}".format(media, form, form["test_field"])
self.assertHTMLEqual(content, expected)
def check_tokens_equivalent(self, token1, token2):
self.assertEqual(len(token1), CSRF_TOKEN_LENGTH)
self.assertEqual(len(token2), CSRF_TOKEN_LENGTH)
token1, token2 = map(_unmask_cipher_token, (token1, token2))
self.assertEqual(token1, token2)
def test_csrf_token(self):
request = HttpRequest()
CsrfViewMiddleware(lambda req: HttpResponse()).process_view(
request, lambda r: None, (), {}
)
template = self.engine.get_template("template_backends/csrf.html")
content = template.render(request=request)
expected = '<input type="hidden" name="csrfmiddlewaretoken" value="([^"]+)">'
match = re.match(expected, content) or re.match(
expected.replace('"', "'"), content
)
self.assertTrue(match, "hidden csrftoken field not found in output")
self.check_tokens_equivalent(match[1], get_token(request))
def test_no_directory_traversal(self):
with self.assertRaises(TemplateDoesNotExist):
self.engine.get_template("../forbidden/template_backends/hello.html")
def test_non_ascii_characters(self):
template = self.engine.get_template("template_backends/hello.html")
content = template.render({"name": "Jérôme"})
self.assertEqual(content, "Hello Jérôme!\n")
|
df75038facd0dc19cb8154bc931b059594542bab21b52a32ba71880be7c4052a | from django.core.exceptions import ImproperlyConfigured
from django.template import engines
from django.test import SimpleTestCase, override_settings
class TemplateUtilsTests(SimpleTestCase):
@override_settings(TEMPLATES=[{"BACKEND": "raise.import.error"}])
def test_backend_import_error(self):
"""
Failing to import a backend keeps raising the original import error
(#24265).
"""
with self.assertRaisesMessage(ImportError, "No module named 'raise"):
engines.all()
with self.assertRaisesMessage(ImportError, "No module named 'raise"):
engines.all()
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
# Incorrect: APP_DIRS and loaders are mutually incompatible.
"APP_DIRS": True,
"OPTIONS": {"loaders": []},
}
]
)
def test_backend_improperly_configured(self):
"""
Failing to initialize a backend keeps raising the original exception
(#24265).
"""
msg = "app_dirs must not be set when loaders is defined."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
engines.all()
with self.assertRaisesMessage(ImproperlyConfigured, msg):
engines.all()
@override_settings(
TEMPLATES=[
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
},
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
},
]
)
def test_backend_names_must_be_unique(self):
msg = (
"Template engine aliases aren't unique, duplicates: django. Set "
"a unique NAME for each engine in settings.TEMPLATES."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
engines.all()
|
dd2c08b56834ba719704da5ff803cd8469d1821b0162337cec69e304f2392038 | from pathlib import Path
from template_tests.test_response import test_processor_name
from django.template import Context, EngineHandler, RequestContext
from django.template.backends.django import DjangoTemplates
from django.template.library import InvalidTemplateLibrary
from django.test import RequestFactory, override_settings
from .test_dummy import TemplateStringsTests
class DjangoTemplatesTests(TemplateStringsTests):
engine_class = DjangoTemplates
backend_name = "django"
request_factory = RequestFactory()
def test_context_has_priority_over_template_context_processors(self):
# See ticket #23789.
engine = DjangoTemplates(
{
"DIRS": [],
"APP_DIRS": False,
"NAME": "django",
"OPTIONS": {
"context_processors": [test_processor_name],
},
}
)
template = engine.from_string("{{ processors }}")
request = self.request_factory.get("/")
# Context processors run
content = template.render({}, request)
self.assertEqual(content, "yes")
# Context overrides context processors
content = template.render({"processors": "no"}, request)
self.assertEqual(content, "no")
def test_render_requires_dict(self):
"""django.Template.render() requires a dict."""
engine = DjangoTemplates(
{
"DIRS": [],
"APP_DIRS": False,
"NAME": "django",
"OPTIONS": {},
}
)
template = engine.from_string("")
context = Context()
request_context = RequestContext(self.request_factory.get("/"), {})
msg = "context must be a dict rather than Context."
with self.assertRaisesMessage(TypeError, msg):
template.render(context)
msg = "context must be a dict rather than RequestContext."
with self.assertRaisesMessage(TypeError, msg):
template.render(request_context)
@override_settings(INSTALLED_APPS=["template_backends.apps.good"])
def test_templatetag_discovery(self):
engine = DjangoTemplates(
{
"DIRS": [],
"APP_DIRS": False,
"NAME": "django",
"OPTIONS": {
"libraries": {
"alternate": (
"template_backends.apps.good.templatetags.good_tags"
),
"override": (
"template_backends.apps.good.templatetags.good_tags"
),
},
},
}
)
# libraries are discovered from installed applications
self.assertEqual(
engine.engine.libraries["good_tags"],
"template_backends.apps.good.templatetags.good_tags",
)
self.assertEqual(
engine.engine.libraries["subpackage.tags"],
"template_backends.apps.good.templatetags.subpackage.tags",
)
# libraries are discovered from django.templatetags
self.assertEqual(
engine.engine.libraries["static"],
"django.templatetags.static",
)
# libraries passed in OPTIONS are registered
self.assertEqual(
engine.engine.libraries["alternate"],
"template_backends.apps.good.templatetags.good_tags",
)
# libraries passed in OPTIONS take precedence over discovered ones
self.assertEqual(
engine.engine.libraries["override"],
"template_backends.apps.good.templatetags.good_tags",
)
@override_settings(INSTALLED_APPS=["template_backends.apps.importerror"])
def test_templatetag_discovery_import_error(self):
"""
Import errors in tag modules should be reraised with a helpful message.
"""
with self.assertRaisesMessage(
InvalidTemplateLibrary,
"ImportError raised when trying to load "
"'template_backends.apps.importerror.templatetags.broken_tags'",
) as cm:
DjangoTemplates(
{
"DIRS": [],
"APP_DIRS": False,
"NAME": "django",
"OPTIONS": {},
}
)
self.assertIsInstance(cm.exception.__cause__, ImportError)
def test_builtins_discovery(self):
engine = DjangoTemplates(
{
"DIRS": [],
"APP_DIRS": False,
"NAME": "django",
"OPTIONS": {
"builtins": ["template_backends.apps.good.templatetags.good_tags"],
},
}
)
self.assertEqual(
engine.engine.builtins,
[
"django.template.defaulttags",
"django.template.defaultfilters",
"django.template.loader_tags",
"template_backends.apps.good.templatetags.good_tags",
],
)
def test_autoescape_off(self):
templates = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"OPTIONS": {"autoescape": False},
}
]
engines = EngineHandler(templates=templates)
self.assertEqual(
engines["django"]
.from_string("Hello, {{ name }}")
.render({"name": "Bob & Jim"}),
"Hello, Bob & Jim",
)
def test_autoescape_default(self):
templates = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
}
]
engines = EngineHandler(templates=templates)
self.assertEqual(
engines["django"]
.from_string("Hello, {{ name }}")
.render({"name": "Bob & Jim"}),
"Hello, Bob & Jim",
)
default_loaders = [
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
]
@override_settings(DEBUG=False)
def test_non_debug_default_template_loaders(self):
engine = DjangoTemplates(
{"DIRS": [], "APP_DIRS": True, "NAME": "django", "OPTIONS": {}}
)
self.assertEqual(
engine.engine.loaders,
[("django.template.loaders.cached.Loader", self.default_loaders)],
)
@override_settings(DEBUG=True)
def test_debug_default_template_loaders(self):
engine = DjangoTemplates(
{"DIRS": [], "APP_DIRS": True, "NAME": "django", "OPTIONS": {}}
)
self.assertEqual(engine.engine.loaders, self.default_loaders)
def test_dirs_pathlib(self):
engine = DjangoTemplates(
{
"DIRS": [Path(__file__).parent / "templates" / "template_backends"],
"APP_DIRS": False,
"NAME": "django",
"OPTIONS": {},
}
)
template = engine.get_template("hello.html")
self.assertEqual(template.render({"name": "Joe"}), "Hello Joe!\n")
|
03a28098fec6f833235fcbd6656fb782994fabf29e0cd0993f8290a8784c4149 | from pathlib import Path
from unittest import mock, skipIf
from django.template import TemplateSyntaxError
from django.test import RequestFactory
from .test_dummy import TemplateStringsTests
try:
import jinja2
except ImportError:
jinja2 = None
Jinja2 = None
else:
from django.template.backends.jinja2 import Jinja2
@skipIf(jinja2 is None, "this test requires jinja2")
class Jinja2Tests(TemplateStringsTests):
engine_class = Jinja2
backend_name = "jinja2"
options = {
"keep_trailing_newline": True,
"context_processors": [
"django.template.context_processors.static",
],
}
def test_origin(self):
template = self.engine.get_template("template_backends/hello.html")
self.assertTrue(template.origin.name.endswith("hello.html"))
self.assertEqual(template.origin.template_name, "template_backends/hello.html")
def test_origin_from_string(self):
template = self.engine.from_string("Hello!\n")
self.assertEqual(template.origin.name, "<template>")
self.assertIsNone(template.origin.template_name)
def test_self_context(self):
"""
Using 'self' in the context should not throw errors (#24538).
"""
# self will be overridden to be a TemplateReference, so the self
# variable will not come through. Attempting to use one though should
# not throw an error.
template = self.engine.from_string("hello {{ foo }}!")
content = template.render(context={"self": "self", "foo": "world"})
self.assertEqual(content, "hello world!")
def test_exception_debug_info_min_context(self):
with self.assertRaises(TemplateSyntaxError) as e:
self.engine.get_template("template_backends/syntax_error.html")
debug = e.exception.template_debug
self.assertEqual(debug["after"], "")
self.assertEqual(debug["before"], "")
self.assertEqual(debug["during"], "{% block %}")
self.assertEqual(debug["bottom"], 1)
self.assertEqual(debug["top"], 0)
self.assertEqual(debug["line"], 1)
self.assertEqual(debug["total"], 1)
self.assertEqual(len(debug["source_lines"]), 1)
self.assertTrue(debug["name"].endswith("syntax_error.html"))
self.assertIn("message", debug)
def test_exception_debug_info_max_context(self):
with self.assertRaises(TemplateSyntaxError) as e:
self.engine.get_template("template_backends/syntax_error2.html")
debug = e.exception.template_debug
self.assertEqual(debug["after"], "")
self.assertEqual(debug["before"], "")
self.assertEqual(debug["during"], "{% block %}")
self.assertEqual(debug["bottom"], 26)
self.assertEqual(debug["top"], 5)
self.assertEqual(debug["line"], 16)
self.assertEqual(debug["total"], 31)
self.assertEqual(len(debug["source_lines"]), 21)
self.assertTrue(debug["name"].endswith("syntax_error2.html"))
self.assertIn("message", debug)
def test_context_processors(self):
request = RequestFactory().get("/")
template = self.engine.from_string("Static URL: {{ STATIC_URL }}")
content = template.render(request=request)
self.assertEqual(content, "Static URL: /static/")
with self.settings(STATIC_URL="/s/"):
content = template.render(request=request)
self.assertEqual(content, "Static URL: /s/")
def test_dirs_pathlib(self):
engine = Jinja2(
{
"DIRS": [Path(__file__).parent / "templates" / "template_backends"],
"APP_DIRS": False,
"NAME": "jinja2",
"OPTIONS": {},
}
)
template = engine.get_template("hello.html")
self.assertEqual(template.render({"name": "Joe"}), "Hello Joe!")
def test_template_render_nested_error(self):
template = self.engine.get_template(
"template_backends/syntax_error_include.html"
)
with self.assertRaises(TemplateSyntaxError) as e:
template.render(context={})
debug = e.exception.template_debug
self.assertEqual(debug["after"], "")
self.assertEqual(debug["before"], "")
self.assertEqual(debug["during"], "{% block %}")
self.assertEqual(debug["bottom"], 1)
self.assertEqual(debug["top"], 0)
self.assertEqual(debug["line"], 1)
self.assertEqual(debug["total"], 1)
self.assertEqual(len(debug["source_lines"]), 1)
self.assertTrue(debug["name"].endswith("syntax_error.html"))
self.assertIn("message", debug)
def test_template_render_error_nonexistent_source(self):
template = self.engine.get_template("template_backends/hello.html")
with mock.patch(
"jinja2.environment.Template.render",
side_effect=jinja2.TemplateSyntaxError("", 1, filename="nonexistent.html"),
):
with self.assertRaises(TemplateSyntaxError) as e:
template.render(context={})
debug = e.exception.template_debug
self.assertEqual(debug["after"], "")
self.assertEqual(debug["before"], "")
self.assertEqual(debug["during"], "")
self.assertEqual(debug["bottom"], 0)
self.assertEqual(debug["top"], 0)
self.assertEqual(debug["line"], 1)
self.assertEqual(debug["total"], 0)
self.assertEqual(len(debug["source_lines"]), 0)
self.assertTrue(debug["name"].endswith("nonexistent.html"))
self.assertIn("message", debug)
|
53f40121f8da1396445416fbca7fc776d5d2c9cd57f1272469a003b7664201fd | import gzip
import os
import sys
import tempfile
import unittest
import warnings
from io import StringIO
from unittest import mock
from django.apps import apps
from django.contrib.sites.models import Site
from django.core import management
from django.core.files.temp import NamedTemporaryFile
from django.core.management import CommandError
from django.core.management.commands.dumpdata import ProxyModelWarning
from django.core.serializers.base import ProgressBar
from django.db import IntegrityError, connection
from django.test import TestCase, TransactionTestCase, skipUnlessDBFeature
from .models import (
Article,
Category,
CircularA,
CircularB,
NaturalKeyThing,
PrimaryKeyUUIDModel,
ProxySpy,
Spy,
Tag,
Visa,
)
try:
import bz2 # NOQA
HAS_BZ2 = True
except ImportError:
HAS_BZ2 = False
try:
import lzma # NOQA
HAS_LZMA = True
except ImportError:
HAS_LZMA = False
class TestCaseFixtureLoadingTests(TestCase):
fixtures = ["fixture1.json", "fixture2.json"]
def test_class_fixtures(self):
"Test case has installed 3 fixture objects"
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
[
"Django conquers world!",
"Copyright is fine the way it is",
"Poker has no place on ESPN",
],
)
class SubclassTestCaseFixtureLoadingTests(TestCaseFixtureLoadingTests):
"""
Make sure that subclasses can remove fixtures from parent class (#21089).
"""
fixtures = []
def test_class_fixtures(self):
"There were no fixture objects installed"
self.assertEqual(Article.objects.count(), 0)
class DumpDataAssertMixin:
def _dumpdata_assert(
self,
args,
output,
format="json",
filename=None,
natural_foreign_keys=False,
natural_primary_keys=False,
use_base_manager=False,
exclude_list=[],
primary_keys="",
):
new_io = StringIO()
filename = filename and os.path.join(tempfile.gettempdir(), filename)
management.call_command(
"dumpdata",
*args,
format=format,
stdout=new_io,
stderr=new_io,
output=filename,
use_natural_foreign_keys=natural_foreign_keys,
use_natural_primary_keys=natural_primary_keys,
use_base_manager=use_base_manager,
exclude=exclude_list,
primary_keys=primary_keys,
)
if filename:
file_root, file_ext = os.path.splitext(filename)
compression_formats = {
".bz2": (open, file_root),
".gz": (gzip.open, filename),
".lzma": (open, file_root),
".xz": (open, file_root),
".zip": (open, file_root),
}
if HAS_BZ2:
compression_formats[".bz2"] = (bz2.open, filename)
if HAS_LZMA:
compression_formats[".lzma"] = (lzma.open, filename)
compression_formats[".xz"] = (lzma.open, filename)
try:
open_method, file_path = compression_formats[file_ext]
except KeyError:
open_method, file_path = open, filename
with open_method(file_path, "rt") as f:
command_output = f.read()
os.remove(file_path)
else:
command_output = new_io.getvalue().strip()
if format == "json":
self.assertJSONEqual(command_output, output)
elif format == "xml":
self.assertXMLEqual(command_output, output)
else:
self.assertEqual(command_output, output)
class FixtureLoadingTests(DumpDataAssertMixin, TestCase):
def test_loading_and_dumping(self):
apps.clear_cache()
Site.objects.all().delete()
# Load fixture 1. Single JSON file, with two objects.
management.call_command("loaddata", "fixture1.json", verbosity=0)
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
["Time to reform copyright", "Poker has no place on ESPN"],
)
# Dump the current contents of the database as a JSON fixture
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
)
# Try just dumping the contents of fixtures.Category
self._dumpdata_assert(
["fixtures.Category"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}]',
)
# ...and just fixtures.Article
self._dumpdata_assert(
["fixtures.Article"],
'[{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
)
# ...and both
self._dumpdata_assert(
["fixtures.Category", "fixtures.Article"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
)
# Specify a specific model twice
self._dumpdata_assert(
["fixtures.Article", "fixtures.Article"],
(
'[{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]'
),
)
# Specify a dump that specifies Article both explicitly and implicitly
self._dumpdata_assert(
["fixtures.Article", "fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
)
# Specify a dump that specifies Article both explicitly and implicitly,
# but lists the app first (#22025).
self._dumpdata_assert(
["fixtures", "fixtures.Article"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
)
# Same again, but specify in the reverse order
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
)
# Specify one model from one application, and an entire other application.
self._dumpdata_assert(
["fixtures.Category", "sites"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 1, "model": "sites.site", "fields": '
'{"domain": "example.com", "name": "example.com"}}]',
)
# Load fixture 2. JSON file imported by default. Overwrites some
# existing objects.
management.call_command("loaddata", "fixture2.json", verbosity=0)
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
[
"Django conquers world!",
"Copyright is fine the way it is",
"Poker has no place on ESPN",
],
)
# Load fixture 3, XML format.
management.call_command("loaddata", "fixture3.xml", verbosity=0)
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
[
"XML identified as leading cause of cancer",
"Django conquers world!",
"Copyright is fine the way it is",
"Poker on TV is great!",
],
)
# Load fixture 6, JSON file with dynamic ContentType fields. Testing ManyToOne.
management.call_command("loaddata", "fixture6.json", verbosity=0)
self.assertQuerysetEqual(
Tag.objects.all(),
[
'<Tag: <Article: Copyright is fine the way it is> tagged "copyright">',
'<Tag: <Article: Copyright is fine the way it is> tagged "law">',
],
transform=repr,
ordered=False,
)
# Load fixture 7, XML file with dynamic ContentType fields. Testing ManyToOne.
management.call_command("loaddata", "fixture7.xml", verbosity=0)
self.assertQuerysetEqual(
Tag.objects.all(),
[
'<Tag: <Article: Copyright is fine the way it is> tagged "copyright">',
'<Tag: <Article: Copyright is fine the way it is> tagged "legal">',
'<Tag: <Article: Django conquers world!> tagged "django">',
'<Tag: <Article: Django conquers world!> tagged "world domination">',
],
transform=repr,
ordered=False,
)
# Load fixture 8, JSON file with dynamic Permission fields. Testing ManyToMany.
management.call_command("loaddata", "fixture8.json", verbosity=0)
self.assertQuerysetEqual(
Visa.objects.all(),
[
"<Visa: Django Reinhardt Can add user, Can change user, Can delete "
"user>",
"<Visa: Stephane Grappelli Can add user>",
"<Visa: Prince >",
],
transform=repr,
ordered=False,
)
# Load fixture 9, XML file with dynamic Permission fields. Testing ManyToMany.
management.call_command("loaddata", "fixture9.xml", verbosity=0)
self.assertQuerysetEqual(
Visa.objects.all(),
[
"<Visa: Django Reinhardt Can add user, Can change user, Can delete "
"user>",
"<Visa: Stephane Grappelli Can add user, Can delete user>",
'<Visa: Artist formerly known as "Prince" Can change user>',
],
transform=repr,
ordered=False,
)
# object list is unaffected
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
[
"XML identified as leading cause of cancer",
"Django conquers world!",
"Copyright is fine the way it is",
"Poker on TV is great!",
],
)
# By default, you get raw keys on dumpdata
self._dumpdata_assert(
["fixtures.book"],
'[{"pk": 1, "model": "fixtures.book", "fields": '
'{"name": "Music for all ages", "authors": [3, 1]}}]',
)
# But you can get natural keys if you ask for them and they are available
self._dumpdata_assert(
["fixtures.book"],
'[{"pk": 1, "model": "fixtures.book", "fields": '
'{"name": "Music for all ages", "authors": '
'[["Artist formerly known as \\"Prince\\""], ["Django Reinhardt"]]}}]',
natural_foreign_keys=True,
)
# You can also omit the primary keys for models that we can get later
# with natural keys.
self._dumpdata_assert(
["fixtures.person"],
'[{"fields": {"name": "Django Reinhardt"}, "model": "fixtures.person"}, '
'{"fields": {"name": "Stephane Grappelli"}, "model": "fixtures.person"}, '
'{"fields": {"name": "Artist formerly known as \\"Prince\\""}, '
'"model": "fixtures.person"}]',
natural_primary_keys=True,
)
# Dump the current contents of the database as a JSON fixture
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker on TV is great!", '
'"pub_date": "2006-06-16T11:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Copyright is fine the way it is", '
'"pub_date": "2006-06-16T14:00:00"}}, '
'{"pk": 4, "model": "fixtures.article", "fields": '
'{"headline": "Django conquers world!", '
'"pub_date": "2006-06-16T15:00:00"}}, '
'{"pk": 5, "model": "fixtures.article", "fields": '
'{"headline": "XML identified as leading cause of cancer", '
'"pub_date": "2006-06-16T16:00:00"}}, '
'{"pk": 1, "model": "fixtures.tag", "fields": '
'{"tagged_type": ["fixtures", "article"], "name": "copyright", '
'"tagged_id": 3}}, '
'{"pk": 2, "model": "fixtures.tag", "fields": '
'{"tagged_type": ["fixtures", "article"], "name": "legal", '
'"tagged_id": 3}}, '
'{"pk": 3, "model": "fixtures.tag", "fields": '
'{"tagged_type": ["fixtures", "article"], "name": "django", '
'"tagged_id": 4}}, '
'{"pk": 4, "model": "fixtures.tag", "fields": '
'{"tagged_type": ["fixtures", "article"], "name": "world domination", '
'"tagged_id": 4}}, '
'{"pk": 1, "model": "fixtures.person", '
'"fields": {"name": "Django Reinhardt"}}, '
'{"pk": 2, "model": "fixtures.person", '
'"fields": {"name": "Stephane Grappelli"}}, '
'{"pk": 3, "model": "fixtures.person", '
'"fields": {"name": "Artist formerly known as \\"Prince\\""}}, '
'{"pk": 1, "model": "fixtures.visa", '
'"fields": {"person": ["Django Reinhardt"], "permissions": '
'[["add_user", "auth", "user"], ["change_user", "auth", "user"], '
'["delete_user", "auth", "user"]]}}, '
'{"pk": 2, "model": "fixtures.visa", "fields": '
'{"person": ["Stephane Grappelli"], "permissions": '
'[["add_user", "auth", "user"], ["delete_user", "auth", "user"]]}}, '
'{"pk": 3, "model": "fixtures.visa", "fields": '
'{"person": ["Artist formerly known as \\"Prince\\""], "permissions": '
'[["change_user", "auth", "user"]]}}, '
'{"pk": 1, "model": "fixtures.book", "fields": '
'{"name": "Music for all ages", "authors": '
'[["Artist formerly known as \\"Prince\\""], ["Django Reinhardt"]]}}]',
natural_foreign_keys=True,
)
# Dump the current contents of the database as an XML fixture
self._dumpdata_assert(
["fixtures"],
'<?xml version="1.0" encoding="utf-8"?><django-objects version="1.0">'
'<object pk="1" model="fixtures.category">'
'<field type="CharField" name="title">News Stories</field>'
'<field type="TextField" name="description">Latest news stories</field>'
"</object>"
'<object pk="2" model="fixtures.article">'
'<field type="CharField" name="headline">Poker on TV is great!</field>'
'<field type="DateTimeField" name="pub_date">2006-06-16T11:00:00</field>'
"</object>"
'<object pk="3" model="fixtures.article">'
'<field type="CharField" name="headline">Copyright is fine the way it is'
"</field>"
'<field type="DateTimeField" name="pub_date">2006-06-16T14:00:00</field>'
"</object>"
'<object pk="4" model="fixtures.article">'
'<field type="CharField" name="headline">Django conquers world!</field>'
'<field type="DateTimeField" name="pub_date">2006-06-16T15:00:00</field>'
"</object>"
'<object pk="5" model="fixtures.article">'
'<field type="CharField" name="headline">'
"XML identified as leading cause of cancer</field>"
'<field type="DateTimeField" name="pub_date">2006-06-16T16:00:00</field>'
"</object>"
'<object pk="1" model="fixtures.tag">'
'<field type="CharField" name="name">copyright</field>'
'<field to="contenttypes.contenttype" name="tagged_type" '
'rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural>'
"</field>"
'<field type="PositiveIntegerField" name="tagged_id">3</field>'
"</object>"
'<object pk="2" model="fixtures.tag">'
'<field type="CharField" name="name">legal</field>'
'<field to="contenttypes.contenttype" name="tagged_type" '
'rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural>'
"</field>"
'<field type="PositiveIntegerField" name="tagged_id">3</field></object>'
'<object pk="3" model="fixtures.tag">'
'<field type="CharField" name="name">django</field>'
'<field to="contenttypes.contenttype" name="tagged_type" '
'rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural>'
"</field>"
'<field type="PositiveIntegerField" name="tagged_id">4</field>'
"</object>"
'<object pk="4" model="fixtures.tag">'
'<field type="CharField" name="name">world domination</field>'
'<field to="contenttypes.contenttype" name="tagged_type" '
'rel="ManyToOneRel"><natural>fixtures</natural><natural>article</natural>'
"</field>"
'<field type="PositiveIntegerField" name="tagged_id">4</field>'
"</object>"
'<object pk="1" model="fixtures.person">'
'<field type="CharField" name="name">Django Reinhardt</field>'
"</object>"
'<object pk="2" model="fixtures.person">'
'<field type="CharField" name="name">Stephane Grappelli</field>'
"</object>"
'<object pk="3" model="fixtures.person">'
'<field type="CharField" name="name">Artist formerly known as "Prince"'
"</field>"
"</object>"
'<object pk="1" model="fixtures.visa">'
'<field to="fixtures.person" name="person" rel="ManyToOneRel">'
"<natural>Django Reinhardt</natural></field>"
'<field to="auth.permission" name="permissions" rel="ManyToManyRel">'
"<object><natural>add_user</natural><natural>auth</natural>"
"<natural>user</natural></object><object><natural>change_user</natural>"
"<natural>auth</natural><natural>user</natural></object>"
"<object><natural>delete_user</natural><natural>auth</natural>"
"<natural>user</natural></object></field>"
"</object>"
'<object pk="2" model="fixtures.visa">'
'<field to="fixtures.person" name="person" rel="ManyToOneRel">'
"<natural>Stephane Grappelli</natural></field>"
'<field to="auth.permission" name="permissions" rel="ManyToManyRel">'
"<object><natural>add_user</natural><natural>auth</natural>"
"<natural>user</natural></object>"
"<object><natural>delete_user</natural><natural>auth</natural>"
"<natural>user</natural></object></field>"
"</object>"
'<object pk="3" model="fixtures.visa">'
'<field to="fixtures.person" name="person" rel="ManyToOneRel">'
'<natural>Artist formerly known as "Prince"</natural></field>'
'<field to="auth.permission" name="permissions" rel="ManyToManyRel">'
"<object><natural>change_user</natural><natural>auth</natural>"
"<natural>user</natural></object></field>"
"</object>"
'<object pk="1" model="fixtures.book">'
'<field type="CharField" name="name">Music for all ages</field>'
'<field to="fixtures.person" name="authors" rel="ManyToManyRel">'
'<object><natural>Artist formerly known as "Prince"</natural></object>'
"<object><natural>Django Reinhardt</natural></object></field>"
"</object></django-objects>",
format="xml",
natural_foreign_keys=True,
)
def test_dumpdata_with_excludes(self):
# Load fixture1 which has a site, two articles, and a category
Site.objects.all().delete()
management.call_command("loaddata", "fixture1.json", verbosity=0)
# Excluding fixtures app should only leave sites
self._dumpdata_assert(
["sites", "fixtures"],
'[{"pk": 1, "model": "sites.site", "fields": '
'{"domain": "example.com", "name": "example.com"}}]',
exclude_list=["fixtures"],
)
# Excluding fixtures.Article/Book should leave fixtures.Category
self._dumpdata_assert(
["sites", "fixtures"],
'[{"pk": 1, "model": "sites.site", '
'"fields": {"domain": "example.com", "name": "example.com"}}, '
'{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}]',
exclude_list=["fixtures.Article", "fixtures.Book"],
)
# Excluding fixtures and fixtures.Article/Book should be a no-op
self._dumpdata_assert(
["sites", "fixtures"],
'[{"pk": 1, "model": "sites.site", '
'"fields": {"domain": "example.com", "name": "example.com"}}, '
'{"pk": 1, "model": "fixtures.category", '
'"fields": {"description": "Latest news stories", '
'"title": "News Stories"}}]',
exclude_list=["fixtures.Article", "fixtures.Book"],
)
# Excluding sites and fixtures.Article/Book should only leave fixtures.Category
self._dumpdata_assert(
["sites", "fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}]',
exclude_list=["fixtures.Article", "fixtures.Book", "sites"],
)
# Excluding a bogus app should throw an error
with self.assertRaisesMessage(
management.CommandError, "No installed app with label 'foo_app'."
):
self._dumpdata_assert(["fixtures", "sites"], "", exclude_list=["foo_app"])
# Excluding a bogus model should throw an error
with self.assertRaisesMessage(
management.CommandError, "Unknown model: fixtures.FooModel"
):
self._dumpdata_assert(
["fixtures", "sites"], "", exclude_list=["fixtures.FooModel"]
)
@unittest.skipIf(
sys.platform == "win32", "Windows doesn't support '?' in filenames."
)
def test_load_fixture_with_special_characters(self):
management.call_command("loaddata", "fixture_with[special]chars", verbosity=0)
self.assertEqual(
Article.objects.get().headline,
"How To Deal With Special Characters",
)
def test_dumpdata_with_filtering_manager(self):
spy1 = Spy.objects.create(name="Paul")
spy2 = Spy.objects.create(name="Alex", cover_blown=True)
self.assertSequenceEqual(Spy.objects.all(), [spy1])
# Use the default manager
self._dumpdata_assert(
["fixtures.Spy"],
'[{"pk": %d, "model": "fixtures.spy", "fields": {"cover_blown": false}}]'
% spy1.pk,
)
# Dump using Django's base manager. Should return all objects,
# even those normally filtered by the manager
self._dumpdata_assert(
["fixtures.Spy"],
'[{"pk": %d, "model": "fixtures.spy", "fields": {"cover_blown": true}}, '
'{"pk": %d, "model": "fixtures.spy", "fields": {"cover_blown": false}}]'
% (spy2.pk, spy1.pk),
use_base_manager=True,
)
def test_dumpdata_with_pks(self):
management.call_command("loaddata", "fixture1.json", verbosity=0)
management.call_command("loaddata", "fixture2.json", verbosity=0)
self._dumpdata_assert(
["fixtures.Article"],
'[{"pk": 2, "model": "fixtures.article", '
'"fields": {"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Copyright is fine the way it is", '
'"pub_date": "2006-06-16T14:00:00"}}]',
primary_keys="2,3",
)
self._dumpdata_assert(
["fixtures.Article"],
'[{"pk": 2, "model": "fixtures.article", '
'"fields": {"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}]',
primary_keys="2",
)
with self.assertRaisesMessage(
management.CommandError, "You can only use --pks option with one model"
):
self._dumpdata_assert(
["fixtures"],
'[{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Copyright is fine the way it is", '
'"pub_date": "2006-06-16T14:00:00"}}]',
primary_keys="2,3",
)
with self.assertRaisesMessage(
management.CommandError, "You can only use --pks option with one model"
):
self._dumpdata_assert(
"",
'[{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Copyright is fine the way it is", '
'"pub_date": "2006-06-16T14:00:00"}}]',
primary_keys="2,3",
)
with self.assertRaisesMessage(
management.CommandError, "You can only use --pks option with one model"
):
self._dumpdata_assert(
["fixtures.Article", "fixtures.category"],
'[{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Copyright is fine the way it is", '
'"pub_date": "2006-06-16T14:00:00"}}]',
primary_keys="2,3",
)
def test_dumpdata_with_uuid_pks(self):
m1 = PrimaryKeyUUIDModel.objects.create()
m2 = PrimaryKeyUUIDModel.objects.create()
output = StringIO()
management.call_command(
"dumpdata",
"fixtures.PrimaryKeyUUIDModel",
"--pks",
", ".join([str(m1.id), str(m2.id)]),
stdout=output,
)
result = output.getvalue()
self.assertIn('"pk": "%s"' % m1.id, result)
self.assertIn('"pk": "%s"' % m2.id, result)
def test_dumpdata_with_file_output(self):
management.call_command("loaddata", "fixture1.json", verbosity=0)
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
filename="dumpdata.json",
)
def test_dumpdata_with_file_gzip_output(self):
management.call_command("loaddata", "fixture1.json", verbosity=0)
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
filename="dumpdata.json.gz",
)
@unittest.skipUnless(HAS_BZ2, "No bz2 library detected.")
def test_dumpdata_with_file_bz2_output(self):
management.call_command("loaddata", "fixture1.json", verbosity=0)
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
filename="dumpdata.json.bz2",
)
@unittest.skipUnless(HAS_LZMA, "No lzma library detected.")
def test_dumpdata_with_file_lzma_output(self):
management.call_command("loaddata", "fixture1.json", verbosity=0)
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
filename="dumpdata.json.lzma",
)
@unittest.skipUnless(HAS_LZMA, "No lzma library detected.")
def test_dumpdata_with_file_xz_output(self):
management.call_command("loaddata", "fixture1.json", verbosity=0)
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
filename="dumpdata.json.xz",
)
def test_dumpdata_with_file_zip_output(self):
management.call_command("loaddata", "fixture1.json", verbosity=0)
msg = "Unsupported file extension (.zip). Fixtures saved in 'dumpdata.json'."
with self.assertWarnsMessage(RuntimeWarning, msg):
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
filename="dumpdata.json.zip",
)
def test_dumpdata_progressbar(self):
"""
Dumpdata shows a progress bar on the command line when --output is set,
stdout is a tty, and verbosity > 0.
"""
management.call_command("loaddata", "fixture1.json", verbosity=0)
new_io = StringIO()
new_io.isatty = lambda: True
with NamedTemporaryFile() as file:
options = {
"format": "json",
"stdout": new_io,
"stderr": new_io,
"output": file.name,
}
management.call_command("dumpdata", "fixtures", **options)
self.assertTrue(
new_io.getvalue().endswith(
"[" + "." * ProgressBar.progress_width + "]\n"
)
)
# Test no progress bar when verbosity = 0
options["verbosity"] = 0
new_io = StringIO()
new_io.isatty = lambda: True
options.update({"stdout": new_io, "stderr": new_io})
management.call_command("dumpdata", "fixtures", **options)
self.assertEqual(new_io.getvalue(), "")
def test_dumpdata_proxy_without_concrete(self):
"""
A warning is displayed if a proxy model is dumped without its concrete
parent.
"""
ProxySpy.objects.create(name="Paul")
msg = "fixtures.ProxySpy is a proxy model and won't be serialized."
with self.assertWarnsMessage(ProxyModelWarning, msg):
self._dumpdata_assert(["fixtures.ProxySpy"], "[]")
def test_dumpdata_proxy_with_concrete(self):
"""
A warning isn't displayed if a proxy model is dumped with its concrete
parent.
"""
spy = ProxySpy.objects.create(name="Paul")
with warnings.catch_warnings(record=True) as warning_list:
warnings.simplefilter("always")
self._dumpdata_assert(
["fixtures.ProxySpy", "fixtures.Spy"],
'[{"pk": %d, "model": "fixtures.spy", '
'"fields": {"cover_blown": false}}]' % spy.pk,
)
self.assertEqual(len(warning_list), 0)
def test_compress_format_loading(self):
# Load fixture 4 (compressed), using format specification
management.call_command("loaddata", "fixture4.json", verbosity=0)
self.assertEqual(Article.objects.get().headline, "Django pets kitten")
def test_compressed_specified_loading(self):
# Load fixture 5 (compressed), using format *and* compression specification
management.call_command("loaddata", "fixture5.json.zip", verbosity=0)
self.assertEqual(
Article.objects.get().headline,
"WoW subscribers now outnumber readers",
)
def test_compressed_loading(self):
# Load fixture 5 (compressed), only compression specification
management.call_command("loaddata", "fixture5.zip", verbosity=0)
self.assertEqual(
Article.objects.get().headline,
"WoW subscribers now outnumber readers",
)
def test_compressed_loading_gzip(self):
management.call_command("loaddata", "fixture5.json.gz", verbosity=0)
self.assertEqual(
Article.objects.get().headline,
"WoW subscribers now outnumber readers",
)
@unittest.skipUnless(HAS_BZ2, "No bz2 library detected.")
def test_compressed_loading_bz2(self):
management.call_command("loaddata", "fixture5.json.bz2", verbosity=0)
self.assertEqual(
Article.objects.get().headline,
"WoW subscribers now outnumber readers",
)
@unittest.skipUnless(HAS_LZMA, "No lzma library detected.")
def test_compressed_loading_lzma(self):
management.call_command("loaddata", "fixture5.json.lzma", verbosity=0)
self.assertEqual(
Article.objects.get().headline,
"WoW subscribers now outnumber readers",
)
@unittest.skipUnless(HAS_LZMA, "No lzma library detected.")
def test_compressed_loading_xz(self):
management.call_command("loaddata", "fixture5.json.xz", verbosity=0)
self.assertEqual(
Article.objects.get().headline,
"WoW subscribers now outnumber readers",
)
def test_ambiguous_compressed_fixture(self):
# The name "fixture5" is ambiguous, so loading raises an error.
msg = "Multiple fixtures named 'fixture5'"
with self.assertRaisesMessage(management.CommandError, msg):
management.call_command("loaddata", "fixture5", verbosity=0)
def test_db_loading(self):
# Load db fixtures 1 and 2. These will load using the 'default'
# database identifier implicitly.
management.call_command("loaddata", "db_fixture_1", verbosity=0)
management.call_command("loaddata", "db_fixture_2", verbosity=0)
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
[
"Who needs more than one database?",
"Who needs to use compressed data?",
],
)
def test_loaddata_error_message(self):
"""
Loading a fixture which contains an invalid object outputs an error
message which contains the pk of the object that triggered the error.
"""
# MySQL needs a little prodding to reject invalid data.
# This won't affect other tests because the database connection
# is closed at the end of each test.
if connection.vendor == "mysql":
with connection.cursor() as cursor:
cursor.execute("SET sql_mode = 'TRADITIONAL'")
msg = "Could not load fixtures.Article(pk=1):"
with self.assertRaisesMessage(IntegrityError, msg):
management.call_command("loaddata", "invalid.json", verbosity=0)
@unittest.skipUnless(
connection.vendor == "postgresql", "psycopg2 prohibits null characters in data."
)
def test_loaddata_null_characters_on_postgresql(self):
msg = (
"Could not load fixtures.Article(pk=2): "
"A string literal cannot contain NUL (0x00) characters."
)
with self.assertRaisesMessage(ValueError, msg):
management.call_command("loaddata", "null_character_in_field_value.json")
def test_loaddata_app_option(self):
with self.assertRaisesMessage(
CommandError, "No fixture named 'db_fixture_1' found."
):
management.call_command(
"loaddata", "db_fixture_1", verbosity=0, app_label="someotherapp"
)
self.assertQuerysetEqual(Article.objects.all(), [])
management.call_command(
"loaddata", "db_fixture_1", verbosity=0, app_label="fixtures"
)
self.assertEqual(
Article.objects.get().headline,
"Who needs more than one database?",
)
def test_loaddata_verbosity_three(self):
output = StringIO()
management.call_command(
"loaddata", "fixture1.json", verbosity=3, stdout=output, stderr=output
)
command_output = output.getvalue()
self.assertIn(
"\rProcessed 1 object(s).\rProcessed 2 object(s)."
"\rProcessed 3 object(s).\rProcessed 4 object(s).\n",
command_output,
)
def test_loading_using(self):
# Load fixtures 1 and 2. These will load using the 'default' database
# identifier explicitly.
management.call_command(
"loaddata", "db_fixture_1", verbosity=0, database="default"
)
management.call_command(
"loaddata", "db_fixture_2", verbosity=0, database="default"
)
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
[
"Who needs more than one database?",
"Who needs to use compressed data?",
],
)
def test_unmatched_identifier_loading(self):
# Db fixture 3 won't load because the database identifier doesn't
# match.
with self.assertRaisesMessage(
CommandError, "No fixture named 'db_fixture_3' found."
):
management.call_command("loaddata", "db_fixture_3", verbosity=0)
with self.assertRaisesMessage(
CommandError, "No fixture named 'db_fixture_3' found."
):
management.call_command(
"loaddata", "db_fixture_3", verbosity=0, database="default"
)
self.assertQuerysetEqual(Article.objects.all(), [])
def test_output_formats(self):
# Load back in fixture 1, we need the articles from it
management.call_command("loaddata", "fixture1", verbosity=0)
# Try to load fixture 6 using format discovery
management.call_command("loaddata", "fixture6", verbosity=0)
self.assertQuerysetEqual(
Tag.objects.all(),
[
'<Tag: <Article: Time to reform copyright> tagged "copyright">',
'<Tag: <Article: Time to reform copyright> tagged "law">',
],
transform=repr,
ordered=False,
)
# Dump the current contents of the database as a JSON fixture
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}, '
'{"pk": 1, "model": "fixtures.tag", "fields": '
'{"tagged_type": ["fixtures", "article"], "name": "copyright", '
'"tagged_id": 3}}, '
'{"pk": 2, "model": "fixtures.tag", "fields": '
'{"tagged_type": ["fixtures", "article"], "name": "law", "tagged_id": 3}}, '
'{"pk": 1, "model": "fixtures.person", "fields": '
'{"name": "Django Reinhardt"}}, '
'{"pk": 2, "model": "fixtures.person", "fields": '
'{"name": "Stephane Grappelli"}}, '
'{"pk": 3, "model": "fixtures.person", "fields": {"name": "Prince"}}]',
natural_foreign_keys=True,
)
# Dump the current contents of the database as an XML fixture
self._dumpdata_assert(
["fixtures"],
'<?xml version="1.0" encoding="utf-8"?><django-objects version="1.0">'
'<object pk="1" model="fixtures.category">'
'<field type="CharField" name="title">News Stories</field>'
'<field type="TextField" name="description">Latest news stories</field>'
"</object>"
'<object pk="2" model="fixtures.article">'
'<field type="CharField" name="headline">Poker has no place on ESPN</field>'
'<field type="DateTimeField" name="pub_date">2006-06-16T12:00:00</field>'
"</object>"
'<object pk="3" model="fixtures.article">'
'<field type="CharField" name="headline">Time to reform copyright</field>'
'<field type="DateTimeField" name="pub_date">2006-06-16T13:00:00</field>'
"</object>"
'<object pk="1" model="fixtures.tag">'
'<field type="CharField" name="name">copyright</field>'
'<field to="contenttypes.contenttype" name="tagged_type" '
'rel="ManyToOneRel"><natural>fixtures</natural>'
"<natural>article</natural></field>"
'<field type="PositiveIntegerField" name="tagged_id">3</field>'
"</object>"
'<object pk="2" model="fixtures.tag">'
'<field type="CharField" name="name">law</field>'
'<field to="contenttypes.contenttype" name="tagged_type" '
'rel="ManyToOneRel"><natural>fixtures</natural>'
"<natural>article</natural></field>"
'<field type="PositiveIntegerField" name="tagged_id">3</field>'
"</object>"
'<object pk="1" model="fixtures.person">'
'<field type="CharField" name="name">Django Reinhardt</field>'
"</object>"
'<object pk="2" model="fixtures.person">'
'<field type="CharField" name="name">Stephane Grappelli</field>'
"</object>"
'<object pk="3" model="fixtures.person">'
'<field type="CharField" name="name">Prince</field>'
"</object></django-objects>",
format="xml",
natural_foreign_keys=True,
)
def test_loading_with_exclude_app(self):
Site.objects.all().delete()
management.call_command(
"loaddata", "fixture1", exclude=["fixtures"], verbosity=0
)
self.assertFalse(Article.objects.exists())
self.assertFalse(Category.objects.exists())
self.assertEqual(Site.objects.get().domain, "example.com")
def test_loading_with_exclude_model(self):
Site.objects.all().delete()
management.call_command(
"loaddata", "fixture1", exclude=["fixtures.Article"], verbosity=0
)
self.assertFalse(Article.objects.exists())
self.assertEqual(Category.objects.get().title, "News Stories")
self.assertEqual(Site.objects.get().domain, "example.com")
def test_exclude_option_errors(self):
"""Excluding a bogus app or model should raise an error."""
msg = "No installed app with label 'foo_app'."
with self.assertRaisesMessage(management.CommandError, msg):
management.call_command(
"loaddata", "fixture1", exclude=["foo_app"], verbosity=0
)
msg = "Unknown model: fixtures.FooModel"
with self.assertRaisesMessage(management.CommandError, msg):
management.call_command(
"loaddata", "fixture1", exclude=["fixtures.FooModel"], verbosity=0
)
def test_stdin_without_format(self):
"""Reading from stdin raises an error if format isn't specified."""
msg = "--format must be specified when reading from stdin."
with self.assertRaisesMessage(management.CommandError, msg):
management.call_command("loaddata", "-", verbosity=0)
def test_loading_stdin(self):
"""Loading fixtures from stdin with json and xml."""
tests_dir = os.path.dirname(__file__)
fixture_json = os.path.join(tests_dir, "fixtures", "fixture1.json")
fixture_xml = os.path.join(tests_dir, "fixtures", "fixture3.xml")
with mock.patch(
"django.core.management.commands.loaddata.sys.stdin", open(fixture_json)
):
management.call_command("loaddata", "--format=json", "-", verbosity=0)
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
["Time to reform copyright", "Poker has no place on ESPN"],
)
with mock.patch(
"django.core.management.commands.loaddata.sys.stdin", open(fixture_xml)
):
management.call_command("loaddata", "--format=xml", "-", verbosity=0)
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
[
"XML identified as leading cause of cancer",
"Time to reform copyright",
"Poker on TV is great!",
],
)
class NonexistentFixtureTests(TestCase):
"""
Custom class to limit fixture dirs.
"""
def test_loaddata_not_existent_fixture_file(self):
stdout_output = StringIO()
with self.assertRaisesMessage(
CommandError, "No fixture named 'this_fixture_doesnt_exist' found."
):
management.call_command(
"loaddata", "this_fixture_doesnt_exist", stdout=stdout_output
)
@mock.patch("django.db.connection.enable_constraint_checking")
@mock.patch("django.db.connection.disable_constraint_checking")
def test_nonexistent_fixture_no_constraint_checking(
self, disable_constraint_checking, enable_constraint_checking
):
"""
If no fixtures match the loaddata command, constraints checks on the
database shouldn't be disabled. This is performance critical on MSSQL.
"""
with self.assertRaisesMessage(
CommandError, "No fixture named 'this_fixture_doesnt_exist' found."
):
management.call_command(
"loaddata", "this_fixture_doesnt_exist", verbosity=0
)
disable_constraint_checking.assert_not_called()
enable_constraint_checking.assert_not_called()
class FixtureTransactionTests(DumpDataAssertMixin, TransactionTestCase):
available_apps = [
"fixtures",
"django.contrib.sites",
]
@skipUnlessDBFeature("supports_forward_references")
def test_format_discovery(self):
# Load fixture 1 again, using format discovery
management.call_command("loaddata", "fixture1", verbosity=0)
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
["Time to reform copyright", "Poker has no place on ESPN"],
)
# Try to load fixture 2 using format discovery; this will fail
# because there are two fixture2's in the fixtures directory
msg = "Multiple fixtures named 'fixture2'"
with self.assertRaisesMessage(management.CommandError, msg):
management.call_command("loaddata", "fixture2", verbosity=0)
# object list is unaffected
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
["Time to reform copyright", "Poker has no place on ESPN"],
)
# Dump the current contents of the database as a JSON fixture
self._dumpdata_assert(
["fixtures"],
'[{"pk": 1, "model": "fixtures.category", "fields": '
'{"description": "Latest news stories", "title": "News Stories"}}, '
'{"pk": 2, "model": "fixtures.article", "fields": '
'{"headline": "Poker has no place on ESPN", '
'"pub_date": "2006-06-16T12:00:00"}}, '
'{"pk": 3, "model": "fixtures.article", "fields": '
'{"headline": "Time to reform copyright", '
'"pub_date": "2006-06-16T13:00:00"}}]',
)
# Load fixture 4 (compressed), using format discovery
management.call_command("loaddata", "fixture4", verbosity=0)
self.assertSequenceEqual(
Article.objects.values_list("headline", flat=True),
[
"Django pets kitten",
"Time to reform copyright",
"Poker has no place on ESPN",
],
)
class ForwardReferenceTests(DumpDataAssertMixin, TestCase):
def test_forward_reference_fk(self):
management.call_command("loaddata", "forward_reference_fk.json", verbosity=0)
t1, t2 = NaturalKeyThing.objects.all()
self.assertEqual(t1.other_thing, t2)
self.assertEqual(t2.other_thing, t1)
self._dumpdata_assert(
["fixtures"],
'[{"model": "fixtures.naturalkeything", "pk": 1, '
'"fields": {"key": "t1", "other_thing": 2, "other_things": []}}, '
'{"model": "fixtures.naturalkeything", "pk": 2, '
'"fields": {"key": "t2", "other_thing": 1, "other_things": []}}]',
)
def test_forward_reference_fk_natural_key(self):
management.call_command(
"loaddata",
"forward_reference_fk_natural_key.json",
verbosity=0,
)
t1, t2 = NaturalKeyThing.objects.all()
self.assertEqual(t1.other_thing, t2)
self.assertEqual(t2.other_thing, t1)
self._dumpdata_assert(
["fixtures"],
'[{"model": "fixtures.naturalkeything", '
'"fields": {"key": "t1", "other_thing": ["t2"], "other_things": []}}, '
'{"model": "fixtures.naturalkeything", '
'"fields": {"key": "t2", "other_thing": ["t1"], "other_things": []}}]',
natural_primary_keys=True,
natural_foreign_keys=True,
)
def test_forward_reference_m2m(self):
management.call_command("loaddata", "forward_reference_m2m.json", verbosity=0)
self.assertEqual(NaturalKeyThing.objects.count(), 3)
t1 = NaturalKeyThing.objects.get_by_natural_key("t1")
self.assertSequenceEqual(
t1.other_things.order_by("key").values_list("key", flat=True),
["t2", "t3"],
)
self._dumpdata_assert(
["fixtures"],
'[{"model": "fixtures.naturalkeything", "pk": 1, '
'"fields": {"key": "t1", "other_thing": null, "other_things": [2, 3]}}, '
'{"model": "fixtures.naturalkeything", "pk": 2, '
'"fields": {"key": "t2", "other_thing": null, "other_things": []}}, '
'{"model": "fixtures.naturalkeything", "pk": 3, '
'"fields": {"key": "t3", "other_thing": null, "other_things": []}}]',
)
def test_forward_reference_m2m_natural_key(self):
management.call_command(
"loaddata",
"forward_reference_m2m_natural_key.json",
verbosity=0,
)
self.assertEqual(NaturalKeyThing.objects.count(), 3)
t1 = NaturalKeyThing.objects.get_by_natural_key("t1")
self.assertSequenceEqual(
t1.other_things.order_by("key").values_list("key", flat=True),
["t2", "t3"],
)
self._dumpdata_assert(
["fixtures"],
'[{"model": "fixtures.naturalkeything", '
'"fields": {"key": "t1", "other_thing": null, '
'"other_things": [["t2"], ["t3"]]}}, '
'{"model": "fixtures.naturalkeything", '
'"fields": {"key": "t2", "other_thing": null, "other_things": []}}, '
'{"model": "fixtures.naturalkeything", '
'"fields": {"key": "t3", "other_thing": null, "other_things": []}}]',
natural_primary_keys=True,
natural_foreign_keys=True,
)
class CircularReferenceTests(DumpDataAssertMixin, TestCase):
def test_circular_reference(self):
management.call_command("loaddata", "circular_reference.json", verbosity=0)
obj_a = CircularA.objects.get()
obj_b = CircularB.objects.get()
self.assertEqual(obj_a.obj, obj_b)
self.assertEqual(obj_b.obj, obj_a)
self._dumpdata_assert(
["fixtures"],
'[{"model": "fixtures.circulara", "pk": 1, '
'"fields": {"key": "x", "obj": 1}}, '
'{"model": "fixtures.circularb", "pk": 1, '
'"fields": {"key": "y", "obj": 1}}]',
)
def test_circular_reference_natural_key(self):
management.call_command(
"loaddata",
"circular_reference_natural_key.json",
verbosity=0,
)
obj_a = CircularA.objects.get()
obj_b = CircularB.objects.get()
self.assertEqual(obj_a.obj, obj_b)
self.assertEqual(obj_b.obj, obj_a)
self._dumpdata_assert(
["fixtures"],
'[{"model": "fixtures.circulara", '
'"fields": {"key": "x", "obj": ["y"]}}, '
'{"model": "fixtures.circularb", '
'"fields": {"key": "y", "obj": ["x"]}}]',
natural_primary_keys=True,
natural_foreign_keys=True,
)
|
d08fe272f7ce7b1c039589d5472f94b7ac5c22edf87ebb429a680d27652200b4 | """
Fixtures.
Fixtures are a way of loading data into the database in bulk. Fixure data
can be stored in any serializable format (including JSON and XML). Fixtures
are identified by name, and are stored in either a directory named 'fixtures'
in the application directory, or in one of the directories named in the
``FIXTURE_DIRS`` setting.
"""
import uuid
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.db import models
class Category(models.Model):
title = models.CharField(max_length=100)
description = models.TextField()
class Meta:
ordering = ("title",)
def __str__(self):
return self.title
class Article(models.Model):
headline = models.CharField(max_length=100, default="Default headline")
pub_date = models.DateTimeField()
class Meta:
ordering = ("-pub_date", "headline")
def __str__(self):
return self.headline
class Blog(models.Model):
name = models.CharField(max_length=100)
featured = models.ForeignKey(
Article, models.CASCADE, related_name="fixtures_featured_set"
)
articles = models.ManyToManyField(
Article, blank=True, related_name="fixtures_articles_set"
)
def __str__(self):
return self.name
class Tag(models.Model):
name = models.CharField(max_length=100)
tagged_type = models.ForeignKey(
ContentType, models.CASCADE, related_name="fixtures_tag_set"
)
tagged_id = models.PositiveIntegerField(default=0)
tagged = GenericForeignKey(ct_field="tagged_type", fk_field="tagged_id")
def __str__(self):
return '<%s: %s> tagged "%s"' % (
self.tagged.__class__.__name__,
self.tagged,
self.name,
)
class PersonManager(models.Manager):
def get_by_natural_key(self, name):
return self.get(name=name)
class Person(models.Model):
objects = PersonManager()
name = models.CharField(max_length=100, unique=True)
class Meta:
ordering = ("name",)
def __str__(self):
return self.name
def natural_key(self):
return (self.name,)
class SpyManager(PersonManager):
def get_queryset(self):
return super().get_queryset().filter(cover_blown=False)
class Spy(Person):
objects = SpyManager()
cover_blown = models.BooleanField(default=False)
class ProxySpy(Spy):
class Meta:
proxy = True
class Visa(models.Model):
person = models.ForeignKey(Person, models.CASCADE)
permissions = models.ManyToManyField(Permission, blank=True)
def __str__(self):
return "%s %s" % (
self.person.name,
", ".join(p.name for p in self.permissions.all()),
)
class Book(models.Model):
name = models.CharField(max_length=100)
authors = models.ManyToManyField(Person)
class Meta:
ordering = ("name",)
def __str__(self):
authors = " and ".join(a.name for a in self.authors.all())
return "%s by %s" % (self.name, authors) if authors else self.name
class PrimaryKeyUUIDModel(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4)
class NaturalKeyManager(models.Manager):
def get_by_natural_key(self, key):
return self.get(key=key)
class NaturalKeyThing(models.Model):
key = models.CharField(max_length=100, unique=True)
other_thing = models.ForeignKey(
"NaturalKeyThing", on_delete=models.CASCADE, null=True
)
other_things = models.ManyToManyField(
"NaturalKeyThing", related_name="thing_m2m_set"
)
objects = NaturalKeyManager()
def natural_key(self):
return (self.key,)
def __str__(self):
return self.key
class CircularA(models.Model):
key = models.CharField(max_length=3, unique=True)
obj = models.ForeignKey("CircularB", models.SET_NULL, null=True)
objects = NaturalKeyManager()
def natural_key(self):
return (self.key,)
class CircularB(models.Model):
key = models.CharField(max_length=3, unique=True)
obj = models.ForeignKey("CircularA", models.SET_NULL, null=True)
objects = NaturalKeyManager()
def natural_key(self):
return (self.key,)
|
f26fe4be6f65ee201743eabb66e7e19b4249c5ea2b1888d346ee63164e4a8aef | from django import forms
from django.core.exceptions import NON_FIELD_ERRORS
from django.test import TestCase
from django.utils.functional import lazy
from . import ValidationAssertions
from .models import (
Article,
Author,
GenericIPAddressTestModel,
GenericIPAddrUnpackUniqueTest,
ModelToValidate,
)
class BaseModelValidationTests(ValidationAssertions, TestCase):
def test_missing_required_field_raises_error(self):
mtv = ModelToValidate(f_with_custom_validator=42)
self.assertFailsValidation(mtv.full_clean, ["name", "number"])
def test_with_correct_value_model_validates(self):
mtv = ModelToValidate(number=10, name="Some Name")
self.assertIsNone(mtv.full_clean())
def test_custom_validate_method(self):
mtv = ModelToValidate(number=11)
self.assertFailsValidation(mtv.full_clean, [NON_FIELD_ERRORS, "name"])
def test_wrong_FK_value_raises_error(self):
mtv = ModelToValidate(number=10, name="Some Name", parent_id=3)
self.assertFieldFailsValidationWithMessage(
mtv.full_clean,
"parent",
["model to validate instance with id %r does not exist." % mtv.parent_id],
)
mtv = ModelToValidate(number=10, name="Some Name", ufm_id="Some Name")
self.assertFieldFailsValidationWithMessage(
mtv.full_clean,
"ufm",
[
"unique fields model instance with unique_charfield %r does not exist."
% mtv.name
],
)
def test_correct_FK_value_validates(self):
parent = ModelToValidate.objects.create(number=10, name="Some Name")
mtv = ModelToValidate(number=10, name="Some Name", parent_id=parent.pk)
self.assertIsNone(mtv.full_clean())
def test_limited_FK_raises_error(self):
# The limit_choices_to on the parent field says that a parent object's
# number attribute must be 10, so this should fail validation.
parent = ModelToValidate.objects.create(number=11, name="Other Name")
mtv = ModelToValidate(number=10, name="Some Name", parent_id=parent.pk)
self.assertFailsValidation(mtv.full_clean, ["parent"])
def test_FK_validates_using_base_manager(self):
# Archived articles are not available through the default manager, only
# the base manager.
author = Author.objects.create(name="Randy", archived=True)
article = Article(title="My Article", author=author)
self.assertIsNone(article.full_clean())
def test_wrong_email_value_raises_error(self):
mtv = ModelToValidate(number=10, name="Some Name", email="not-an-email")
self.assertFailsValidation(mtv.full_clean, ["email"])
def test_correct_email_value_passes(self):
mtv = ModelToValidate(number=10, name="Some Name", email="[email protected]")
self.assertIsNone(mtv.full_clean())
def test_wrong_url_value_raises_error(self):
mtv = ModelToValidate(number=10, name="Some Name", url="not a url")
self.assertFieldFailsValidationWithMessage(
mtv.full_clean, "url", ["Enter a valid URL."]
)
def test_text_greater_that_charfields_max_length_raises_errors(self):
mtv = ModelToValidate(number=10, name="Some Name" * 100)
self.assertFailsValidation(mtv.full_clean, ["name"])
def test_malformed_slug_raises_error(self):
mtv = ModelToValidate(number=10, name="Some Name", slug="##invalid##")
self.assertFailsValidation(mtv.full_clean, ["slug"])
def test_full_clean_does_not_mutate_exclude(self):
mtv = ModelToValidate(f_with_custom_validator=42)
exclude = ["number"]
self.assertFailsValidation(mtv.full_clean, ["name"], exclude=exclude)
self.assertEqual(len(exclude), 1)
self.assertEqual(exclude[0], "number")
class ArticleForm(forms.ModelForm):
class Meta:
model = Article
exclude = ["author"]
class ModelFormsTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.author = Author.objects.create(name="Joseph Kocherhans")
def test_partial_validation(self):
# Make sure the "commit=False and set field values later" idiom still
# works with model validation.
data = {
"title": "The state of model validation",
"pub_date": "2010-1-10 14:49:00",
}
form = ArticleForm(data)
self.assertEqual(list(form.errors), [])
article = form.save(commit=False)
article.author = self.author
article.save()
def test_validation_with_empty_blank_field(self):
# Since a value for pub_date wasn't provided and the field is
# blank=True, model-validation should pass.
# Also, Article.clean() should be run, so pub_date will be filled after
# validation, so the form should save cleanly even though pub_date is
# not allowed to be null.
data = {
"title": "The state of model validation",
}
article = Article(author_id=self.author.id)
form = ArticleForm(data, instance=article)
self.assertEqual(list(form.errors), [])
self.assertIsNotNone(form.instance.pub_date)
article = form.save()
def test_validation_with_invalid_blank_field(self):
# Even though pub_date is set to blank=True, an invalid value was
# provided, so it should fail validation.
data = {"title": "The state of model validation", "pub_date": "never"}
article = Article(author_id=self.author.id)
form = ArticleForm(data, instance=article)
self.assertEqual(list(form.errors), ["pub_date"])
class GenericIPAddressFieldTests(ValidationAssertions, TestCase):
def test_correct_generic_ip_passes(self):
giptm = GenericIPAddressTestModel(generic_ip="1.2.3.4")
self.assertIsNone(giptm.full_clean())
giptm = GenericIPAddressTestModel(generic_ip=" 1.2.3.4 ")
self.assertIsNone(giptm.full_clean())
giptm = GenericIPAddressTestModel(generic_ip="1.2.3.4\n")
self.assertIsNone(giptm.full_clean())
giptm = GenericIPAddressTestModel(generic_ip="2001::2")
self.assertIsNone(giptm.full_clean())
def test_invalid_generic_ip_raises_error(self):
giptm = GenericIPAddressTestModel(generic_ip="294.4.2.1")
self.assertFailsValidation(giptm.full_clean, ["generic_ip"])
giptm = GenericIPAddressTestModel(generic_ip="1:2")
self.assertFailsValidation(giptm.full_clean, ["generic_ip"])
giptm = GenericIPAddressTestModel(generic_ip=1)
self.assertFailsValidation(giptm.full_clean, ["generic_ip"])
giptm = GenericIPAddressTestModel(generic_ip=lazy(lambda: 1, int))
self.assertFailsValidation(giptm.full_clean, ["generic_ip"])
def test_correct_v4_ip_passes(self):
giptm = GenericIPAddressTestModel(v4_ip="1.2.3.4")
self.assertIsNone(giptm.full_clean())
def test_invalid_v4_ip_raises_error(self):
giptm = GenericIPAddressTestModel(v4_ip="294.4.2.1")
self.assertFailsValidation(giptm.full_clean, ["v4_ip"])
giptm = GenericIPAddressTestModel(v4_ip="2001::2")
self.assertFailsValidation(giptm.full_clean, ["v4_ip"])
def test_correct_v6_ip_passes(self):
giptm = GenericIPAddressTestModel(v6_ip="2001::2")
self.assertIsNone(giptm.full_clean())
def test_invalid_v6_ip_raises_error(self):
giptm = GenericIPAddressTestModel(v6_ip="1.2.3.4")
self.assertFailsValidation(giptm.full_clean, ["v6_ip"])
giptm = GenericIPAddressTestModel(v6_ip="1:2")
self.assertFailsValidation(giptm.full_clean, ["v6_ip"])
def test_v6_uniqueness_detection(self):
# These two addresses are the same with different syntax
giptm = GenericIPAddressTestModel(generic_ip="2001::1:0:0:0:0:2")
giptm.save()
giptm = GenericIPAddressTestModel(generic_ip="2001:0:1:2")
self.assertFailsValidation(giptm.full_clean, ["generic_ip"])
def test_v4_unpack_uniqueness_detection(self):
# These two are different, because we are not doing IPv4 unpacking
giptm = GenericIPAddressTestModel(generic_ip="::ffff:10.10.10.10")
giptm.save()
giptm = GenericIPAddressTestModel(generic_ip="10.10.10.10")
self.assertIsNone(giptm.full_clean())
# These two are the same, because we are doing IPv4 unpacking
giptm = GenericIPAddrUnpackUniqueTest(generic_v4unpack_ip="::ffff:18.52.18.52")
giptm.save()
giptm = GenericIPAddrUnpackUniqueTest(generic_v4unpack_ip="18.52.18.52")
self.assertFailsValidation(giptm.full_clean, ["generic_v4unpack_ip"])
def test_empty_generic_ip_passes(self):
giptm = GenericIPAddressTestModel(generic_ip="")
self.assertIsNone(giptm.full_clean())
giptm = GenericIPAddressTestModel(generic_ip=None)
self.assertIsNone(giptm.full_clean())
|
b0882b8c6bbadb577ae204bf59aeb820e42cadb0a69219df96436d28069045fc | from unittest import TestCase
from django.core.exceptions import ValidationError
from django.db import models
class ValidationMessagesTest(TestCase):
def _test_validation_messages(self, field, value, expected):
with self.assertRaises(ValidationError) as cm:
field.clean(value, None)
self.assertEqual(cm.exception.messages, expected)
def test_autofield_field_raises_error_message(self):
f = models.AutoField(primary_key=True)
self._test_validation_messages(f, "fõo", ["“fõo” value must be an integer."])
def test_integer_field_raises_error_message(self):
f = models.IntegerField()
self._test_validation_messages(f, "fõo", ["“fõo” value must be an integer."])
def test_boolean_field_raises_error_message(self):
f = models.BooleanField()
self._test_validation_messages(
f, "fõo", ["“fõo” value must be either True or False."]
)
def test_nullable_boolean_field_raises_error_message(self):
f = models.BooleanField(null=True)
self._test_validation_messages(
f, "fõo", ["“fõo” value must be either True, False, or None."]
)
def test_float_field_raises_error_message(self):
f = models.FloatField()
self._test_validation_messages(f, "fõo", ["“fõo” value must be a float."])
def test_decimal_field_raises_error_message(self):
f = models.DecimalField()
self._test_validation_messages(
f, "fõo", ["“fõo” value must be a decimal number."]
)
def test_null_boolean_field_raises_error_message(self):
f = models.BooleanField(null=True)
self._test_validation_messages(
f, "fõo", ["“fõo” value must be either True, False, or None."]
)
def test_date_field_raises_error_message(self):
f = models.DateField()
self._test_validation_messages(
f,
"fõo",
[
"“fõo” value has an invalid date format. It must be in YYYY-MM-DD "
"format."
],
)
self._test_validation_messages(
f,
"aaaa-10-10",
[
"“aaaa-10-10” value has an invalid date format. It must be in "
"YYYY-MM-DD format."
],
)
self._test_validation_messages(
f,
"2011-13-10",
[
"“2011-13-10” value has the correct format (YYYY-MM-DD) but it is an "
"invalid date."
],
)
self._test_validation_messages(
f,
"2011-10-32",
[
"“2011-10-32” value has the correct format (YYYY-MM-DD) but it is an "
"invalid date."
],
)
def test_datetime_field_raises_error_message(self):
f = models.DateTimeField()
# Wrong format
self._test_validation_messages(
f,
"fõo",
[
"“fõo” value has an invalid format. It must be in "
"YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format."
],
)
# Correct format but invalid date
self._test_validation_messages(
f,
"2011-10-32",
[
"“2011-10-32” value has the correct format (YYYY-MM-DD) but it is an "
"invalid date."
],
)
# Correct format but invalid date/time
self._test_validation_messages(
f,
"2011-10-32 10:10",
[
"“2011-10-32 10:10” value has the correct format "
"(YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) but it is an invalid date/time."
],
)
def test_time_field_raises_error_message(self):
f = models.TimeField()
# Wrong format
self._test_validation_messages(
f,
"fõo",
[
"“fõo” value has an invalid format. It must be in HH:MM[:ss[.uuuuuu]] "
"format."
],
)
# Correct format but invalid time
self._test_validation_messages(
f,
"25:50",
[
"“25:50” value has the correct format (HH:MM[:ss[.uuuuuu]]) but it is "
"an invalid time."
],
)
|
7117994958604d9dd754e95746b485fb7b9de90deba8cca266691c4bbe28d620 | from datetime import datetime
from django.core.exceptions import ValidationError
from django.db import models
from django.db.models.functions import Lower
def validate_answer_to_universe(value):
if value != 42:
raise ValidationError(
"This is not the answer to life, universe and everything!", code="not42"
)
class ModelToValidate(models.Model):
name = models.CharField(max_length=100)
created = models.DateTimeField(default=datetime.now)
number = models.IntegerField(db_column="number_val")
parent = models.ForeignKey(
"self",
models.SET_NULL,
blank=True,
null=True,
limit_choices_to={"number": 10},
)
email = models.EmailField(blank=True)
ufm = models.ForeignKey(
"UniqueFieldsModel",
models.SET_NULL,
to_field="unique_charfield",
blank=True,
null=True,
)
url = models.URLField(blank=True)
f_with_custom_validator = models.IntegerField(
blank=True, null=True, validators=[validate_answer_to_universe]
)
f_with_iterable_of_validators = models.IntegerField(
blank=True, null=True, validators=(validate_answer_to_universe,)
)
slug = models.SlugField(blank=True)
def clean(self):
super().clean()
if self.number == 11:
raise ValidationError("Invalid number supplied!")
class UniqueFieldsModel(models.Model):
unique_charfield = models.CharField(max_length=100, unique=True)
unique_integerfield = models.IntegerField(unique=True)
non_unique_field = models.IntegerField()
class CustomPKModel(models.Model):
my_pk_field = models.CharField(max_length=100, primary_key=True)
class UniqueTogetherModel(models.Model):
cfield = models.CharField(max_length=100)
ifield = models.IntegerField()
efield = models.EmailField()
class Meta:
unique_together = (
(
"ifield",
"cfield",
),
["ifield", "efield"],
)
class UniqueForDateModel(models.Model):
start_date = models.DateField()
end_date = models.DateTimeField()
count = models.IntegerField(
unique_for_date="start_date", unique_for_year="end_date"
)
order = models.IntegerField(unique_for_month="end_date")
name = models.CharField(max_length=100)
class CustomMessagesModel(models.Model):
other = models.IntegerField(blank=True, null=True)
number = models.IntegerField(
db_column="number_val",
error_messages={"null": "NULL", "not42": "AAARGH", "not_equal": "%s != me"},
validators=[validate_answer_to_universe],
)
class AuthorManager(models.Manager):
def get_queryset(self):
qs = super().get_queryset()
return qs.filter(archived=False)
class Author(models.Model):
name = models.CharField(max_length=100)
archived = models.BooleanField(default=False)
objects = AuthorManager()
class Article(models.Model):
title = models.CharField(max_length=100)
author = models.ForeignKey(Author, models.CASCADE)
pub_date = models.DateTimeField(blank=True)
def clean(self):
if self.pub_date is None:
self.pub_date = datetime.now()
class Post(models.Model):
title = models.CharField(max_length=50, unique_for_date="posted", blank=True)
slug = models.CharField(max_length=50, unique_for_year="posted", blank=True)
subtitle = models.CharField(max_length=50, unique_for_month="posted", blank=True)
posted = models.DateField()
class FlexibleDatePost(models.Model):
title = models.CharField(max_length=50, unique_for_date="posted", blank=True)
slug = models.CharField(max_length=50, unique_for_year="posted", blank=True)
subtitle = models.CharField(max_length=50, unique_for_month="posted", blank=True)
posted = models.DateField(blank=True, null=True)
class UniqueErrorsModel(models.Model):
name = models.CharField(
max_length=100,
unique=True,
error_messages={"unique": "Custom unique name message."},
)
no = models.IntegerField(
unique=True, error_messages={"unique": "Custom unique number message."}
)
class GenericIPAddressTestModel(models.Model):
generic_ip = models.GenericIPAddressField(blank=True, null=True, unique=True)
v4_ip = models.GenericIPAddressField(blank=True, null=True, protocol="ipv4")
v6_ip = models.GenericIPAddressField(blank=True, null=True, protocol="ipv6")
ip_verbose_name = models.GenericIPAddressField(
"IP Address Verbose", blank=True, null=True
)
class GenericIPAddrUnpackUniqueTest(models.Model):
generic_v4unpack_ip = models.GenericIPAddressField(
null=True, blank=True, unique=True, unpack_ipv4=True
)
class UniqueFuncConstraintModel(models.Model):
field = models.CharField(max_length=255)
class Meta:
required_db_features = {"supports_expression_indexes"}
constraints = [
models.UniqueConstraint(Lower("field"), name="func_lower_field_uq"),
]
|
e92fd2e39d70de07013f43526c41a2135c3f5aaa90202a34add6e0e62be6e39f | import pickle
from unittest import TestCase
from django.core.exceptions import ValidationError
class PickableValidationErrorTestCase(TestCase):
def test_validationerror_is_picklable(self):
original = ValidationError("a", code="something")
unpickled = pickle.loads(pickle.dumps(original))
self.assertIs(unpickled, unpickled.error_list[0])
self.assertEqual(original.message, unpickled.message)
self.assertEqual(original.code, unpickled.code)
original = ValidationError("a", code="something")
unpickled = pickle.loads(pickle.dumps(ValidationError(original)))
self.assertIs(unpickled, unpickled.error_list[0])
self.assertEqual(original.message, unpickled.message)
self.assertEqual(original.code, unpickled.code)
original = ValidationError(["a", "b"])
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(
original.error_list[0].message, unpickled.error_list[0].message
)
self.assertEqual(
original.error_list[1].message, unpickled.error_list[1].message
)
original = ValidationError(["a", "b"])
unpickled = pickle.loads(pickle.dumps(ValidationError(original)))
self.assertEqual(
original.error_list[0].message, unpickled.error_list[0].message
)
self.assertEqual(
original.error_list[1].message, unpickled.error_list[1].message
)
original = ValidationError([ValidationError("a"), ValidationError("b")])
unpickled = pickle.loads(pickle.dumps(original))
self.assertIs(unpickled.args[0][0], unpickled.error_list[0])
self.assertEqual(
original.error_list[0].message, unpickled.error_list[0].message
)
self.assertEqual(
original.error_list[1].message, unpickled.error_list[1].message
)
message_dict = {"field1": ["a", "b"], "field2": ["c", "d"]}
original = ValidationError(message_dict)
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(unpickled.message_dict, message_dict)
|
1e085e3c54925056747e9830321d2441530c63ea25a6a4277b91a995f1071928 | import datetime
import unittest
from django.apps.registry import Apps
from django.core.exceptions import ValidationError
from django.db import models
from django.test import TestCase
from .models import (
CustomPKModel,
FlexibleDatePost,
ModelToValidate,
Post,
UniqueErrorsModel,
UniqueFieldsModel,
UniqueForDateModel,
UniqueFuncConstraintModel,
UniqueTogetherModel,
)
class GetUniqueCheckTests(unittest.TestCase):
def test_unique_fields_get_collected(self):
m = UniqueFieldsModel()
self.assertEqual(
(
[
(UniqueFieldsModel, ("id",)),
(UniqueFieldsModel, ("unique_charfield",)),
(UniqueFieldsModel, ("unique_integerfield",)),
],
[],
),
m._get_unique_checks(),
)
def test_unique_together_gets_picked_up_and_converted_to_tuple(self):
m = UniqueTogetherModel()
self.assertEqual(
(
[
(UniqueTogetherModel, ("ifield", "cfield")),
(UniqueTogetherModel, ("ifield", "efield")),
(UniqueTogetherModel, ("id",)),
],
[],
),
m._get_unique_checks(),
)
def test_unique_together_normalization(self):
"""
Test the Meta.unique_together normalization with different sorts of
objects.
"""
data = {
"2-tuple": (("foo", "bar"), (("foo", "bar"),)),
"list": (["foo", "bar"], (("foo", "bar"),)),
"already normalized": (
(("foo", "bar"), ("bar", "baz")),
(("foo", "bar"), ("bar", "baz")),
),
"set": (
{("foo", "bar"), ("bar", "baz")}, # Ref #21469
(("foo", "bar"), ("bar", "baz")),
),
}
for unique_together, normalized in data.values():
class M(models.Model):
foo = models.IntegerField()
bar = models.IntegerField()
baz = models.IntegerField()
Meta = type(
"Meta", (), {"unique_together": unique_together, "apps": Apps()}
)
checks, _ = M()._get_unique_checks()
for t in normalized:
check = (M, t)
self.assertIn(check, checks)
def test_primary_key_is_considered_unique(self):
m = CustomPKModel()
self.assertEqual(
([(CustomPKModel, ("my_pk_field",))], []), m._get_unique_checks()
)
def test_unique_for_date_gets_picked_up(self):
m = UniqueForDateModel()
self.assertEqual(
(
[(UniqueForDateModel, ("id",))],
[
(UniqueForDateModel, "date", "count", "start_date"),
(UniqueForDateModel, "year", "count", "end_date"),
(UniqueForDateModel, "month", "order", "end_date"),
],
),
m._get_unique_checks(),
)
def test_unique_for_date_exclusion(self):
m = UniqueForDateModel()
self.assertEqual(
(
[(UniqueForDateModel, ("id",))],
[
(UniqueForDateModel, "year", "count", "end_date"),
(UniqueForDateModel, "month", "order", "end_date"),
],
),
m._get_unique_checks(exclude="start_date"),
)
def test_func_unique_constraint_ignored(self):
m = UniqueFuncConstraintModel()
self.assertEqual(
m._get_unique_checks(),
([(UniqueFuncConstraintModel, ("id",))], []),
)
class PerformUniqueChecksTest(TestCase):
def test_primary_key_unique_check_not_performed_when_adding_and_pk_not_specified(
self,
):
# Regression test for #12560
with self.assertNumQueries(0):
mtv = ModelToValidate(number=10, name="Some Name")
setattr(mtv, "_adding", True)
mtv.full_clean()
def test_primary_key_unique_check_performed_when_adding_and_pk_specified(self):
# Regression test for #12560
with self.assertNumQueries(1):
mtv = ModelToValidate(number=10, name="Some Name", id=123)
setattr(mtv, "_adding", True)
mtv.full_clean()
def test_primary_key_unique_check_not_performed_when_not_adding(self):
# Regression test for #12132
with self.assertNumQueries(0):
mtv = ModelToValidate(number=10, name="Some Name")
mtv.full_clean()
def test_func_unique_check_not_performed(self):
with self.assertNumQueries(0):
UniqueFuncConstraintModel(field="some name").full_clean()
def test_unique_for_date(self):
Post.objects.create(
title="Django 1.0 is released",
slug="Django 1.0",
subtitle="Finally",
posted=datetime.date(2008, 9, 3),
)
p = Post(title="Django 1.0 is released", posted=datetime.date(2008, 9, 3))
with self.assertRaises(ValidationError) as cm:
p.full_clean()
self.assertEqual(
cm.exception.message_dict,
{"title": ["Title must be unique for Posted date."]},
)
# Should work without errors
p = Post(title="Work on Django 1.1 begins", posted=datetime.date(2008, 9, 3))
p.full_clean()
# Should work without errors
p = Post(title="Django 1.0 is released", posted=datetime.datetime(2008, 9, 4))
p.full_clean()
p = Post(slug="Django 1.0", posted=datetime.datetime(2008, 1, 1))
with self.assertRaises(ValidationError) as cm:
p.full_clean()
self.assertEqual(
cm.exception.message_dict,
{"slug": ["Slug must be unique for Posted year."]},
)
p = Post(subtitle="Finally", posted=datetime.datetime(2008, 9, 30))
with self.assertRaises(ValidationError) as cm:
p.full_clean()
self.assertEqual(
cm.exception.message_dict,
{"subtitle": ["Subtitle must be unique for Posted month."]},
)
p = Post(title="Django 1.0 is released")
with self.assertRaises(ValidationError) as cm:
p.full_clean()
self.assertEqual(
cm.exception.message_dict, {"posted": ["This field cannot be null."]}
)
def test_unique_for_date_with_nullable_date(self):
"""
unique_for_date/year/month checks shouldn't trigger when the
associated DateField is None.
"""
FlexibleDatePost.objects.create(
title="Django 1.0 is released",
slug="Django 1.0",
subtitle="Finally",
posted=datetime.date(2008, 9, 3),
)
p = FlexibleDatePost(title="Django 1.0 is released")
p.full_clean()
p = FlexibleDatePost(slug="Django 1.0")
p.full_clean()
p = FlexibleDatePost(subtitle="Finally")
p.full_clean()
def test_unique_errors(self):
UniqueErrorsModel.objects.create(name="Some Name", no=10)
m = UniqueErrorsModel(name="Some Name", no=11)
with self.assertRaises(ValidationError) as cm:
m.full_clean()
self.assertEqual(
cm.exception.message_dict, {"name": ["Custom unique name message."]}
)
m = UniqueErrorsModel(name="Some Other Name", no=10)
with self.assertRaises(ValidationError) as cm:
m.full_clean()
self.assertEqual(
cm.exception.message_dict, {"no": ["Custom unique number message."]}
)
|
e912ba4fd11d0217382a7fcf4af9b23daa541ef53681b25728b51a86f3099c8b | from django.test import SimpleTestCase
from . import ValidationAssertions
from .models import CustomMessagesModel
class CustomMessagesTests(ValidationAssertions, SimpleTestCase):
def test_custom_simple_validator_message(self):
cmm = CustomMessagesModel(number=12)
self.assertFieldFailsValidationWithMessage(cmm.full_clean, "number", ["AAARGH"])
def test_custom_null_message(self):
cmm = CustomMessagesModel()
self.assertFieldFailsValidationWithMessage(cmm.full_clean, "number", ["NULL"])
|
8bcc688113546f4ad7a60cb99def5c45c7d75c359d9894004e91739b2ba4f95c | from django.test import SimpleTestCase
from . import ValidationAssertions
from .models import ModelToValidate
class TestModelsWithValidators(ValidationAssertions, SimpleTestCase):
def test_custom_validator_passes_for_correct_value(self):
mtv = ModelToValidate(
number=10,
name="Some Name",
f_with_custom_validator=42,
f_with_iterable_of_validators=42,
)
self.assertIsNone(mtv.full_clean())
def test_custom_validator_raises_error_for_incorrect_value(self):
mtv = ModelToValidate(
number=10,
name="Some Name",
f_with_custom_validator=12,
f_with_iterable_of_validators=42,
)
self.assertFailsValidation(mtv.full_clean, ["f_with_custom_validator"])
self.assertFieldFailsValidationWithMessage(
mtv.full_clean,
"f_with_custom_validator",
["This is not the answer to life, universe and everything!"],
)
def test_field_validators_can_be_any_iterable(self):
mtv = ModelToValidate(
number=10,
name="Some Name",
f_with_custom_validator=42,
f_with_iterable_of_validators=12,
)
self.assertFailsValidation(mtv.full_clean, ["f_with_iterable_of_validators"])
self.assertFieldFailsValidationWithMessage(
mtv.full_clean,
"f_with_iterable_of_validators",
["This is not the answer to life, universe and everything!"],
)
|
07b9342e45c31f5327088a33c47da6318a4e7259720d9f424ee9c0a47ddbd899 | import os
import sys
import unittest
from types import ModuleType, SimpleNamespace
from unittest import mock
from django.conf import (
ENVIRONMENT_VARIABLE,
USE_DEPRECATED_PYTZ_DEPRECATED_MSG,
LazySettings,
Settings,
settings,
)
from django.core.exceptions import ImproperlyConfigured
from django.http import HttpRequest
from django.test import (
SimpleTestCase,
TestCase,
TransactionTestCase,
modify_settings,
override_settings,
signals,
)
from django.test.utils import requires_tz_support
from django.urls import clear_script_prefix, set_script_prefix
from django.utils.deprecation import RemovedInDjango50Warning
@modify_settings(ITEMS={"prepend": ["b"], "append": ["d"], "remove": ["a", "e"]})
@override_settings(
ITEMS=["a", "c", "e"], ITEMS_OUTER=[1, 2, 3], TEST="override", TEST_OUTER="outer"
)
class FullyDecoratedTranTestCase(TransactionTestCase):
available_apps = []
def test_override(self):
self.assertEqual(settings.ITEMS, ["b", "c", "d"])
self.assertEqual(settings.ITEMS_OUTER, [1, 2, 3])
self.assertEqual(settings.TEST, "override")
self.assertEqual(settings.TEST_OUTER, "outer")
@modify_settings(
ITEMS={
"append": ["e", "f"],
"prepend": ["a"],
"remove": ["d", "c"],
}
)
def test_method_list_override(self):
self.assertEqual(settings.ITEMS, ["a", "b", "e", "f"])
self.assertEqual(settings.ITEMS_OUTER, [1, 2, 3])
@modify_settings(
ITEMS={
"append": ["b"],
"prepend": ["d"],
"remove": ["a", "c", "e"],
}
)
def test_method_list_override_no_ops(self):
self.assertEqual(settings.ITEMS, ["b", "d"])
@modify_settings(
ITEMS={
"append": "e",
"prepend": "a",
"remove": "c",
}
)
def test_method_list_override_strings(self):
self.assertEqual(settings.ITEMS, ["a", "b", "d", "e"])
@modify_settings(ITEMS={"remove": ["b", "d"]})
@modify_settings(ITEMS={"append": ["b"], "prepend": ["d"]})
def test_method_list_override_nested_order(self):
self.assertEqual(settings.ITEMS, ["d", "c", "b"])
@override_settings(TEST="override2")
def test_method_override(self):
self.assertEqual(settings.TEST, "override2")
self.assertEqual(settings.TEST_OUTER, "outer")
def test_decorated_testcase_name(self):
self.assertEqual(
FullyDecoratedTranTestCase.__name__, "FullyDecoratedTranTestCase"
)
def test_decorated_testcase_module(self):
self.assertEqual(FullyDecoratedTranTestCase.__module__, __name__)
@modify_settings(ITEMS={"prepend": ["b"], "append": ["d"], "remove": ["a", "e"]})
@override_settings(ITEMS=["a", "c", "e"], TEST="override")
class FullyDecoratedTestCase(TestCase):
def test_override(self):
self.assertEqual(settings.ITEMS, ["b", "c", "d"])
self.assertEqual(settings.TEST, "override")
@modify_settings(
ITEMS={
"append": "e",
"prepend": "a",
"remove": "c",
}
)
@override_settings(TEST="override2")
def test_method_override(self):
self.assertEqual(settings.ITEMS, ["a", "b", "d", "e"])
self.assertEqual(settings.TEST, "override2")
class ClassDecoratedTestCaseSuper(TestCase):
"""
Dummy class for testing max recursion error in child class call to
super(). Refs #17011.
"""
def test_max_recursion_error(self):
pass
@override_settings(TEST="override")
class ClassDecoratedTestCase(ClassDecoratedTestCaseSuper):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.foo = getattr(settings, "TEST", "BUG")
def test_override(self):
self.assertEqual(settings.TEST, "override")
def test_setupclass_override(self):
"""Settings are overridden within setUpClass (#21281)."""
self.assertEqual(self.foo, "override")
@override_settings(TEST="override2")
def test_method_override(self):
self.assertEqual(settings.TEST, "override2")
def test_max_recursion_error(self):
"""
Overriding a method on a super class and then calling that method on
the super class should not trigger infinite recursion. See #17011.
"""
super().test_max_recursion_error()
@modify_settings(ITEMS={"append": "mother"})
@override_settings(ITEMS=["father"], TEST="override-parent")
class ParentDecoratedTestCase(TestCase):
pass
@modify_settings(ITEMS={"append": ["child"]})
@override_settings(TEST="override-child")
class ChildDecoratedTestCase(ParentDecoratedTestCase):
def test_override_settings_inheritance(self):
self.assertEqual(settings.ITEMS, ["father", "mother", "child"])
self.assertEqual(settings.TEST, "override-child")
class SettingsTests(SimpleTestCase):
def setUp(self):
self.testvalue = None
signals.setting_changed.connect(self.signal_callback)
def tearDown(self):
signals.setting_changed.disconnect(self.signal_callback)
def signal_callback(self, sender, setting, value, **kwargs):
if setting == "TEST":
self.testvalue = value
def test_override(self):
settings.TEST = "test"
self.assertEqual("test", settings.TEST)
with self.settings(TEST="override"):
self.assertEqual("override", settings.TEST)
self.assertEqual("test", settings.TEST)
del settings.TEST
def test_override_change(self):
settings.TEST = "test"
self.assertEqual("test", settings.TEST)
with self.settings(TEST="override"):
self.assertEqual("override", settings.TEST)
settings.TEST = "test2"
self.assertEqual("test", settings.TEST)
del settings.TEST
def test_override_doesnt_leak(self):
with self.assertRaises(AttributeError):
getattr(settings, "TEST")
with self.settings(TEST="override"):
self.assertEqual("override", settings.TEST)
settings.TEST = "test"
with self.assertRaises(AttributeError):
getattr(settings, "TEST")
@override_settings(TEST="override")
def test_decorator(self):
self.assertEqual("override", settings.TEST)
def test_context_manager(self):
with self.assertRaises(AttributeError):
getattr(settings, "TEST")
override = override_settings(TEST="override")
with self.assertRaises(AttributeError):
getattr(settings, "TEST")
override.enable()
self.assertEqual("override", settings.TEST)
override.disable()
with self.assertRaises(AttributeError):
getattr(settings, "TEST")
def test_class_decorator(self):
# SimpleTestCase can be decorated by override_settings, but not ut.TestCase
class SimpleTestCaseSubclass(SimpleTestCase):
pass
class UnittestTestCaseSubclass(unittest.TestCase):
pass
decorated = override_settings(TEST="override")(SimpleTestCaseSubclass)
self.assertIsInstance(decorated, type)
self.assertTrue(issubclass(decorated, SimpleTestCase))
with self.assertRaisesMessage(
Exception, "Only subclasses of Django SimpleTestCase"
):
decorated = override_settings(TEST="override")(UnittestTestCaseSubclass)
def test_signal_callback_context_manager(self):
with self.assertRaises(AttributeError):
getattr(settings, "TEST")
with self.settings(TEST="override"):
self.assertEqual(self.testvalue, "override")
self.assertIsNone(self.testvalue)
@override_settings(TEST="override")
def test_signal_callback_decorator(self):
self.assertEqual(self.testvalue, "override")
#
# Regression tests for #10130: deleting settings.
#
def test_settings_delete(self):
settings.TEST = "test"
self.assertEqual("test", settings.TEST)
del settings.TEST
msg = "'Settings' object has no attribute 'TEST'"
with self.assertRaisesMessage(AttributeError, msg):
getattr(settings, "TEST")
def test_settings_delete_wrapped(self):
with self.assertRaisesMessage(TypeError, "can't delete _wrapped."):
delattr(settings, "_wrapped")
def test_override_settings_delete(self):
"""
Allow deletion of a setting in an overridden settings set (#18824)
"""
previous_i18n = settings.USE_I18N
previous_tz = settings.USE_TZ
with self.settings(USE_I18N=False):
del settings.USE_I18N
with self.assertRaises(AttributeError):
getattr(settings, "USE_I18N")
# Should also work for a non-overridden setting
del settings.USE_TZ
with self.assertRaises(AttributeError):
getattr(settings, "USE_TZ")
self.assertNotIn("USE_I18N", dir(settings))
self.assertNotIn("USE_TZ", dir(settings))
self.assertEqual(settings.USE_I18N, previous_i18n)
self.assertEqual(settings.USE_TZ, previous_tz)
def test_override_settings_nested(self):
"""
override_settings uses the actual _wrapped attribute at
runtime, not when it was instantiated.
"""
with self.assertRaises(AttributeError):
getattr(settings, "TEST")
with self.assertRaises(AttributeError):
getattr(settings, "TEST2")
inner = override_settings(TEST2="override")
with override_settings(TEST="override"):
self.assertEqual("override", settings.TEST)
with inner:
self.assertEqual("override", settings.TEST)
self.assertEqual("override", settings.TEST2)
# inner's __exit__ should have restored the settings of the outer
# context manager, not those when the class was instantiated
self.assertEqual("override", settings.TEST)
with self.assertRaises(AttributeError):
getattr(settings, "TEST2")
with self.assertRaises(AttributeError):
getattr(settings, "TEST")
with self.assertRaises(AttributeError):
getattr(settings, "TEST2")
@override_settings(SECRET_KEY="")
def test_no_secret_key(self):
msg = "The SECRET_KEY setting must not be empty."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
settings.SECRET_KEY
def test_no_settings_module(self):
msg = (
"Requested setting%s, but settings are not configured. You "
"must either define the environment variable DJANGO_SETTINGS_MODULE "
"or call settings.configure() before accessing settings."
)
orig_settings = os.environ[ENVIRONMENT_VARIABLE]
os.environ[ENVIRONMENT_VARIABLE] = ""
try:
with self.assertRaisesMessage(ImproperlyConfigured, msg % "s"):
settings._setup()
with self.assertRaisesMessage(ImproperlyConfigured, msg % " TEST"):
settings._setup("TEST")
finally:
os.environ[ENVIRONMENT_VARIABLE] = orig_settings
def test_already_configured(self):
with self.assertRaisesMessage(RuntimeError, "Settings already configured."):
settings.configure()
def test_nonupper_settings_prohibited_in_configure(self):
s = LazySettings()
with self.assertRaisesMessage(TypeError, "Setting 'foo' must be uppercase."):
s.configure(foo="bar")
def test_nonupper_settings_ignored_in_default_settings(self):
s = LazySettings()
s.configure(SimpleNamespace(foo="bar"))
with self.assertRaises(AttributeError):
getattr(s, "foo")
@requires_tz_support
@mock.patch("django.conf.global_settings.TIME_ZONE", "test")
def test_incorrect_timezone(self):
with self.assertRaisesMessage(ValueError, "Incorrect timezone setting: test"):
settings._setup()
def test_use_tz_false_deprecation(self):
settings_module = ModuleType("fake_settings_module")
settings_module.SECRET_KEY = "foo"
sys.modules["fake_settings_module"] = settings_module
msg = (
"The default value of USE_TZ will change from False to True in "
"Django 5.0. Set USE_TZ to False in your project settings if you "
"want to keep the current default behavior."
)
try:
with self.assertRaisesMessage(RemovedInDjango50Warning, msg):
Settings("fake_settings_module")
finally:
del sys.modules["fake_settings_module"]
def test_use_deprecated_pytz_deprecation(self):
settings_module = ModuleType("fake_settings_module")
settings_module.USE_DEPRECATED_PYTZ = True
settings_module.USE_TZ = True
sys.modules["fake_settings_module"] = settings_module
try:
with self.assertRaisesMessage(
RemovedInDjango50Warning, USE_DEPRECATED_PYTZ_DEPRECATED_MSG
):
Settings("fake_settings_module")
finally:
del sys.modules["fake_settings_module"]
holder = LazySettings()
with self.assertRaisesMessage(
RemovedInDjango50Warning, USE_DEPRECATED_PYTZ_DEPRECATED_MSG
):
holder.configure(USE_DEPRECATED_PYTZ=True)
class TestComplexSettingOverride(SimpleTestCase):
def setUp(self):
self.old_warn_override_settings = signals.COMPLEX_OVERRIDE_SETTINGS.copy()
signals.COMPLEX_OVERRIDE_SETTINGS.add("TEST_WARN")
def tearDown(self):
signals.COMPLEX_OVERRIDE_SETTINGS = self.old_warn_override_settings
self.assertNotIn("TEST_WARN", signals.COMPLEX_OVERRIDE_SETTINGS)
def test_complex_override_warning(self):
"""Regression test for #19031"""
msg = "Overriding setting TEST_WARN can lead to unexpected behavior."
with self.assertWarnsMessage(UserWarning, msg) as cm:
with override_settings(TEST_WARN="override"):
self.assertEqual(settings.TEST_WARN, "override")
self.assertEqual(cm.filename, __file__)
class SecureProxySslHeaderTest(SimpleTestCase):
@override_settings(SECURE_PROXY_SSL_HEADER=None)
def test_none(self):
req = HttpRequest()
self.assertIs(req.is_secure(), False)
@override_settings(SECURE_PROXY_SSL_HEADER=("HTTP_X_FORWARDED_PROTO", "https"))
def test_set_without_xheader(self):
req = HttpRequest()
self.assertIs(req.is_secure(), False)
@override_settings(SECURE_PROXY_SSL_HEADER=("HTTP_X_FORWARDED_PROTO", "https"))
def test_set_with_xheader_wrong(self):
req = HttpRequest()
req.META["HTTP_X_FORWARDED_PROTO"] = "wrongvalue"
self.assertIs(req.is_secure(), False)
@override_settings(SECURE_PROXY_SSL_HEADER=("HTTP_X_FORWARDED_PROTO", "https"))
def test_set_with_xheader_right(self):
req = HttpRequest()
req.META["HTTP_X_FORWARDED_PROTO"] = "https"
self.assertIs(req.is_secure(), True)
@override_settings(SECURE_PROXY_SSL_HEADER=("HTTP_X_FORWARDED_PROTO", "https"))
def test_xheader_preferred_to_underlying_request(self):
class ProxyRequest(HttpRequest):
def _get_scheme(self):
"""Proxy always connecting via HTTPS"""
return "https"
# Client connects via HTTP.
req = ProxyRequest()
req.META["HTTP_X_FORWARDED_PROTO"] = "http"
self.assertIs(req.is_secure(), False)
class IsOverriddenTest(SimpleTestCase):
def test_configure(self):
s = LazySettings()
s.configure(SECRET_KEY="foo")
self.assertTrue(s.is_overridden("SECRET_KEY"))
def test_module(self):
settings_module = ModuleType("fake_settings_module")
settings_module.SECRET_KEY = "foo"
settings_module.USE_TZ = False
sys.modules["fake_settings_module"] = settings_module
try:
s = Settings("fake_settings_module")
self.assertTrue(s.is_overridden("SECRET_KEY"))
self.assertFalse(s.is_overridden("ALLOWED_HOSTS"))
finally:
del sys.modules["fake_settings_module"]
def test_override(self):
self.assertFalse(settings.is_overridden("ALLOWED_HOSTS"))
with override_settings(ALLOWED_HOSTS=[]):
self.assertTrue(settings.is_overridden("ALLOWED_HOSTS"))
def test_unevaluated_lazysettings_repr(self):
lazy_settings = LazySettings()
expected = "<LazySettings [Unevaluated]>"
self.assertEqual(repr(lazy_settings), expected)
def test_evaluated_lazysettings_repr(self):
lazy_settings = LazySettings()
module = os.environ.get(ENVIRONMENT_VARIABLE)
expected = '<LazySettings "%s">' % module
# Force evaluation of the lazy object.
lazy_settings.APPEND_SLASH
self.assertEqual(repr(lazy_settings), expected)
def test_usersettingsholder_repr(self):
lazy_settings = LazySettings()
lazy_settings.configure(APPEND_SLASH=False)
expected = "<UserSettingsHolder>"
self.assertEqual(repr(lazy_settings._wrapped), expected)
def test_settings_repr(self):
module = os.environ.get(ENVIRONMENT_VARIABLE)
lazy_settings = Settings(module)
expected = '<Settings "%s">' % module
self.assertEqual(repr(lazy_settings), expected)
class TestListSettings(SimpleTestCase):
"""
Make sure settings that should be lists or tuples throw
ImproperlyConfigured if they are set to a string instead of a list or tuple.
"""
list_or_tuple_settings = (
"ALLOWED_HOSTS",
"INSTALLED_APPS",
"TEMPLATE_DIRS",
"LOCALE_PATHS",
"SECRET_KEY_FALLBACKS",
)
def test_tuple_settings(self):
settings_module = ModuleType("fake_settings_module")
settings_module.SECRET_KEY = "foo"
msg = "The %s setting must be a list or a tuple."
for setting in self.list_or_tuple_settings:
setattr(settings_module, setting, ("non_list_or_tuple_value"))
sys.modules["fake_settings_module"] = settings_module
try:
with self.assertRaisesMessage(ImproperlyConfigured, msg % setting):
Settings("fake_settings_module")
finally:
del sys.modules["fake_settings_module"]
delattr(settings_module, setting)
class SettingChangeEnterException(Exception):
pass
class SettingChangeExitException(Exception):
pass
class OverrideSettingsIsolationOnExceptionTests(SimpleTestCase):
"""
The override_settings context manager restore settings if one of the
receivers of "setting_changed" signal fails. Check the three cases of
receiver failure detailed in receiver(). In each case, ALL receivers are
called when exiting the context manager.
"""
def setUp(self):
signals.setting_changed.connect(self.receiver)
self.addCleanup(signals.setting_changed.disconnect, self.receiver)
# Create a spy that's connected to the `setting_changed` signal and
# executed AFTER `self.receiver`.
self.spy_receiver = mock.Mock()
signals.setting_changed.connect(self.spy_receiver)
self.addCleanup(signals.setting_changed.disconnect, self.spy_receiver)
def receiver(self, **kwargs):
"""
A receiver that fails while certain settings are being changed.
- SETTING_BOTH raises an error while receiving the signal
on both entering and exiting the context manager.
- SETTING_ENTER raises an error only on enter.
- SETTING_EXIT raises an error only on exit.
"""
setting = kwargs["setting"]
enter = kwargs["enter"]
if setting in ("SETTING_BOTH", "SETTING_ENTER") and enter:
raise SettingChangeEnterException
if setting in ("SETTING_BOTH", "SETTING_EXIT") and not enter:
raise SettingChangeExitException
def check_settings(self):
"""Assert that settings for these tests aren't present."""
self.assertFalse(hasattr(settings, "SETTING_BOTH"))
self.assertFalse(hasattr(settings, "SETTING_ENTER"))
self.assertFalse(hasattr(settings, "SETTING_EXIT"))
self.assertFalse(hasattr(settings, "SETTING_PASS"))
def check_spy_receiver_exit_calls(self, call_count):
"""
Assert that `self.spy_receiver` was called exactly `call_count` times
with the ``enter=False`` keyword argument.
"""
kwargs_with_exit = [
kwargs
for args, kwargs in self.spy_receiver.call_args_list
if ("enter", False) in kwargs.items()
]
self.assertEqual(len(kwargs_with_exit), call_count)
def test_override_settings_both(self):
"""Receiver fails on both enter and exit."""
with self.assertRaises(SettingChangeEnterException):
with override_settings(SETTING_PASS="BOTH", SETTING_BOTH="BOTH"):
pass
self.check_settings()
# Two settings were touched, so expect two calls of `spy_receiver`.
self.check_spy_receiver_exit_calls(call_count=2)
def test_override_settings_enter(self):
"""Receiver fails on enter only."""
with self.assertRaises(SettingChangeEnterException):
with override_settings(SETTING_PASS="ENTER", SETTING_ENTER="ENTER"):
pass
self.check_settings()
# Two settings were touched, so expect two calls of `spy_receiver`.
self.check_spy_receiver_exit_calls(call_count=2)
def test_override_settings_exit(self):
"""Receiver fails on exit only."""
with self.assertRaises(SettingChangeExitException):
with override_settings(SETTING_PASS="EXIT", SETTING_EXIT="EXIT"):
pass
self.check_settings()
# Two settings were touched, so expect two calls of `spy_receiver`.
self.check_spy_receiver_exit_calls(call_count=2)
def test_override_settings_reusable_on_enter(self):
"""
Error is raised correctly when reusing the same override_settings
instance.
"""
@override_settings(SETTING_ENTER="ENTER")
def decorated_function():
pass
with self.assertRaises(SettingChangeEnterException):
decorated_function()
signals.setting_changed.disconnect(self.receiver)
# This call shouldn't raise any errors.
decorated_function()
class MediaURLStaticURLPrefixTest(SimpleTestCase):
def set_script_name(self, val):
clear_script_prefix()
if val is not None:
set_script_prefix(val)
def test_not_prefixed(self):
# Don't add SCRIPT_NAME prefix to absolute paths, URLs, or None.
tests = (
"/path/",
"http://myhost.com/path/",
"http://myhost/path/",
"https://myhost/path/",
None,
)
for setting in ("MEDIA_URL", "STATIC_URL"):
for path in tests:
new_settings = {setting: path}
with self.settings(**new_settings):
for script_name in ["/somesubpath", "/somesubpath/", "/", "", None]:
with self.subTest(script_name=script_name, **new_settings):
try:
self.set_script_name(script_name)
self.assertEqual(getattr(settings, setting), path)
finally:
clear_script_prefix()
def test_add_script_name_prefix(self):
tests = (
# Relative paths.
("/somesubpath", "path/", "/somesubpath/path/"),
("/somesubpath/", "path/", "/somesubpath/path/"),
("/", "path/", "/path/"),
# Invalid URLs.
(
"/somesubpath/",
"htp://myhost.com/path/",
"/somesubpath/htp://myhost.com/path/",
),
# Blank settings.
("/somesubpath/", "", "/somesubpath/"),
)
for setting in ("MEDIA_URL", "STATIC_URL"):
for script_name, path, expected_path in tests:
new_settings = {setting: path}
with self.settings(**new_settings):
with self.subTest(script_name=script_name, **new_settings):
try:
self.set_script_name(script_name)
self.assertEqual(getattr(settings, setting), expected_path)
finally:
clear_script_prefix()
|
8f8f499fa189e064470a2bcd3042df9aeae0ac67405c4e6b00efb737e9e71fcd | from django.test import TestCase
from .models import Person
class PropertyTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.a = Person.objects.create(first_name="John", last_name="Lennon")
def test_getter(self):
self.assertEqual(self.a.full_name, "John Lennon")
def test_setter(self):
# The "full_name" property hasn't provided a "set" method.
with self.assertRaises(AttributeError):
setattr(self.a, "full_name", "Paul McCartney")
# And cannot be used to initialize the class.
with self.assertRaises(AttributeError):
Person(full_name="Paul McCartney")
# But "full_name_2" has, and it can be used to initialize the class.
a2 = Person(full_name_2="Paul McCartney")
a2.save()
self.assertEqual(a2.first_name, "Paul")
|
54b926ef75fa8cfacad3c6ca59bfb2b059dfd4f26e68b89d582e85475cf589b9 | """
Using properties on models
Use properties on models just like on any other Python object.
"""
from django.db import models
class Person(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
def _get_full_name(self):
return "%s %s" % (self.first_name, self.last_name)
def _set_full_name(self, combined_name):
self.first_name, self.last_name = combined_name.split(" ", 1)
full_name = property(_get_full_name)
full_name_2 = property(_get_full_name, _set_full_name)
|
0a10df8e61988b679b4917d309cf3ef0d720303ac594f1cbc9f0d0993ba5cca0 | from unittest import mock
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django.test import TestCase, override_settings
from django.urls import path, reverse
class Router:
target_db = None
def db_for_read(self, model, **hints):
return self.target_db
db_for_write = db_for_read
site = admin.AdminSite(name="test_adminsite")
site.register(User, admin_class=UserAdmin)
urlpatterns = [
path("admin/", site.urls),
]
@override_settings(ROOT_URLCONF=__name__, DATABASE_ROUTERS=["%s.Router" % __name__])
class MultiDatabaseTests(TestCase):
databases = {"default", "other"}
@classmethod
def setUpTestData(cls):
cls.superusers = {}
for db in cls.databases:
Router.target_db = db
cls.superusers[db] = User.objects.create_superuser(
username="admin",
password="something",
email="[email protected]",
)
@mock.patch("django.contrib.auth.admin.transaction")
def test_add_view(self, mock):
for db in self.databases:
with self.subTest(db_connection=db):
Router.target_db = db
self.client.force_login(self.superusers[db])
self.client.post(
reverse("test_adminsite:auth_user_add"),
{
"username": "some_user",
"password1": "helloworld",
"password2": "helloworld",
},
)
mock.atomic.assert_called_with(using=db)
|
509879c937a42fe2264239c05009a1966cb51063a2530c20f44c6f6511ff5adc | from django.apps import apps
from django.contrib.auth import authenticate, signals
from django.contrib.auth.models import User
from django.core.exceptions import FieldDoesNotExist
from django.test import TestCase, override_settings
from django.test.client import RequestFactory
from .models import MinimalUser, UserWithDisabledLastLoginField
@override_settings(ROOT_URLCONF="auth_tests.urls")
class SignalTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create_user(username="testclient", password="password")
cls.u3 = User.objects.create_user(username="staff", password="password")
def listener_login(self, user, **kwargs):
self.logged_in.append(user)
def listener_logout(self, user, **kwargs):
self.logged_out.append(user)
def listener_login_failed(self, sender, **kwargs):
self.login_failed.append(kwargs)
def setUp(self):
"""Set up the listeners and reset the logged in/logged out counters"""
self.logged_in = []
self.logged_out = []
self.login_failed = []
signals.user_logged_in.connect(self.listener_login)
signals.user_logged_out.connect(self.listener_logout)
signals.user_login_failed.connect(self.listener_login_failed)
def tearDown(self):
"""Disconnect the listeners"""
signals.user_logged_in.disconnect(self.listener_login)
signals.user_logged_out.disconnect(self.listener_logout)
signals.user_login_failed.disconnect(self.listener_login_failed)
def test_login(self):
# Only a successful login will trigger the success signal.
self.client.login(username="testclient", password="bad")
self.assertEqual(len(self.logged_in), 0)
self.assertEqual(len(self.login_failed), 1)
self.assertEqual(self.login_failed[0]["credentials"]["username"], "testclient")
# verify the password is cleansed
self.assertIn("***", self.login_failed[0]["credentials"]["password"])
self.assertIn("request", self.login_failed[0])
# Like this:
self.client.login(username="testclient", password="password")
self.assertEqual(len(self.logged_in), 1)
self.assertEqual(self.logged_in[0].username, "testclient")
# Ensure there were no more failures.
self.assertEqual(len(self.login_failed), 1)
def test_logout_anonymous(self):
# The log_out function will still trigger the signal for anonymous
# users.
self.client.get("/logout/next_page/")
self.assertEqual(len(self.logged_out), 1)
self.assertIsNone(self.logged_out[0])
def test_logout(self):
self.client.login(username="testclient", password="password")
self.client.get("/logout/next_page/")
self.assertEqual(len(self.logged_out), 1)
self.assertEqual(self.logged_out[0].username, "testclient")
def test_update_last_login(self):
"""Only `last_login` is updated in `update_last_login`"""
user = self.u3
old_last_login = user.last_login
user.username = "This username shouldn't get saved"
request = RequestFactory().get("/login")
signals.user_logged_in.send(sender=user.__class__, request=request, user=user)
user = User.objects.get(pk=user.pk)
self.assertEqual(user.username, "staff")
self.assertNotEqual(user.last_login, old_last_login)
def test_failed_login_without_request(self):
authenticate(username="testclient", password="bad")
self.assertIsNone(self.login_failed[0]["request"])
def test_login_with_custom_user_without_last_login_field(self):
"""
The user_logged_in signal is only registered if the user model has a
last_login field.
"""
last_login_receivers = signals.user_logged_in.receivers
try:
signals.user_logged_in.receivers = []
with self.assertRaises(FieldDoesNotExist):
MinimalUser._meta.get_field("last_login")
with self.settings(AUTH_USER_MODEL="auth_tests.MinimalUser"):
apps.get_app_config("auth").ready()
self.assertEqual(signals.user_logged_in.receivers, [])
# last_login is a property whose value is None.
self.assertIsNone(UserWithDisabledLastLoginField().last_login)
with self.settings(
AUTH_USER_MODEL="auth_tests.UserWithDisabledLastLoginField"
):
apps.get_app_config("auth").ready()
self.assertEqual(signals.user_logged_in.receivers, [])
with self.settings(AUTH_USER_MODEL="auth.User"):
apps.get_app_config("auth").ready()
self.assertEqual(len(signals.user_logged_in.receivers), 1)
finally:
signals.user_logged_in.receivers = last_login_receivers
|
6287aae8a30a7bab3de658543ee523328e63d5f58d4bf02c4770b0798bbeb780 | import datetime
import itertools
import re
from importlib import import_module
from unittest import mock
from urllib.parse import quote, urljoin
from django.apps import apps
from django.conf import settings
from django.contrib.admin.models import LogEntry
from django.contrib.auth import BACKEND_SESSION_KEY, REDIRECT_FIELD_NAME, SESSION_KEY
from django.contrib.auth.forms import (
AuthenticationForm,
PasswordChangeForm,
SetPasswordForm,
)
from django.contrib.auth.models import Permission, User
from django.contrib.auth.views import (
INTERNAL_RESET_SESSION_TOKEN,
LoginView,
logout_then_login,
redirect_to_login,
)
from django.contrib.contenttypes.models import ContentType
from django.contrib.sessions.middleware import SessionMiddleware
from django.contrib.sites.requests import RequestSite
from django.core import mail
from django.core.exceptions import ImproperlyConfigured
from django.db import connection
from django.http import HttpRequest, HttpResponse
from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.test import Client, TestCase, override_settings
from django.test.client import RedirectCycleError
from django.urls import NoReverseMatch, reverse, reverse_lazy
from django.utils.http import urlsafe_base64_encode
from .client import PasswordResetConfirmClient
from .models import CustomUser, UUIDUser
from .settings import AUTH_TEMPLATES
@override_settings(
LANGUAGES=[("en", "English")],
LANGUAGE_CODE="en",
TEMPLATES=AUTH_TEMPLATES,
ROOT_URLCONF="auth_tests.urls",
)
class AuthViewsTestCase(TestCase):
"""
Helper base class for the test classes that follow.
"""
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create_user(
username="testclient", password="password", email="[email protected]"
)
cls.u3 = User.objects.create_user(
username="staff", password="password", email="[email protected]"
)
def login(self, username="testclient", password="password", url="/login/"):
response = self.client.post(
url,
{
"username": username,
"password": password,
},
)
self.assertIn(SESSION_KEY, self.client.session)
return response
def logout(self):
response = self.client.get("/admin/logout/")
self.assertEqual(response.status_code, 200)
self.assertNotIn(SESSION_KEY, self.client.session)
def assertFormError(self, response, error):
"""Assert that error is found in response.context['form'] errors"""
form_errors = list(itertools.chain(*response.context["form"].errors.values()))
self.assertIn(str(error), form_errors)
@override_settings(ROOT_URLCONF="django.contrib.auth.urls")
class AuthViewNamedURLTests(AuthViewsTestCase):
def test_named_urls(self):
"Named URLs should be reversible"
expected_named_urls = [
("login", [], {}),
("logout", [], {}),
("password_change", [], {}),
("password_change_done", [], {}),
("password_reset", [], {}),
("password_reset_done", [], {}),
(
"password_reset_confirm",
[],
{
"uidb64": "aaaaaaa",
"token": "1111-aaaaa",
},
),
("password_reset_complete", [], {}),
]
for name, args, kwargs in expected_named_urls:
with self.subTest(name=name):
try:
reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.fail(
"Reversal of url named '%s' failed with NoReverseMatch" % name
)
class PasswordResetTest(AuthViewsTestCase):
def setUp(self):
self.client = PasswordResetConfirmClient()
def test_email_not_found(self):
"""If the provided email is not registered, don't raise any error but
also don't send any email."""
response = self.client.get("/password_reset/")
self.assertEqual(response.status_code, 200)
response = self.client.post(
"/password_reset/", {"email": "[email protected]"}
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post(
"/password_reset/", {"email": "[email protected]"}
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn("http://", mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
# optional multipart text/html email has been added. Make sure original,
# default functionality is 100% the same
self.assertFalse(mail.outbox[0].message().is_multipart())
def test_extra_email_context(self):
"""
extra_email_context should be available in the email template context.
"""
response = self.client.post(
"/password_reset_extra_email_context/",
{"email": "[email protected]"},
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertIn('Email email context: "Hello!"', mail.outbox[0].body)
self.assertIn("http://custom.example.com/reset/", mail.outbox[0].body)
def test_html_mail_template(self):
"""
A multipart email with text/plain and text/html is sent
if the html_email_template parameter is passed to the view
"""
response = self.client.post(
"/password_reset/html_email_template/", {"email": "[email protected]"}
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0].message()
self.assertEqual(len(message.get_payload()), 2)
self.assertTrue(message.is_multipart())
self.assertEqual(message.get_payload(0).get_content_type(), "text/plain")
self.assertEqual(message.get_payload(1).get_content_type(), "text/html")
self.assertNotIn("<html>", message.get_payload(0).get_payload())
self.assertIn("<html>", message.get_payload(1).get_payload())
def test_email_found_custom_from(self):
"""
Email is sent if a valid email address is provided for password reset
when a custom from_email is provided.
"""
response = self.client.post(
"/password_reset_from_email/", {"email": "[email protected]"}
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("[email protected]", mail.outbox[0].from_email)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host(self):
"Poisoned HTTP_HOST headers can't be used for reset emails"
# This attack is based on the way browsers handle URLs. The colon
# should be used to separate the port, but if the URL contains an @,
# the colon is interpreted as part of a username for login purposes,
# making 'evil.com' the request domain. Since HTTP_HOST is used to
# produce a meaningful reset URL, we need to be certain that the
# HTTP_HOST header isn't poisoned. This is done as a check when get_host()
# is invoked, but we check here as a practical consequence.
with self.assertLogs("django.security.DisallowedHost", "ERROR"):
response = self.client.post(
"/password_reset/",
{"email": "[email protected]"},
HTTP_HOST="www.example:[email protected]",
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host_admin_site(self):
"Poisoned HTTP_HOST headers can't be used for reset emails on admin views"
with self.assertLogs("django.security.DisallowedHost", "ERROR"):
response = self.client.post(
"/admin_password_reset/",
{"email": "[email protected]"},
HTTP_HOST="www.example:[email protected]",
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(mail.outbox), 0)
def _test_confirm_start(self):
# Start by creating the email
self.client.post("/password_reset/", {"email": "[email protected]"})
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertIsNotNone(urlmatch, "No URL found in sent email")
return urlmatch[0], urlmatch[1]
def test_confirm_valid(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
def test_confirm_invalid(self):
url, path = self._test_confirm_start()
# Let's munge the token in the path, but keep the same length,
# in case the URLconf will reject a different length.
path = path[:-5] + ("0" * 4) + path[-1]
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_user(self):
# A nonexistent user returns a 200 response, not a 404.
response = self.client.get("/reset/123456/1-1/")
self.assertContains(response, "The password reset link was invalid")
def test_confirm_overflow_user(self):
# A base36 user id that overflows int returns a 200 response.
response = self.client.get("/reset/zzzzzzzzzzzzz/1-1/")
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_post(self):
# Same as test_confirm_invalid, but trying to do a POST instead.
url, path = self._test_confirm_start()
path = path[:-5] + ("0" * 4) + path[-1]
self.client.post(
path,
{
"new_password1": "anewpassword",
"new_password2": " anewpassword",
},
)
# Check the password has not been changed
u = User.objects.get(email="[email protected]")
self.assertTrue(not u.check_password("anewpassword"))
def test_confirm_invalid_hash(self):
"""A POST with an invalid token is rejected."""
u = User.objects.get(email="[email protected]")
original_password = u.password
url, path = self._test_confirm_start()
path_parts = path.split("-")
path_parts[-1] = ("0") * 20 + "/"
path = "-".join(path_parts)
response = self.client.post(
path,
{
"new_password1": "anewpassword",
"new_password2": "anewpassword",
},
)
self.assertIs(response.context["validlink"], False)
u.refresh_from_db()
self.assertEqual(original_password, u.password) # password hasn't changed
def test_confirm_complete(self):
url, path = self._test_confirm_start()
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
# Check the password has been changed
u = User.objects.get(email="[email protected]")
self.assertTrue(u.check_password("anewpassword"))
# The reset token is deleted from the session.
self.assertNotIn(INTERNAL_RESET_SESSION_TOKEN, self.client.session)
# Check we can't use the link again
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_different_passwords(self):
url, path = self._test_confirm_start()
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "x"}
)
self.assertFormError(
response, SetPasswordForm.error_messages["password_mismatch"]
)
def test_reset_redirect_default(self):
response = self.client.post(
"/password_reset/", {"email": "[email protected]"}
)
self.assertRedirects(
response, "/password_reset/done/", fetch_redirect_response=False
)
def test_reset_custom_redirect(self):
response = self.client.post(
"/password_reset/custom_redirect/", {"email": "[email protected]"}
)
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
def test_reset_custom_redirect_named(self):
response = self.client.post(
"/password_reset/custom_redirect/named/",
{"email": "[email protected]"},
)
self.assertRedirects(
response, "/password_reset/", fetch_redirect_response=False
)
def test_confirm_redirect_default(self):
url, path = self._test_confirm_start()
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(response, "/reset/done/", fetch_redirect_response=False)
def test_confirm_redirect_custom(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/custom/")
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
def test_confirm_redirect_custom_named(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/custom/named/")
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(
response, "/password_reset/", fetch_redirect_response=False
)
def test_confirm_custom_reset_url_token(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/custom/token/")
self.client.reset_url_token = "set-passwordcustom"
response = self.client.post(
path,
{"new_password1": "anewpassword", "new_password2": "anewpassword"},
)
self.assertRedirects(response, "/reset/done/", fetch_redirect_response=False)
def test_confirm_login_post_reset(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/post_reset_login/")
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(response, "/reset/done/", fetch_redirect_response=False)
self.assertIn(SESSION_KEY, self.client.session)
@override_settings(
AUTHENTICATION_BACKENDS=[
"django.contrib.auth.backends.ModelBackend",
"django.contrib.auth.backends.AllowAllUsersModelBackend",
]
)
def test_confirm_login_post_reset_custom_backend(self):
# This backend is specified in the URL pattern.
backend = "django.contrib.auth.backends.AllowAllUsersModelBackend"
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/post_reset_login_custom_backend/")
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(response, "/reset/done/", fetch_redirect_response=False)
self.assertIn(SESSION_KEY, self.client.session)
self.assertEqual(self.client.session[BACKEND_SESSION_KEY], backend)
def test_confirm_login_post_reset_already_logged_in(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/post_reset_login/")
self.login()
response = self.client.post(
path, {"new_password1": "anewpassword", "new_password2": "anewpassword"}
)
self.assertRedirects(response, "/reset/done/", fetch_redirect_response=False)
self.assertIn(SESSION_KEY, self.client.session)
def test_confirm_display_user_from_form(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# The password_reset_confirm() view passes the user object to the
# SetPasswordForm``, even on GET requests (#16919). For this test,
# {{ form.user }}`` is rendered in the template
# registration/password_reset_confirm.html.
username = User.objects.get(email="[email protected]").username
self.assertContains(response, "Hello, %s." % username)
# However, the view should NOT pass any user object on a form if the
# password reset link was invalid.
response = self.client.get("/reset/zzzzzzzzzzzzz/1-1/")
self.assertContains(response, "Hello, .")
def test_confirm_link_redirects_to_set_password_page(self):
url, path = self._test_confirm_start()
# Don't use PasswordResetConfirmClient (self.client) here which
# automatically fetches the redirect page.
client = Client()
response = client.get(path)
token = response.resolver_match.kwargs["token"]
uuidb64 = response.resolver_match.kwargs["uidb64"]
self.assertRedirects(response, "/reset/%s/set-password/" % uuidb64)
self.assertEqual(client.session["_password_reset_token"], token)
def test_confirm_custom_reset_url_token_link_redirects_to_set_password_page(self):
url, path = self._test_confirm_start()
path = path.replace("/reset/", "/reset/custom/token/")
client = Client()
response = client.get(path)
token = response.resolver_match.kwargs["token"]
uuidb64 = response.resolver_match.kwargs["uidb64"]
self.assertRedirects(
response, "/reset/custom/token/%s/set-passwordcustom/" % uuidb64
)
self.assertEqual(client.session["_password_reset_token"], token)
def test_invalid_link_if_going_directly_to_the_final_reset_password_url(self):
url, path = self._test_confirm_start()
_, uuidb64, _ = path.strip("/").split("/")
response = Client().get("/reset/%s/set-password/" % uuidb64)
self.assertContains(response, "The password reset link was invalid")
def test_missing_kwargs(self):
msg = "The URL path must contain 'uidb64' and 'token' parameters."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get("/reset/missing_parameters/")
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUser")
class CustomUserPasswordResetTest(AuthViewsTestCase):
user_email = "[email protected]"
@classmethod
def setUpTestData(cls):
cls.u1 = CustomUser.custom_objects.create(
email="[email protected]",
date_of_birth=datetime.date(1976, 11, 8),
)
cls.u1.set_password("password")
cls.u1.save()
def setUp(self):
self.client = PasswordResetConfirmClient()
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post("/password_reset/", {"email": self.user_email})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertIsNotNone(urlmatch, "No URL found in sent email")
return urlmatch[0], urlmatch[1]
def test_confirm_valid_custom_user(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
# then submit a new password
response = self.client.post(
path,
{
"new_password1": "anewpassword",
"new_password2": "anewpassword",
},
)
self.assertRedirects(response, "/reset/done/")
@override_settings(AUTH_USER_MODEL="auth_tests.UUIDUser")
class UUIDUserPasswordResetTest(CustomUserPasswordResetTest):
def _test_confirm_start(self):
# instead of fixture
UUIDUser.objects.create_user(
email=self.user_email,
username="foo",
password="foo",
)
return super()._test_confirm_start()
def test_confirm_invalid_uuid(self):
"""A uidb64 that decodes to a non-UUID doesn't crash."""
_, path = self._test_confirm_start()
invalid_uidb64 = urlsafe_base64_encode(b"INVALID_UUID")
first, _uuidb64_, second = path.strip("/").split("/")
response = self.client.get(
"/" + "/".join((first, invalid_uidb64, second)) + "/"
)
self.assertContains(response, "The password reset link was invalid")
class ChangePasswordTest(AuthViewsTestCase):
def fail_login(self):
response = self.client.post(
"/login/",
{
"username": "testclient",
"password": "password",
},
)
self.assertFormError(
response,
AuthenticationForm.error_messages["invalid_login"]
% {"username": User._meta.get_field("username").verbose_name},
)
def logout(self):
self.client.get("/logout/")
def test_password_change_fails_with_invalid_old_password(self):
self.login()
response = self.client.post(
"/password_change/",
{
"old_password": "donuts",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertFormError(
response, PasswordChangeForm.error_messages["password_incorrect"]
)
def test_password_change_fails_with_mismatched_passwords(self):
self.login()
response = self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "donuts",
},
)
self.assertFormError(
response, SetPasswordForm.error_messages["password_mismatch"]
)
def test_password_change_succeeds(self):
self.login()
self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.fail_login()
self.login(password="password1")
def test_password_change_done_succeeds(self):
self.login()
response = self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertRedirects(
response, "/password_change/done/", fetch_redirect_response=False
)
@override_settings(LOGIN_URL="/login/")
def test_password_change_done_fails(self):
response = self.client.get("/password_change/done/")
self.assertRedirects(
response,
"/login/?next=/password_change/done/",
fetch_redirect_response=False,
)
def test_password_change_redirect_default(self):
self.login()
response = self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertRedirects(
response, "/password_change/done/", fetch_redirect_response=False
)
def test_password_change_redirect_custom(self):
self.login()
response = self.client.post(
"/password_change/custom/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
def test_password_change_redirect_custom_named(self):
self.login()
response = self.client.post(
"/password_change/custom/named/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
self.assertRedirects(
response, "/password_reset/", fetch_redirect_response=False
)
class SessionAuthenticationTests(AuthViewsTestCase):
def test_user_password_change_updates_session(self):
"""
#21649 - Ensure contrib.auth.views.password_change updates the user's
session auth hash after a password change so the session isn't logged out.
"""
self.login()
original_session_key = self.client.session.session_key
response = self.client.post(
"/password_change/",
{
"old_password": "password",
"new_password1": "password1",
"new_password2": "password1",
},
)
# if the hash isn't updated, retrieving the redirection page will fail.
self.assertRedirects(response, "/password_change/done/")
# The session key is rotated.
self.assertNotEqual(original_session_key, self.client.session.session_key)
class LoginTest(AuthViewsTestCase):
def test_current_site_in_context_after_login(self):
response = self.client.get(reverse("login"))
self.assertEqual(response.status_code, 200)
if apps.is_installed("django.contrib.sites"):
Site = apps.get_model("sites.Site")
site = Site.objects.get_current()
self.assertEqual(response.context["site"], site)
self.assertEqual(response.context["site_name"], site.name)
else:
self.assertIsInstance(response.context["site"], RequestSite)
self.assertIsInstance(response.context["form"], AuthenticationForm)
def test_security_check(self):
login_url = reverse("login")
# These URLs should not pass the security check.
bad_urls = (
"http://example.com",
"http:///example.com",
"https://example.com",
"ftp://example.com",
"///example.com",
"//example.com",
'javascript:alert("XSS")',
)
for bad_url in bad_urls:
with self.subTest(bad_url=bad_url):
nasty_url = "%(url)s?%(next)s=%(bad_url)s" % {
"url": login_url,
"next": REDIRECT_FIELD_NAME,
"bad_url": quote(bad_url),
}
response = self.client.post(
nasty_url,
{
"username": "testclient",
"password": "password",
},
)
self.assertEqual(response.status_code, 302)
self.assertNotIn(
bad_url, response.url, "%s should be blocked" % bad_url
)
# These URLs should pass the security check.
good_urls = (
"/view/?param=http://example.com",
"/view/?param=https://example.com",
"/view?param=ftp://example.com",
"view/?param=//example.com",
"https://testserver/",
"HTTPS://testserver/",
"//testserver/",
"/url%20with%20spaces/",
)
for good_url in good_urls:
with self.subTest(good_url=good_url):
safe_url = "%(url)s?%(next)s=%(good_url)s" % {
"url": login_url,
"next": REDIRECT_FIELD_NAME,
"good_url": quote(good_url),
}
response = self.client.post(
safe_url,
{
"username": "testclient",
"password": "password",
},
)
self.assertEqual(response.status_code, 302)
self.assertIn(good_url, response.url, "%s should be allowed" % good_url)
def test_security_check_https(self):
login_url = reverse("login")
non_https_next_url = "http://testserver/path"
not_secured_url = "%(url)s?%(next)s=%(next_url)s" % {
"url": login_url,
"next": REDIRECT_FIELD_NAME,
"next_url": quote(non_https_next_url),
}
post_data = {
"username": "testclient",
"password": "password",
}
response = self.client.post(not_secured_url, post_data, secure=True)
self.assertEqual(response.status_code, 302)
self.assertNotEqual(response.url, non_https_next_url)
self.assertEqual(response.url, settings.LOGIN_REDIRECT_URL)
def test_login_form_contains_request(self):
# The custom authentication form for this login requires a request to
# initialize it.
response = self.client.post(
"/custom_request_auth_login/",
{
"username": "testclient",
"password": "password",
},
)
# The login was successful.
self.assertRedirects(
response, settings.LOGIN_REDIRECT_URL, fetch_redirect_response=False
)
def test_login_csrf_rotate(self):
"""
Makes sure that a login rotates the currently-used CSRF token.
"""
def get_response(request):
return HttpResponse()
# Do a GET to establish a CSRF token
# The test client isn't used here as it's a test for middleware.
req = HttpRequest()
CsrfViewMiddleware(get_response).process_view(req, LoginView.as_view(), (), {})
# get_token() triggers CSRF token inclusion in the response
get_token(req)
resp = CsrfViewMiddleware(LoginView.as_view())(req)
csrf_cookie = resp.cookies.get(settings.CSRF_COOKIE_NAME, None)
token1 = csrf_cookie.coded_value
# Prepare the POST request
req = HttpRequest()
req.COOKIES[settings.CSRF_COOKIE_NAME] = token1
req.method = "POST"
req.POST = {
"username": "testclient",
"password": "password",
"csrfmiddlewaretoken": token1,
}
# Use POST request to log in
SessionMiddleware(get_response).process_request(req)
CsrfViewMiddleware(get_response).process_view(req, LoginView.as_view(), (), {})
req.META[
"SERVER_NAME"
] = "testserver" # Required to have redirect work in login view
req.META["SERVER_PORT"] = 80
resp = CsrfViewMiddleware(LoginView.as_view())(req)
csrf_cookie = resp.cookies.get(settings.CSRF_COOKIE_NAME, None)
token2 = csrf_cookie.coded_value
# Check the CSRF token switched
self.assertNotEqual(token1, token2)
def test_session_key_flushed_on_login(self):
"""
To avoid reusing another user's session, ensure a new, empty session is
created if the existing session corresponds to a different authenticated
user.
"""
self.login()
original_session_key = self.client.session.session_key
self.login(username="staff")
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_session_key_flushed_on_login_after_password_change(self):
"""
As above, but same user logging in after a password change.
"""
self.login()
original_session_key = self.client.session.session_key
# If no password change, session key should not be flushed.
self.login()
self.assertEqual(original_session_key, self.client.session.session_key)
user = User.objects.get(username="testclient")
user.set_password("foobar")
user.save()
self.login(password="foobar")
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_login_session_without_hash_session_key(self):
"""
Session without django.contrib.auth.HASH_SESSION_KEY should login
without an exception.
"""
user = User.objects.get(username="testclient")
engine = import_module(settings.SESSION_ENGINE)
session = engine.SessionStore()
session[SESSION_KEY] = user.id
session.save()
original_session_key = session.session_key
self.client.cookies[settings.SESSION_COOKIE_NAME] = original_session_key
self.login()
self.assertNotEqual(original_session_key, self.client.session.session_key)
def test_login_get_default_redirect_url(self):
response = self.login(url="/login/get_default_redirect_url/")
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
def test_login_next_page(self):
response = self.login(url="/login/next_page/")
self.assertRedirects(response, "/somewhere/", fetch_redirect_response=False)
def test_login_named_next_page_named(self):
response = self.login(url="/login/next_page/named/")
self.assertRedirects(
response, "/password_reset/", fetch_redirect_response=False
)
@override_settings(LOGIN_REDIRECT_URL="/custom/")
def test_login_next_page_overrides_login_redirect_url_setting(self):
response = self.login(url="/login/next_page/")
self.assertRedirects(response, "/somewhere/", fetch_redirect_response=False)
def test_login_redirect_url_overrides_next_page(self):
response = self.login(url="/login/next_page/?next=/test/")
self.assertRedirects(response, "/test/", fetch_redirect_response=False)
def test_login_redirect_url_overrides_get_default_redirect_url(self):
response = self.login(url="/login/get_default_redirect_url/?next=/test/")
self.assertRedirects(response, "/test/", fetch_redirect_response=False)
class LoginURLSettings(AuthViewsTestCase):
"""Tests for settings.LOGIN_URL."""
def assertLoginURLEquals(self, url):
response = self.client.get("/login_required/")
self.assertRedirects(response, url, fetch_redirect_response=False)
@override_settings(LOGIN_URL="/login/")
def test_standard_login_url(self):
self.assertLoginURLEquals("/login/?next=/login_required/")
@override_settings(LOGIN_URL="login")
def test_named_login_url(self):
self.assertLoginURLEquals("/login/?next=/login_required/")
@override_settings(LOGIN_URL="http://remote.example.com/login")
def test_remote_login_url(self):
quoted_next = quote("http://testserver/login_required/")
expected = "http://remote.example.com/login?next=%s" % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL="https:///login/")
def test_https_login_url(self):
quoted_next = quote("http://testserver/login_required/")
expected = "https:///login/?next=%s" % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL="/login/?pretty=1")
def test_login_url_with_querystring(self):
self.assertLoginURLEquals("/login/?pretty=1&next=/login_required/")
@override_settings(LOGIN_URL="http://remote.example.com/login/?next=/default/")
def test_remote_login_url_with_next_querystring(self):
quoted_next = quote("http://testserver/login_required/")
expected = "http://remote.example.com/login/?next=%s" % quoted_next
self.assertLoginURLEquals(expected)
@override_settings(LOGIN_URL=reverse_lazy("login"))
def test_lazy_login_url(self):
self.assertLoginURLEquals("/login/?next=/login_required/")
class LoginRedirectUrlTest(AuthViewsTestCase):
"""Tests for settings.LOGIN_REDIRECT_URL."""
def assertLoginRedirectURLEqual(self, url):
response = self.login()
self.assertRedirects(response, url, fetch_redirect_response=False)
def test_default(self):
self.assertLoginRedirectURLEqual("/accounts/profile/")
@override_settings(LOGIN_REDIRECT_URL="/custom/")
def test_custom(self):
self.assertLoginRedirectURLEqual("/custom/")
@override_settings(LOGIN_REDIRECT_URL="password_reset")
def test_named(self):
self.assertLoginRedirectURLEqual("/password_reset/")
@override_settings(LOGIN_REDIRECT_URL="http://remote.example.com/welcome/")
def test_remote(self):
self.assertLoginRedirectURLEqual("http://remote.example.com/welcome/")
class RedirectToLoginTests(AuthViewsTestCase):
"""Tests for the redirect_to_login view"""
@override_settings(LOGIN_URL=reverse_lazy("login"))
def test_redirect_to_login_with_lazy(self):
login_redirect_response = redirect_to_login(next="/else/where/")
expected = "/login/?next=/else/where/"
self.assertEqual(expected, login_redirect_response.url)
@override_settings(LOGIN_URL=reverse_lazy("login"))
def test_redirect_to_login_with_lazy_and_unicode(self):
login_redirect_response = redirect_to_login(next="/else/where/झ/")
expected = "/login/?next=/else/where/%E0%A4%9D/"
self.assertEqual(expected, login_redirect_response.url)
class LogoutThenLoginTests(AuthViewsTestCase):
"""Tests for the logout_then_login view"""
def confirm_logged_out(self):
self.assertNotIn(SESSION_KEY, self.client.session)
@override_settings(LOGIN_URL="/login/")
def test_default_logout_then_login(self):
self.login()
req = HttpRequest()
req.method = "GET"
req.session = self.client.session
response = logout_then_login(req)
self.confirm_logged_out()
self.assertRedirects(response, "/login/", fetch_redirect_response=False)
def test_logout_then_login_with_custom_login(self):
self.login()
req = HttpRequest()
req.method = "GET"
req.session = self.client.session
response = logout_then_login(req, login_url="/custom/")
self.confirm_logged_out()
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
class LoginRedirectAuthenticatedUser(AuthViewsTestCase):
dont_redirect_url = "/login/redirect_authenticated_user_default/"
do_redirect_url = "/login/redirect_authenticated_user/"
def test_default(self):
"""Stay on the login page by default."""
self.login()
response = self.client.get(self.dont_redirect_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context["next"], "")
def test_guest(self):
"""If not logged in, stay on the same page."""
response = self.client.get(self.do_redirect_url)
self.assertEqual(response.status_code, 200)
def test_redirect(self):
"""If logged in, go to default redirected URL."""
self.login()
response = self.client.get(self.do_redirect_url)
self.assertRedirects(
response, "/accounts/profile/", fetch_redirect_response=False
)
@override_settings(LOGIN_REDIRECT_URL="/custom/")
def test_redirect_url(self):
"""If logged in, go to custom redirected URL."""
self.login()
response = self.client.get(self.do_redirect_url)
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
def test_redirect_param(self):
"""If next is specified as a GET parameter, go there."""
self.login()
url = self.do_redirect_url + "?next=/custom_next/"
response = self.client.get(url)
self.assertRedirects(response, "/custom_next/", fetch_redirect_response=False)
def test_redirect_loop(self):
"""
Detect a redirect loop if LOGIN_REDIRECT_URL is not correctly set,
with and without custom parameters.
"""
self.login()
msg = (
"Redirection loop for authenticated user detected. Check that "
"your LOGIN_REDIRECT_URL doesn't point to a login page."
)
with self.settings(LOGIN_REDIRECT_URL=self.do_redirect_url):
with self.assertRaisesMessage(ValueError, msg):
self.client.get(self.do_redirect_url)
url = self.do_redirect_url + "?bla=2"
with self.assertRaisesMessage(ValueError, msg):
self.client.get(url)
def test_permission_required_not_logged_in(self):
# Not logged in ...
with self.settings(LOGIN_URL=self.do_redirect_url):
# redirected to login.
response = self.client.get("/permission_required_redirect/", follow=True)
self.assertEqual(response.status_code, 200)
# exception raised.
response = self.client.get("/permission_required_exception/", follow=True)
self.assertEqual(response.status_code, 403)
# redirected to login.
response = self.client.get(
"/login_and_permission_required_exception/", follow=True
)
self.assertEqual(response.status_code, 200)
def test_permission_required_logged_in(self):
self.login()
# Already logged in...
with self.settings(LOGIN_URL=self.do_redirect_url):
# redirect loop encountered.
with self.assertRaisesMessage(
RedirectCycleError, "Redirect loop detected."
):
self.client.get("/permission_required_redirect/", follow=True)
# exception raised.
response = self.client.get("/permission_required_exception/", follow=True)
self.assertEqual(response.status_code, 403)
# exception raised.
response = self.client.get(
"/login_and_permission_required_exception/", follow=True
)
self.assertEqual(response.status_code, 403)
class LoginSuccessURLAllowedHostsTest(AuthViewsTestCase):
def test_success_url_allowed_hosts_same_host(self):
response = self.client.post(
"/login/allowed_hosts/",
{
"username": "testclient",
"password": "password",
"next": "https://testserver/home",
},
)
self.assertIn(SESSION_KEY, self.client.session)
self.assertRedirects(
response, "https://testserver/home", fetch_redirect_response=False
)
def test_success_url_allowed_hosts_safe_host(self):
response = self.client.post(
"/login/allowed_hosts/",
{
"username": "testclient",
"password": "password",
"next": "https://otherserver/home",
},
)
self.assertIn(SESSION_KEY, self.client.session)
self.assertRedirects(
response, "https://otherserver/home", fetch_redirect_response=False
)
def test_success_url_allowed_hosts_unsafe_host(self):
response = self.client.post(
"/login/allowed_hosts/",
{
"username": "testclient",
"password": "password",
"next": "https://evil/home",
},
)
self.assertIn(SESSION_KEY, self.client.session)
self.assertRedirects(
response, "/accounts/profile/", fetch_redirect_response=False
)
class LogoutTest(AuthViewsTestCase):
def confirm_logged_out(self):
self.assertNotIn(SESSION_KEY, self.client.session)
def test_logout_default(self):
"Logout without next_page option renders the default template"
self.login()
response = self.client.get("/logout/")
self.assertContains(response, "Logged out")
self.confirm_logged_out()
def test_logout_with_post(self):
self.login()
response = self.client.post("/logout/")
self.assertContains(response, "Logged out")
self.confirm_logged_out()
def test_14377(self):
# Bug 14377
self.login()
response = self.client.get("/logout/")
self.assertIn("site", response.context)
def test_logout_doesnt_cache(self):
"""
The logout() view should send "no-cache" headers for reasons described
in #25490.
"""
response = self.client.get("/logout/")
self.assertIn("no-store", response.headers["Cache-Control"])
def test_logout_with_overridden_redirect_url(self):
# Bug 11223
self.login()
response = self.client.get("/logout/next_page/")
self.assertRedirects(response, "/somewhere/", fetch_redirect_response=False)
response = self.client.get("/logout/next_page/?next=/login/")
self.assertRedirects(response, "/login/", fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_next_page_specified(self):
"Logout with next_page option given redirects to specified resource"
self.login()
response = self.client.get("/logout/next_page/")
self.assertRedirects(response, "/somewhere/", fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_redirect_argument(self):
"Logout with query string redirects to specified resource"
self.login()
response = self.client.get("/logout/?next=/login/")
self.assertRedirects(response, "/login/", fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_custom_redirect_argument(self):
"Logout with custom query string redirects to specified resource"
self.login()
response = self.client.get("/logout/custom_query/?follow=/somewhere/")
self.assertRedirects(response, "/somewhere/", fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_with_named_redirect(self):
"Logout resolves names or URLs passed as next_page."
self.login()
response = self.client.get("/logout/next_page/named/")
self.assertRedirects(
response, "/password_reset/", fetch_redirect_response=False
)
self.confirm_logged_out()
def test_success_url_allowed_hosts_same_host(self):
self.login()
response = self.client.get("/logout/allowed_hosts/?next=https://testserver/")
self.assertRedirects(
response, "https://testserver/", fetch_redirect_response=False
)
self.confirm_logged_out()
def test_success_url_allowed_hosts_safe_host(self):
self.login()
response = self.client.get("/logout/allowed_hosts/?next=https://otherserver/")
self.assertRedirects(
response, "https://otherserver/", fetch_redirect_response=False
)
self.confirm_logged_out()
def test_success_url_allowed_hosts_unsafe_host(self):
self.login()
response = self.client.get("/logout/allowed_hosts/?next=https://evil/")
self.assertRedirects(
response, "/logout/allowed_hosts/", fetch_redirect_response=False
)
self.confirm_logged_out()
def test_security_check(self):
logout_url = reverse("logout")
# These URLs should not pass the security check.
bad_urls = (
"http://example.com",
"http:///example.com",
"https://example.com",
"ftp://example.com",
"///example.com",
"//example.com",
'javascript:alert("XSS")',
)
for bad_url in bad_urls:
with self.subTest(bad_url=bad_url):
nasty_url = "%(url)s?%(next)s=%(bad_url)s" % {
"url": logout_url,
"next": REDIRECT_FIELD_NAME,
"bad_url": quote(bad_url),
}
self.login()
response = self.client.get(nasty_url)
self.assertEqual(response.status_code, 302)
self.assertNotIn(
bad_url, response.url, "%s should be blocked" % bad_url
)
self.confirm_logged_out()
# These URLs should pass the security check.
good_urls = (
"/view/?param=http://example.com",
"/view/?param=https://example.com",
"/view?param=ftp://example.com",
"view/?param=//example.com",
"https://testserver/",
"HTTPS://testserver/",
"//testserver/",
"/url%20with%20spaces/",
)
for good_url in good_urls:
with self.subTest(good_url=good_url):
safe_url = "%(url)s?%(next)s=%(good_url)s" % {
"url": logout_url,
"next": REDIRECT_FIELD_NAME,
"good_url": quote(good_url),
}
self.login()
response = self.client.get(safe_url)
self.assertEqual(response.status_code, 302)
self.assertIn(good_url, response.url, "%s should be allowed" % good_url)
self.confirm_logged_out()
def test_security_check_https(self):
logout_url = reverse("logout")
non_https_next_url = "http://testserver/"
url = "%(url)s?%(next)s=%(next_url)s" % {
"url": logout_url,
"next": REDIRECT_FIELD_NAME,
"next_url": quote(non_https_next_url),
}
self.login()
response = self.client.get(url, secure=True)
self.assertRedirects(response, logout_url, fetch_redirect_response=False)
self.confirm_logged_out()
def test_logout_preserve_language(self):
"""Language is preserved after logout."""
self.login()
self.client.post("/setlang/", {"language": "pl"})
self.assertEqual(self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, "pl")
self.client.get("/logout/")
self.assertEqual(self.client.cookies[settings.LANGUAGE_COOKIE_NAME].value, "pl")
@override_settings(LOGOUT_REDIRECT_URL="/custom/")
def test_logout_redirect_url_setting(self):
self.login()
response = self.client.get("/logout/")
self.assertRedirects(response, "/custom/", fetch_redirect_response=False)
@override_settings(LOGOUT_REDIRECT_URL="logout")
def test_logout_redirect_url_named_setting(self):
self.login()
response = self.client.get("/logout/")
self.assertRedirects(response, "/logout/", fetch_redirect_response=False)
def get_perm(Model, perm):
ct = ContentType.objects.get_for_model(Model)
return Permission.objects.get(content_type=ct, codename=perm)
# Redirect in test_user_change_password will fail if session auth hash
# isn't updated after password change (#21649)
@override_settings(ROOT_URLCONF="auth_tests.urls_admin")
class ChangelistTests(AuthViewsTestCase):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
# Make me a superuser before logging in.
User.objects.filter(username="testclient").update(
is_staff=True, is_superuser=True
)
def setUp(self):
self.login()
# Get the latest last_login value.
self.admin = User.objects.get(pk=self.u1.pk)
def get_user_data(self, user):
return {
"username": user.username,
"password": user.password,
"email": user.email,
"is_active": user.is_active,
"is_staff": user.is_staff,
"is_superuser": user.is_superuser,
"last_login_0": user.last_login.strftime("%Y-%m-%d"),
"last_login_1": user.last_login.strftime("%H:%M:%S"),
"initial-last_login_0": user.last_login.strftime("%Y-%m-%d"),
"initial-last_login_1": user.last_login.strftime("%H:%M:%S"),
"date_joined_0": user.date_joined.strftime("%Y-%m-%d"),
"date_joined_1": user.date_joined.strftime("%H:%M:%S"),
"initial-date_joined_0": user.date_joined.strftime("%Y-%m-%d"),
"initial-date_joined_1": user.date_joined.strftime("%H:%M:%S"),
"first_name": user.first_name,
"last_name": user.last_name,
}
# #20078 - users shouldn't be allowed to guess password hashes via
# repeated password__startswith queries.
def test_changelist_disallows_password_lookups(self):
# A lookup that tries to filter on password isn't OK
with self.assertLogs("django.security.DisallowedModelAdminLookup", "ERROR"):
response = self.client.get(
reverse("auth_test_admin:auth_user_changelist")
+ "?password__startswith=sha1$"
)
self.assertEqual(response.status_code, 400)
def test_user_change_email(self):
data = self.get_user_data(self.admin)
data["email"] = "new_" + data["email"]
response = self.client.post(
reverse("auth_test_admin:auth_user_change", args=(self.admin.pk,)), data
)
self.assertRedirects(response, reverse("auth_test_admin:auth_user_changelist"))
row = LogEntry.objects.latest("id")
self.assertEqual(row.get_change_message(), "Changed Email address.")
def test_user_not_change(self):
response = self.client.post(
reverse("auth_test_admin:auth_user_change", args=(self.admin.pk,)),
self.get_user_data(self.admin),
)
self.assertRedirects(response, reverse("auth_test_admin:auth_user_changelist"))
row = LogEntry.objects.latest("id")
self.assertEqual(row.get_change_message(), "No fields changed.")
def test_user_change_password(self):
user_change_url = reverse(
"auth_test_admin:auth_user_change", args=(self.admin.pk,)
)
password_change_url = reverse(
"auth_test_admin:auth_user_password_change", args=(self.admin.pk,)
)
response = self.client.get(user_change_url)
# Test the link inside password field help_text.
rel_link = re.search(
r'you can change the password using <a href="([^"]*)">this form</a>',
response.content.decode(),
)[1]
self.assertEqual(urljoin(user_change_url, rel_link), password_change_url)
response = self.client.post(
password_change_url,
{
"password1": "password1",
"password2": "password1",
},
)
self.assertRedirects(response, user_change_url)
row = LogEntry.objects.latest("id")
self.assertEqual(row.get_change_message(), "Changed password.")
self.logout()
self.login(password="password1")
def test_user_change_different_user_password(self):
u = User.objects.get(email="[email protected]")
response = self.client.post(
reverse("auth_test_admin:auth_user_password_change", args=(u.pk,)),
{
"password1": "password1",
"password2": "password1",
},
)
self.assertRedirects(
response, reverse("auth_test_admin:auth_user_change", args=(u.pk,))
)
row = LogEntry.objects.latest("id")
self.assertEqual(row.user_id, self.admin.pk)
self.assertEqual(row.object_id, str(u.pk))
self.assertEqual(row.get_change_message(), "Changed password.")
def test_password_change_bad_url(self):
response = self.client.get(
reverse("auth_test_admin:auth_user_password_change", args=("foobar",))
)
self.assertEqual(response.status_code, 404)
@mock.patch("django.contrib.auth.admin.UserAdmin.has_change_permission")
def test_user_change_password_passes_user_to_has_change_permission(
self, has_change_permission
):
url = reverse(
"auth_test_admin:auth_user_password_change", args=(self.admin.pk,)
)
self.client.post(url, {"password1": "password1", "password2": "password1"})
(_request, user), _kwargs = has_change_permission.call_args
self.assertEqual(user.pk, self.admin.pk)
def test_view_user_password_is_readonly(self):
u = User.objects.get(username="testclient")
u.is_superuser = False
u.save()
original_password = u.password
u.user_permissions.add(get_perm(User, "view_user"))
response = self.client.get(
reverse("auth_test_admin:auth_user_change", args=(u.pk,)),
)
algo, salt, hash_string = u.password.split("$")
self.assertContains(response, '<div class="readonly">testclient</div>')
# ReadOnlyPasswordHashWidget is used to render the field.
self.assertContains(
response,
"<strong>algorithm</strong>: %s\n\n"
"<strong>salt</strong>: %s********************\n\n"
"<strong>hash</strong>: %s**************************\n\n"
% (
algo,
salt[:2],
hash_string[:6],
),
html=True,
)
# Value in POST data is ignored.
data = self.get_user_data(u)
data["password"] = "shouldnotchange"
change_url = reverse("auth_test_admin:auth_user_change", args=(u.pk,))
response = self.client.post(change_url, data)
self.assertEqual(response.status_code, 403)
u.refresh_from_db()
self.assertEqual(u.password, original_password)
@override_settings(
AUTH_USER_MODEL="auth_tests.UUIDUser",
ROOT_URLCONF="auth_tests.urls_custom_user_admin",
)
class UUIDUserTests(TestCase):
def test_admin_password_change(self):
u = UUIDUser.objects.create_superuser(
username="uuid", email="[email protected]", password="test"
)
self.assertTrue(self.client.login(username="uuid", password="test"))
user_change_url = reverse(
"custom_user_admin:auth_tests_uuiduser_change", args=(u.pk,)
)
response = self.client.get(user_change_url)
self.assertEqual(response.status_code, 200)
password_change_url = reverse(
"custom_user_admin:auth_user_password_change", args=(u.pk,)
)
response = self.client.get(password_change_url)
# The action attribute is omitted.
self.assertContains(response, '<form method="post" id="uuiduser_form">')
# A LogEntry is created with pk=1 which breaks a FK constraint on MySQL
with connection.constraint_checks_disabled():
response = self.client.post(
password_change_url,
{
"password1": "password1",
"password2": "password1",
},
)
self.assertRedirects(response, user_change_url)
row = LogEntry.objects.latest("id")
self.assertEqual(row.user_id, 1) # hardcoded in CustomUserAdmin.log_change()
self.assertEqual(row.object_id, str(u.pk))
self.assertEqual(row.get_change_message(), "Changed password.")
# The LogEntry.user column isn't altered to a UUID type so it's set to
# an integer manually in CustomUserAdmin to avoid an error. To avoid a
# constraint error, delete the entry before constraints are checked
# after the test.
row.delete()
|
4ded6bfd11994b67a1af00216502ae7fdb17389986c024b564386c36914770e6 | from datetime import datetime
from django.conf import settings
from django.contrib.auth import authenticate
from django.contrib.auth.backends import RemoteUserBackend
from django.contrib.auth.middleware import RemoteUserMiddleware
from django.contrib.auth.models import User
from django.middleware.csrf import _get_new_csrf_string, _mask_cipher_secret
from django.test import Client, TestCase, modify_settings, override_settings
from django.utils import timezone
@override_settings(ROOT_URLCONF="auth_tests.urls")
class RemoteUserTest(TestCase):
middleware = "django.contrib.auth.middleware.RemoteUserMiddleware"
backend = "django.contrib.auth.backends.RemoteUserBackend"
header = "REMOTE_USER"
email_header = "REMOTE_EMAIL"
# Usernames to be passed in REMOTE_USER for the test_known_user test case.
known_user = "knownuser"
known_user2 = "knownuser2"
def setUp(self):
self.patched_settings = modify_settings(
AUTHENTICATION_BACKENDS={"append": self.backend},
MIDDLEWARE={"append": self.middleware},
)
self.patched_settings.enable()
def tearDown(self):
self.patched_settings.disable()
def test_no_remote_user(self):
"""Users are not created when remote user is not specified."""
num_users = User.objects.count()
response = self.client.get("/remote_user/")
self.assertTrue(response.context["user"].is_anonymous)
self.assertEqual(User.objects.count(), num_users)
response = self.client.get("/remote_user/", **{self.header: None})
self.assertTrue(response.context["user"].is_anonymous)
self.assertEqual(User.objects.count(), num_users)
response = self.client.get("/remote_user/", **{self.header: ""})
self.assertTrue(response.context["user"].is_anonymous)
self.assertEqual(User.objects.count(), num_users)
def test_csrf_validation_passes_after_process_request_login(self):
"""
CSRF check must access the CSRF token from the session or cookie,
rather than the request, as rotate_token() may have been called by an
authentication middleware during the process_request() phase.
"""
csrf_client = Client(enforce_csrf_checks=True)
csrf_secret = _get_new_csrf_string()
csrf_token = _mask_cipher_secret(csrf_secret)
csrf_token_form = _mask_cipher_secret(csrf_secret)
headers = {self.header: "fakeuser"}
data = {"csrfmiddlewaretoken": csrf_token_form}
# Verify that CSRF is configured for the view
csrf_client.cookies.load({settings.CSRF_COOKIE_NAME: csrf_token})
response = csrf_client.post("/remote_user/", **headers)
self.assertEqual(response.status_code, 403)
self.assertIn(b"CSRF verification failed.", response.content)
# This request will call django.contrib.auth.login() which will call
# django.middleware.csrf.rotate_token() thus changing the value of
# request.META['CSRF_COOKIE'] from the user submitted value set by
# CsrfViewMiddleware.process_request() to the new csrftoken value set
# by rotate_token(). Csrf validation should still pass when the view is
# later processed by CsrfViewMiddleware.process_view()
csrf_client.cookies.load({settings.CSRF_COOKIE_NAME: csrf_token})
response = csrf_client.post("/remote_user/", data, **headers)
self.assertEqual(response.status_code, 200)
def test_unknown_user(self):
"""
Tests the case where the username passed in the header does not exist
as a User.
"""
num_users = User.objects.count()
response = self.client.get("/remote_user/", **{self.header: "newuser"})
self.assertEqual(response.context["user"].username, "newuser")
self.assertEqual(User.objects.count(), num_users + 1)
User.objects.get(username="newuser")
# Another request with same user should not create any new users.
response = self.client.get("/remote_user/", **{self.header: "newuser"})
self.assertEqual(User.objects.count(), num_users + 1)
def test_known_user(self):
"""
Tests the case where the username passed in the header is a valid User.
"""
User.objects.create(username="knownuser")
User.objects.create(username="knownuser2")
num_users = User.objects.count()
response = self.client.get("/remote_user/", **{self.header: self.known_user})
self.assertEqual(response.context["user"].username, "knownuser")
self.assertEqual(User.objects.count(), num_users)
# A different user passed in the headers causes the new user
# to be logged in.
response = self.client.get("/remote_user/", **{self.header: self.known_user2})
self.assertEqual(response.context["user"].username, "knownuser2")
self.assertEqual(User.objects.count(), num_users)
def test_last_login(self):
"""
A user's last_login is set the first time they make a
request but not updated in subsequent requests with the same session.
"""
user = User.objects.create(username="knownuser")
# Set last_login to something so we can determine if it changes.
default_login = datetime(2000, 1, 1)
if settings.USE_TZ:
default_login = default_login.replace(tzinfo=timezone.utc)
user.last_login = default_login
user.save()
response = self.client.get("/remote_user/", **{self.header: self.known_user})
self.assertNotEqual(default_login, response.context["user"].last_login)
user = User.objects.get(username="knownuser")
user.last_login = default_login
user.save()
response = self.client.get("/remote_user/", **{self.header: self.known_user})
self.assertEqual(default_login, response.context["user"].last_login)
def test_header_disappears(self):
"""
A logged in user is logged out automatically when
the REMOTE_USER header disappears during the same browser session.
"""
User.objects.create(username="knownuser")
# Known user authenticates
response = self.client.get("/remote_user/", **{self.header: self.known_user})
self.assertEqual(response.context["user"].username, "knownuser")
# During the session, the REMOTE_USER header disappears. Should trigger logout.
response = self.client.get("/remote_user/")
self.assertTrue(response.context["user"].is_anonymous)
# verify the remoteuser middleware will not remove a user
# authenticated via another backend
User.objects.create_user(username="modeluser", password="foo")
self.client.login(username="modeluser", password="foo")
authenticate(username="modeluser", password="foo")
response = self.client.get("/remote_user/")
self.assertEqual(response.context["user"].username, "modeluser")
def test_user_switch_forces_new_login(self):
"""
If the username in the header changes between requests
that the original user is logged out
"""
User.objects.create(username="knownuser")
# Known user authenticates
response = self.client.get("/remote_user/", **{self.header: self.known_user})
self.assertEqual(response.context["user"].username, "knownuser")
# During the session, the REMOTE_USER changes to a different user.
response = self.client.get("/remote_user/", **{self.header: "newnewuser"})
# The current user is not the prior remote_user.
# In backends that create a new user, username is "newnewuser"
# In backends that do not create new users, it is '' (anonymous user)
self.assertNotEqual(response.context["user"].username, "knownuser")
def test_inactive_user(self):
User.objects.create(username="knownuser", is_active=False)
response = self.client.get("/remote_user/", **{self.header: "knownuser"})
self.assertTrue(response.context["user"].is_anonymous)
class RemoteUserNoCreateBackend(RemoteUserBackend):
"""Backend that doesn't create unknown users."""
create_unknown_user = False
class RemoteUserNoCreateTest(RemoteUserTest):
"""
Contains the same tests as RemoteUserTest, but using a custom auth backend
class that doesn't create unknown users.
"""
backend = "auth_tests.test_remote_user.RemoteUserNoCreateBackend"
def test_unknown_user(self):
num_users = User.objects.count()
response = self.client.get("/remote_user/", **{self.header: "newuser"})
self.assertTrue(response.context["user"].is_anonymous)
self.assertEqual(User.objects.count(), num_users)
class AllowAllUsersRemoteUserBackendTest(RemoteUserTest):
"""Backend that allows inactive users."""
backend = "django.contrib.auth.backends.AllowAllUsersRemoteUserBackend"
def test_inactive_user(self):
user = User.objects.create(username="knownuser", is_active=False)
response = self.client.get("/remote_user/", **{self.header: self.known_user})
self.assertEqual(response.context["user"].username, user.username)
class CustomRemoteUserBackend(RemoteUserBackend):
"""
Backend that overrides RemoteUserBackend methods.
"""
def clean_username(self, username):
"""
Grabs username before the @ character.
"""
return username.split("@")[0]
def configure_user(self, request, user):
"""
Sets user's email address using the email specified in an HTTP header.
"""
user.email = request.META.get(RemoteUserTest.email_header, "")
user.save()
return user
class RemoteUserCustomTest(RemoteUserTest):
"""
Tests a custom RemoteUserBackend subclass that overrides the clean_username
and configure_user methods.
"""
backend = "auth_tests.test_remote_user.CustomRemoteUserBackend"
# REMOTE_USER strings with email addresses for the custom backend to
# clean.
known_user = "[email protected]"
known_user2 = "[email protected]"
def test_known_user(self):
"""
The strings passed in REMOTE_USER should be cleaned and the known users
should not have been configured with an email address.
"""
super().test_known_user()
self.assertEqual(User.objects.get(username="knownuser").email, "")
self.assertEqual(User.objects.get(username="knownuser2").email, "")
def test_unknown_user(self):
"""
The unknown user created should be configured with an email address
provided in the request header.
"""
num_users = User.objects.count()
response = self.client.get(
"/remote_user/",
**{
self.header: "newuser",
self.email_header: "[email protected]",
},
)
self.assertEqual(response.context["user"].username, "newuser")
self.assertEqual(response.context["user"].email, "[email protected]")
self.assertEqual(User.objects.count(), num_users + 1)
newuser = User.objects.get(username="newuser")
self.assertEqual(newuser.email, "[email protected]")
class CustomHeaderMiddleware(RemoteUserMiddleware):
"""
Middleware that overrides custom HTTP auth user header.
"""
header = "HTTP_AUTHUSER"
class CustomHeaderRemoteUserTest(RemoteUserTest):
"""
Tests a custom RemoteUserMiddleware subclass with custom HTTP auth user
header.
"""
middleware = "auth_tests.test_remote_user.CustomHeaderMiddleware"
header = "HTTP_AUTHUSER"
class PersistentRemoteUserTest(RemoteUserTest):
"""
PersistentRemoteUserMiddleware keeps the user logged in even if the
subsequent calls do not contain the header value.
"""
middleware = "django.contrib.auth.middleware.PersistentRemoteUserMiddleware"
require_header = False
def test_header_disappears(self):
"""
A logged in user is kept logged in even if the REMOTE_USER header
disappears during the same browser session.
"""
User.objects.create(username="knownuser")
# Known user authenticates
response = self.client.get("/remote_user/", **{self.header: self.known_user})
self.assertEqual(response.context["user"].username, "knownuser")
# Should stay logged in if the REMOTE_USER header disappears.
response = self.client.get("/remote_user/")
self.assertFalse(response.context["user"].is_anonymous)
self.assertEqual(response.context["user"].username, "knownuser")
|
cb25ec92034d181e83f7bd19860b8c707127970880746459b02f8410843c427f | from unittest import mock
from django.conf.global_settings import PASSWORD_HASHERS
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.base_user import AbstractBaseUser
from django.contrib.auth.hashers import get_hasher
from django.contrib.auth.models import (
AnonymousUser,
Group,
Permission,
User,
UserManager,
)
from django.contrib.contenttypes.models import ContentType
from django.core import mail
from django.db import connection, migrations
from django.db.migrations.state import ModelState, ProjectState
from django.db.models.signals import post_save
from django.test import SimpleTestCase, TestCase, TransactionTestCase, override_settings
from .models import CustomEmailField, IntegerUsernameUser
class NaturalKeysTestCase(TestCase):
def test_user_natural_key(self):
staff_user = User.objects.create_user(username="staff")
self.assertEqual(User.objects.get_by_natural_key("staff"), staff_user)
self.assertEqual(staff_user.natural_key(), ("staff",))
def test_group_natural_key(self):
users_group = Group.objects.create(name="users")
self.assertEqual(Group.objects.get_by_natural_key("users"), users_group)
class LoadDataWithoutNaturalKeysTestCase(TestCase):
fixtures = ["regular.json"]
def test_user_is_created_and_added_to_group(self):
user = User.objects.get(username="my_username")
group = Group.objects.get(name="my_group")
self.assertEqual(group, user.groups.get())
class LoadDataWithNaturalKeysTestCase(TestCase):
fixtures = ["natural.json"]
def test_user_is_created_and_added_to_group(self):
user = User.objects.get(username="my_username")
group = Group.objects.get(name="my_group")
self.assertEqual(group, user.groups.get())
class LoadDataWithNaturalKeysAndMultipleDatabasesTestCase(TestCase):
databases = {"default", "other"}
def test_load_data_with_user_permissions(self):
# Create test contenttypes for both databases
default_objects = [
ContentType.objects.db_manager("default").create(
model="examplemodela",
app_label="app_a",
),
ContentType.objects.db_manager("default").create(
model="examplemodelb",
app_label="app_b",
),
]
other_objects = [
ContentType.objects.db_manager("other").create(
model="examplemodelb",
app_label="app_b",
),
ContentType.objects.db_manager("other").create(
model="examplemodela",
app_label="app_a",
),
]
# Now we create the test UserPermission
Permission.objects.db_manager("default").create(
name="Can delete example model b",
codename="delete_examplemodelb",
content_type=default_objects[1],
)
Permission.objects.db_manager("other").create(
name="Can delete example model b",
codename="delete_examplemodelb",
content_type=other_objects[0],
)
perm_default = Permission.objects.get_by_natural_key(
"delete_examplemodelb",
"app_b",
"examplemodelb",
)
perm_other = Permission.objects.db_manager("other").get_by_natural_key(
"delete_examplemodelb",
"app_b",
"examplemodelb",
)
self.assertEqual(perm_default.content_type_id, default_objects[1].id)
self.assertEqual(perm_other.content_type_id, other_objects[0].id)
class UserManagerTestCase(TransactionTestCase):
available_apps = [
"auth_tests",
"django.contrib.auth",
"django.contrib.contenttypes",
]
def test_create_user(self):
email_lowercase = "[email protected]"
user = User.objects.create_user("user", email_lowercase)
self.assertEqual(user.email, email_lowercase)
self.assertEqual(user.username, "user")
self.assertFalse(user.has_usable_password())
def test_create_user_email_domain_normalize_rfc3696(self):
# According to https://tools.ietf.org/html/rfc3696#section-3
# the "@" symbol can be part of the local part of an email address
returned = UserManager.normalize_email(r"Abc\@[email protected]")
self.assertEqual(returned, r"Abc\@[email protected]")
def test_create_user_email_domain_normalize(self):
returned = UserManager.normalize_email("[email protected]")
self.assertEqual(returned, "[email protected]")
def test_create_user_email_domain_normalize_with_whitespace(self):
returned = UserManager.normalize_email(r"email\ [email protected]")
self.assertEqual(returned, r"email\ [email protected]")
def test_empty_username(self):
with self.assertRaisesMessage(ValueError, "The given username must be set"):
User.objects.create_user(username="")
def test_create_user_is_staff(self):
email = "[email protected]"
user = User.objects.create_user("user", email, is_staff=True)
self.assertEqual(user.email, email)
self.assertEqual(user.username, "user")
self.assertTrue(user.is_staff)
def test_create_super_user_raises_error_on_false_is_superuser(self):
with self.assertRaisesMessage(
ValueError, "Superuser must have is_superuser=True."
):
User.objects.create_superuser(
username="test",
email="[email protected]",
password="test",
is_superuser=False,
)
def test_create_superuser_raises_error_on_false_is_staff(self):
with self.assertRaisesMessage(ValueError, "Superuser must have is_staff=True."):
User.objects.create_superuser(
username="test",
email="[email protected]",
password="test",
is_staff=False,
)
def test_make_random_password(self):
allowed_chars = "abcdefg"
password = UserManager().make_random_password(5, allowed_chars)
self.assertEqual(len(password), 5)
for char in password:
self.assertIn(char, allowed_chars)
def test_runpython_manager_methods(self):
def forwards(apps, schema_editor):
UserModel = apps.get_model("auth", "User")
user = UserModel.objects.create_user("user1", password="secure")
self.assertIsInstance(user, UserModel)
operation = migrations.RunPython(forwards, migrations.RunPython.noop)
project_state = ProjectState()
project_state.add_model(ModelState.from_model(User))
project_state.add_model(ModelState.from_model(Group))
project_state.add_model(ModelState.from_model(Permission))
project_state.add_model(ModelState.from_model(ContentType))
new_state = project_state.clone()
with connection.schema_editor() as editor:
operation.state_forwards("test_manager_methods", new_state)
operation.database_forwards(
"test_manager_methods",
editor,
project_state,
new_state,
)
user = User.objects.get(username="user1")
self.assertTrue(user.check_password("secure"))
class AbstractBaseUserTests(SimpleTestCase):
def test_has_usable_password(self):
"""
Passwords are usable even if they don't correspond to a hasher in
settings.PASSWORD_HASHERS.
"""
self.assertIs(User(password="some-gibbberish").has_usable_password(), True)
def test_normalize_username(self):
self.assertEqual(IntegerUsernameUser().normalize_username(123), 123)
def test_clean_normalize_username(self):
# The normalization happens in AbstractBaseUser.clean()
ohm_username = "iamtheΩ" # U+2126 OHM SIGN
for model in ("auth.User", "auth_tests.CustomUser"):
with self.subTest(model=model), self.settings(AUTH_USER_MODEL=model):
User = get_user_model()
user = User(**{User.USERNAME_FIELD: ohm_username, "password": "foo"})
user.clean()
username = user.get_username()
self.assertNotEqual(username, ohm_username)
self.assertEqual(
username, "iamtheΩ"
) # U+03A9 GREEK CAPITAL LETTER OMEGA
def test_default_email(self):
self.assertEqual(AbstractBaseUser.get_email_field_name(), "email")
def test_custom_email(self):
user = CustomEmailField()
self.assertEqual(user.get_email_field_name(), "email_address")
class AbstractUserTestCase(TestCase):
def test_email_user(self):
# valid send_mail parameters
kwargs = {
"fail_silently": False,
"auth_user": None,
"auth_password": None,
"connection": None,
"html_message": None,
}
user = User(email="[email protected]")
user.email_user(
subject="Subject here",
message="This is a message",
from_email="[email protected]",
**kwargs,
)
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0]
self.assertEqual(message.subject, "Subject here")
self.assertEqual(message.body, "This is a message")
self.assertEqual(message.from_email, "[email protected]")
self.assertEqual(message.to, [user.email])
def test_last_login_default(self):
user1 = User.objects.create(username="user1")
self.assertIsNone(user1.last_login)
user2 = User.objects.create_user(username="user2")
self.assertIsNone(user2.last_login)
def test_user_clean_normalize_email(self):
user = User(username="user", password="foo", email="[email protected]")
user.clean()
self.assertEqual(user.email, "[email protected]")
def test_user_double_save(self):
"""
Calling user.save() twice should trigger password_changed() once.
"""
user = User.objects.create_user(username="user", password="foo")
user.set_password("bar")
with mock.patch(
"django.contrib.auth.password_validation.password_changed"
) as pw_changed:
user.save()
self.assertEqual(pw_changed.call_count, 1)
user.save()
self.assertEqual(pw_changed.call_count, 1)
@override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS)
def test_check_password_upgrade(self):
"""
password_changed() shouldn't be called if User.check_password()
triggers a hash iteration upgrade.
"""
user = User.objects.create_user(username="user", password="foo")
initial_password = user.password
self.assertTrue(user.check_password("foo"))
hasher = get_hasher("default")
self.assertEqual("pbkdf2_sha256", hasher.algorithm)
old_iterations = hasher.iterations
try:
# Upgrade the password iterations
hasher.iterations = old_iterations + 1
with mock.patch(
"django.contrib.auth.password_validation.password_changed"
) as pw_changed:
user.check_password("foo")
self.assertEqual(pw_changed.call_count, 0)
self.assertNotEqual(initial_password, user.password)
finally:
hasher.iterations = old_iterations
class CustomModelBackend(ModelBackend):
def with_perm(
self, perm, is_active=True, include_superusers=True, backend=None, obj=None
):
if obj is not None and obj.username == "charliebrown":
return User.objects.filter(pk=obj.pk)
return User.objects.filter(username__startswith="charlie")
class UserWithPermTestCase(TestCase):
@classmethod
def setUpTestData(cls):
content_type = ContentType.objects.get_for_model(Group)
cls.permission = Permission.objects.create(
name="test",
content_type=content_type,
codename="test",
)
# User with permission.
cls.user1 = User.objects.create_user("user 1", "[email protected]")
cls.user1.user_permissions.add(cls.permission)
# User with group permission.
group1 = Group.objects.create(name="group 1")
group1.permissions.add(cls.permission)
group2 = Group.objects.create(name="group 2")
group2.permissions.add(cls.permission)
cls.user2 = User.objects.create_user("user 2", "[email protected]")
cls.user2.groups.add(group1, group2)
# Users without permissions.
cls.user_charlie = User.objects.create_user("charlie", "[email protected]")
cls.user_charlie_b = User.objects.create_user(
"charliebrown", "[email protected]"
)
# Superuser.
cls.superuser = User.objects.create_superuser(
"superuser",
"[email protected]",
"superpassword",
)
# Inactive user with permission.
cls.inactive_user = User.objects.create_user(
"inactive_user",
"[email protected]",
is_active=False,
)
cls.inactive_user.user_permissions.add(cls.permission)
def test_invalid_permission_name(self):
msg = "Permission name should be in the form app_label.permission_codename."
for perm in ("nodots", "too.many.dots", "...", ""):
with self.subTest(perm), self.assertRaisesMessage(ValueError, msg):
User.objects.with_perm(perm)
def test_invalid_permission_type(self):
msg = "The `perm` argument must be a string or a permission instance."
for perm in (b"auth.test", object(), None):
with self.subTest(perm), self.assertRaisesMessage(TypeError, msg):
User.objects.with_perm(perm)
def test_invalid_backend_type(self):
msg = "backend must be a dotted import path string (got %r)."
for backend in (b"auth_tests.CustomModelBackend", object()):
with self.subTest(backend):
with self.assertRaisesMessage(TypeError, msg % backend):
User.objects.with_perm("auth.test", backend=backend)
def test_basic(self):
active_users = [self.user1, self.user2]
tests = [
({}, [*active_users, self.superuser]),
({"obj": self.user1}, []),
# Only inactive users.
({"is_active": False}, [self.inactive_user]),
# All users.
({"is_active": None}, [*active_users, self.superuser, self.inactive_user]),
# Exclude superusers.
({"include_superusers": False}, active_users),
(
{"include_superusers": False, "is_active": False},
[self.inactive_user],
),
(
{"include_superusers": False, "is_active": None},
[*active_users, self.inactive_user],
),
]
for kwargs, expected_users in tests:
for perm in ("auth.test", self.permission):
with self.subTest(perm=perm, **kwargs):
self.assertCountEqual(
User.objects.with_perm(perm, **kwargs),
expected_users,
)
@override_settings(
AUTHENTICATION_BACKENDS=["django.contrib.auth.backends.BaseBackend"]
)
def test_backend_without_with_perm(self):
self.assertSequenceEqual(User.objects.with_perm("auth.test"), [])
def test_nonexistent_permission(self):
self.assertSequenceEqual(User.objects.with_perm("auth.perm"), [self.superuser])
def test_nonexistent_backend(self):
with self.assertRaises(ImportError):
User.objects.with_perm(
"auth.test",
backend="invalid.backend.CustomModelBackend",
)
@override_settings(
AUTHENTICATION_BACKENDS=["auth_tests.test_models.CustomModelBackend"]
)
def test_custom_backend(self):
for perm in ("auth.test", self.permission):
with self.subTest(perm):
self.assertCountEqual(
User.objects.with_perm(perm),
[self.user_charlie, self.user_charlie_b],
)
@override_settings(
AUTHENTICATION_BACKENDS=["auth_tests.test_models.CustomModelBackend"]
)
def test_custom_backend_pass_obj(self):
for perm in ("auth.test", self.permission):
with self.subTest(perm):
self.assertSequenceEqual(
User.objects.with_perm(perm, obj=self.user_charlie_b),
[self.user_charlie_b],
)
@override_settings(
AUTHENTICATION_BACKENDS=[
"auth_tests.test_models.CustomModelBackend",
"django.contrib.auth.backends.ModelBackend",
]
)
def test_multiple_backends(self):
msg = (
"You have multiple authentication backends configured and "
"therefore must provide the `backend` argument."
)
with self.assertRaisesMessage(ValueError, msg):
User.objects.with_perm("auth.test")
backend = "auth_tests.test_models.CustomModelBackend"
self.assertCountEqual(
User.objects.with_perm("auth.test", backend=backend),
[self.user_charlie, self.user_charlie_b],
)
class IsActiveTestCase(TestCase):
"""
Tests the behavior of the guaranteed is_active attribute
"""
def test_builtin_user_isactive(self):
user = User.objects.create(username="foo", email="[email protected]")
# is_active is true by default
self.assertIs(user.is_active, True)
user.is_active = False
user.save()
user_fetched = User.objects.get(pk=user.pk)
# the is_active flag is saved
self.assertFalse(user_fetched.is_active)
@override_settings(AUTH_USER_MODEL="auth_tests.IsActiveTestUser1")
def test_is_active_field_default(self):
"""
tests that the default value for is_active is provided
"""
UserModel = get_user_model()
user = UserModel(username="foo")
self.assertIs(user.is_active, True)
# you can set the attribute - but it will not save
user.is_active = False
# there should be no problem saving - but the attribute is not saved
user.save()
user_fetched = UserModel._default_manager.get(pk=user.pk)
# the attribute is always true for newly retrieved instance
self.assertIs(user_fetched.is_active, True)
class TestCreateSuperUserSignals(TestCase):
"""
Simple test case for ticket #20541
"""
def post_save_listener(self, *args, **kwargs):
self.signals_count += 1
def setUp(self):
self.signals_count = 0
post_save.connect(self.post_save_listener, sender=User)
def tearDown(self):
post_save.disconnect(self.post_save_listener, sender=User)
def test_create_user(self):
User.objects.create_user("JohnDoe")
self.assertEqual(self.signals_count, 1)
def test_create_superuser(self):
User.objects.create_superuser("JohnDoe", "[email protected]", "1")
self.assertEqual(self.signals_count, 1)
class AnonymousUserTests(SimpleTestCase):
no_repr_msg = "Django doesn't provide a DB representation for AnonymousUser."
def setUp(self):
self.user = AnonymousUser()
def test_properties(self):
self.assertIsNone(self.user.pk)
self.assertEqual(self.user.username, "")
self.assertEqual(self.user.get_username(), "")
self.assertIs(self.user.is_anonymous, True)
self.assertIs(self.user.is_authenticated, False)
self.assertIs(self.user.is_staff, False)
self.assertIs(self.user.is_active, False)
self.assertIs(self.user.is_superuser, False)
self.assertEqual(self.user.groups.count(), 0)
self.assertEqual(self.user.user_permissions.count(), 0)
self.assertEqual(self.user.get_user_permissions(), set())
self.assertEqual(self.user.get_group_permissions(), set())
def test_str(self):
self.assertEqual(str(self.user), "AnonymousUser")
def test_eq(self):
self.assertEqual(self.user, AnonymousUser())
self.assertNotEqual(self.user, User("super", "[email protected]", "super"))
def test_hash(self):
self.assertEqual(hash(self.user), 1)
def test_int(self):
msg = (
"Cannot cast AnonymousUser to int. Are you trying to use it in "
"place of User?"
)
with self.assertRaisesMessage(TypeError, msg):
int(self.user)
def test_delete(self):
with self.assertRaisesMessage(NotImplementedError, self.no_repr_msg):
self.user.delete()
def test_save(self):
with self.assertRaisesMessage(NotImplementedError, self.no_repr_msg):
self.user.save()
def test_set_password(self):
with self.assertRaisesMessage(NotImplementedError, self.no_repr_msg):
self.user.set_password("password")
def test_check_password(self):
with self.assertRaisesMessage(NotImplementedError, self.no_repr_msg):
self.user.check_password("password")
class GroupTests(SimpleTestCase):
def test_str(self):
g = Group(name="Users")
self.assertEqual(str(g), "Users")
class PermissionTests(TestCase):
def test_str(self):
p = Permission.objects.get(codename="view_customemailfield")
self.assertEqual(
str(p), "auth_tests | custom email field | Can view custom email field"
)
|
7d054bbad37cb84833c8387112dcd237be115ddc5311a7d36f12d2c7f51be9b7 | from django.contrib.auth import authenticate
from django.contrib.auth.context_processors import PermLookupDict, PermWrapper
from django.contrib.auth.models import Permission, User
from django.contrib.contenttypes.models import ContentType
from django.db.models import Q
from django.test import SimpleTestCase, TestCase, override_settings
from .settings import AUTH_MIDDLEWARE, AUTH_TEMPLATES
class MockUser:
def __repr__(self):
return "MockUser()"
def has_module_perms(self, perm):
return perm == "mockapp"
def has_perm(self, perm, obj=None):
return perm == "mockapp.someperm"
class PermWrapperTests(SimpleTestCase):
"""
Test some details of the PermWrapper implementation.
"""
class EQLimiterObject:
"""
This object makes sure __eq__ will not be called endlessly.
"""
def __init__(self):
self.eq_calls = 0
def __eq__(self, other):
if self.eq_calls > 0:
return True
self.eq_calls += 1
return False
def test_repr(self):
perms = PermWrapper(MockUser())
self.assertEqual(repr(perms), "PermWrapper(MockUser())")
def test_permwrapper_in(self):
"""
'something' in PermWrapper works as expected.
"""
perms = PermWrapper(MockUser())
# Works for modules and full permissions.
self.assertIn("mockapp", perms)
self.assertNotIn("nonexistent", perms)
self.assertIn("mockapp.someperm", perms)
self.assertNotIn("mockapp.nonexistent", perms)
def test_permlookupdict_in(self):
"""
No endless loops if accessed with 'in' - refs #18979.
"""
pldict = PermLookupDict(MockUser(), "mockapp")
with self.assertRaises(TypeError):
self.EQLimiterObject() in pldict
def test_iter(self):
with self.assertRaisesMessage(TypeError, "PermWrapper is not iterable."):
iter(PermWrapper(MockUser()))
@override_settings(ROOT_URLCONF="auth_tests.urls", TEMPLATES=AUTH_TEMPLATES)
class AuthContextProcessorTests(TestCase):
"""
Tests for the ``django.contrib.auth.context_processors.auth`` processor
"""
@classmethod
def setUpTestData(cls):
cls.superuser = User.objects.create_superuser(
username="super", password="secret", email="[email protected]"
)
@override_settings(MIDDLEWARE=AUTH_MIDDLEWARE)
def test_session_not_accessed(self):
"""
The session is not accessed simply by including
the auth context processor
"""
response = self.client.get("/auth_processor_no_attr_access/")
self.assertContains(response, "Session not accessed")
@override_settings(MIDDLEWARE=AUTH_MIDDLEWARE)
def test_session_is_accessed(self):
"""
The session is accessed if the auth context processor
is used and relevant attributes accessed.
"""
response = self.client.get("/auth_processor_attr_access/")
self.assertContains(response, "Session accessed")
def test_perms_attrs(self):
u = User.objects.create_user(username="normal", password="secret")
u.user_permissions.add(
Permission.objects.get(
content_type=ContentType.objects.get_for_model(Permission),
codename="add_permission",
)
)
self.client.force_login(u)
response = self.client.get("/auth_processor_perms/")
self.assertContains(response, "Has auth permissions")
self.assertContains(response, "Has auth.add_permission permissions")
self.assertNotContains(response, "nonexistent")
def test_perm_in_perms_attrs(self):
u = User.objects.create_user(username="normal", password="secret")
u.user_permissions.add(
Permission.objects.get(
content_type=ContentType.objects.get_for_model(Permission),
codename="add_permission",
)
)
self.client.login(username="normal", password="secret")
response = self.client.get("/auth_processor_perm_in_perms/")
self.assertContains(response, "Has auth permissions")
self.assertContains(response, "Has auth.add_permission permissions")
self.assertNotContains(response, "nonexistent")
def test_message_attrs(self):
self.client.force_login(self.superuser)
response = self.client.get("/auth_processor_messages/")
self.assertContains(response, "Message 1")
def test_user_attrs(self):
"""
The lazy objects returned behave just like the wrapped objects.
"""
# These are 'functional' level tests for common use cases. Direct
# testing of the implementation (SimpleLazyObject) is in the 'utils'
# tests.
self.client.login(username="super", password="secret")
user = authenticate(username="super", password="secret")
response = self.client.get("/auth_processor_user/")
self.assertContains(response, "unicode: super")
self.assertContains(response, "id: %d" % self.superuser.pk)
self.assertContains(response, "username: super")
# bug #12037 is tested by the {% url %} in the template:
self.assertContains(response, "url: /userpage/super/")
# A Q() comparing a user and with another Q() (in an AND or OR fashion).
Q(user=response.context["user"]) & Q(someflag=True)
# Tests for user equality. This is hard because User defines
# equality in a non-duck-typing way
# See bug #12060
self.assertEqual(response.context["user"], user)
self.assertEqual(user, response.context["user"])
|
070d64cfe181e643a3869f9b3c650ceca99cb0c7846cf1db7ad147ec6cf3ec5c | from unittest import mock
from django.contrib.auth import models
from django.contrib.auth.mixins import (
LoginRequiredMixin,
PermissionRequiredMixin,
UserPassesTestMixin,
)
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse
from django.test import RequestFactory, SimpleTestCase, TestCase
from django.views.generic import View
class AlwaysTrueMixin(UserPassesTestMixin):
def test_func(self):
return True
class AlwaysFalseMixin(UserPassesTestMixin):
def test_func(self):
return False
class EmptyResponseView(View):
def get(self, request, *args, **kwargs):
return HttpResponse()
class AlwaysTrueView(AlwaysTrueMixin, EmptyResponseView):
pass
class AlwaysFalseView(AlwaysFalseMixin, EmptyResponseView):
pass
class StackedMixinsView1(
LoginRequiredMixin, PermissionRequiredMixin, EmptyResponseView
):
permission_required = ["auth_tests.add_customuser", "auth_tests.change_customuser"]
raise_exception = True
class StackedMixinsView2(
PermissionRequiredMixin, LoginRequiredMixin, EmptyResponseView
):
permission_required = ["auth_tests.add_customuser", "auth_tests.change_customuser"]
raise_exception = True
class AccessMixinTests(TestCase):
factory = RequestFactory()
def test_stacked_mixins_success(self):
user = models.User.objects.create(username="joe", password="qwerty")
perms = models.Permission.objects.filter(
codename__in=("add_customuser", "change_customuser")
)
user.user_permissions.add(*perms)
request = self.factory.get("/rand")
request.user = user
view = StackedMixinsView1.as_view()
response = view(request)
self.assertEqual(response.status_code, 200)
view = StackedMixinsView2.as_view()
response = view(request)
self.assertEqual(response.status_code, 200)
def test_stacked_mixins_missing_permission(self):
user = models.User.objects.create(username="joe", password="qwerty")
perms = models.Permission.objects.filter(codename__in=("add_customuser",))
user.user_permissions.add(*perms)
request = self.factory.get("/rand")
request.user = user
view = StackedMixinsView1.as_view()
with self.assertRaises(PermissionDenied):
view(request)
view = StackedMixinsView2.as_view()
with self.assertRaises(PermissionDenied):
view(request)
def test_access_mixin_permission_denied_response(self):
user = models.User.objects.create(username="joe", password="qwerty")
# Authenticated users receive PermissionDenied.
request = self.factory.get("/rand")
request.user = user
view = AlwaysFalseView.as_view()
with self.assertRaises(PermissionDenied):
view(request)
# Anonymous users are redirected to the login page.
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, "/accounts/login/?next=/rand")
def test_access_mixin_permission_denied_remote_login_url(self):
class AView(AlwaysFalseView):
login_url = "https://www.remote.example.com/login"
view = AView.as_view()
request = self.factory.get("/rand")
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 302)
self.assertEqual(
response.url,
"https://www.remote.example.com/login?next=http%3A//testserver/rand",
)
@mock.patch.object(models.User, "is_authenticated", False)
def test_stacked_mixins_not_logged_in(self):
user = models.User.objects.create(username="joe", password="qwerty")
perms = models.Permission.objects.filter(
codename__in=("add_customuser", "change_customuser")
)
user.user_permissions.add(*perms)
request = self.factory.get("/rand")
request.user = user
view = StackedMixinsView1.as_view()
with self.assertRaises(PermissionDenied):
view(request)
view = StackedMixinsView2.as_view()
with self.assertRaises(PermissionDenied):
view(request)
class UserPassesTestTests(SimpleTestCase):
factory = RequestFactory()
def _test_redirect(self, view=None, url="/accounts/login/?next=/rand"):
if not view:
view = AlwaysFalseView.as_view()
request = self.factory.get("/rand")
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, url)
def test_default(self):
self._test_redirect()
def test_custom_redirect_url(self):
class AView(AlwaysFalseView):
login_url = "/login/"
self._test_redirect(AView.as_view(), "/login/?next=/rand")
def test_custom_redirect_parameter(self):
class AView(AlwaysFalseView):
redirect_field_name = "goto"
self._test_redirect(AView.as_view(), "/accounts/login/?goto=/rand")
def test_no_redirect_parameter(self):
class AView(AlwaysFalseView):
redirect_field_name = None
self._test_redirect(AView.as_view(), "/accounts/login/")
def test_raise_exception(self):
class AView(AlwaysFalseView):
raise_exception = True
request = self.factory.get("/rand")
request.user = AnonymousUser()
with self.assertRaises(PermissionDenied):
AView.as_view()(request)
def test_raise_exception_custom_message(self):
msg = "You don't have access here"
class AView(AlwaysFalseView):
raise_exception = True
permission_denied_message = msg
request = self.factory.get("/rand")
request.user = AnonymousUser()
view = AView.as_view()
with self.assertRaisesMessage(PermissionDenied, msg):
view(request)
def test_raise_exception_custom_message_function(self):
msg = "You don't have access here"
class AView(AlwaysFalseView):
raise_exception = True
def get_permission_denied_message(self):
return msg
request = self.factory.get("/rand")
request.user = AnonymousUser()
view = AView.as_view()
with self.assertRaisesMessage(PermissionDenied, msg):
view(request)
def test_user_passes(self):
view = AlwaysTrueView.as_view()
request = self.factory.get("/rand")
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 200)
class LoginRequiredMixinTests(TestCase):
factory = RequestFactory()
@classmethod
def setUpTestData(cls):
cls.user = models.User.objects.create(username="joe", password="qwerty")
def test_login_required(self):
"""
login_required works on a simple view wrapped in a login_required
decorator.
"""
class AView(LoginRequiredMixin, EmptyResponseView):
pass
view = AView.as_view()
request = self.factory.get("/rand")
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 302)
self.assertEqual("/accounts/login/?next=/rand", response.url)
request = self.factory.get("/rand")
request.user = self.user
response = view(request)
self.assertEqual(response.status_code, 200)
class PermissionsRequiredMixinTests(TestCase):
factory = RequestFactory()
@classmethod
def setUpTestData(cls):
cls.user = models.User.objects.create(username="joe", password="qwerty")
perms = models.Permission.objects.filter(
codename__in=("add_customuser", "change_customuser")
)
cls.user.user_permissions.add(*perms)
def test_many_permissions_pass(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = [
"auth_tests.add_customuser",
"auth_tests.change_customuser",
]
request = self.factory.get("/rand")
request.user = self.user
resp = AView.as_view()(request)
self.assertEqual(resp.status_code, 200)
def test_single_permission_pass(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = "auth_tests.add_customuser"
request = self.factory.get("/rand")
request.user = self.user
resp = AView.as_view()(request)
self.assertEqual(resp.status_code, 200)
def test_permissioned_denied_redirect(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = [
"auth_tests.add_customuser",
"auth_tests.change_customuser",
"nonexistent-permission",
]
# Authenticated users receive PermissionDenied.
request = self.factory.get("/rand")
request.user = self.user
with self.assertRaises(PermissionDenied):
AView.as_view()(request)
# Anonymous users are redirected to the login page.
request.user = AnonymousUser()
resp = AView.as_view()(request)
self.assertEqual(resp.status_code, 302)
def test_permissioned_denied_exception_raised(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = [
"auth_tests.add_customuser",
"auth_tests.change_customuser",
"nonexistent-permission",
]
raise_exception = True
request = self.factory.get("/rand")
request.user = self.user
with self.assertRaises(PermissionDenied):
AView.as_view()(request)
|
60c88d29eb41beed9699ac563d36b51cd15071cd98251fcd1c978216e6cef1aa | from django.contrib.auth import get_user, get_user_model
from django.contrib.auth.models import AnonymousUser, User
from django.core.exceptions import ImproperlyConfigured
from django.db import IntegrityError
from django.http import HttpRequest
from django.test import TestCase, override_settings
from django.utils import translation
from .models import CustomUser
class BasicTestCase(TestCase):
def test_user(self):
"Users can be created and can set their password"
u = User.objects.create_user("testuser", "[email protected]", "testpw")
self.assertTrue(u.has_usable_password())
self.assertFalse(u.check_password("bad"))
self.assertTrue(u.check_password("testpw"))
# Check we can manually set an unusable password
u.set_unusable_password()
u.save()
self.assertFalse(u.check_password("testpw"))
self.assertFalse(u.has_usable_password())
u.set_password("testpw")
self.assertTrue(u.check_password("testpw"))
u.set_password(None)
self.assertFalse(u.has_usable_password())
# Check username getter
self.assertEqual(u.get_username(), "testuser")
# Check authentication/permissions
self.assertFalse(u.is_anonymous)
self.assertTrue(u.is_authenticated)
self.assertFalse(u.is_staff)
self.assertTrue(u.is_active)
self.assertFalse(u.is_superuser)
# Check API-based user creation with no password
u2 = User.objects.create_user("testuser2", "[email protected]")
self.assertFalse(u2.has_usable_password())
def test_unicode_username(self):
User.objects.create_user("jörg")
User.objects.create_user("Григорий")
# Two equivalent Unicode normalized usernames are duplicates.
omega_username = "iamtheΩ" # U+03A9 GREEK CAPITAL LETTER OMEGA
ohm_username = "iamtheΩ" # U+2126 OHM SIGN
User.objects.create_user(ohm_username)
with self.assertRaises(IntegrityError):
User.objects.create_user(omega_username)
def test_user_no_email(self):
"Users can be created without an email"
cases = [
{},
{"email": ""},
{"email": None},
]
for i, kwargs in enumerate(cases):
with self.subTest(**kwargs):
u = User.objects.create_user("testuser{}".format(i), **kwargs)
self.assertEqual(u.email, "")
def test_superuser(self):
"Check the creation and properties of a superuser"
super = User.objects.create_superuser("super", "[email protected]", "super")
self.assertTrue(super.is_superuser)
self.assertTrue(super.is_active)
self.assertTrue(super.is_staff)
def test_superuser_no_email_or_password(self):
cases = [
{},
{"email": ""},
{"email": None},
{"password": None},
]
for i, kwargs in enumerate(cases):
with self.subTest(**kwargs):
superuser = User.objects.create_superuser("super{}".format(i), **kwargs)
self.assertEqual(superuser.email, "")
self.assertFalse(superuser.has_usable_password())
def test_get_user_model(self):
"The current user model can be retrieved"
self.assertEqual(get_user_model(), User)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUser")
def test_swappable_user(self):
"The current user model can be swapped out for another"
self.assertEqual(get_user_model(), CustomUser)
with self.assertRaises(AttributeError):
User.objects.all()
@override_settings(AUTH_USER_MODEL="badsetting")
def test_swappable_user_bad_setting(self):
"The alternate user setting must point to something in the format app.model"
msg = "AUTH_USER_MODEL must be of the form 'app_label.model_name'"
with self.assertRaisesMessage(ImproperlyConfigured, msg):
get_user_model()
@override_settings(AUTH_USER_MODEL="thismodel.doesntexist")
def test_swappable_user_nonexistent_model(self):
"The current user model must point to an installed model"
msg = (
"AUTH_USER_MODEL refers to model 'thismodel.doesntexist' "
"that has not been installed"
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
get_user_model()
def test_user_verbose_names_translatable(self):
"Default User model verbose names are translatable (#19945)"
with translation.override("en"):
self.assertEqual(User._meta.verbose_name, "user")
self.assertEqual(User._meta.verbose_name_plural, "users")
with translation.override("es"):
self.assertEqual(User._meta.verbose_name, "usuario")
self.assertEqual(User._meta.verbose_name_plural, "usuarios")
class TestGetUser(TestCase):
def test_get_user_anonymous(self):
request = HttpRequest()
request.session = self.client.session
user = get_user(request)
self.assertIsInstance(user, AnonymousUser)
def test_get_user(self):
created_user = User.objects.create_user(
"testuser", "[email protected]", "testpw"
)
self.client.login(username="testuser", password="testpw")
request = HttpRequest()
request.session = self.client.session
user = get_user(request)
self.assertIsInstance(user, User)
self.assertEqual(user.username, created_user.username)
|
5a5732e60c3a1a0887d2bd1da01dfe66d6f3de8870013fa32ecc19ccba7a23e6 | from django.contrib.auth.handlers.modwsgi import check_password, groups_for_user
from django.contrib.auth.models import Group, User
from django.test import TransactionTestCase, override_settings
from .models import CustomUser
# This must be a TransactionTestCase because the WSGI auth handler performs
# its own transaction management.
class ModWsgiHandlerTestCase(TransactionTestCase):
"""
Tests for the mod_wsgi authentication handler
"""
available_apps = [
"django.contrib.auth",
"django.contrib.contenttypes",
"auth_tests",
]
def test_check_password(self):
"""
check_password() returns the correct values as per
https://modwsgi.readthedocs.io/en/develop/user-guides/access-control-mechanisms.html#apache-authentication-provider
"""
User.objects.create_user("test", "[email protected]", "test")
# User not in database
self.assertIsNone(check_password({}, "unknown", ""))
# Valid user with correct password
self.assertTrue(check_password({}, "test", "test"))
# correct password, but user is inactive
User.objects.filter(username="test").update(is_active=False)
self.assertFalse(check_password({}, "test", "test"))
# Valid user with incorrect password
self.assertFalse(check_password({}, "test", "incorrect"))
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUser")
def test_check_password_custom_user(self):
"""
check_password() returns the correct values as per
https://modwsgi.readthedocs.io/en/develop/user-guides/access-control-mechanisms.html#apache-authentication-provider
with a custom user installed.
"""
CustomUser._default_manager.create_user(
"[email protected]", "1990-01-01", "test"
)
# User not in database
self.assertIsNone(check_password({}, "unknown", ""))
# Valid user with correct password'
self.assertTrue(check_password({}, "[email protected]", "test"))
# Valid user with incorrect password
self.assertFalse(check_password({}, "[email protected]", "incorrect"))
def test_groups_for_user(self):
"""
groups_for_user() returns correct values as per
https://modwsgi.readthedocs.io/en/develop/user-guides/access-control-mechanisms.html#apache-group-authorisation
"""
user1 = User.objects.create_user("test", "[email protected]", "test")
User.objects.create_user("test1", "[email protected]", "test1")
group = Group.objects.create(name="test_group")
user1.groups.add(group)
# User not in database
self.assertEqual(groups_for_user({}, "unknown"), [])
self.assertEqual(groups_for_user({}, "test"), [b"test_group"])
self.assertEqual(groups_for_user({}, "test1"), [])
|
c9564ab02400654874b1fec86f756547ab14d0d90ddfce245532d3de2f589f72 | from importlib import import_module
from django.apps import apps
from django.contrib.auth.models import Permission, User
from django.contrib.contenttypes.models import ContentType
from django.db import connection, connections
from django.test import TransactionTestCase
from django.test.utils import captured_stdout
from .models import Proxy, UserProxy
update_proxy_permissions = import_module(
"django.contrib.auth.migrations.0011_update_proxy_permissions"
)
class ProxyModelWithDifferentAppLabelTests(TransactionTestCase):
available_apps = [
"auth_tests",
"django.contrib.auth",
"django.contrib.contenttypes",
]
def setUp(self):
"""
Create proxy permissions with content_type to the concrete model
rather than the proxy model (as they were before Django 2.2 and
migration 11).
"""
Permission.objects.all().delete()
self.concrete_content_type = ContentType.objects.get_for_model(UserProxy)
self.default_permission = Permission.objects.create(
content_type=self.concrete_content_type,
codename="add_userproxy",
name="Can add userproxy",
)
self.custom_permission = Permission.objects.create(
content_type=self.concrete_content_type,
codename="use_different_app_label",
name="May use a different app label",
)
def test_proxy_model_permissions_contenttype(self):
proxy_model_content_type = ContentType.objects.get_for_model(
UserProxy, for_concrete_model=False
)
self.assertEqual(
self.default_permission.content_type, self.concrete_content_type
)
self.assertEqual(
self.custom_permission.content_type, self.concrete_content_type
)
with connection.schema_editor() as editor:
update_proxy_permissions.update_proxy_model_permissions(apps, editor)
self.default_permission.refresh_from_db()
self.assertEqual(self.default_permission.content_type, proxy_model_content_type)
self.custom_permission.refresh_from_db()
self.assertEqual(self.custom_permission.content_type, proxy_model_content_type)
def test_user_has_now_proxy_model_permissions(self):
user = User.objects.create()
user.user_permissions.add(self.default_permission)
user.user_permissions.add(self.custom_permission)
for permission in [self.default_permission, self.custom_permission]:
self.assertTrue(user.has_perm("auth." + permission.codename))
self.assertFalse(user.has_perm("auth_tests." + permission.codename))
with connection.schema_editor() as editor:
update_proxy_permissions.update_proxy_model_permissions(apps, editor)
# Reload user to purge the _perm_cache.
user = User._default_manager.get(pk=user.pk)
for permission in [self.default_permission, self.custom_permission]:
self.assertFalse(user.has_perm("auth." + permission.codename))
self.assertTrue(user.has_perm("auth_tests." + permission.codename))
def test_migrate_backwards(self):
with connection.schema_editor() as editor:
update_proxy_permissions.update_proxy_model_permissions(apps, editor)
update_proxy_permissions.revert_proxy_model_permissions(apps, editor)
self.default_permission.refresh_from_db()
self.assertEqual(
self.default_permission.content_type, self.concrete_content_type
)
self.custom_permission.refresh_from_db()
self.assertEqual(
self.custom_permission.content_type, self.concrete_content_type
)
def test_user_keeps_same_permissions_after_migrating_backward(self):
user = User.objects.create()
user.user_permissions.add(self.default_permission)
user.user_permissions.add(self.custom_permission)
for permission in [self.default_permission, self.custom_permission]:
self.assertTrue(user.has_perm("auth." + permission.codename))
self.assertFalse(user.has_perm("auth_tests." + permission.codename))
with connection.schema_editor() as editor:
update_proxy_permissions.update_proxy_model_permissions(apps, editor)
update_proxy_permissions.revert_proxy_model_permissions(apps, editor)
# Reload user to purge the _perm_cache.
user = User._default_manager.get(pk=user.pk)
for permission in [self.default_permission, self.custom_permission]:
self.assertTrue(user.has_perm("auth." + permission.codename))
self.assertFalse(user.has_perm("auth_tests." + permission.codename))
class ProxyModelWithSameAppLabelTests(TransactionTestCase):
available_apps = [
"auth_tests",
"django.contrib.auth",
"django.contrib.contenttypes",
]
def setUp(self):
"""
Create proxy permissions with content_type to the concrete model
rather than the proxy model (as they were before Django 2.2 and
migration 11).
"""
Permission.objects.all().delete()
self.concrete_content_type = ContentType.objects.get_for_model(Proxy)
self.default_permission = Permission.objects.create(
content_type=self.concrete_content_type,
codename="add_proxy",
name="Can add proxy",
)
self.custom_permission = Permission.objects.create(
content_type=self.concrete_content_type,
codename="display_proxys",
name="May display proxys information",
)
def test_proxy_model_permissions_contenttype(self):
proxy_model_content_type = ContentType.objects.get_for_model(
Proxy, for_concrete_model=False
)
self.assertEqual(
self.default_permission.content_type, self.concrete_content_type
)
self.assertEqual(
self.custom_permission.content_type, self.concrete_content_type
)
with connection.schema_editor() as editor:
update_proxy_permissions.update_proxy_model_permissions(apps, editor)
self.default_permission.refresh_from_db()
self.custom_permission.refresh_from_db()
self.assertEqual(self.default_permission.content_type, proxy_model_content_type)
self.assertEqual(self.custom_permission.content_type, proxy_model_content_type)
def test_user_still_has_proxy_model_permissions(self):
user = User.objects.create()
user.user_permissions.add(self.default_permission)
user.user_permissions.add(self.custom_permission)
for permission in [self.default_permission, self.custom_permission]:
self.assertTrue(user.has_perm("auth_tests." + permission.codename))
with connection.schema_editor() as editor:
update_proxy_permissions.update_proxy_model_permissions(apps, editor)
# Reload user to purge the _perm_cache.
user = User._default_manager.get(pk=user.pk)
for permission in [self.default_permission, self.custom_permission]:
self.assertTrue(user.has_perm("auth_tests." + permission.codename))
def test_migrate_backwards(self):
with connection.schema_editor() as editor:
update_proxy_permissions.update_proxy_model_permissions(apps, editor)
update_proxy_permissions.revert_proxy_model_permissions(apps, editor)
self.default_permission.refresh_from_db()
self.assertEqual(
self.default_permission.content_type, self.concrete_content_type
)
self.custom_permission.refresh_from_db()
self.assertEqual(
self.custom_permission.content_type, self.concrete_content_type
)
def test_user_keeps_same_permissions_after_migrating_backward(self):
user = User.objects.create()
user.user_permissions.add(self.default_permission)
user.user_permissions.add(self.custom_permission)
for permission in [self.default_permission, self.custom_permission]:
self.assertTrue(user.has_perm("auth_tests." + permission.codename))
with connection.schema_editor() as editor:
update_proxy_permissions.update_proxy_model_permissions(apps, editor)
update_proxy_permissions.revert_proxy_model_permissions(apps, editor)
# Reload user to purge the _perm_cache.
user = User._default_manager.get(pk=user.pk)
for permission in [self.default_permission, self.custom_permission]:
self.assertTrue(user.has_perm("auth_tests." + permission.codename))
def test_migrate_with_existing_target_permission(self):
"""
Permissions may already exist:
- Old workaround was to manually create permissions for proxy models.
- Model may have been concrete and then converted to proxy.
Output a reminder to audit relevant permissions.
"""
proxy_model_content_type = ContentType.objects.get_for_model(
Proxy, for_concrete_model=False
)
Permission.objects.create(
content_type=proxy_model_content_type,
codename="add_proxy",
name="Can add proxy",
)
Permission.objects.create(
content_type=proxy_model_content_type,
codename="display_proxys",
name="May display proxys information",
)
with captured_stdout() as stdout:
with connection.schema_editor() as editor:
update_proxy_permissions.update_proxy_model_permissions(apps, editor)
self.assertIn(
"A problem arose migrating proxy model permissions", stdout.getvalue()
)
class MultiDBProxyModelAppLabelTests(TransactionTestCase):
databases = {"default", "other"}
available_apps = [
"auth_tests",
"django.contrib.auth",
"django.contrib.contenttypes",
]
def setUp(self):
ContentType.objects.all().delete()
Permission.objects.using("other").delete()
concrete_content_type = ContentType.objects.db_manager("other").get_for_model(
Proxy
)
self.permission = Permission.objects.using("other").create(
content_type=concrete_content_type,
codename="add_proxy",
name="Can add proxy",
)
def test_migrate_other_database(self):
proxy_model_content_type = ContentType.objects.db_manager(
"other"
).get_for_model(Proxy, for_concrete_model=False)
with connections["other"].schema_editor() as editor:
update_proxy_permissions.update_proxy_model_permissions(apps, editor)
self.permission.refresh_from_db()
self.assertEqual(self.permission.content_type, proxy_model_content_type)
|
eb41c5427d0fe256fd31bd343674fb71a2ef7f9eb0137b15b9a65552ced926d1 | import builtins
import getpass
import os
import sys
from datetime import date
from io import StringIO
from unittest import mock
from django.apps import apps
from django.contrib.auth import get_permission_codename, management
from django.contrib.auth.management import create_permissions, get_default_username
from django.contrib.auth.management.commands import changepassword, createsuperuser
from django.contrib.auth.models import Group, Permission, User
from django.contrib.contenttypes.models import ContentType
from django.core.management import call_command
from django.core.management.base import CommandError
from django.db import migrations
from django.test import TestCase, override_settings
from django.utils.translation import gettext_lazy as _
from .models import (
CustomUser,
CustomUserNonUniqueUsername,
CustomUserWithFK,
CustomUserWithM2M,
Email,
Organization,
UserProxy,
)
MOCK_INPUT_KEY_TO_PROMPTS = {
# @mock_inputs dict key: [expected prompt messages],
"bypass": ["Bypass password validation and create user anyway? [y/N]: "],
"email": ["Email address: "],
"date_of_birth": ["Date of birth: "],
"first_name": ["First name: "],
"username": [
"Username: ",
lambda: "Username (leave blank to use '%s'): " % get_default_username(),
],
}
def mock_inputs(inputs):
"""
Decorator to temporarily replace input/getpass to allow interactive
createsuperuser.
"""
def inner(test_func):
def wrapped(*args):
class mock_getpass:
@staticmethod
def getpass(prompt=b"Password: ", stream=None):
if callable(inputs["password"]):
return inputs["password"]()
return inputs["password"]
def mock_input(prompt):
assert "__proxy__" not in prompt
response = None
for key, val in inputs.items():
if val == "KeyboardInterrupt":
raise KeyboardInterrupt
# get() fallback because sometimes 'key' is the actual
# prompt rather than a shortcut name.
prompt_msgs = MOCK_INPUT_KEY_TO_PROMPTS.get(key, key)
if isinstance(prompt_msgs, list):
prompt_msgs = [
msg() if callable(msg) else msg for msg in prompt_msgs
]
if prompt in prompt_msgs:
if callable(val):
response = val()
else:
response = val
break
if response is None:
raise ValueError("Mock input for %r not found." % prompt)
return response
old_getpass = createsuperuser.getpass
old_input = builtins.input
createsuperuser.getpass = mock_getpass
builtins.input = mock_input
try:
test_func(*args)
finally:
createsuperuser.getpass = old_getpass
builtins.input = old_input
return wrapped
return inner
class MockTTY:
"""
A fake stdin object that pretends to be a TTY to be used in conjunction
with mock_inputs.
"""
def isatty(self):
return True
class MockInputTests(TestCase):
@mock_inputs({"username": "alice"})
def test_input_not_found(self):
with self.assertRaisesMessage(
ValueError, "Mock input for 'Email address: ' not found."
):
call_command("createsuperuser", stdin=MockTTY())
class GetDefaultUsernameTestCase(TestCase):
databases = {"default", "other"}
def setUp(self):
self.old_get_system_username = management.get_system_username
def tearDown(self):
management.get_system_username = self.old_get_system_username
def test_actual_implementation(self):
self.assertIsInstance(management.get_system_username(), str)
def test_simple(self):
management.get_system_username = lambda: "joe"
self.assertEqual(management.get_default_username(), "joe")
def test_existing(self):
User.objects.create(username="joe")
management.get_system_username = lambda: "joe"
self.assertEqual(management.get_default_username(), "")
self.assertEqual(management.get_default_username(check_db=False), "joe")
def test_i18n(self):
# 'Julia' with accented 'u':
management.get_system_username = lambda: "J\xfalia"
self.assertEqual(management.get_default_username(), "julia")
def test_with_database(self):
User.objects.create(username="joe")
management.get_system_username = lambda: "joe"
self.assertEqual(management.get_default_username(), "")
self.assertEqual(management.get_default_username(database="other"), "joe")
User.objects.using("other").create(username="joe")
self.assertEqual(management.get_default_username(database="other"), "")
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
]
)
class ChangepasswordManagementCommandTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.user = User.objects.create_user(username="joe", password="qwerty")
def setUp(self):
self.stdout = StringIO()
self.stderr = StringIO()
def tearDown(self):
self.stdout.close()
self.stderr.close()
@mock.patch.object(getpass, "getpass", return_value="password")
def test_get_pass(self, mock_get_pass):
call_command("changepassword", username="joe", stdout=self.stdout)
self.assertIs(User.objects.get(username="joe").check_password("password"), True)
@mock.patch.object(getpass, "getpass", return_value="")
def test_get_pass_no_input(self, mock_get_pass):
with self.assertRaisesMessage(CommandError, "aborted"):
call_command("changepassword", username="joe", stdout=self.stdout)
@mock.patch.object(changepassword.Command, "_get_pass", return_value="new_password")
def test_system_username(self, mock_get_pass):
"""The system username is used if --username isn't provided."""
username = getpass.getuser()
User.objects.create_user(username=username, password="qwerty")
call_command("changepassword", stdout=self.stdout)
self.assertIs(
User.objects.get(username=username).check_password("new_password"), True
)
def test_nonexistent_username(self):
with self.assertRaisesMessage(CommandError, "user 'test' does not exist"):
call_command("changepassword", username="test", stdout=self.stdout)
@mock.patch.object(changepassword.Command, "_get_pass", return_value="not qwerty")
def test_that_changepassword_command_changes_joes_password(self, mock_get_pass):
"Executing the changepassword management command should change joe's password"
self.assertTrue(self.user.check_password("qwerty"))
call_command("changepassword", username="joe", stdout=self.stdout)
command_output = self.stdout.getvalue().strip()
self.assertEqual(
command_output,
"Changing password for user 'joe'\n"
"Password changed successfully for user 'joe'",
)
self.assertTrue(User.objects.get(username="joe").check_password("not qwerty"))
@mock.patch.object(
changepassword.Command, "_get_pass", side_effect=lambda *args: str(args)
)
def test_that_max_tries_exits_1(self, mock_get_pass):
"""
A CommandError should be thrown by handle() if the user enters in
mismatched passwords three times.
"""
msg = "Aborting password change for user 'joe' after 3 attempts"
with self.assertRaisesMessage(CommandError, msg):
call_command(
"changepassword", username="joe", stdout=self.stdout, stderr=self.stderr
)
@mock.patch.object(changepassword.Command, "_get_pass", return_value="1234567890")
def test_password_validation(self, mock_get_pass):
"""
A CommandError should be raised if the user enters in passwords which
fail validation three times.
"""
abort_msg = "Aborting password change for user 'joe' after 3 attempts"
with self.assertRaisesMessage(CommandError, abort_msg):
call_command(
"changepassword", username="joe", stdout=self.stdout, stderr=self.stderr
)
self.assertIn("This password is entirely numeric.", self.stderr.getvalue())
@mock.patch.object(changepassword.Command, "_get_pass", return_value="not qwerty")
def test_that_changepassword_command_works_with_nonascii_output(
self, mock_get_pass
):
"""
#21627 -- Executing the changepassword management command should allow
non-ASCII characters from the User object representation.
"""
# 'Julia' with accented 'u':
User.objects.create_user(username="J\xfalia", password="qwerty")
call_command("changepassword", username="J\xfalia", stdout=self.stdout)
class MultiDBChangepasswordManagementCommandTestCase(TestCase):
databases = {"default", "other"}
@mock.patch.object(changepassword.Command, "_get_pass", return_value="not qwerty")
def test_that_changepassword_command_with_database_option_uses_given_db(
self, mock_get_pass
):
"""
changepassword --database should operate on the specified DB.
"""
user = User.objects.db_manager("other").create_user(
username="joe", password="qwerty"
)
self.assertTrue(user.check_password("qwerty"))
out = StringIO()
call_command("changepassword", username="joe", database="other", stdout=out)
command_output = out.getvalue().strip()
self.assertEqual(
command_output,
"Changing password for user 'joe'\n"
"Password changed successfully for user 'joe'",
)
self.assertTrue(
User.objects.using("other").get(username="joe").check_password("not qwerty")
)
@override_settings(
SILENCED_SYSTEM_CHECKS=["fields.W342"], # ForeignKey(unique=True)
AUTH_PASSWORD_VALIDATORS=[
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"}
],
)
class CreatesuperuserManagementCommandTestCase(TestCase):
def test_no_email_argument(self):
new_io = StringIO()
with self.assertRaisesMessage(
CommandError, "You must use --email with --noinput."
):
call_command(
"createsuperuser", interactive=False, username="joe", stdout=new_io
)
def test_basic_usage(self):
"Check the operation of the createsuperuser management command"
# We can use the management command to create a superuser
new_io = StringIO()
call_command(
"createsuperuser",
interactive=False,
username="joe",
email="[email protected]",
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
u = User.objects.get(username="joe")
self.assertEqual(u.email, "[email protected]")
# created password should be unusable
self.assertFalse(u.has_usable_password())
def test_non_ascii_verbose_name(self):
@mock_inputs(
{
"password": "nopasswd",
"Uživatel (leave blank to use '%s'): "
% get_default_username(): "foo", # username (cz)
"email": "[email protected]",
}
)
def test(self):
username_field = User._meta.get_field("username")
old_verbose_name = username_field.verbose_name
username_field.verbose_name = _("u\u017eivatel")
new_io = StringIO()
try:
call_command(
"createsuperuser",
interactive=True,
stdout=new_io,
stdin=MockTTY(),
)
finally:
username_field.verbose_name = old_verbose_name
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
test(self)
def test_verbosity_zero(self):
# We can suppress output on the management command
new_io = StringIO()
call_command(
"createsuperuser",
interactive=False,
username="joe2",
email="[email protected]",
verbosity=0,
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "")
u = User.objects.get(username="joe2")
self.assertEqual(u.email, "[email protected]")
self.assertFalse(u.has_usable_password())
def test_email_in_username(self):
call_command(
"createsuperuser",
interactive=False,
username="[email protected]",
email="[email protected]",
verbosity=0,
)
u = User._default_manager.get(username="[email protected]")
self.assertEqual(u.email, "[email protected]")
self.assertFalse(u.has_usable_password())
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUser")
def test_swappable_user(self):
"A superuser can be created when a custom user model is in use"
# We can use the management command to create a superuser
# We skip validation because the temporary substitution of the
# swappable User model messes with validation.
new_io = StringIO()
call_command(
"createsuperuser",
interactive=False,
email="[email protected]",
date_of_birth="1976-04-01",
first_name="Joe",
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
u = CustomUser._default_manager.get(email="[email protected]")
self.assertEqual(u.date_of_birth, date(1976, 4, 1))
# created password should be unusable
self.assertFalse(u.has_usable_password())
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUser")
def test_swappable_user_missing_required_field(self):
"A Custom superuser won't be created when a required field isn't provided"
# We can use the management command to create a superuser
# We skip validation because the temporary substitution of the
# swappable User model messes with validation.
new_io = StringIO()
with self.assertRaisesMessage(
CommandError, "You must use --email with --noinput."
):
call_command(
"createsuperuser",
interactive=False,
stdout=new_io,
stderr=new_io,
)
self.assertEqual(CustomUser._default_manager.count(), 0)
@override_settings(
AUTH_USER_MODEL="auth_tests.CustomUserNonUniqueUsername",
AUTHENTICATION_BACKENDS=["my.custom.backend"],
)
def test_swappable_user_username_non_unique(self):
@mock_inputs(
{
"username": "joe",
"password": "nopasswd",
}
)
def createsuperuser():
new_io = StringIO()
call_command(
"createsuperuser",
interactive=True,
email="[email protected]",
stdout=new_io,
stdin=MockTTY(),
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
for i in range(2):
createsuperuser()
users = CustomUserNonUniqueUsername.objects.filter(username="joe")
self.assertEqual(users.count(), 2)
def test_skip_if_not_in_TTY(self):
"""
If the command is not called from a TTY, it should be skipped and a
message should be displayed (#7423).
"""
class FakeStdin:
"""A fake stdin object that has isatty() return False."""
def isatty(self):
return False
out = StringIO()
call_command(
"createsuperuser",
stdin=FakeStdin(),
stdout=out,
interactive=True,
)
self.assertEqual(User._default_manager.count(), 0)
self.assertIn("Superuser creation skipped", out.getvalue())
def test_passing_stdin(self):
"""
You can pass a stdin object as an option and it should be
available on self.stdin.
If no such option is passed, it defaults to sys.stdin.
"""
sentinel = object()
command = createsuperuser.Command()
call_command(
command,
stdin=sentinel,
interactive=False,
verbosity=0,
username="janet",
email="[email protected]",
)
self.assertIs(command.stdin, sentinel)
command = createsuperuser.Command()
call_command(
command,
interactive=False,
verbosity=0,
username="joe",
email="[email protected]",
)
self.assertIs(command.stdin, sys.stdin)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithFK")
def test_fields_with_fk(self):
new_io = StringIO()
group = Group.objects.create(name="mygroup")
email = Email.objects.create(email="[email protected]")
call_command(
"createsuperuser",
interactive=False,
username=email.pk,
email=email.email,
group=group.pk,
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
u = CustomUserWithFK._default_manager.get(email=email)
self.assertEqual(u.username, email)
self.assertEqual(u.group, group)
non_existent_email = "[email protected]"
msg = "email instance with email %r does not exist." % non_existent_email
with self.assertRaisesMessage(CommandError, msg):
call_command(
"createsuperuser",
interactive=False,
username=email.pk,
email=non_existent_email,
stdout=new_io,
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithFK")
def test_fields_with_fk_interactive(self):
new_io = StringIO()
group = Group.objects.create(name="mygroup")
email = Email.objects.create(email="[email protected]")
@mock_inputs(
{
"password": "nopasswd",
"Username (Email.id): ": email.pk,
"Email (Email.email): ": email.email,
"Group (Group.id): ": group.pk,
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdout=new_io,
stdin=MockTTY(),
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
u = CustomUserWithFK._default_manager.get(email=email)
self.assertEqual(u.username, email)
self.assertEqual(u.group, group)
test(self)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithFK")
def test_fields_with_fk_via_option_interactive(self):
new_io = StringIO()
group = Group.objects.create(name="mygroup")
email = Email.objects.create(email="[email protected]")
@mock_inputs({"password": "nopasswd"})
def test(self):
call_command(
"createsuperuser",
interactive=True,
username=email.pk,
email=email.email,
group=group.pk,
stdout=new_io,
stdin=MockTTY(),
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
u = CustomUserWithFK._default_manager.get(email=email)
self.assertEqual(u.username, email)
self.assertEqual(u.group, group)
test(self)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithFK")
def test_validate_fk(self):
email = Email.objects.create(email="[email protected]")
Group.objects.all().delete()
nonexistent_group_id = 1
msg = f"group instance with id {nonexistent_group_id} does not exist."
with self.assertRaisesMessage(CommandError, msg):
call_command(
"createsuperuser",
interactive=False,
username=email.pk,
email=email.email,
group=nonexistent_group_id,
verbosity=0,
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithFK")
def test_validate_fk_environment_variable(self):
email = Email.objects.create(email="[email protected]")
Group.objects.all().delete()
nonexistent_group_id = 1
msg = f"group instance with id {nonexistent_group_id} does not exist."
with mock.patch.dict(
os.environ,
{"DJANGO_SUPERUSER_GROUP": str(nonexistent_group_id)},
):
with self.assertRaisesMessage(CommandError, msg):
call_command(
"createsuperuser",
interactive=False,
username=email.pk,
email=email.email,
verbosity=0,
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithFK")
def test_validate_fk_via_option_interactive(self):
email = Email.objects.create(email="[email protected]")
Group.objects.all().delete()
nonexistent_group_id = 1
msg = f"group instance with id {nonexistent_group_id} does not exist."
@mock_inputs(
{
"password": "nopasswd",
"Username (Email.id): ": email.pk,
"Email (Email.email): ": email.email,
}
)
def test(self):
with self.assertRaisesMessage(CommandError, msg):
call_command(
"createsuperuser",
group=nonexistent_group_id,
stdin=MockTTY(),
verbosity=0,
)
test(self)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithM2m")
def test_fields_with_m2m(self):
new_io = StringIO()
org_id_1 = Organization.objects.create(name="Organization 1").pk
org_id_2 = Organization.objects.create(name="Organization 2").pk
call_command(
"createsuperuser",
interactive=False,
username="joe",
orgs=[org_id_1, org_id_2],
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
user = CustomUserWithM2M._default_manager.get(username="joe")
self.assertEqual(user.orgs.count(), 2)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithM2M")
def test_fields_with_m2m_interactive(self):
new_io = StringIO()
org_id_1 = Organization.objects.create(name="Organization 1").pk
org_id_2 = Organization.objects.create(name="Organization 2").pk
@mock_inputs(
{
"password": "nopasswd",
"Username: ": "joe",
"Orgs (Organization.id): ": "%s, %s" % (org_id_1, org_id_2),
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdout=new_io,
stdin=MockTTY(),
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
user = CustomUserWithM2M._default_manager.get(username="joe")
self.assertEqual(user.orgs.count(), 2)
test(self)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithM2M")
def test_fields_with_m2m_interactive_blank(self):
new_io = StringIO()
org_id = Organization.objects.create(name="Organization").pk
entered_orgs = [str(org_id), " "]
def return_orgs():
return entered_orgs.pop()
@mock_inputs(
{
"password": "nopasswd",
"Username: ": "joe",
"Orgs (Organization.id): ": return_orgs,
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdout=new_io,
stderr=new_io,
stdin=MockTTY(),
)
self.assertEqual(
new_io.getvalue().strip(),
"Error: This field cannot be blank.\n"
"Superuser created successfully.",
)
test(self)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithM2MThrough")
def test_fields_with_m2m_and_through(self):
msg = (
"Required field 'orgs' specifies a many-to-many relation through "
"model, which is not supported."
)
with self.assertRaisesMessage(CommandError, msg):
call_command("createsuperuser")
def test_default_username(self):
"""createsuperuser uses a default username when one isn't provided."""
# Get the default username before creating a user.
default_username = get_default_username()
new_io = StringIO()
entered_passwords = ["password", "password"]
def return_passwords():
return entered_passwords.pop(0)
@mock_inputs({"password": return_passwords, "username": "", "email": ""})
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(), "Superuser created successfully."
)
self.assertTrue(User.objects.filter(username=default_username).exists())
test(self)
def test_password_validation(self):
"""
Creation should fail if the password fails validation.
"""
new_io = StringIO()
entered_passwords = ["1234567890", "1234567890", "password", "password"]
def bad_then_good_password():
return entered_passwords.pop(0)
@mock_inputs(
{
"password": bad_then_good_password,
"username": "joe1234567890",
"email": "",
"bypass": "n",
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"This password is entirely numeric.\n"
"Superuser created successfully.",
)
test(self)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
]
)
def test_validate_password_against_username(self):
new_io = StringIO()
username = "supremelycomplex"
entered_passwords = [
username,
username,
"superduperunguessablepassword",
"superduperunguessablepassword",
]
def bad_then_good_password():
return entered_passwords.pop(0)
@mock_inputs(
{
"password": bad_then_good_password,
"username": username,
"email": "",
"bypass": "n",
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"The password is too similar to the username.\n"
"Superuser created successfully.",
)
test(self)
@override_settings(
AUTH_USER_MODEL="auth_tests.CustomUser",
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
],
)
def test_validate_password_against_required_fields(self):
new_io = StringIO()
first_name = "josephine"
entered_passwords = [
first_name,
first_name,
"superduperunguessablepassword",
"superduperunguessablepassword",
]
def bad_then_good_password():
return entered_passwords.pop(0)
@mock_inputs(
{
"password": bad_then_good_password,
"username": "whatever",
"first_name": first_name,
"date_of_birth": "1970-01-01",
"email": "[email protected]",
"bypass": "n",
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"The password is too similar to the first name.\n"
"Superuser created successfully.",
)
test(self)
@override_settings(
AUTH_USER_MODEL="auth_tests.CustomUser",
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
],
)
def test_validate_password_against_required_fields_via_option(self):
new_io = StringIO()
first_name = "josephine"
entered_passwords = [
first_name,
first_name,
"superduperunguessablepassword",
"superduperunguessablepassword",
]
def bad_then_good_password():
return entered_passwords.pop(0)
@mock_inputs(
{
"password": bad_then_good_password,
"bypass": "n",
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
first_name=first_name,
date_of_birth="1970-01-01",
email="[email protected]",
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"The password is too similar to the first name.\n"
"Superuser created successfully.",
)
test(self)
def test_blank_username(self):
"""Creation fails if --username is blank."""
new_io = StringIO()
with self.assertRaisesMessage(CommandError, "Username cannot be blank."):
call_command(
"createsuperuser",
username="",
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
def test_blank_username_non_interactive(self):
new_io = StringIO()
with self.assertRaisesMessage(CommandError, "Username cannot be blank."):
call_command(
"createsuperuser",
username="",
interactive=False,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
def test_password_validation_bypass(self):
"""
Password validation can be bypassed by entering 'y' at the prompt.
"""
new_io = StringIO()
@mock_inputs(
{
"password": "1234567890",
"username": "joe1234567890",
"email": "",
"bypass": "y",
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"This password is entirely numeric.\n"
"Superuser created successfully.",
)
test(self)
def test_invalid_username(self):
"""Creation fails if the username fails validation."""
user_field = User._meta.get_field(User.USERNAME_FIELD)
new_io = StringIO()
entered_passwords = ["password", "password"]
# Enter an invalid (too long) username first and then a valid one.
invalid_username = ("x" * user_field.max_length) + "y"
entered_usernames = [invalid_username, "janet"]
def return_passwords():
return entered_passwords.pop(0)
def return_usernames():
return entered_usernames.pop(0)
@mock_inputs(
{"password": return_passwords, "username": return_usernames, "email": ""}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"Error: Ensure this value has at most %s characters (it has %s).\n"
"Superuser created successfully."
% (user_field.max_length, len(invalid_username)),
)
test(self)
@mock_inputs({"username": "KeyboardInterrupt"})
def test_keyboard_interrupt(self):
new_io = StringIO()
with self.assertRaises(SystemExit):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(new_io.getvalue(), "\nOperation cancelled.\n")
def test_existing_username(self):
"""Creation fails if the username already exists."""
user = User.objects.create(username="janet")
new_io = StringIO()
entered_passwords = ["password", "password"]
# Enter the existing username first and then a new one.
entered_usernames = [user.username, "joe"]
def return_passwords():
return entered_passwords.pop(0)
def return_usernames():
return entered_usernames.pop(0)
@mock_inputs(
{"password": return_passwords, "username": return_usernames, "email": ""}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"Error: That username is already taken.\n"
"Superuser created successfully.",
)
test(self)
def test_existing_username_non_interactive(self):
"""Creation fails if the username already exists."""
User.objects.create(username="janet")
new_io = StringIO()
with self.assertRaisesMessage(
CommandError, "Error: That username is already taken."
):
call_command(
"createsuperuser",
username="janet",
email="",
interactive=False,
stdout=new_io,
)
def test_existing_username_provided_via_option_and_interactive(self):
"""call_command() gets username='janet' and interactive=True."""
new_io = StringIO()
entered_passwords = ["password", "password"]
User.objects.create(username="janet")
def return_passwords():
return entered_passwords.pop(0)
@mock_inputs(
{
"password": return_passwords,
"username": "janet1",
"email": "[email protected]",
}
)
def test(self):
call_command(
"createsuperuser",
username="janet",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
msg = (
"Error: That username is already taken.\n"
"Superuser created successfully."
)
self.assertEqual(new_io.getvalue().strip(), msg)
test(self)
def test_validation_mismatched_passwords(self):
"""
Creation should fail if the user enters mismatched passwords.
"""
new_io = StringIO()
# The first two passwords do not match, but the second two do match and
# are valid.
entered_passwords = ["password", "not password", "password2", "password2"]
def mismatched_passwords_then_matched():
return entered_passwords.pop(0)
@mock_inputs(
{
"password": mismatched_passwords_then_matched,
"username": "joe1234567890",
"email": "",
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"Error: Your passwords didn't match.\n"
"Superuser created successfully.",
)
test(self)
def test_validation_blank_password_entered(self):
"""
Creation should fail if the user enters blank passwords.
"""
new_io = StringIO()
# The first two passwords are empty strings, but the second two are
# valid.
entered_passwords = ["", "", "password2", "password2"]
def blank_passwords_then_valid():
return entered_passwords.pop(0)
@mock_inputs(
{
"password": blank_passwords_then_valid,
"username": "joe1234567890",
"email": "",
}
)
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(),
"Error: Blank passwords aren't allowed.\n"
"Superuser created successfully.",
)
test(self)
@override_settings(AUTH_USER_MODEL="auth_tests.NoPasswordUser")
def test_usermodel_without_password(self):
new_io = StringIO()
call_command(
"createsuperuser",
interactive=False,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
username="username",
)
self.assertEqual(new_io.getvalue().strip(), "Superuser created successfully.")
@override_settings(AUTH_USER_MODEL="auth_tests.NoPasswordUser")
def test_usermodel_without_password_interactive(self):
new_io = StringIO()
@mock_inputs({"username": "username"})
def test(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
stdout=new_io,
stderr=new_io,
)
self.assertEqual(
new_io.getvalue().strip(), "Superuser created successfully."
)
test(self)
@mock.patch.dict(
os.environ,
{
"DJANGO_SUPERUSER_PASSWORD": "test_password",
"DJANGO_SUPERUSER_USERNAME": "test_superuser",
"DJANGO_SUPERUSER_EMAIL": "[email protected]",
"DJANGO_SUPERUSER_FIRST_NAME": "ignored_first_name",
},
)
def test_environment_variable_non_interactive(self):
call_command("createsuperuser", interactive=False, verbosity=0)
user = User.objects.get(username="test_superuser")
self.assertEqual(user.email, "[email protected]")
self.assertTrue(user.check_password("test_password"))
# Environment variables are ignored for non-required fields.
self.assertEqual(user.first_name, "")
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithM2m")
def test_environment_variable_m2m_non_interactive(self):
new_io = StringIO()
org_id_1 = Organization.objects.create(name="Organization 1").pk
org_id_2 = Organization.objects.create(name="Organization 2").pk
with mock.patch.dict(
os.environ,
{
"DJANGO_SUPERUSER_ORGS": f"{org_id_1},{org_id_2}",
},
):
call_command(
"createsuperuser",
interactive=False,
username="joe",
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
user = CustomUserWithM2M._default_manager.get(username="joe")
self.assertEqual(user.orgs.count(), 2)
@mock.patch.dict(
os.environ,
{
"DJANGO_SUPERUSER_USERNAME": "test_superuser",
"DJANGO_SUPERUSER_EMAIL": "[email protected]",
},
)
def test_ignore_environment_variable_non_interactive(self):
# Environment variables are ignored in non-interactive mode, if
# provided by a command line arguments.
call_command(
"createsuperuser",
interactive=False,
username="cmd_superuser",
email="[email protected]",
verbosity=0,
)
user = User.objects.get(username="cmd_superuser")
self.assertEqual(user.email, "[email protected]")
self.assertFalse(user.has_usable_password())
@mock.patch.dict(
os.environ,
{
"DJANGO_SUPERUSER_PASSWORD": "test_password",
"DJANGO_SUPERUSER_USERNAME": "test_superuser",
"DJANGO_SUPERUSER_EMAIL": "[email protected]",
},
)
def test_ignore_environment_variable_interactive(self):
# Environment variables are ignored in interactive mode.
@mock_inputs({"password": "cmd_password"})
def test(self):
call_command(
"createsuperuser",
interactive=True,
username="cmd_superuser",
email="[email protected]",
stdin=MockTTY(),
verbosity=0,
)
user = User.objects.get(username="cmd_superuser")
self.assertEqual(user.email, "[email protected]")
self.assertTrue(user.check_password("cmd_password"))
test(self)
class MultiDBCreatesuperuserTestCase(TestCase):
databases = {"default", "other"}
def test_createsuperuser_command_with_database_option(self):
"""
createsuperuser --database should operate on the specified DB.
"""
new_io = StringIO()
call_command(
"createsuperuser",
interactive=False,
username="joe",
email="[email protected]",
database="other",
stdout=new_io,
)
command_output = new_io.getvalue().strip()
self.assertEqual(command_output, "Superuser created successfully.")
user = User.objects.using("other").get(username="joe")
self.assertEqual(user.email, "[email protected]")
def test_createsuperuser_command_suggested_username_with_database_option(self):
default_username = get_default_username(database="other")
qs = User.objects.using("other")
@mock_inputs({"password": "nopasswd", "username": "", "email": ""})
def test_other_create_with_suggested_username(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
verbosity=0,
database="other",
)
self.assertIs(qs.filter(username=default_username).exists(), True)
test_other_create_with_suggested_username(self)
@mock_inputs({"password": "nopasswd", "Username: ": "other", "email": ""})
def test_other_no_suggestion(self):
call_command(
"createsuperuser",
interactive=True,
stdin=MockTTY(),
verbosity=0,
database="other",
)
self.assertIs(qs.filter(username="other").exists(), True)
test_other_no_suggestion(self)
class CreatePermissionsTests(TestCase):
def setUp(self):
self._original_permissions = Permission._meta.permissions[:]
self._original_default_permissions = Permission._meta.default_permissions
self.app_config = apps.get_app_config("auth")
def tearDown(self):
Permission._meta.permissions = self._original_permissions
Permission._meta.default_permissions = self._original_default_permissions
ContentType.objects.clear_cache()
def test_default_permissions(self):
permission_content_type = ContentType.objects.get_by_natural_key(
"auth", "permission"
)
Permission._meta.permissions = [
("my_custom_permission", "Some permission"),
]
create_permissions(self.app_config, verbosity=0)
# view/add/change/delete permission by default + custom permission
self.assertEqual(
Permission.objects.filter(
content_type=permission_content_type,
).count(),
5,
)
Permission.objects.filter(content_type=permission_content_type).delete()
Permission._meta.default_permissions = []
create_permissions(self.app_config, verbosity=0)
# custom permission only since default permissions is empty
self.assertEqual(
Permission.objects.filter(
content_type=permission_content_type,
).count(),
1,
)
def test_unavailable_models(self):
"""
#24075 - Permissions shouldn't be created or deleted if the ContentType
or Permission models aren't available.
"""
state = migrations.state.ProjectState()
# Unavailable contenttypes.ContentType
with self.assertNumQueries(0):
create_permissions(self.app_config, verbosity=0, apps=state.apps)
# Unavailable auth.Permission
state = migrations.state.ProjectState(real_apps={"contenttypes"})
with self.assertNumQueries(0):
create_permissions(self.app_config, verbosity=0, apps=state.apps)
def test_create_permissions_checks_contenttypes_created(self):
"""
`post_migrate` handler ordering isn't guaranteed. Simulate a case
where create_permissions() is called before create_contenttypes().
"""
# Warm the manager cache.
ContentType.objects.get_for_model(Group)
# Apply a deletion as if e.g. a database 'flush' had been executed.
ContentType.objects.filter(app_label="auth", model="group").delete()
# This fails with a foreign key constraint without the fix.
create_permissions(apps.get_app_config("auth"), interactive=False, verbosity=0)
def test_permission_with_proxy_content_type_created(self):
"""
A proxy model's permissions use its own content type rather than the
content type of the concrete model.
"""
opts = UserProxy._meta
codename = get_permission_codename("add", opts)
self.assertTrue(
Permission.objects.filter(
content_type__model=opts.model_name,
content_type__app_label=opts.app_label,
codename=codename,
).exists()
)
|
325d535d369d51ca952ae4d6d8c160f2dba1d6aaebad84b8d053ea8e2e4816d4 | import sys
from datetime import date
from unittest import mock
from django.contrib.auth import (
BACKEND_SESSION_KEY,
SESSION_KEY,
_clean_credentials,
authenticate,
get_user,
signals,
)
from django.contrib.auth.backends import BaseBackend, ModelBackend
from django.contrib.auth.hashers import MD5PasswordHasher
from django.contrib.auth.models import AnonymousUser, Group, Permission, User
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.http import HttpRequest
from django.test import (
RequestFactory,
SimpleTestCase,
TestCase,
modify_settings,
override_settings,
)
from django.views.debug import technical_500_response
from django.views.decorators.debug import sensitive_variables
from .models import (
CustomPermissionsUser,
CustomUser,
CustomUserWithoutIsActiveField,
ExtensionUser,
UUIDUser,
)
class SimpleBackend(BaseBackend):
def get_user_permissions(self, user_obj, obj=None):
return ["user_perm"]
def get_group_permissions(self, user_obj, obj=None):
return ["group_perm"]
@override_settings(
AUTHENTICATION_BACKENDS=["auth_tests.test_auth_backends.SimpleBackend"]
)
class BaseBackendTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.user = User.objects.create_user("test", "[email protected]", "test")
def test_get_user_permissions(self):
self.assertEqual(self.user.get_user_permissions(), {"user_perm"})
def test_get_group_permissions(self):
self.assertEqual(self.user.get_group_permissions(), {"group_perm"})
def test_get_all_permissions(self):
self.assertEqual(self.user.get_all_permissions(), {"user_perm", "group_perm"})
def test_has_perm(self):
self.assertIs(self.user.has_perm("user_perm"), True)
self.assertIs(self.user.has_perm("group_perm"), True)
self.assertIs(self.user.has_perm("other_perm", TestObj()), False)
def test_has_perms_perm_list_invalid(self):
msg = "perm_list must be an iterable of permissions."
with self.assertRaisesMessage(ValueError, msg):
self.user.has_perms("user_perm")
with self.assertRaisesMessage(ValueError, msg):
self.user.has_perms(object())
class CountingMD5PasswordHasher(MD5PasswordHasher):
"""Hasher that counts how many times it computes a hash."""
calls = 0
def encode(self, *args, **kwargs):
type(self).calls += 1
return super().encode(*args, **kwargs)
class BaseModelBackendTest:
"""
A base class for tests that need to validate the ModelBackend
with different User models. Subclasses should define a class
level UserModel attribute, and a create_users() method to
construct two users for test purposes.
"""
backend = "django.contrib.auth.backends.ModelBackend"
def setUp(self):
self.patched_settings = modify_settings(
AUTHENTICATION_BACKENDS={"append": self.backend},
)
self.patched_settings.enable()
self.create_users()
def tearDown(self):
self.patched_settings.disable()
# The custom_perms test messes with ContentTypes, which will
# be cached; flush the cache to ensure there are no side effects
# Refs #14975, #14925
ContentType.objects.clear_cache()
def test_has_perm(self):
user = self.UserModel._default_manager.get(pk=self.user.pk)
self.assertIs(user.has_perm("auth.test"), False)
user.is_staff = True
user.save()
self.assertIs(user.has_perm("auth.test"), False)
user.is_superuser = True
user.save()
self.assertIs(user.has_perm("auth.test"), True)
user.is_staff = True
user.is_superuser = True
user.is_active = False
user.save()
self.assertIs(user.has_perm("auth.test"), False)
def test_custom_perms(self):
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(
name="test", content_type=content_type, codename="test"
)
user.user_permissions.add(perm)
# reloading user to purge the _perm_cache
user = self.UserModel._default_manager.get(pk=self.user.pk)
self.assertEqual(user.get_all_permissions(), {"auth.test"})
self.assertEqual(user.get_user_permissions(), {"auth.test"})
self.assertEqual(user.get_group_permissions(), set())
self.assertIs(user.has_module_perms("Group"), False)
self.assertIs(user.has_module_perms("auth"), True)
perm = Permission.objects.create(
name="test2", content_type=content_type, codename="test2"
)
user.user_permissions.add(perm)
perm = Permission.objects.create(
name="test3", content_type=content_type, codename="test3"
)
user.user_permissions.add(perm)
user = self.UserModel._default_manager.get(pk=self.user.pk)
expected_user_perms = {"auth.test2", "auth.test", "auth.test3"}
self.assertEqual(user.get_all_permissions(), expected_user_perms)
self.assertIs(user.has_perm("test"), False)
self.assertIs(user.has_perm("auth.test"), True)
self.assertIs(user.has_perms(["auth.test2", "auth.test3"]), True)
perm = Permission.objects.create(
name="test_group", content_type=content_type, codename="test_group"
)
group = Group.objects.create(name="test_group")
group.permissions.add(perm)
user.groups.add(group)
user = self.UserModel._default_manager.get(pk=self.user.pk)
self.assertEqual(
user.get_all_permissions(), {*expected_user_perms, "auth.test_group"}
)
self.assertEqual(user.get_user_permissions(), expected_user_perms)
self.assertEqual(user.get_group_permissions(), {"auth.test_group"})
self.assertIs(user.has_perms(["auth.test3", "auth.test_group"]), True)
user = AnonymousUser()
self.assertIs(user.has_perm("test"), False)
self.assertIs(user.has_perms(["auth.test2", "auth.test3"]), False)
def test_has_no_object_perm(self):
"""Regressiontest for #12462"""
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(
name="test", content_type=content_type, codename="test"
)
user.user_permissions.add(perm)
self.assertIs(user.has_perm("auth.test", "object"), False)
self.assertEqual(user.get_all_permissions("object"), set())
self.assertIs(user.has_perm("auth.test"), True)
self.assertEqual(user.get_all_permissions(), {"auth.test"})
def test_anonymous_has_no_permissions(self):
"""
#17903 -- Anonymous users shouldn't have permissions in
ModelBackend.get_(all|user|group)_permissions().
"""
backend = ModelBackend()
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
user_perm = Permission.objects.create(
name="test", content_type=content_type, codename="test_user"
)
group_perm = Permission.objects.create(
name="test2", content_type=content_type, codename="test_group"
)
user.user_permissions.add(user_perm)
group = Group.objects.create(name="test_group")
user.groups.add(group)
group.permissions.add(group_perm)
self.assertEqual(
backend.get_all_permissions(user), {"auth.test_user", "auth.test_group"}
)
self.assertEqual(backend.get_user_permissions(user), {"auth.test_user"})
self.assertEqual(backend.get_group_permissions(user), {"auth.test_group"})
with mock.patch.object(self.UserModel, "is_anonymous", True):
self.assertEqual(backend.get_all_permissions(user), set())
self.assertEqual(backend.get_user_permissions(user), set())
self.assertEqual(backend.get_group_permissions(user), set())
def test_inactive_has_no_permissions(self):
"""
#17903 -- Inactive users shouldn't have permissions in
ModelBackend.get_(all|user|group)_permissions().
"""
backend = ModelBackend()
user = self.UserModel._default_manager.get(pk=self.user.pk)
content_type = ContentType.objects.get_for_model(Group)
user_perm = Permission.objects.create(
name="test", content_type=content_type, codename="test_user"
)
group_perm = Permission.objects.create(
name="test2", content_type=content_type, codename="test_group"
)
user.user_permissions.add(user_perm)
group = Group.objects.create(name="test_group")
user.groups.add(group)
group.permissions.add(group_perm)
self.assertEqual(
backend.get_all_permissions(user), {"auth.test_user", "auth.test_group"}
)
self.assertEqual(backend.get_user_permissions(user), {"auth.test_user"})
self.assertEqual(backend.get_group_permissions(user), {"auth.test_group"})
user.is_active = False
user.save()
self.assertEqual(backend.get_all_permissions(user), set())
self.assertEqual(backend.get_user_permissions(user), set())
self.assertEqual(backend.get_group_permissions(user), set())
def test_get_all_superuser_permissions(self):
"""A superuser has all permissions. Refs #14795."""
user = self.UserModel._default_manager.get(pk=self.superuser.pk)
self.assertEqual(len(user.get_all_permissions()), len(Permission.objects.all()))
@override_settings(
PASSWORD_HASHERS=["auth_tests.test_auth_backends.CountingMD5PasswordHasher"]
)
def test_authentication_timing(self):
"""Hasher is run once regardless of whether the user exists. Refs #20760."""
# Re-set the password, because this tests overrides PASSWORD_HASHERS
self.user.set_password("test")
self.user.save()
CountingMD5PasswordHasher.calls = 0
username = getattr(self.user, self.UserModel.USERNAME_FIELD)
authenticate(username=username, password="test")
self.assertEqual(CountingMD5PasswordHasher.calls, 1)
CountingMD5PasswordHasher.calls = 0
authenticate(username="no_such_user", password="test")
self.assertEqual(CountingMD5PasswordHasher.calls, 1)
@override_settings(
PASSWORD_HASHERS=["auth_tests.test_auth_backends.CountingMD5PasswordHasher"]
)
def test_authentication_without_credentials(self):
CountingMD5PasswordHasher.calls = 0
for credentials in (
{},
{"username": getattr(self.user, self.UserModel.USERNAME_FIELD)},
{"password": "test"},
):
with self.subTest(credentials=credentials):
with self.assertNumQueries(0):
authenticate(**credentials)
self.assertEqual(CountingMD5PasswordHasher.calls, 0)
class ModelBackendTest(BaseModelBackendTest, TestCase):
"""
Tests for the ModelBackend using the default User model.
"""
UserModel = User
user_credentials = {"username": "test", "password": "test"}
def create_users(self):
self.user = User.objects.create_user(
email="[email protected]", **self.user_credentials
)
self.superuser = User.objects.create_superuser(
username="test2",
email="[email protected]",
password="test",
)
def test_authenticate_inactive(self):
"""
An inactive user can't authenticate.
"""
self.assertEqual(authenticate(**self.user_credentials), self.user)
self.user.is_active = False
self.user.save()
self.assertIsNone(authenticate(**self.user_credentials))
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserWithoutIsActiveField")
def test_authenticate_user_without_is_active_field(self):
"""
A custom user without an `is_active` field is allowed to authenticate.
"""
user = CustomUserWithoutIsActiveField.objects._create_user(
username="test",
email="[email protected]",
password="test",
)
self.assertEqual(authenticate(username="test", password="test"), user)
@override_settings(AUTH_USER_MODEL="auth_tests.ExtensionUser")
class ExtensionUserModelBackendTest(BaseModelBackendTest, TestCase):
"""
Tests for the ModelBackend using the custom ExtensionUser model.
This isn't a perfect test, because both the User and ExtensionUser are
synchronized to the database, which wouldn't ordinary happen in
production. As a result, it doesn't catch errors caused by the non-
existence of the User table.
The specific problem is queries on .filter(groups__user) et al, which
makes an implicit assumption that the user model is called 'User'. In
production, the auth.User table won't exist, so the requested join
won't exist either; in testing, the auth.User *does* exist, and
so does the join. However, the join table won't contain any useful
data; for testing, we check that the data we expect actually does exist.
"""
UserModel = ExtensionUser
def create_users(self):
self.user = ExtensionUser._default_manager.create_user(
username="test",
email="[email protected]",
password="test",
date_of_birth=date(2006, 4, 25),
)
self.superuser = ExtensionUser._default_manager.create_superuser(
username="test2",
email="[email protected]",
password="test",
date_of_birth=date(1976, 11, 8),
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomPermissionsUser")
class CustomPermissionsUserModelBackendTest(BaseModelBackendTest, TestCase):
"""
Tests for the ModelBackend using the CustomPermissionsUser model.
As with the ExtensionUser test, this isn't a perfect test, because both
the User and CustomPermissionsUser are synchronized to the database,
which wouldn't ordinary happen in production.
"""
UserModel = CustomPermissionsUser
def create_users(self):
self.user = CustomPermissionsUser._default_manager.create_user(
email="[email protected]", password="test", date_of_birth=date(2006, 4, 25)
)
self.superuser = CustomPermissionsUser._default_manager.create_superuser(
email="[email protected]", password="test", date_of_birth=date(1976, 11, 8)
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUser")
class CustomUserModelBackendAuthenticateTest(TestCase):
"""
The model backend can accept a credentials kwarg labeled with
custom user model's USERNAME_FIELD.
"""
def test_authenticate(self):
test_user = CustomUser._default_manager.create_user(
email="[email protected]", password="test", date_of_birth=date(2006, 4, 25)
)
authenticated_user = authenticate(email="[email protected]", password="test")
self.assertEqual(test_user, authenticated_user)
@override_settings(AUTH_USER_MODEL="auth_tests.UUIDUser")
class UUIDUserTests(TestCase):
def test_login(self):
"""
A custom user with a UUID primary key should be able to login.
"""
user = UUIDUser.objects.create_user(username="uuid", password="test")
self.assertTrue(self.client.login(username="uuid", password="test"))
self.assertEqual(
UUIDUser.objects.get(pk=self.client.session[SESSION_KEY]), user
)
class TestObj:
pass
class SimpleRowlevelBackend:
def has_perm(self, user, perm, obj=None):
if not obj:
return # We only support row level perms
if isinstance(obj, TestObj):
if user.username == "test2":
return True
elif user.is_anonymous and perm == "anon":
return True
elif not user.is_active and perm == "inactive":
return True
return False
def has_module_perms(self, user, app_label):
return (user.is_anonymous or user.is_active) and app_label == "app1"
def get_all_permissions(self, user, obj=None):
if not obj:
return [] # We only support row level perms
if not isinstance(obj, TestObj):
return ["none"]
if user.is_anonymous:
return ["anon"]
if user.username == "test2":
return ["simple", "advanced"]
else:
return ["simple"]
def get_group_permissions(self, user, obj=None):
if not obj:
return # We only support row level perms
if not isinstance(obj, TestObj):
return ["none"]
if "test_group" in [group.name for group in user.groups.all()]:
return ["group_perm"]
else:
return ["none"]
@modify_settings(
AUTHENTICATION_BACKENDS={
"append": "auth_tests.test_auth_backends.SimpleRowlevelBackend",
}
)
class RowlevelBackendTest(TestCase):
"""
Tests for auth backend that supports object level permissions
"""
@classmethod
def setUpTestData(cls):
cls.user1 = User.objects.create_user("test", "[email protected]", "test")
cls.user2 = User.objects.create_user("test2", "[email protected]", "test")
cls.user3 = User.objects.create_user("test3", "[email protected]", "test")
def tearDown(self):
# The get_group_permissions test messes with ContentTypes, which will
# be cached; flush the cache to ensure there are no side effects
# Refs #14975, #14925
ContentType.objects.clear_cache()
def test_has_perm(self):
self.assertIs(self.user1.has_perm("perm", TestObj()), False)
self.assertIs(self.user2.has_perm("perm", TestObj()), True)
self.assertIs(self.user2.has_perm("perm"), False)
self.assertIs(self.user2.has_perms(["simple", "advanced"], TestObj()), True)
self.assertIs(self.user3.has_perm("perm", TestObj()), False)
self.assertIs(self.user3.has_perm("anon", TestObj()), False)
self.assertIs(self.user3.has_perms(["simple", "advanced"], TestObj()), False)
def test_get_all_permissions(self):
self.assertEqual(self.user1.get_all_permissions(TestObj()), {"simple"})
self.assertEqual(
self.user2.get_all_permissions(TestObj()), {"simple", "advanced"}
)
self.assertEqual(self.user2.get_all_permissions(), set())
def test_get_group_permissions(self):
group = Group.objects.create(name="test_group")
self.user3.groups.add(group)
self.assertEqual(self.user3.get_group_permissions(TestObj()), {"group_perm"})
@override_settings(
AUTHENTICATION_BACKENDS=["auth_tests.test_auth_backends.SimpleRowlevelBackend"],
)
class AnonymousUserBackendTest(SimpleTestCase):
"""
Tests for AnonymousUser delegating to backend.
"""
def setUp(self):
self.user1 = AnonymousUser()
def test_has_perm(self):
self.assertIs(self.user1.has_perm("perm", TestObj()), False)
self.assertIs(self.user1.has_perm("anon", TestObj()), True)
def test_has_perms(self):
self.assertIs(self.user1.has_perms(["anon"], TestObj()), True)
self.assertIs(self.user1.has_perms(["anon", "perm"], TestObj()), False)
def test_has_perms_perm_list_invalid(self):
msg = "perm_list must be an iterable of permissions."
with self.assertRaisesMessage(ValueError, msg):
self.user1.has_perms("perm")
with self.assertRaisesMessage(ValueError, msg):
self.user1.has_perms(object())
def test_has_module_perms(self):
self.assertIs(self.user1.has_module_perms("app1"), True)
self.assertIs(self.user1.has_module_perms("app2"), False)
def test_get_all_permissions(self):
self.assertEqual(self.user1.get_all_permissions(TestObj()), {"anon"})
@override_settings(AUTHENTICATION_BACKENDS=[])
class NoBackendsTest(TestCase):
"""
An appropriate error is raised if no auth backends are provided.
"""
@classmethod
def setUpTestData(cls):
cls.user = User.objects.create_user("test", "[email protected]", "test")
def test_raises_exception(self):
msg = (
"No authentication backends have been defined. "
"Does AUTHENTICATION_BACKENDS contain anything?"
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.user.has_perm(("perm", TestObj()))
@override_settings(
AUTHENTICATION_BACKENDS=["auth_tests.test_auth_backends.SimpleRowlevelBackend"]
)
class InActiveUserBackendTest(TestCase):
"""
Tests for an inactive user
"""
@classmethod
def setUpTestData(cls):
cls.user1 = User.objects.create_user("test", "[email protected]", "test")
cls.user1.is_active = False
cls.user1.save()
def test_has_perm(self):
self.assertIs(self.user1.has_perm("perm", TestObj()), False)
self.assertIs(self.user1.has_perm("inactive", TestObj()), True)
def test_has_module_perms(self):
self.assertIs(self.user1.has_module_perms("app1"), False)
self.assertIs(self.user1.has_module_perms("app2"), False)
class PermissionDeniedBackend:
"""
Always raises PermissionDenied in `authenticate`, `has_perm` and `has_module_perms`.
"""
def authenticate(self, request, username=None, password=None):
raise PermissionDenied
def has_perm(self, user_obj, perm, obj=None):
raise PermissionDenied
def has_module_perms(self, user_obj, app_label):
raise PermissionDenied
class PermissionDeniedBackendTest(TestCase):
"""
Other backends are not checked once a backend raises PermissionDenied
"""
backend = "auth_tests.test_auth_backends.PermissionDeniedBackend"
@classmethod
def setUpTestData(cls):
cls.user1 = User.objects.create_user("test", "[email protected]", "test")
def setUp(self):
self.user_login_failed = []
signals.user_login_failed.connect(self.user_login_failed_listener)
def tearDown(self):
signals.user_login_failed.disconnect(self.user_login_failed_listener)
def user_login_failed_listener(self, sender, credentials, **kwargs):
self.user_login_failed.append(credentials)
@modify_settings(AUTHENTICATION_BACKENDS={"prepend": backend})
def test_permission_denied(self):
"user is not authenticated after a backend raises permission denied #2550"
self.assertIsNone(authenticate(username="test", password="test"))
# user_login_failed signal is sent.
self.assertEqual(
self.user_login_failed,
[{"password": "********************", "username": "test"}],
)
@modify_settings(AUTHENTICATION_BACKENDS={"append": backend})
def test_authenticates(self):
self.assertEqual(authenticate(username="test", password="test"), self.user1)
@modify_settings(AUTHENTICATION_BACKENDS={"prepend": backend})
def test_has_perm_denied(self):
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(
name="test", content_type=content_type, codename="test"
)
self.user1.user_permissions.add(perm)
self.assertIs(self.user1.has_perm("auth.test"), False)
self.assertIs(self.user1.has_module_perms("auth"), False)
@modify_settings(AUTHENTICATION_BACKENDS={"append": backend})
def test_has_perm(self):
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(
name="test", content_type=content_type, codename="test"
)
self.user1.user_permissions.add(perm)
self.assertIs(self.user1.has_perm("auth.test"), True)
self.assertIs(self.user1.has_module_perms("auth"), True)
class NewModelBackend(ModelBackend):
pass
class ChangedBackendSettingsTest(TestCase):
"""
Tests for changes in the settings.AUTHENTICATION_BACKENDS
"""
backend = "auth_tests.test_auth_backends.NewModelBackend"
TEST_USERNAME = "test_user"
TEST_PASSWORD = "test_password"
TEST_EMAIL = "[email protected]"
@classmethod
def setUpTestData(cls):
User.objects.create_user(cls.TEST_USERNAME, cls.TEST_EMAIL, cls.TEST_PASSWORD)
@override_settings(AUTHENTICATION_BACKENDS=[backend])
def test_changed_backend_settings(self):
"""
Removing a backend configured in AUTHENTICATION_BACKENDS makes already
logged-in users disconnect.
"""
# Get a session for the test user
self.assertTrue(
self.client.login(
username=self.TEST_USERNAME,
password=self.TEST_PASSWORD,
)
)
# Prepare a request object
request = HttpRequest()
request.session = self.client.session
# Remove NewModelBackend
with self.settings(
AUTHENTICATION_BACKENDS=["django.contrib.auth.backends.ModelBackend"]
):
# Get the user from the request
user = get_user(request)
# Assert that the user retrieval is successful and the user is
# anonymous as the backend is not longer available.
self.assertIsNotNone(user)
self.assertTrue(user.is_anonymous)
class TypeErrorBackend:
"""
Always raises TypeError.
"""
@sensitive_variables("password")
def authenticate(self, request, username=None, password=None):
raise TypeError
class SkippedBackend:
def authenticate(self):
# Doesn't accept any credentials so is skipped by authenticate().
pass
class SkippedBackendWithDecoratedMethod:
@sensitive_variables()
def authenticate(self):
pass
class AuthenticateTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.user1 = User.objects.create_user("test", "[email protected]", "test")
def setUp(self):
self.sensitive_password = "mypassword"
@override_settings(
AUTHENTICATION_BACKENDS=["auth_tests.test_auth_backends.TypeErrorBackend"]
)
def test_type_error_raised(self):
"""A TypeError within a backend is propagated properly (#18171)."""
with self.assertRaises(TypeError):
authenticate(username="test", password="test")
@override_settings(
AUTHENTICATION_BACKENDS=["auth_tests.test_auth_backends.TypeErrorBackend"]
)
def test_authenticate_sensitive_variables(self):
try:
authenticate(username="testusername", password=self.sensitive_password)
except TypeError:
exc_info = sys.exc_info()
rf = RequestFactory()
response = technical_500_response(rf.get("/"), *exc_info)
self.assertNotContains(response, self.sensitive_password, status_code=500)
self.assertContains(response, "TypeErrorBackend", status_code=500)
self.assertContains(
response,
'<tr><td>credentials</td><td class="code">'
"<pre>'********************'</pre></td></tr>",
html=True,
status_code=500,
)
def test_clean_credentials_sensitive_variables(self):
try:
# Passing in a list to cause an exception
_clean_credentials([1, self.sensitive_password])
except TypeError:
exc_info = sys.exc_info()
rf = RequestFactory()
response = technical_500_response(rf.get("/"), *exc_info)
self.assertNotContains(response, self.sensitive_password, status_code=500)
self.assertContains(
response,
'<tr><td>credentials</td><td class="code">'
"<pre>'********************'</pre></td></tr>",
html=True,
status_code=500,
)
@override_settings(
AUTHENTICATION_BACKENDS=(
"auth_tests.test_auth_backends.SkippedBackend",
"django.contrib.auth.backends.ModelBackend",
)
)
def test_skips_backends_without_arguments(self):
"""
A backend (SkippedBackend) is ignored if it doesn't accept the
credentials as arguments.
"""
self.assertEqual(authenticate(username="test", password="test"), self.user1)
@override_settings(
AUTHENTICATION_BACKENDS=(
"auth_tests.test_auth_backends.SkippedBackendWithDecoratedMethod",
"django.contrib.auth.backends.ModelBackend",
)
)
def test_skips_backends_with_decorated_method(self):
self.assertEqual(authenticate(username="test", password="test"), self.user1)
class ImproperlyConfiguredUserModelTest(TestCase):
"""
An exception from within get_user_model() is propagated and doesn't
raise an UnboundLocalError (#21439).
"""
@classmethod
def setUpTestData(cls):
cls.user1 = User.objects.create_user("test", "[email protected]", "test")
def setUp(self):
self.client.login(username="test", password="test")
@override_settings(AUTH_USER_MODEL="thismodel.doesntexist")
def test_does_not_shadow_exception(self):
# Prepare a request object
request = HttpRequest()
request.session = self.client.session
msg = (
"AUTH_USER_MODEL refers to model 'thismodel.doesntexist' "
"that has not been installed"
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
get_user(request)
class ImportedModelBackend(ModelBackend):
pass
class CustomModelBackend(ModelBackend):
pass
class OtherModelBackend(ModelBackend):
pass
class ImportedBackendTests(TestCase):
"""
#23925 - The backend path added to the session should be the same
as the one defined in AUTHENTICATION_BACKENDS setting.
"""
backend = "auth_tests.backend_alias.ImportedModelBackend"
@override_settings(AUTHENTICATION_BACKENDS=[backend])
def test_backend_path(self):
username = "username"
password = "password"
User.objects.create_user(username, "email", password)
self.assertTrue(self.client.login(username=username, password=password))
request = HttpRequest()
request.session = self.client.session
self.assertEqual(request.session[BACKEND_SESSION_KEY], self.backend)
class SelectingBackendTests(TestCase):
backend = "auth_tests.test_auth_backends.CustomModelBackend"
other_backend = "auth_tests.test_auth_backends.OtherModelBackend"
username = "username"
password = "password"
def assertBackendInSession(self, backend):
request = HttpRequest()
request.session = self.client.session
self.assertEqual(request.session[BACKEND_SESSION_KEY], backend)
@override_settings(AUTHENTICATION_BACKENDS=[backend])
def test_backend_path_login_without_authenticate_single_backend(self):
user = User.objects.create_user(self.username, "email", self.password)
self.client._login(user)
self.assertBackendInSession(self.backend)
@override_settings(AUTHENTICATION_BACKENDS=[backend, other_backend])
def test_backend_path_login_without_authenticate_multiple_backends(self):
user = User.objects.create_user(self.username, "email", self.password)
expected_message = (
"You have multiple authentication backends configured and "
"therefore must provide the `backend` argument or set the "
"`backend` attribute on the user."
)
with self.assertRaisesMessage(ValueError, expected_message):
self.client._login(user)
def test_non_string_backend(self):
user = User.objects.create_user(self.username, "email", self.password)
expected_message = (
"backend must be a dotted import path string (got "
"<class 'django.contrib.auth.backends.ModelBackend'>)."
)
with self.assertRaisesMessage(TypeError, expected_message):
self.client._login(user, backend=ModelBackend)
@override_settings(AUTHENTICATION_BACKENDS=[backend, other_backend])
def test_backend_path_login_with_explicit_backends(self):
user = User.objects.create_user(self.username, "email", self.password)
self.client._login(user, self.other_backend)
self.assertBackendInSession(self.other_backend)
@override_settings(
AUTHENTICATION_BACKENDS=["django.contrib.auth.backends.AllowAllUsersModelBackend"]
)
class AllowAllUsersModelBackendTest(TestCase):
"""
Inactive users may authenticate with the AllowAllUsersModelBackend.
"""
user_credentials = {"username": "test", "password": "test"}
@classmethod
def setUpTestData(cls):
cls.user = User.objects.create_user(
email="[email protected]", is_active=False, **cls.user_credentials
)
def test_authenticate(self):
self.assertFalse(self.user.is_active)
self.assertEqual(authenticate(**self.user_credentials), self.user)
def test_get_user(self):
self.client.force_login(self.user)
request = HttpRequest()
request.session = self.client.session
user = get_user(request)
self.assertEqual(user, self.user)
|
7e3a856ae30988dfc1d279253603b0be6e037f443d9bff49b9a46b93a07c417c | """
Test URLs for auth admins.
"""
from django.contrib import admin
from django.contrib.auth.admin import GroupAdmin, UserAdmin
from django.contrib.auth.models import Group, User
from django.contrib.auth.urls import urlpatterns
from django.urls import path
# Create a silo'd admin site for just the user/group admins.
site = admin.AdminSite(name="auth_test_admin")
site.register(User, UserAdmin)
site.register(Group, GroupAdmin)
urlpatterns += [
path("admin/", site.urls),
]
|
62835e8230e40023ef11752f072af31645f6b72fb46bda18795855bff883fad3 | import re
from django.contrib.auth.views import (
INTERNAL_RESET_SESSION_TOKEN,
PasswordResetConfirmView,
)
from django.test import Client
def extract_token_from_url(url):
token_search = re.search(r"/reset/.*/(.+?)/", url)
if token_search:
return token_search[1]
class PasswordResetConfirmClient(Client):
"""
This client eases testing the password reset flow by emulating the
PasswordResetConfirmView's redirect and saving of the reset token in the
user's session. This request puts 'my-token' in the session and redirects
to '/reset/bla/set-password/':
>>> client = PasswordResetConfirmClient()
>>> client.get('/reset/bla/my-token/')
"""
reset_url_token = PasswordResetConfirmView.reset_url_token
def _get_password_reset_confirm_redirect_url(self, url):
token = extract_token_from_url(url)
if not token:
return url
# Add the token to the session
session = self.session
session[INTERNAL_RESET_SESSION_TOKEN] = token
session.save()
return url.replace(token, self.reset_url_token)
def get(self, path, *args, **kwargs):
redirect_url = self._get_password_reset_confirm_redirect_url(path)
return super().get(redirect_url, *args, **kwargs)
def post(self, path, *args, **kwargs):
redirect_url = self._get_password_reset_confirm_redirect_url(path)
return super().post(redirect_url, *args, **kwargs)
|
d16ee5e693d61797138c37a76314d788d4a1a2fb943341d2103aabcab385173e | # For testing that auth backends can be referenced using a convenience import
from .test_auth_backends import ImportedModelBackend
__all__ = ["ImportedModelBackend"]
|
578c9af91beeff1c2596d02671b9361f8388a19e3a1d4e66efdb7449836e1e05 | from datetime import datetime, timedelta
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.tokens import PasswordResetTokenGenerator
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase
from django.test.utils import override_settings
from .models import CustomEmailField
class MockedPasswordResetTokenGenerator(PasswordResetTokenGenerator):
def __init__(self, now):
self._now_val = now
super().__init__()
def _now(self):
return self._now_val
class TokenGeneratorTest(TestCase):
def test_make_token(self):
user = User.objects.create_user("tokentestuser", "[email protected]", "testpw")
p0 = PasswordResetTokenGenerator()
tk1 = p0.make_token(user)
self.assertIs(p0.check_token(user, tk1), True)
def test_10265(self):
"""
The token generated for a user created in the same request
will work correctly.
"""
user = User.objects.create_user("comebackkid", "[email protected]", "testpw")
user_reload = User.objects.get(username="comebackkid")
p0 = MockedPasswordResetTokenGenerator(datetime.now())
tk1 = p0.make_token(user)
tk2 = p0.make_token(user_reload)
self.assertEqual(tk1, tk2)
def test_token_with_different_email(self):
"""Updating the user email address invalidates the token."""
tests = [
(CustomEmailField, None),
(CustomEmailField, "[email protected]"),
(User, "[email protected]"),
]
for model, email in tests:
with self.subTest(model=model.__qualname__, email=email):
user = model.objects.create_user(
"changeemailuser",
email=email,
password="testpw",
)
p0 = PasswordResetTokenGenerator()
tk1 = p0.make_token(user)
self.assertIs(p0.check_token(user, tk1), True)
setattr(user, user.get_email_field_name(), "[email protected]")
user.save()
self.assertIs(p0.check_token(user, tk1), False)
def test_timeout(self):
"""The token is valid after n seconds, but no greater."""
# Uses a mocked version of PasswordResetTokenGenerator so we can change
# the value of 'now'.
user = User.objects.create_user("tokentestuser", "[email protected]", "testpw")
now = datetime.now()
p0 = MockedPasswordResetTokenGenerator(now)
tk1 = p0.make_token(user)
p1 = MockedPasswordResetTokenGenerator(
now + timedelta(seconds=settings.PASSWORD_RESET_TIMEOUT)
)
self.assertIs(p1.check_token(user, tk1), True)
p2 = MockedPasswordResetTokenGenerator(
now + timedelta(seconds=(settings.PASSWORD_RESET_TIMEOUT + 1))
)
self.assertIs(p2.check_token(user, tk1), False)
with self.settings(PASSWORD_RESET_TIMEOUT=60 * 60):
p3 = MockedPasswordResetTokenGenerator(
now + timedelta(seconds=settings.PASSWORD_RESET_TIMEOUT)
)
self.assertIs(p3.check_token(user, tk1), True)
p4 = MockedPasswordResetTokenGenerator(
now + timedelta(seconds=(settings.PASSWORD_RESET_TIMEOUT + 1))
)
self.assertIs(p4.check_token(user, tk1), False)
def test_check_token_with_nonexistent_token_and_user(self):
user = User.objects.create_user("tokentestuser", "[email protected]", "testpw")
p0 = PasswordResetTokenGenerator()
tk1 = p0.make_token(user)
self.assertIs(p0.check_token(None, tk1), False)
self.assertIs(p0.check_token(user, None), False)
def test_token_with_different_secret(self):
"""
A valid token can be created with a secret other than SECRET_KEY by
using the PasswordResetTokenGenerator.secret attribute.
"""
user = User.objects.create_user("tokentestuser", "[email protected]", "testpw")
new_secret = "abcdefghijkl"
# Create and check a token with a different secret.
p0 = PasswordResetTokenGenerator()
p0.secret = new_secret
tk0 = p0.make_token(user)
self.assertIs(p0.check_token(user, tk0), True)
# Create and check a token with the default secret.
p1 = PasswordResetTokenGenerator()
self.assertEqual(p1.secret, settings.SECRET_KEY)
self.assertNotEqual(p1.secret, new_secret)
tk1 = p1.make_token(user)
# Tokens created with a different secret don't validate.
self.assertIs(p0.check_token(user, tk1), False)
self.assertIs(p1.check_token(user, tk0), False)
def test_token_with_different_secret_subclass(self):
class CustomPasswordResetTokenGenerator(PasswordResetTokenGenerator):
secret = "test-secret"
user = User.objects.create_user("tokentestuser", "[email protected]", "testpw")
custom_password_generator = CustomPasswordResetTokenGenerator()
tk_custom = custom_password_generator.make_token(user)
self.assertIs(custom_password_generator.check_token(user, tk_custom), True)
default_password_generator = PasswordResetTokenGenerator()
self.assertNotEqual(
custom_password_generator.secret,
default_password_generator.secret,
)
self.assertEqual(default_password_generator.secret, settings.SECRET_KEY)
# Tokens created with a different secret don't validate.
tk_default = default_password_generator.make_token(user)
self.assertIs(custom_password_generator.check_token(user, tk_default), False)
self.assertIs(default_password_generator.check_token(user, tk_custom), False)
@override_settings(SECRET_KEY="")
def test_secret_lazy_validation(self):
default_token_generator = PasswordResetTokenGenerator()
msg = "The SECRET_KEY setting must not be empty."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
default_token_generator.secret
def test_check_token_secret_fallbacks(self):
user = User.objects.create_user("tokentestuser", "[email protected]", "testpw")
p1 = PasswordResetTokenGenerator()
p1.secret = "secret"
tk = p1.make_token(user)
p2 = PasswordResetTokenGenerator()
p2.secret = "newsecret"
p2.secret_fallbacks = ["secret"]
self.assertIs(p1.check_token(user, tk), True)
self.assertIs(p2.check_token(user, tk), True)
@override_settings(
SECRET_KEY="secret",
SECRET_KEY_FALLBACKS=["oldsecret"],
)
def test_check_token_secret_key_fallbacks(self):
user = User.objects.create_user("tokentestuser", "[email protected]", "testpw")
p1 = PasswordResetTokenGenerator()
p1.secret = "oldsecret"
tk = p1.make_token(user)
p2 = PasswordResetTokenGenerator()
self.assertIs(p2.check_token(user, tk), True)
@override_settings(
SECRET_KEY="secret",
SECRET_KEY_FALLBACKS=["oldsecret"],
)
def test_check_token_secret_key_fallbacks_override(self):
user = User.objects.create_user("tokentestuser", "[email protected]", "testpw")
p1 = PasswordResetTokenGenerator()
p1.secret = "oldsecret"
tk = p1.make_token(user)
p2 = PasswordResetTokenGenerator()
p2.secret_fallbacks = []
self.assertIs(p2.check_token(user, tk), False)
|
b16bb9819cd73692dec7874b9539816c57fed6c835488fcdc4a31a1ce865e68d | from django.contrib import admin
from django.contrib.auth import get_user_model
from django.contrib.auth.admin import UserAdmin
from django.urls import path
site = admin.AdminSite(name="custom_user_admin")
class CustomUserAdmin(UserAdmin):
def log_change(self, request, obj, message):
# LogEntry.user column doesn't get altered to expect a UUID, so set an
# integer manually to avoid causing an error.
original_pk = request.user.pk
request.user.pk = 1
super().log_change(request, obj, message)
request.user.pk = original_pk
site.register(get_user_model(), CustomUserAdmin)
urlpatterns = [
path("admin/", site.urls),
]
|
a8e901dbb75935616ad039258c9b91ccc82592beff3a6524d38f496cac1e88f0 | from django.contrib import admin
from django.contrib.auth import views
from django.contrib.auth.decorators import login_required, permission_required
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.auth.urls import urlpatterns as auth_urlpatterns
from django.contrib.auth.views import LoginView
from django.contrib.messages.api import info
from django.http import HttpRequest, HttpResponse
from django.shortcuts import render
from django.template import RequestContext, Template
from django.urls import path, re_path, reverse_lazy
from django.views.decorators.cache import never_cache
from django.views.i18n import set_language
class CustomRequestAuthenticationForm(AuthenticationForm):
def __init__(self, request, *args, **kwargs):
assert isinstance(request, HttpRequest)
super().__init__(request, *args, **kwargs)
@never_cache
def remote_user_auth_view(request):
"Dummy view for remote user tests"
t = Template("Username is {{ user }}.")
c = RequestContext(request, {})
return HttpResponse(t.render(c))
def auth_processor_no_attr_access(request):
render(request, "context_processors/auth_attrs_no_access.html")
# *After* rendering, we check whether the session was accessed
return render(
request,
"context_processors/auth_attrs_test_access.html",
{"session_accessed": request.session.accessed},
)
def auth_processor_attr_access(request):
render(request, "context_processors/auth_attrs_access.html")
return render(
request,
"context_processors/auth_attrs_test_access.html",
{"session_accessed": request.session.accessed},
)
def auth_processor_user(request):
return render(request, "context_processors/auth_attrs_user.html")
def auth_processor_perms(request):
return render(request, "context_processors/auth_attrs_perms.html")
def auth_processor_perm_in_perms(request):
return render(request, "context_processors/auth_attrs_perm_in_perms.html")
def auth_processor_messages(request):
info(request, "Message 1")
return render(request, "context_processors/auth_attrs_messages.html")
def userpage(request):
pass
@permission_required("unknown.permission")
def permission_required_redirect(request):
pass
@permission_required("unknown.permission", raise_exception=True)
def permission_required_exception(request):
pass
@login_required
@permission_required("unknown.permission", raise_exception=True)
def login_and_permission_required_exception(request):
pass
class CustomDefaultRedirectURLLoginView(LoginView):
def get_default_redirect_url(self):
return "/custom/"
# special urls for auth test cases
urlpatterns = auth_urlpatterns + [
path(
"logout/custom_query/", views.LogoutView.as_view(redirect_field_name="follow")
),
path("logout/next_page/", views.LogoutView.as_view(next_page="/somewhere/")),
path(
"logout/next_page/named/", views.LogoutView.as_view(next_page="password_reset")
),
path(
"logout/allowed_hosts/",
views.LogoutView.as_view(success_url_allowed_hosts={"otherserver"}),
),
path("remote_user/", remote_user_auth_view),
path(
"password_reset_from_email/",
views.PasswordResetView.as_view(from_email="[email protected]"),
),
path(
"password_reset_extra_email_context/",
views.PasswordResetView.as_view(
extra_email_context={"greeting": "Hello!", "domain": "custom.example.com"},
),
),
path(
"password_reset/custom_redirect/",
views.PasswordResetView.as_view(success_url="/custom/"),
),
path(
"password_reset/custom_redirect/named/",
views.PasswordResetView.as_view(success_url=reverse_lazy("password_reset")),
),
path(
"password_reset/html_email_template/",
views.PasswordResetView.as_view(
html_email_template_name="registration/html_password_reset_email.html"
),
),
path(
"reset/custom/<uidb64>/<token>/",
views.PasswordResetConfirmView.as_view(success_url="/custom/"),
),
path(
"reset/custom/named/<uidb64>/<token>/",
views.PasswordResetConfirmView.as_view(
success_url=reverse_lazy("password_reset")
),
),
path(
"reset/custom/token/<uidb64>/<token>/",
views.PasswordResetConfirmView.as_view(reset_url_token="set-passwordcustom"),
),
path(
"reset/post_reset_login/<uidb64>/<token>/",
views.PasswordResetConfirmView.as_view(post_reset_login=True),
),
path(
"reset/post_reset_login_custom_backend/<uidb64>/<token>/",
views.PasswordResetConfirmView.as_view(
post_reset_login=True,
post_reset_login_backend=(
"django.contrib.auth.backends.AllowAllUsersModelBackend"
),
),
),
path("reset/missing_parameters/", views.PasswordResetConfirmView.as_view()),
path(
"password_change/custom/",
views.PasswordChangeView.as_view(success_url="/custom/"),
),
path(
"password_change/custom/named/",
views.PasswordChangeView.as_view(success_url=reverse_lazy("password_reset")),
),
path("login_required/", login_required(views.PasswordResetView.as_view())),
path(
"login_required_login_url/",
login_required(views.PasswordResetView.as_view(), login_url="/somewhere/"),
),
path("auth_processor_no_attr_access/", auth_processor_no_attr_access),
path("auth_processor_attr_access/", auth_processor_attr_access),
path("auth_processor_user/", auth_processor_user),
path("auth_processor_perms/", auth_processor_perms),
path("auth_processor_perm_in_perms/", auth_processor_perm_in_perms),
path("auth_processor_messages/", auth_processor_messages),
path(
"custom_request_auth_login/",
views.LoginView.as_view(authentication_form=CustomRequestAuthenticationForm),
),
re_path("^userpage/(.+)/$", userpage, name="userpage"),
path("login/redirect_authenticated_user_default/", views.LoginView.as_view()),
path(
"login/redirect_authenticated_user/",
views.LoginView.as_view(redirect_authenticated_user=True),
),
path(
"login/allowed_hosts/",
views.LoginView.as_view(success_url_allowed_hosts={"otherserver"}),
),
path(
"login/get_default_redirect_url/", CustomDefaultRedirectURLLoginView.as_view()
),
path("login/next_page/", views.LoginView.as_view(next_page="/somewhere/")),
path("login/next_page/named/", views.LoginView.as_view(next_page="password_reset")),
path("permission_required_redirect/", permission_required_redirect),
path("permission_required_exception/", permission_required_exception),
path(
"login_and_permission_required_exception/",
login_and_permission_required_exception,
),
path("setlang/", set_language, name="set_language"),
# This line is only required to render the password reset with is_admin=True
path("admin/", admin.site.urls),
]
|
131ca8e699f06a843b17e0e28c23c4c7ed811b339ed24aa31a9daf1929cea115 | from django.contrib.auth.checks import check_models_permissions, check_user_model
from django.contrib.auth.models import AbstractBaseUser
from django.core import checks
from django.db import models
from django.db.models import Q, UniqueConstraint
from django.test import SimpleTestCase, override_settings, override_system_checks
from django.test.utils import isolate_apps
from .models import CustomUserNonUniqueUsername
@isolate_apps("auth_tests", attr_name="apps")
@override_system_checks([check_user_model])
class UserModelChecksTests(SimpleTestCase):
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserNonListRequiredFields")
def test_required_fields_is_list(self):
"""REQUIRED_FIELDS should be a list."""
class CustomUserNonListRequiredFields(AbstractBaseUser):
username = models.CharField(max_length=30, unique=True)
date_of_birth = models.DateField()
USERNAME_FIELD = "username"
REQUIRED_FIELDS = "date_of_birth"
errors = checks.run_checks(app_configs=self.apps.get_app_configs())
self.assertEqual(
errors,
[
checks.Error(
"'REQUIRED_FIELDS' must be a list or tuple.",
obj=CustomUserNonListRequiredFields,
id="auth.E001",
),
],
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserBadRequiredFields")
def test_username_not_in_required_fields(self):
"""USERNAME_FIELD should not appear in REQUIRED_FIELDS."""
class CustomUserBadRequiredFields(AbstractBaseUser):
username = models.CharField(max_length=30, unique=True)
date_of_birth = models.DateField()
USERNAME_FIELD = "username"
REQUIRED_FIELDS = ["username", "date_of_birth"]
errors = checks.run_checks(self.apps.get_app_configs())
self.assertEqual(
errors,
[
checks.Error(
"The field named as the 'USERNAME_FIELD' for a custom user model "
"must not be included in 'REQUIRED_FIELDS'.",
hint=(
"The 'USERNAME_FIELD' is currently set to 'username', you "
"should remove 'username' from the 'REQUIRED_FIELDS'."
),
obj=CustomUserBadRequiredFields,
id="auth.E002",
),
],
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserNonUniqueUsername")
def test_username_non_unique(self):
"""
A non-unique USERNAME_FIELD raises an error only if the default
authentication backend is used. Otherwise, a warning is raised.
"""
errors = checks.run_checks()
self.assertEqual(
errors,
[
checks.Error(
"'CustomUserNonUniqueUsername.username' must be "
"unique because it is named as the 'USERNAME_FIELD'.",
obj=CustomUserNonUniqueUsername,
id="auth.E003",
),
],
)
with self.settings(AUTHENTICATION_BACKENDS=["my.custom.backend"]):
errors = checks.run_checks()
self.assertEqual(
errors,
[
checks.Warning(
"'CustomUserNonUniqueUsername.username' is named as "
"the 'USERNAME_FIELD', but it is not unique.",
hint=(
"Ensure that your authentication backend(s) can handle "
"non-unique usernames."
),
obj=CustomUserNonUniqueUsername,
id="auth.W004",
),
],
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserPartiallyUnique")
def test_username_partially_unique(self):
class CustomUserPartiallyUnique(AbstractBaseUser):
username = models.CharField(max_length=30)
USERNAME_FIELD = "username"
class Meta:
constraints = [
UniqueConstraint(
fields=["username"],
name="partial_username_unique",
condition=Q(password__isnull=False),
),
]
errors = checks.run_checks(app_configs=self.apps.get_app_configs())
self.assertEqual(
errors,
[
checks.Error(
"'CustomUserPartiallyUnique.username' must be unique because "
"it is named as the 'USERNAME_FIELD'.",
obj=CustomUserPartiallyUnique,
id="auth.E003",
),
],
)
with self.settings(AUTHENTICATION_BACKENDS=["my.custom.backend"]):
errors = checks.run_checks(app_configs=self.apps.get_app_configs())
self.assertEqual(
errors,
[
checks.Warning(
"'CustomUserPartiallyUnique.username' is named as the "
"'USERNAME_FIELD', but it is not unique.",
hint=(
"Ensure that your authentication backend(s) can "
"handle non-unique usernames."
),
obj=CustomUserPartiallyUnique,
id="auth.W004",
),
],
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUserUniqueConstraint")
def test_username_unique_with_model_constraint(self):
class CustomUserUniqueConstraint(AbstractBaseUser):
username = models.CharField(max_length=30)
USERNAME_FIELD = "username"
class Meta:
constraints = [
UniqueConstraint(fields=["username"], name="username_unique"),
]
self.assertEqual(checks.run_checks(app_configs=self.apps.get_app_configs()), [])
with self.settings(AUTHENTICATION_BACKENDS=["my.custom.backend"]):
errors = checks.run_checks(app_configs=self.apps.get_app_configs())
self.assertEqual(errors, [])
@override_settings(AUTH_USER_MODEL="auth_tests.BadUser")
def test_is_anonymous_authenticated_methods(self):
"""
<User Model>.is_anonymous/is_authenticated must not be methods.
"""
class BadUser(AbstractBaseUser):
username = models.CharField(max_length=30, unique=True)
USERNAME_FIELD = "username"
def is_anonymous(self):
return True
def is_authenticated(self):
return True
errors = checks.run_checks(app_configs=self.apps.get_app_configs())
self.assertEqual(
errors,
[
checks.Critical(
"%s.is_anonymous must be an attribute or property rather than "
"a method. Ignoring this is a security issue as anonymous "
"users will be treated as authenticated!" % BadUser,
obj=BadUser,
id="auth.C009",
),
checks.Critical(
"%s.is_authenticated must be an attribute or property rather "
"than a method. Ignoring this is a security issue as anonymous "
"users will be treated as authenticated!" % BadUser,
obj=BadUser,
id="auth.C010",
),
],
)
@isolate_apps("auth_tests", attr_name="apps")
@override_system_checks([check_models_permissions])
class ModelsPermissionsChecksTests(SimpleTestCase):
def test_clashing_default_permissions(self):
class Checked(models.Model):
class Meta:
permissions = [("change_checked", "Can edit permission (duplicate)")]
errors = checks.run_checks(self.apps.get_app_configs())
self.assertEqual(
errors,
[
checks.Error(
"The permission codenamed 'change_checked' clashes with a builtin "
"permission for model 'auth_tests.Checked'.",
obj=Checked,
id="auth.E005",
),
],
)
def test_non_clashing_custom_permissions(self):
class Checked(models.Model):
class Meta:
permissions = [
("my_custom_permission", "Some permission"),
("other_one", "Some other permission"),
]
errors = checks.run_checks(self.apps.get_app_configs())
self.assertEqual(errors, [])
def test_clashing_custom_permissions(self):
class Checked(models.Model):
class Meta:
permissions = [
("my_custom_permission", "Some permission"),
("other_one", "Some other permission"),
(
"my_custom_permission",
"Some permission with duplicate permission code",
),
]
errors = checks.run_checks(self.apps.get_app_configs())
self.assertEqual(
errors,
[
checks.Error(
"The permission codenamed 'my_custom_permission' is duplicated for "
"model 'auth_tests.Checked'.",
obj=Checked,
id="auth.E006",
),
],
)
def test_verbose_name_max_length(self):
class Checked(models.Model):
class Meta:
verbose_name = (
"some ridiculously long verbose name that is out of control" * 5
)
errors = checks.run_checks(self.apps.get_app_configs())
self.assertEqual(
errors,
[
checks.Error(
"The verbose_name of model 'auth_tests.Checked' must be at most "
"244 characters for its builtin permission names to be at most 255 "
"characters.",
obj=Checked,
id="auth.E007",
),
],
)
def test_model_name_max_length(self):
model_name = "X" * 94
model = type(model_name, (models.Model,), {"__module__": self.__module__})
errors = checks.run_checks(self.apps.get_app_configs())
self.assertEqual(
errors,
[
checks.Error(
"The name of model 'auth_tests.%s' must be at most 93 "
"characters for its builtin permission codenames to be at "
"most 100 characters." % model_name,
obj=model,
id="auth.E011",
),
],
)
def test_custom_permission_name_max_length(self):
custom_permission_name = (
"some ridiculously long verbose name that is out of control" * 5
)
class Checked(models.Model):
class Meta:
permissions = [
("my_custom_permission", custom_permission_name),
]
errors = checks.run_checks(self.apps.get_app_configs())
self.assertEqual(
errors,
[
checks.Error(
"The permission named '%s' of model 'auth_tests.Checked' is longer "
"than 255 characters." % custom_permission_name,
obj=Checked,
id="auth.E008",
),
],
)
def test_custom_permission_codename_max_length(self):
custom_permission_codename = "x" * 101
class Checked(models.Model):
class Meta:
permissions = [
(custom_permission_codename, "Custom permission"),
]
errors = checks.run_checks(self.apps.get_app_configs())
self.assertEqual(
errors,
[
checks.Error(
"The permission codenamed '%s' of model 'auth_tests.Checked' "
"is longer than 100 characters." % custom_permission_codename,
obj=Checked,
id="auth.E012",
),
],
)
def test_empty_default_permissions(self):
class Checked(models.Model):
class Meta:
default_permissions = ()
self.assertEqual(checks.run_checks(self.apps.get_app_configs()), [])
|
07ca15661b9a07436c7ec8d5fc79f0077b517d14b454a682279e6936831d660f | import os
from django.contrib.auth import validators
from django.contrib.auth.models import User
from django.contrib.auth.password_validation import (
CommonPasswordValidator,
MinimumLengthValidator,
NumericPasswordValidator,
UserAttributeSimilarityValidator,
get_default_password_validators,
get_password_validators,
password_changed,
password_validators_help_text_html,
password_validators_help_texts,
validate_password,
)
from django.core.exceptions import ValidationError
from django.db import models
from django.test import SimpleTestCase, TestCase, override_settings
from django.test.utils import isolate_apps
from django.utils.html import conditional_escape
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
"OPTIONS": {
"min_length": 12,
},
},
]
)
class PasswordValidationTest(SimpleTestCase):
def test_get_default_password_validators(self):
validators = get_default_password_validators()
self.assertEqual(len(validators), 2)
self.assertEqual(validators[0].__class__.__name__, "CommonPasswordValidator")
self.assertEqual(validators[1].__class__.__name__, "MinimumLengthValidator")
self.assertEqual(validators[1].min_length, 12)
def test_get_password_validators_custom(self):
validator_config = [
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"}
]
validators = get_password_validators(validator_config)
self.assertEqual(len(validators), 1)
self.assertEqual(validators[0].__class__.__name__, "CommonPasswordValidator")
self.assertEqual(get_password_validators([]), [])
def test_validate_password(self):
self.assertIsNone(validate_password("sufficiently-long"))
msg_too_short = (
"This password is too short. It must contain at least 12 characters."
)
with self.assertRaises(ValidationError) as cm:
validate_password("django4242")
self.assertEqual(cm.exception.messages, [msg_too_short])
self.assertEqual(cm.exception.error_list[0].code, "password_too_short")
with self.assertRaises(ValidationError) as cm:
validate_password("password")
self.assertEqual(
cm.exception.messages, ["This password is too common.", msg_too_short]
)
self.assertEqual(cm.exception.error_list[0].code, "password_too_common")
self.assertIsNone(validate_password("password", password_validators=[]))
def test_password_changed(self):
self.assertIsNone(password_changed("password"))
def test_password_changed_with_custom_validator(self):
class Validator:
def password_changed(self, password, user):
self.password = password
self.user = user
user = object()
validator = Validator()
password_changed("password", user=user, password_validators=(validator,))
self.assertIs(validator.user, user)
self.assertEqual(validator.password, "password")
def test_password_validators_help_texts(self):
help_texts = password_validators_help_texts()
self.assertEqual(len(help_texts), 2)
self.assertIn("12 characters", help_texts[1])
self.assertEqual(password_validators_help_texts(password_validators=[]), [])
def test_password_validators_help_text_html(self):
help_text = password_validators_help_text_html()
self.assertEqual(help_text.count("<li>"), 2)
self.assertIn("12 characters", help_text)
def test_password_validators_help_text_html_escaping(self):
class AmpersandValidator:
def get_help_text(self):
return "Must contain &"
help_text = password_validators_help_text_html([AmpersandValidator()])
self.assertEqual(help_text, "<ul><li>Must contain &</li></ul>")
# help_text is marked safe and therefore unchanged by conditional_escape().
self.assertEqual(help_text, conditional_escape(help_text))
@override_settings(AUTH_PASSWORD_VALIDATORS=[])
def test_empty_password_validator_help_text_html(self):
self.assertEqual(password_validators_help_text_html(), "")
class MinimumLengthValidatorTest(SimpleTestCase):
def test_validate(self):
expected_error = (
"This password is too short. It must contain at least %d characters."
)
self.assertIsNone(MinimumLengthValidator().validate("12345678"))
self.assertIsNone(MinimumLengthValidator(min_length=3).validate("123"))
with self.assertRaises(ValidationError) as cm:
MinimumLengthValidator().validate("1234567")
self.assertEqual(cm.exception.messages, [expected_error % 8])
self.assertEqual(cm.exception.error_list[0].code, "password_too_short")
with self.assertRaises(ValidationError) as cm:
MinimumLengthValidator(min_length=3).validate("12")
self.assertEqual(cm.exception.messages, [expected_error % 3])
def test_help_text(self):
self.assertEqual(
MinimumLengthValidator().get_help_text(),
"Your password must contain at least 8 characters.",
)
class UserAttributeSimilarityValidatorTest(TestCase):
def test_validate(self):
user = User.objects.create_user(
username="testclient",
password="password",
email="[email protected]",
first_name="Test",
last_name="Client",
)
expected_error = "The password is too similar to the %s."
self.assertIsNone(UserAttributeSimilarityValidator().validate("testclient"))
with self.assertRaises(ValidationError) as cm:
UserAttributeSimilarityValidator().validate("testclient", user=user),
self.assertEqual(cm.exception.messages, [expected_error % "username"])
self.assertEqual(cm.exception.error_list[0].code, "password_too_similar")
with self.assertRaises(ValidationError) as cm:
UserAttributeSimilarityValidator().validate("example.com", user=user),
self.assertEqual(cm.exception.messages, [expected_error % "email address"])
with self.assertRaises(ValidationError) as cm:
UserAttributeSimilarityValidator(
user_attributes=["first_name"],
max_similarity=0.3,
).validate("testclient", user=user)
self.assertEqual(cm.exception.messages, [expected_error % "first name"])
# max_similarity=1 doesn't allow passwords that are identical to the
# attribute's value.
with self.assertRaises(ValidationError) as cm:
UserAttributeSimilarityValidator(
user_attributes=["first_name"],
max_similarity=1,
).validate(user.first_name, user=user)
self.assertEqual(cm.exception.messages, [expected_error % "first name"])
# Very low max_similarity is rejected.
msg = "max_similarity must be at least 0.1"
with self.assertRaisesMessage(ValueError, msg):
UserAttributeSimilarityValidator(max_similarity=0.09)
# Passes validation.
self.assertIsNone(
UserAttributeSimilarityValidator(user_attributes=["first_name"]).validate(
"testclient", user=user
)
)
@isolate_apps("auth_tests")
def test_validate_property(self):
class TestUser(models.Model):
pass
@property
def username(self):
return "foobar"
with self.assertRaises(ValidationError) as cm:
UserAttributeSimilarityValidator().validate("foobar", user=TestUser()),
self.assertEqual(
cm.exception.messages, ["The password is too similar to the username."]
)
def test_help_text(self):
self.assertEqual(
UserAttributeSimilarityValidator().get_help_text(),
"Your password can’t be too similar to your other personal information.",
)
class CommonPasswordValidatorTest(SimpleTestCase):
def test_validate(self):
expected_error = "This password is too common."
self.assertIsNone(CommonPasswordValidator().validate("a-safe-password"))
with self.assertRaises(ValidationError) as cm:
CommonPasswordValidator().validate("godzilla")
self.assertEqual(cm.exception.messages, [expected_error])
def test_validate_custom_list(self):
path = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "common-passwords-custom.txt"
)
validator = CommonPasswordValidator(password_list_path=path)
expected_error = "This password is too common."
self.assertIsNone(validator.validate("a-safe-password"))
with self.assertRaises(ValidationError) as cm:
validator.validate("from-my-custom-list")
self.assertEqual(cm.exception.messages, [expected_error])
self.assertEqual(cm.exception.error_list[0].code, "password_too_common")
def test_validate_django_supplied_file(self):
validator = CommonPasswordValidator()
for password in validator.passwords:
self.assertEqual(password, password.lower())
def test_help_text(self):
self.assertEqual(
CommonPasswordValidator().get_help_text(),
"Your password can’t be a commonly used password.",
)
class NumericPasswordValidatorTest(SimpleTestCase):
def test_validate(self):
expected_error = "This password is entirely numeric."
self.assertIsNone(NumericPasswordValidator().validate("a-safe-password"))
with self.assertRaises(ValidationError) as cm:
NumericPasswordValidator().validate("42424242")
self.assertEqual(cm.exception.messages, [expected_error])
self.assertEqual(cm.exception.error_list[0].code, "password_entirely_numeric")
def test_help_text(self):
self.assertEqual(
NumericPasswordValidator().get_help_text(),
"Your password can’t be entirely numeric.",
)
class UsernameValidatorsTests(SimpleTestCase):
def test_unicode_validator(self):
valid_usernames = ["joe", "René", "ᴮᴵᴳᴮᴵᴿᴰ", "أحمد"]
invalid_usernames = [
"o'connell",
"عبد ال",
"zerowidth\u200Bspace",
"nonbreaking\u00A0space",
"en\u2013dash",
"trailingnewline\u000A",
]
v = validators.UnicodeUsernameValidator()
for valid in valid_usernames:
with self.subTest(valid=valid):
v(valid)
for invalid in invalid_usernames:
with self.subTest(invalid=invalid):
with self.assertRaises(ValidationError):
v(invalid)
def test_ascii_validator(self):
valid_usernames = ["glenn", "GLEnN", "jean-marc"]
invalid_usernames = [
"o'connell",
"Éric",
"jean marc",
"أحمد",
"trailingnewline\n",
]
v = validators.ASCIIUsernameValidator()
for valid in valid_usernames:
with self.subTest(valid=valid):
v(valid)
for invalid in invalid_usernames:
with self.subTest(invalid=invalid):
with self.assertRaises(ValidationError):
v(invalid)
|
a7952c6da10659194c9ebc49711f43369f69dac4242fe6544f623f52bab073e2 | import os
AUTH_MIDDLEWARE = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
]
AUTH_TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [os.path.join(os.path.dirname(__file__), "templates")],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
}
]
|
dcef2c7df464e90bfc4847d2fb4b8c8172dcabe5c6bb8276a8cac03ad71de138 | import datetime
import re
from unittest import mock
from django.contrib.auth.forms import (
AdminPasswordChangeForm,
AuthenticationForm,
PasswordChangeForm,
PasswordResetForm,
ReadOnlyPasswordHashField,
ReadOnlyPasswordHashWidget,
SetPasswordForm,
UserChangeForm,
UserCreationForm,
)
from django.contrib.auth.models import User
from django.contrib.auth.signals import user_login_failed
from django.contrib.sites.models import Site
from django.core import mail
from django.core.exceptions import ValidationError
from django.core.mail import EmailMultiAlternatives
from django.forms import forms
from django.forms.fields import CharField, Field, IntegerField
from django.test import SimpleTestCase, TestCase, override_settings
from django.utils import translation
from django.utils.text import capfirst
from django.utils.translation import gettext as _
from .models.custom_user import (
CustomUser,
CustomUserWithoutIsActiveField,
ExtensionUser,
)
from .models.with_custom_email_field import CustomEmailField
from .models.with_integer_username import IntegerUsernameUser
from .settings import AUTH_TEMPLATES
class TestDataMixin:
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create_user(
username="testclient", password="password", email="[email protected]"
)
cls.u2 = User.objects.create_user(
username="inactive", password="password", is_active=False
)
cls.u3 = User.objects.create_user(username="staff", password="password")
cls.u4 = User.objects.create(username="empty_password", password="")
cls.u5 = User.objects.create(username="unmanageable_password", password="$")
cls.u6 = User.objects.create(username="unknown_password", password="foo$bar")
class UserCreationFormTest(TestDataMixin, TestCase):
def test_user_already_exists(self):
data = {
"username": "testclient",
"password1": "test123",
"password2": "test123",
}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(
form["username"].errors,
[str(User._meta.get_field("username").error_messages["unique"])],
)
def test_invalid_data(self):
data = {
"username": "jsmith!",
"password1": "test123",
"password2": "test123",
}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
validator = next(
v
for v in User._meta.get_field("username").validators
if v.code == "invalid"
)
self.assertEqual(form["username"].errors, [str(validator.message)])
def test_password_verification(self):
# The verification password is incorrect.
data = {
"username": "jsmith",
"password1": "test123",
"password2": "test",
}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(
form["password2"].errors, [str(form.error_messages["password_mismatch"])]
)
def test_both_passwords(self):
# One (or both) passwords weren't given
data = {"username": "jsmith"}
form = UserCreationForm(data)
required_error = [str(Field.default_error_messages["required"])]
self.assertFalse(form.is_valid())
self.assertEqual(form["password1"].errors, required_error)
self.assertEqual(form["password2"].errors, required_error)
data["password2"] = "test123"
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form["password1"].errors, required_error)
self.assertEqual(form["password2"].errors, [])
@mock.patch("django.contrib.auth.password_validation.password_changed")
def test_success(self, password_changed):
# The success case.
data = {
"username": "[email protected]",
"password1": "test123",
"password2": "test123",
}
form = UserCreationForm(data)
self.assertTrue(form.is_valid())
form.save(commit=False)
self.assertEqual(password_changed.call_count, 0)
u = form.save()
self.assertEqual(password_changed.call_count, 1)
self.assertEqual(repr(u), "<User: [email protected]>")
def test_unicode_username(self):
data = {
"username": "宝",
"password1": "test123",
"password2": "test123",
}
form = UserCreationForm(data)
self.assertTrue(form.is_valid())
u = form.save()
self.assertEqual(u.username, "宝")
def test_normalize_username(self):
# The normalization happens in AbstractBaseUser.clean() and ModelForm
# validation calls Model.clean().
ohm_username = "testΩ" # U+2126 OHM SIGN
data = {
"username": ohm_username,
"password1": "pwd2",
"password2": "pwd2",
}
form = UserCreationForm(data)
self.assertTrue(form.is_valid())
user = form.save()
self.assertNotEqual(user.username, ohm_username)
self.assertEqual(user.username, "testΩ") # U+03A9 GREEK CAPITAL LETTER OMEGA
def test_duplicate_normalized_unicode(self):
"""
To prevent almost identical usernames, visually identical but differing
by their unicode code points only, Unicode NFKC normalization should
make appear them equal to Django.
"""
omega_username = "iamtheΩ" # U+03A9 GREEK CAPITAL LETTER OMEGA
ohm_username = "iamtheΩ" # U+2126 OHM SIGN
self.assertNotEqual(omega_username, ohm_username)
User.objects.create_user(username=omega_username, password="pwd")
data = {
"username": ohm_username,
"password1": "pwd2",
"password2": "pwd2",
}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors["username"], ["A user with that username already exists."]
)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
{
"NAME": (
"django.contrib.auth.password_validation.MinimumLengthValidator"
),
"OPTIONS": {
"min_length": 12,
},
},
]
)
def test_validates_password(self):
data = {
"username": "testclient",
"password1": "testclient",
"password2": "testclient",
}
form = UserCreationForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(len(form["password2"].errors), 2)
self.assertIn(
"The password is too similar to the username.", form["password2"].errors
)
self.assertIn(
"This password is too short. It must contain at least 12 characters.",
form["password2"].errors,
)
def test_custom_form(self):
class CustomUserCreationForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
model = ExtensionUser
fields = UserCreationForm.Meta.fields + ("date_of_birth",)
data = {
"username": "testclient",
"password1": "testclient",
"password2": "testclient",
"date_of_birth": "1988-02-24",
}
form = CustomUserCreationForm(data)
self.assertTrue(form.is_valid())
def test_custom_form_with_different_username_field(self):
class CustomUserCreationForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
model = CustomUser
fields = ("email", "date_of_birth")
data = {
"email": "[email protected]",
"password1": "testclient",
"password2": "testclient",
"date_of_birth": "1988-02-24",
}
form = CustomUserCreationForm(data)
self.assertTrue(form.is_valid())
def test_custom_form_hidden_username_field(self):
class CustomUserCreationForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
model = CustomUserWithoutIsActiveField
fields = ("email",) # without USERNAME_FIELD
data = {
"email": "[email protected]",
"password1": "testclient",
"password2": "testclient",
}
form = CustomUserCreationForm(data)
self.assertTrue(form.is_valid())
def test_password_whitespace_not_stripped(self):
data = {
"username": "testuser",
"password1": " testpassword ",
"password2": " testpassword ",
}
form = UserCreationForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data["password1"], data["password1"])
self.assertEqual(form.cleaned_data["password2"], data["password2"])
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
]
)
def test_password_help_text(self):
form = UserCreationForm()
self.assertEqual(
form.fields["password1"].help_text,
"<ul><li>"
"Your password can’t be too similar to your other personal information."
"</li></ul>",
)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
]
)
def test_user_create_form_validates_password_with_all_data(self):
"""UserCreationForm password validation uses all of the form's data."""
class CustomUserCreationForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
model = User
fields = ("username", "email", "first_name", "last_name")
form = CustomUserCreationForm(
{
"username": "testuser",
"password1": "testpassword",
"password2": "testpassword",
"first_name": "testpassword",
"last_name": "lastname",
}
)
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors["password2"],
["The password is too similar to the first name."],
)
def test_username_field_autocapitalize_none(self):
form = UserCreationForm()
self.assertEqual(
form.fields["username"].widget.attrs.get("autocapitalize"), "none"
)
def test_html_autocomplete_attributes(self):
form = UserCreationForm()
tests = (
("username", "username"),
("password1", "new-password"),
("password2", "new-password"),
)
for field_name, autocomplete in tests:
with self.subTest(field_name=field_name, autocomplete=autocomplete):
self.assertEqual(
form.fields[field_name].widget.attrs["autocomplete"], autocomplete
)
# To verify that the login form rejects inactive users, use an authentication
# backend that allows them.
@override_settings(
AUTHENTICATION_BACKENDS=["django.contrib.auth.backends.AllowAllUsersModelBackend"]
)
class AuthenticationFormTest(TestDataMixin, TestCase):
def test_invalid_username(self):
# The user submits an invalid username.
data = {
"username": "jsmith_does_not_exist",
"password": "test123",
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.non_field_errors(),
[
form.error_messages["invalid_login"]
% {"username": User._meta.get_field("username").verbose_name}
],
)
def test_inactive_user(self):
# The user is inactive.
data = {
"username": "inactive",
"password": "password",
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.non_field_errors(), [str(form.error_messages["inactive"])]
)
# Use an authentication backend that rejects inactive users.
@override_settings(
AUTHENTICATION_BACKENDS=["django.contrib.auth.backends.ModelBackend"]
)
def test_inactive_user_incorrect_password(self):
"""An invalid login doesn't leak the inactive status of a user."""
data = {
"username": "inactive",
"password": "incorrect",
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.non_field_errors(),
[
form.error_messages["invalid_login"]
% {"username": User._meta.get_field("username").verbose_name}
],
)
def test_login_failed(self):
signal_calls = []
def signal_handler(**kwargs):
signal_calls.append(kwargs)
user_login_failed.connect(signal_handler)
fake_request = object()
try:
form = AuthenticationForm(
fake_request,
{
"username": "testclient",
"password": "incorrect",
},
)
self.assertFalse(form.is_valid())
self.assertIs(signal_calls[0]["request"], fake_request)
finally:
user_login_failed.disconnect(signal_handler)
def test_inactive_user_i18n(self):
with self.settings(USE_I18N=True), translation.override(
"pt-br", deactivate=True
):
# The user is inactive.
data = {
"username": "inactive",
"password": "password",
}
form = AuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.non_field_errors(), [str(form.error_messages["inactive"])]
)
# Use an authentication backend that allows inactive users.
@override_settings(
AUTHENTICATION_BACKENDS=[
"django.contrib.auth.backends.AllowAllUsersModelBackend"
]
)
def test_custom_login_allowed_policy(self):
# The user is inactive, but our custom form policy allows them to log in.
data = {
"username": "inactive",
"password": "password",
}
class AuthenticationFormWithInactiveUsersOkay(AuthenticationForm):
def confirm_login_allowed(self, user):
pass
form = AuthenticationFormWithInactiveUsersOkay(None, data)
self.assertTrue(form.is_valid())
# Raise a ValidationError in the form to disallow some logins according
# to custom logic.
class PickyAuthenticationForm(AuthenticationForm):
def confirm_login_allowed(self, user):
if user.username == "inactive":
raise ValidationError("This user is disallowed.")
raise ValidationError("Sorry, nobody's allowed in.")
form = PickyAuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(form.non_field_errors(), ["This user is disallowed."])
data = {
"username": "testclient",
"password": "password",
}
form = PickyAuthenticationForm(None, data)
self.assertFalse(form.is_valid())
self.assertEqual(form.non_field_errors(), ["Sorry, nobody's allowed in."])
def test_success(self):
# The success case
data = {
"username": "testclient",
"password": "password",
}
form = AuthenticationForm(None, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.non_field_errors(), [])
def test_unicode_username(self):
User.objects.create_user(username="Σαρα", password="pwd")
data = {
"username": "Σαρα",
"password": "pwd",
}
form = AuthenticationForm(None, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.non_field_errors(), [])
@override_settings(AUTH_USER_MODEL="auth_tests.CustomEmailField")
def test_username_field_max_length_matches_user_model(self):
self.assertEqual(CustomEmailField._meta.get_field("username").max_length, 255)
data = {
"username": "u" * 255,
"password": "pwd",
"email": "[email protected]",
}
CustomEmailField.objects.create_user(**data)
form = AuthenticationForm(None, data)
self.assertEqual(form.fields["username"].max_length, 255)
self.assertEqual(form.fields["username"].widget.attrs.get("maxlength"), 255)
self.assertEqual(form.errors, {})
@override_settings(AUTH_USER_MODEL="auth_tests.IntegerUsernameUser")
def test_username_field_max_length_defaults_to_254(self):
self.assertIsNone(IntegerUsernameUser._meta.get_field("username").max_length)
data = {
"username": "0123456",
"password": "password",
}
IntegerUsernameUser.objects.create_user(**data)
form = AuthenticationForm(None, data)
self.assertEqual(form.fields["username"].max_length, 254)
self.assertEqual(form.fields["username"].widget.attrs.get("maxlength"), 254)
self.assertEqual(form.errors, {})
def test_username_field_label(self):
class CustomAuthenticationForm(AuthenticationForm):
username = CharField(label="Name", max_length=75)
form = CustomAuthenticationForm()
self.assertEqual(form["username"].label, "Name")
def test_username_field_label_not_set(self):
class CustomAuthenticationForm(AuthenticationForm):
username = CharField()
form = CustomAuthenticationForm()
username_field = User._meta.get_field(User.USERNAME_FIELD)
self.assertEqual(
form.fields["username"].label, capfirst(username_field.verbose_name)
)
def test_username_field_autocapitalize_none(self):
form = AuthenticationForm()
self.assertEqual(
form.fields["username"].widget.attrs.get("autocapitalize"), "none"
)
def test_username_field_label_empty_string(self):
class CustomAuthenticationForm(AuthenticationForm):
username = CharField(label="")
form = CustomAuthenticationForm()
self.assertEqual(form.fields["username"].label, "")
def test_password_whitespace_not_stripped(self):
data = {
"username": "testuser",
"password": " pass ",
}
form = AuthenticationForm(None, data)
form.is_valid() # Not necessary to have valid credentails for the test.
self.assertEqual(form.cleaned_data["password"], data["password"])
@override_settings(AUTH_USER_MODEL="auth_tests.IntegerUsernameUser")
def test_integer_username(self):
class CustomAuthenticationForm(AuthenticationForm):
username = IntegerField()
user = IntegerUsernameUser.objects.create_user(username=0, password="pwd")
data = {
"username": 0,
"password": "pwd",
}
form = CustomAuthenticationForm(None, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data["username"], data["username"])
self.assertEqual(form.cleaned_data["password"], data["password"])
self.assertEqual(form.errors, {})
self.assertEqual(form.user_cache, user)
def test_get_invalid_login_error(self):
error = AuthenticationForm().get_invalid_login_error()
self.assertIsInstance(error, ValidationError)
self.assertEqual(
error.message,
"Please enter a correct %(username)s and password. Note that both "
"fields may be case-sensitive.",
)
self.assertEqual(error.code, "invalid_login")
self.assertEqual(error.params, {"username": "username"})
def test_html_autocomplete_attributes(self):
form = AuthenticationForm()
tests = (
("username", "username"),
("password", "current-password"),
)
for field_name, autocomplete in tests:
with self.subTest(field_name=field_name, autocomplete=autocomplete):
self.assertEqual(
form.fields[field_name].widget.attrs["autocomplete"], autocomplete
)
class SetPasswordFormTest(TestDataMixin, TestCase):
def test_password_verification(self):
# The two new passwords do not match.
user = User.objects.get(username="testclient")
data = {
"new_password1": "abc123",
"new_password2": "abc",
}
form = SetPasswordForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form["new_password2"].errors,
[str(form.error_messages["password_mismatch"])],
)
@mock.patch("django.contrib.auth.password_validation.password_changed")
def test_success(self, password_changed):
user = User.objects.get(username="testclient")
data = {
"new_password1": "abc123",
"new_password2": "abc123",
}
form = SetPasswordForm(user, data)
self.assertTrue(form.is_valid())
form.save(commit=False)
self.assertEqual(password_changed.call_count, 0)
form.save()
self.assertEqual(password_changed.call_count, 1)
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
{
"NAME": (
"django.contrib.auth.password_validation.MinimumLengthValidator"
),
"OPTIONS": {
"min_length": 12,
},
},
]
)
def test_validates_password(self):
user = User.objects.get(username="testclient")
data = {
"new_password1": "testclient",
"new_password2": "testclient",
}
form = SetPasswordForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(len(form["new_password2"].errors), 2)
self.assertIn(
"The password is too similar to the username.", form["new_password2"].errors
)
self.assertIn(
"This password is too short. It must contain at least 12 characters.",
form["new_password2"].errors,
)
def test_password_whitespace_not_stripped(self):
user = User.objects.get(username="testclient")
data = {
"new_password1": " password ",
"new_password2": " password ",
}
form = SetPasswordForm(user, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data["new_password1"], data["new_password1"])
self.assertEqual(form.cleaned_data["new_password2"], data["new_password2"])
@override_settings(
AUTH_PASSWORD_VALIDATORS=[
{
"NAME": (
"django.contrib.auth.password_validation."
"UserAttributeSimilarityValidator"
)
},
{
"NAME": (
"django.contrib.auth.password_validation.MinimumLengthValidator"
),
"OPTIONS": {
"min_length": 12,
},
},
]
)
def test_help_text_translation(self):
french_help_texts = [
"Votre mot de passe ne peut pas trop ressembler à vos autres informations "
"personnelles.",
"Votre mot de passe doit contenir au minimum 12 caractères.",
]
form = SetPasswordForm(self.u1)
with translation.override("fr"):
html = form.as_p()
for french_text in french_help_texts:
self.assertIn(french_text, html)
def test_html_autocomplete_attributes(self):
form = SetPasswordForm(self.u1)
tests = (
("new_password1", "new-password"),
("new_password2", "new-password"),
)
for field_name, autocomplete in tests:
with self.subTest(field_name=field_name, autocomplete=autocomplete):
self.assertEqual(
form.fields[field_name].widget.attrs["autocomplete"], autocomplete
)
class PasswordChangeFormTest(TestDataMixin, TestCase):
def test_incorrect_password(self):
user = User.objects.get(username="testclient")
data = {
"old_password": "test",
"new_password1": "abc123",
"new_password2": "abc123",
}
form = PasswordChangeForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form["old_password"].errors,
[str(form.error_messages["password_incorrect"])],
)
def test_password_verification(self):
# The two new passwords do not match.
user = User.objects.get(username="testclient")
data = {
"old_password": "password",
"new_password1": "abc123",
"new_password2": "abc",
}
form = PasswordChangeForm(user, data)
self.assertFalse(form.is_valid())
self.assertEqual(
form["new_password2"].errors,
[str(form.error_messages["password_mismatch"])],
)
@mock.patch("django.contrib.auth.password_validation.password_changed")
def test_success(self, password_changed):
# The success case.
user = User.objects.get(username="testclient")
data = {
"old_password": "password",
"new_password1": "abc123",
"new_password2": "abc123",
}
form = PasswordChangeForm(user, data)
self.assertTrue(form.is_valid())
form.save(commit=False)
self.assertEqual(password_changed.call_count, 0)
form.save()
self.assertEqual(password_changed.call_count, 1)
def test_field_order(self):
# Regression test - check the order of fields:
user = User.objects.get(username="testclient")
self.assertEqual(
list(PasswordChangeForm(user, {}).fields),
["old_password", "new_password1", "new_password2"],
)
def test_password_whitespace_not_stripped(self):
user = User.objects.get(username="testclient")
user.set_password(" oldpassword ")
data = {
"old_password": " oldpassword ",
"new_password1": " pass ",
"new_password2": " pass ",
}
form = PasswordChangeForm(user, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data["old_password"], data["old_password"])
self.assertEqual(form.cleaned_data["new_password1"], data["new_password1"])
self.assertEqual(form.cleaned_data["new_password2"], data["new_password2"])
def test_html_autocomplete_attributes(self):
user = User.objects.get(username="testclient")
form = PasswordChangeForm(user)
self.assertEqual(
form.fields["old_password"].widget.attrs["autocomplete"], "current-password"
)
class UserChangeFormTest(TestDataMixin, TestCase):
def test_username_validity(self):
user = User.objects.get(username="testclient")
data = {"username": "not valid"}
form = UserChangeForm(data, instance=user)
self.assertFalse(form.is_valid())
validator = next(
v
for v in User._meta.get_field("username").validators
if v.code == "invalid"
)
self.assertEqual(form["username"].errors, [str(validator.message)])
def test_bug_14242(self):
# A regression test, introduce by adding an optimization for the
# UserChangeForm.
class MyUserForm(UserChangeForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields[
"groups"
].help_text = "These groups give users different permissions"
class Meta(UserChangeForm.Meta):
fields = ("groups",)
# Just check we can create it
MyUserForm({})
def test_unusable_password(self):
user = User.objects.get(username="empty_password")
user.set_unusable_password()
user.save()
form = UserChangeForm(instance=user)
self.assertIn(_("No password set."), form.as_table())
def test_bug_17944_empty_password(self):
user = User.objects.get(username="empty_password")
form = UserChangeForm(instance=user)
self.assertIn(_("No password set."), form.as_table())
def test_bug_17944_unmanageable_password(self):
user = User.objects.get(username="unmanageable_password")
form = UserChangeForm(instance=user)
self.assertIn(
_("Invalid password format or unknown hashing algorithm."), form.as_table()
)
def test_bug_17944_unknown_password_algorithm(self):
user = User.objects.get(username="unknown_password")
form = UserChangeForm(instance=user)
self.assertIn(
_("Invalid password format or unknown hashing algorithm."), form.as_table()
)
def test_bug_19133(self):
"The change form does not return the password value"
# Use the form to construct the POST data
user = User.objects.get(username="testclient")
form_for_data = UserChangeForm(instance=user)
post_data = form_for_data.initial
# The password field should be readonly, so anything
# posted here should be ignored; the form will be
# valid, and give back the 'initial' value for the
# password field.
post_data["password"] = "new password"
form = UserChangeForm(instance=user, data=post_data)
self.assertTrue(form.is_valid())
# original hashed password contains $
self.assertIn("$", form.cleaned_data["password"])
def test_bug_19349_bound_password_field(self):
user = User.objects.get(username="testclient")
form = UserChangeForm(data={}, instance=user)
# When rendering the bound password field,
# ReadOnlyPasswordHashWidget needs the initial
# value to render correctly
self.assertEqual(form.initial["password"], form["password"].value())
def test_custom_form(self):
class CustomUserChangeForm(UserChangeForm):
class Meta(UserChangeForm.Meta):
model = ExtensionUser
fields = (
"username",
"password",
"date_of_birth",
)
user = User.objects.get(username="testclient")
data = {
"username": "testclient",
"password": "testclient",
"date_of_birth": "1998-02-24",
}
form = CustomUserChangeForm(data, instance=user)
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(form.cleaned_data["username"], "testclient")
self.assertEqual(form.cleaned_data["date_of_birth"], datetime.date(1998, 2, 24))
def test_password_excluded(self):
class UserChangeFormWithoutPassword(UserChangeForm):
password = None
class Meta:
model = User
exclude = ["password"]
form = UserChangeFormWithoutPassword()
self.assertNotIn("password", form.fields)
def test_username_field_autocapitalize_none(self):
form = UserChangeForm()
self.assertEqual(
form.fields["username"].widget.attrs.get("autocapitalize"), "none"
)
@override_settings(TEMPLATES=AUTH_TEMPLATES)
class PasswordResetFormTest(TestDataMixin, TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
# This cleanup is necessary because contrib.sites cache
# makes tests interfere with each other, see #11505
Site.objects.clear_cache()
def create_dummy_user(self):
"""
Create a user and return a tuple (user_object, username, email).
"""
username = "jsmith"
email = "[email protected]"
user = User.objects.create_user(username, email, "test123")
return (user, username, email)
def test_invalid_email(self):
data = {"email": "not valid"}
form = PasswordResetForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form["email"].errors, [_("Enter a valid email address.")])
def test_user_email_unicode_collision(self):
User.objects.create_user("mike123", "[email protected]", "test123")
User.objects.create_user("mike456", "mı[email protected]", "test123")
data = {"email": "mı[email protected]"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ["mı[email protected]"])
def test_user_email_domain_unicode_collision(self):
User.objects.create_user("mike123", "[email protected]", "test123")
User.objects.create_user("mike456", "mike@ıxample.org", "test123")
data = {"email": "mike@ıxample.org"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ["mike@ıxample.org"])
def test_user_email_unicode_collision_nonexistent(self):
User.objects.create_user("mike123", "[email protected]", "test123")
data = {"email": "mı[email protected]"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(mail.outbox), 0)
def test_user_email_domain_unicode_collision_nonexistent(self):
User.objects.create_user("mike123", "[email protected]", "test123")
data = {"email": "mike@ıxample.org"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(mail.outbox), 0)
def test_nonexistent_email(self):
"""
Test nonexistent email address. This should not fail because it would
expose information about registered users.
"""
data = {"email": "[email protected]"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(len(mail.outbox), 0)
def test_cleaned_data(self):
(user, username, email) = self.create_dummy_user()
data = {"email": email}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
form.save(domain_override="example.com")
self.assertEqual(form.cleaned_data["email"], email)
self.assertEqual(len(mail.outbox), 1)
def test_custom_email_subject(self):
data = {"email": "[email protected]"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
# Since we're not providing a request object, we must provide a
# domain_override to prevent the save operation from failing in the
# potential case where contrib.sites is not installed. Refs #16412.
form.save(domain_override="example.com")
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "Custom password reset on example.com")
def test_custom_email_constructor(self):
data = {"email": "[email protected]"}
class CustomEmailPasswordResetForm(PasswordResetForm):
def send_mail(
self,
subject_template_name,
email_template_name,
context,
from_email,
to_email,
html_email_template_name=None,
):
EmailMultiAlternatives(
"Forgot your password?",
"Sorry to hear you forgot your password.",
None,
[to_email],
["[email protected]"],
headers={"Reply-To": "[email protected]"},
alternatives=[
("Really sorry to hear you forgot your password.", "text/html")
],
).send()
form = CustomEmailPasswordResetForm(data)
self.assertTrue(form.is_valid())
# Since we're not providing a request object, we must provide a
# domain_override to prevent the save operation from failing in the
# potential case where contrib.sites is not installed. Refs #16412.
form.save(domain_override="example.com")
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, "Forgot your password?")
self.assertEqual(mail.outbox[0].bcc, ["[email protected]"])
self.assertEqual(mail.outbox[0].content_subtype, "plain")
def test_preserve_username_case(self):
"""
Preserve the case of the user name (before the @ in the email address)
when creating a user (#5605).
"""
user = User.objects.create_user("forms_test2", "[email protected]", "test")
self.assertEqual(user.email, "[email protected]")
user = User.objects.create_user("forms_test3", "tesT", "test")
self.assertEqual(user.email, "tesT")
def test_inactive_user(self):
"""
Inactive user cannot receive password reset email.
"""
(user, username, email) = self.create_dummy_user()
user.is_active = False
user.save()
form = PasswordResetForm({"email": email})
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(mail.outbox), 0)
def test_unusable_password(self):
user = User.objects.create_user("testuser", "[email protected]", "test")
data = {"email": "[email protected]"}
form = PasswordResetForm(data)
self.assertTrue(form.is_valid())
user.set_unusable_password()
user.save()
form = PasswordResetForm(data)
# The form itself is valid, but no email is sent
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(mail.outbox), 0)
def test_save_plaintext_email(self):
"""
Test the PasswordResetForm.save() method with no html_email_template_name
parameter passed in.
Test to ensure original behavior is unchanged after the parameter was added.
"""
(user, username, email) = self.create_dummy_user()
form = PasswordResetForm({"email": email})
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0].message()
self.assertFalse(message.is_multipart())
self.assertEqual(message.get_content_type(), "text/plain")
self.assertEqual(message.get("subject"), "Custom password reset on example.com")
self.assertEqual(len(mail.outbox[0].alternatives), 0)
self.assertEqual(message.get_all("to"), [email])
self.assertTrue(
re.match(r"^http://example.com/reset/[\w+/-]", message.get_payload())
)
def test_save_html_email_template_name(self):
"""
Test the PasswordResetForm.save() method with html_email_template_name
parameter specified.
Test to ensure that a multipart email is sent with both text/plain
and text/html parts.
"""
(user, username, email) = self.create_dummy_user()
form = PasswordResetForm({"email": email})
self.assertTrue(form.is_valid())
form.save(
html_email_template_name="registration/html_password_reset_email.html"
)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(len(mail.outbox[0].alternatives), 1)
message = mail.outbox[0].message()
self.assertEqual(message.get("subject"), "Custom password reset on example.com")
self.assertEqual(len(message.get_payload()), 2)
self.assertTrue(message.is_multipart())
self.assertEqual(message.get_payload(0).get_content_type(), "text/plain")
self.assertEqual(message.get_payload(1).get_content_type(), "text/html")
self.assertEqual(message.get_all("to"), [email])
self.assertTrue(
re.match(
r"^http://example.com/reset/[\w/-]+",
message.get_payload(0).get_payload(),
)
)
self.assertTrue(
re.match(
r'^<html><a href="http://example.com/reset/[\w/-]+/">Link</a></html>$',
message.get_payload(1).get_payload(),
)
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomEmailField")
def test_custom_email_field(self):
email = "[email protected]"
CustomEmailField.objects.create_user("test name", "test password", email)
form = PasswordResetForm({"email": email})
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(form.cleaned_data["email"], email)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, [email])
def test_html_autocomplete_attributes(self):
form = PasswordResetForm()
self.assertEqual(form.fields["email"].widget.attrs["autocomplete"], "email")
class ReadOnlyPasswordHashTest(SimpleTestCase):
def test_bug_19349_render_with_none_value(self):
# Rendering the widget with value set to None
# mustn't raise an exception.
widget = ReadOnlyPasswordHashWidget()
html = widget.render(name="password", value=None, attrs={})
self.assertIn(_("No password set."), html)
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.PBKDF2PasswordHasher"]
)
def test_render(self):
widget = ReadOnlyPasswordHashWidget()
value = (
"pbkdf2_sha256$100000$a6Pucb1qSFcD$WmCkn9Hqidj48NVe5x0FEM6A9YiOqQcl/83m2Z5u"
"dm0="
)
self.assertHTMLEqual(
widget.render("name", value, {"id": "id_password"}),
"""
<div id="id_password">
<strong>algorithm</strong>: pbkdf2_sha256
<strong>iterations</strong>: 100000
<strong>salt</strong>: a6Pucb******
<strong>hash</strong>: WmCkn9**************************************
</div>
""",
)
def test_readonly_field_has_changed(self):
field = ReadOnlyPasswordHashField()
self.assertIs(field.disabled, True)
self.assertFalse(field.has_changed("aaa", "bbb"))
def test_label(self):
"""
ReadOnlyPasswordHashWidget doesn't contain a for attribute in the
<label> because it doesn't have any labelable elements.
"""
class TestForm(forms.Form):
hash_field = ReadOnlyPasswordHashField()
bound_field = TestForm()["hash_field"]
self.assertIsNone(bound_field.field.widget.id_for_label("id"))
self.assertEqual(bound_field.label_tag(), "<label>Hash field:</label>")
class AdminPasswordChangeFormTest(TestDataMixin, TestCase):
@mock.patch("django.contrib.auth.password_validation.password_changed")
def test_success(self, password_changed):
user = User.objects.get(username="testclient")
data = {
"password1": "test123",
"password2": "test123",
}
form = AdminPasswordChangeForm(user, data)
self.assertTrue(form.is_valid())
form.save(commit=False)
self.assertEqual(password_changed.call_count, 0)
form.save()
self.assertEqual(password_changed.call_count, 1)
def test_password_whitespace_not_stripped(self):
user = User.objects.get(username="testclient")
data = {
"password1": " pass ",
"password2": " pass ",
}
form = AdminPasswordChangeForm(user, data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data["password1"], data["password1"])
self.assertEqual(form.cleaned_data["password2"], data["password2"])
def test_non_matching_passwords(self):
user = User.objects.get(username="testclient")
data = {"password1": "password1", "password2": "password2"}
form = AdminPasswordChangeForm(user, data)
self.assertEqual(
form.errors["password2"], [form.error_messages["password_mismatch"]]
)
def test_missing_passwords(self):
user = User.objects.get(username="testclient")
data = {"password1": "", "password2": ""}
form = AdminPasswordChangeForm(user, data)
required_error = [Field.default_error_messages["required"]]
self.assertEqual(form.errors["password1"], required_error)
self.assertEqual(form.errors["password2"], required_error)
def test_one_password(self):
user = User.objects.get(username="testclient")
form1 = AdminPasswordChangeForm(user, {"password1": "", "password2": "test"})
required_error = [Field.default_error_messages["required"]]
self.assertEqual(form1.errors["password1"], required_error)
self.assertNotIn("password2", form1.errors)
form2 = AdminPasswordChangeForm(user, {"password1": "test", "password2": ""})
self.assertEqual(form2.errors["password2"], required_error)
self.assertNotIn("password1", form2.errors)
def test_html_autocomplete_attributes(self):
user = User.objects.get(username="testclient")
form = AdminPasswordChangeForm(user)
tests = (
("password1", "new-password"),
("password2", "new-password"),
)
for field_name, autocomplete in tests:
with self.subTest(field_name=field_name, autocomplete=autocomplete):
self.assertEqual(
form.fields[field_name].widget.attrs["autocomplete"], autocomplete
)
|
2128a0d515340efa96f5918c5cde09ee6bcbcd3632ce0aeb8effc7c8f9568ca9 | from datetime import date
from django.contrib.auth import authenticate
from django.contrib.auth.models import User
from django.contrib.auth.tokens import PasswordResetTokenGenerator
from django.contrib.auth.views import (
PasswordChangeDoneView,
PasswordChangeView,
PasswordResetCompleteView,
PasswordResetDoneView,
PasswordResetView,
)
from django.test import RequestFactory, TestCase, override_settings
from django.urls import reverse
from django.utils.http import urlsafe_base64_encode
from .client import PasswordResetConfirmClient
from .models import CustomUser
@override_settings(ROOT_URLCONF="auth_tests.urls")
class AuthTemplateTests(TestCase):
request_factory = RequestFactory()
@classmethod
def setUpTestData(cls):
user = User.objects.create_user("jsmith", "[email protected]", "pass")
user = authenticate(username=user.username, password="pass")
request = cls.request_factory.get("/somepath/")
request.user = user
cls.user, cls.request = user, request
def test_password_reset_view(self):
response = PasswordResetView.as_view(success_url="dummy/")(self.request)
self.assertContains(
response, "<title>Password reset | Django site admin</title>"
)
self.assertContains(response, "<h1>Password reset</h1>")
def test_password_reset_done_view(self):
response = PasswordResetDoneView.as_view()(self.request)
self.assertContains(
response, "<title>Password reset sent | Django site admin</title>"
)
self.assertContains(response, "<h1>Password reset sent</h1>")
def test_password_reset_confirm_view_invalid_token(self):
# PasswordResetConfirmView invalid token
client = PasswordResetConfirmClient()
url = reverse(
"password_reset_confirm", kwargs={"uidb64": "Bad", "token": "Bad-Token"}
)
response = client.get(url)
self.assertContains(
response, "<title>Password reset unsuccessful | Django site admin</title>"
)
self.assertContains(response, "<h1>Password reset unsuccessful</h1>")
def test_password_reset_confirm_view_valid_token(self):
# PasswordResetConfirmView valid token
client = PasswordResetConfirmClient()
default_token_generator = PasswordResetTokenGenerator()
token = default_token_generator.make_token(self.user)
uidb64 = urlsafe_base64_encode(str(self.user.pk).encode())
url = reverse(
"password_reset_confirm", kwargs={"uidb64": uidb64, "token": token}
)
response = client.get(url)
self.assertContains(
response, "<title>Enter new password | Django site admin</title>"
)
self.assertContains(response, "<h1>Enter new password</h1>")
# The username is added to the password reset confirmation form to help
# browser's password managers.
self.assertContains(
response,
'<input class="hidden" autocomplete="username" value="jsmith">',
)
@override_settings(AUTH_USER_MODEL="auth_tests.CustomUser")
def test_password_reset_confirm_view_custom_username_hint(self):
custom_user = CustomUser.custom_objects.create_user(
email="[email protected]",
date_of_birth=date(1986, 11, 11),
first_name="Joe",
)
client = PasswordResetConfirmClient()
default_token_generator = PasswordResetTokenGenerator()
token = default_token_generator.make_token(custom_user)
uidb64 = urlsafe_base64_encode(str(custom_user.pk).encode())
url = reverse(
"password_reset_confirm", kwargs={"uidb64": uidb64, "token": token}
)
response = client.get(url)
self.assertContains(
response,
"<title>Enter new password | Django site admin</title>",
)
self.assertContains(response, "<h1>Enter new password</h1>")
# The username field is added to the password reset confirmation form
# to help browser's password managers.
self.assertContains(
response,
'<input class="hidden" autocomplete="username" value="[email protected]">',
)
def test_password_reset_complete_view(self):
response = PasswordResetCompleteView.as_view()(self.request)
self.assertContains(
response, "<title>Password reset complete | Django site admin</title>"
)
self.assertContains(response, "<h1>Password reset complete</h1>")
def test_password_reset_change_view(self):
response = PasswordChangeView.as_view(success_url="dummy/")(self.request)
self.assertContains(
response, "<title>Password change | Django site admin</title>"
)
self.assertContains(response, "<h1>Password change</h1>")
def test_password_change_done_view(self):
response = PasswordChangeDoneView.as_view()(self.request)
self.assertContains(
response, "<title>Password change successful | Django site admin</title>"
)
self.assertContains(response, "<h1>Password change successful</h1>")
|
67beafe67432c0addd1ba350e38427d49d6f9ae90bfe8cfe7711034ad7190347 | from django.conf import settings
from django.contrib.auth import models
from django.contrib.auth.decorators import login_required, permission_required
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse
from django.test import TestCase, override_settings
from django.test.client import RequestFactory
from .test_views import AuthViewsTestCase
@override_settings(ROOT_URLCONF="auth_tests.urls")
class LoginRequiredTestCase(AuthViewsTestCase):
"""
Tests the login_required decorators
"""
def test_callable(self):
"""
login_required is assignable to callable objects.
"""
class CallableView:
def __call__(self, *args, **kwargs):
pass
login_required(CallableView())
def test_view(self):
"""
login_required is assignable to normal views.
"""
def normal_view(request):
pass
login_required(normal_view)
def test_login_required(self, view_url="/login_required/", login_url=None):
"""
login_required works on a simple view wrapped in a login_required
decorator.
"""
if login_url is None:
login_url = settings.LOGIN_URL
response = self.client.get(view_url)
self.assertEqual(response.status_code, 302)
self.assertIn(login_url, response.url)
self.login()
response = self.client.get(view_url)
self.assertEqual(response.status_code, 200)
def test_login_required_next_url(self):
"""
login_required works on a simple view wrapped in a login_required
decorator with a login_url set.
"""
self.test_login_required(
view_url="/login_required_login_url/", login_url="/somewhere/"
)
class PermissionsRequiredDecoratorTest(TestCase):
"""
Tests for the permission_required decorator
"""
factory = RequestFactory()
@classmethod
def setUpTestData(cls):
cls.user = models.User.objects.create(username="joe", password="qwerty")
# Add permissions auth.add_customuser and auth.change_customuser
perms = models.Permission.objects.filter(
codename__in=("add_customuser", "change_customuser")
)
cls.user.user_permissions.add(*perms)
def test_many_permissions_pass(self):
@permission_required(
["auth_tests.add_customuser", "auth_tests.change_customuser"]
)
def a_view(request):
return HttpResponse()
request = self.factory.get("/rand")
request.user = self.user
resp = a_view(request)
self.assertEqual(resp.status_code, 200)
def test_many_permissions_in_set_pass(self):
@permission_required(
{"auth_tests.add_customuser", "auth_tests.change_customuser"}
)
def a_view(request):
return HttpResponse()
request = self.factory.get("/rand")
request.user = self.user
resp = a_view(request)
self.assertEqual(resp.status_code, 200)
def test_single_permission_pass(self):
@permission_required("auth_tests.add_customuser")
def a_view(request):
return HttpResponse()
request = self.factory.get("/rand")
request.user = self.user
resp = a_view(request)
self.assertEqual(resp.status_code, 200)
def test_permissioned_denied_redirect(self):
@permission_required(
[
"auth_tests.add_customuser",
"auth_tests.change_customuser",
"nonexistent-permission",
]
)
def a_view(request):
return HttpResponse()
request = self.factory.get("/rand")
request.user = self.user
resp = a_view(request)
self.assertEqual(resp.status_code, 302)
def test_permissioned_denied_exception_raised(self):
@permission_required(
[
"auth_tests.add_customuser",
"auth_tests.change_customuser",
"nonexistent-permission",
],
raise_exception=True,
)
def a_view(request):
return HttpResponse()
request = self.factory.get("/rand")
request.user = self.user
with self.assertRaises(PermissionDenied):
a_view(request)
|
6c88d351a711ea8d0266db835ec58e6e5bf6ecec5bed2d536bbf26df00abb65b | from unittest import mock, skipUnless
from django.conf.global_settings import PASSWORD_HASHERS
from django.contrib.auth.hashers import (
UNUSABLE_PASSWORD_PREFIX,
UNUSABLE_PASSWORD_SUFFIX_LENGTH,
BasePasswordHasher,
BCryptPasswordHasher,
BCryptSHA256PasswordHasher,
MD5PasswordHasher,
PBKDF2PasswordHasher,
PBKDF2SHA1PasswordHasher,
ScryptPasswordHasher,
SHA1PasswordHasher,
check_password,
get_hasher,
identify_hasher,
is_password_usable,
make_password,
)
from django.test import SimpleTestCase
from django.test.utils import override_settings
try:
import crypt
except ImportError:
crypt = None
else:
# On some platforms (e.g. OpenBSD), crypt.crypt() always return None.
if crypt.crypt("") is None:
crypt = None
try:
import bcrypt
except ImportError:
bcrypt = None
try:
import argon2
except ImportError:
argon2 = None
class PBKDF2SingleIterationHasher(PBKDF2PasswordHasher):
iterations = 1
@override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS)
class TestUtilsHashPass(SimpleTestCase):
def test_simple(self):
encoded = make_password("lètmein")
self.assertTrue(encoded.startswith("pbkdf2_sha256$"))
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
# Blank passwords
blank_encoded = make_password("")
self.assertTrue(blank_encoded.startswith("pbkdf2_sha256$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
def test_bytes(self):
encoded = make_password(b"bytes_password")
self.assertTrue(encoded.startswith("pbkdf2_sha256$"))
self.assertIs(is_password_usable(encoded), True)
self.assertIs(check_password(b"bytes_password", encoded), True)
def test_invalid_password(self):
msg = "Password must be a string or bytes, got int."
with self.assertRaisesMessage(TypeError, msg):
make_password(1)
def test_pbkdf2(self):
encoded = make_password("lètmein", "seasalt", "pbkdf2_sha256")
self.assertEqual(
encoded,
"pbkdf2_sha256$390000$seasalt$8xBlGd3jVgvJ+92hWPxi5ww0uuAuAnKgC45eudxro7c=",
)
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "pbkdf2_sha256")
# Blank passwords
blank_encoded = make_password("", "seasalt", "pbkdf2_sha256")
self.assertTrue(blank_encoded.startswith("pbkdf2_sha256$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
# Salt entropy check.
hasher = get_hasher("pbkdf2_sha256")
encoded_weak_salt = make_password("lètmein", "iodizedsalt", "pbkdf2_sha256")
encoded_strong_salt = make_password("lètmein", hasher.salt(), "pbkdf2_sha256")
self.assertIs(hasher.must_update(encoded_weak_salt), True)
self.assertIs(hasher.must_update(encoded_strong_salt), False)
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.SHA1PasswordHasher"]
)
def test_sha1(self):
encoded = make_password("lètmein", "seasalt", "sha1")
self.assertEqual(
encoded, "sha1$seasalt$cff36ea83f5706ce9aa7454e63e431fc726b2dc8"
)
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "sha1")
# Blank passwords
blank_encoded = make_password("", "seasalt", "sha1")
self.assertTrue(blank_encoded.startswith("sha1$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
# Salt entropy check.
hasher = get_hasher("sha1")
encoded_weak_salt = make_password("lètmein", "iodizedsalt", "sha1")
encoded_strong_salt = make_password("lètmein", hasher.salt(), "sha1")
self.assertIs(hasher.must_update(encoded_weak_salt), True)
self.assertIs(hasher.must_update(encoded_strong_salt), False)
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.MD5PasswordHasher"]
)
def test_md5(self):
encoded = make_password("lètmein", "seasalt", "md5")
self.assertEqual(encoded, "md5$seasalt$3f86d0d3d465b7b458c231bf3555c0e3")
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "md5")
# Blank passwords
blank_encoded = make_password("", "seasalt", "md5")
self.assertTrue(blank_encoded.startswith("md5$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
# Salt entropy check.
hasher = get_hasher("md5")
encoded_weak_salt = make_password("lètmein", "iodizedsalt", "md5")
encoded_strong_salt = make_password("lètmein", hasher.salt(), "md5")
self.assertIs(hasher.must_update(encoded_weak_salt), True)
self.assertIs(hasher.must_update(encoded_strong_salt), False)
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.UnsaltedMD5PasswordHasher"]
)
def test_unsalted_md5(self):
encoded = make_password("lètmein", "", "unsalted_md5")
self.assertEqual(encoded, "88a434c88cca4e900f7874cd98123f43")
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "unsalted_md5")
# Alternate unsalted syntax
alt_encoded = "md5$$%s" % encoded
self.assertTrue(is_password_usable(alt_encoded))
self.assertTrue(check_password("lètmein", alt_encoded))
self.assertFalse(check_password("lètmeinz", alt_encoded))
# Blank passwords
blank_encoded = make_password("", "", "unsalted_md5")
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.UnsaltedMD5PasswordHasher"]
)
def test_unsalted_md5_encode_invalid_salt(self):
hasher = get_hasher("unsalted_md5")
msg = "salt must be empty."
with self.assertRaisesMessage(ValueError, msg):
hasher.encode("password", salt="salt")
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.UnsaltedSHA1PasswordHasher"]
)
def test_unsalted_sha1(self):
encoded = make_password("lètmein", "", "unsalted_sha1")
self.assertEqual(encoded, "sha1$$6d138ca3ae545631b3abd71a4f076ce759c5700b")
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "unsalted_sha1")
# Raw SHA1 isn't acceptable
alt_encoded = encoded[6:]
self.assertFalse(check_password("lètmein", alt_encoded))
# Blank passwords
blank_encoded = make_password("", "", "unsalted_sha1")
self.assertTrue(blank_encoded.startswith("sha1$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.UnsaltedSHA1PasswordHasher"]
)
def test_unsalted_sha1_encode_invalid_salt(self):
hasher = get_hasher("unsalted_sha1")
msg = "salt must be empty."
with self.assertRaisesMessage(ValueError, msg):
hasher.encode("password", salt="salt")
@skipUnless(crypt, "no crypt module to generate password.")
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.CryptPasswordHasher"]
)
def test_crypt(self):
encoded = make_password("lètmei", "ab", "crypt")
self.assertEqual(encoded, "crypt$$ab1Hv2Lg7ltQo")
self.assertTrue(is_password_usable(encoded))
self.assertTrue(check_password("lètmei", encoded))
self.assertFalse(check_password("lètmeiz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "crypt")
# Blank passwords
blank_encoded = make_password("", "ab", "crypt")
self.assertTrue(blank_encoded.startswith("crypt$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
@skipUnless(crypt, "no crypt module to generate password.")
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.CryptPasswordHasher"]
)
def test_crypt_encode_invalid_salt(self):
hasher = get_hasher("crypt")
msg = "salt must be of length 2."
with self.assertRaisesMessage(ValueError, msg):
hasher.encode("password", salt="a")
@skipUnless(crypt, "no crypt module to generate password.")
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.CryptPasswordHasher"]
)
def test_crypt_encode_invalid_hash(self):
hasher = get_hasher("crypt")
msg = "hash must be provided."
with mock.patch("crypt.crypt", return_value=None):
with self.assertRaisesMessage(TypeError, msg):
hasher.encode("password", salt="ab")
@skipUnless(bcrypt, "bcrypt not installed")
def test_bcrypt_sha256(self):
encoded = make_password("lètmein", hasher="bcrypt_sha256")
self.assertTrue(is_password_usable(encoded))
self.assertTrue(encoded.startswith("bcrypt_sha256$"))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "bcrypt_sha256")
# password truncation no longer works
password = (
"VSK0UYV6FFQVZ0KG88DYN9WADAADZO1CTSIVDJUNZSUML6IBX7LN7ZS3R5"
"JGB3RGZ7VI7G7DJQ9NI8BQFSRPTG6UWTTVESA5ZPUN"
)
encoded = make_password(password, hasher="bcrypt_sha256")
self.assertTrue(check_password(password, encoded))
self.assertFalse(check_password(password[:72], encoded))
# Blank passwords
blank_encoded = make_password("", hasher="bcrypt_sha256")
self.assertTrue(blank_encoded.startswith("bcrypt_sha256$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
@skipUnless(bcrypt, "bcrypt not installed")
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.BCryptPasswordHasher"]
)
def test_bcrypt(self):
encoded = make_password("lètmein", hasher="bcrypt")
self.assertTrue(is_password_usable(encoded))
self.assertTrue(encoded.startswith("bcrypt$"))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "bcrypt")
# Blank passwords
blank_encoded = make_password("", hasher="bcrypt")
self.assertTrue(blank_encoded.startswith("bcrypt$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
@skipUnless(bcrypt, "bcrypt not installed")
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.BCryptPasswordHasher"]
)
def test_bcrypt_upgrade(self):
hasher = get_hasher("bcrypt")
self.assertEqual("bcrypt", hasher.algorithm)
self.assertNotEqual(hasher.rounds, 4)
old_rounds = hasher.rounds
try:
# Generate a password with 4 rounds.
hasher.rounds = 4
encoded = make_password("letmein", hasher="bcrypt")
rounds = hasher.safe_summary(encoded)["work factor"]
self.assertEqual(rounds, 4)
state = {"upgraded": False}
def setter(password):
state["upgraded"] = True
# No upgrade is triggered.
self.assertTrue(check_password("letmein", encoded, setter, "bcrypt"))
self.assertFalse(state["upgraded"])
# Revert to the old rounds count and ...
hasher.rounds = old_rounds
# ... check if the password would get updated to the new count.
self.assertTrue(check_password("letmein", encoded, setter, "bcrypt"))
self.assertTrue(state["upgraded"])
finally:
hasher.rounds = old_rounds
@skipUnless(bcrypt, "bcrypt not installed")
@override_settings(
PASSWORD_HASHERS=["django.contrib.auth.hashers.BCryptPasswordHasher"]
)
def test_bcrypt_harden_runtime(self):
hasher = get_hasher("bcrypt")
self.assertEqual("bcrypt", hasher.algorithm)
with mock.patch.object(hasher, "rounds", 4):
encoded = make_password("letmein", hasher="bcrypt")
with mock.patch.object(hasher, "rounds", 6), mock.patch.object(
hasher, "encode", side_effect=hasher.encode
):
hasher.harden_runtime("wrong_password", encoded)
# Increasing rounds from 4 to 6 means an increase of 4 in workload,
# therefore hardening should run 3 times to make the timing the
# same (the original encode() call already ran once).
self.assertEqual(hasher.encode.call_count, 3)
# Get the original salt (includes the original workload factor)
algorithm, data = encoded.split("$", 1)
expected_call = (("wrong_password", data[:29].encode()),)
self.assertEqual(hasher.encode.call_args_list, [expected_call] * 3)
def test_unusable(self):
encoded = make_password(None)
self.assertEqual(
len(encoded),
len(UNUSABLE_PASSWORD_PREFIX) + UNUSABLE_PASSWORD_SUFFIX_LENGTH,
)
self.assertFalse(is_password_usable(encoded))
self.assertFalse(check_password(None, encoded))
self.assertFalse(check_password(encoded, encoded))
self.assertFalse(check_password(UNUSABLE_PASSWORD_PREFIX, encoded))
self.assertFalse(check_password("", encoded))
self.assertFalse(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
with self.assertRaisesMessage(ValueError, "Unknown password hashing algorith"):
identify_hasher(encoded)
# Assert that the unusable passwords actually contain a random part.
# This might fail one day due to a hash collision.
self.assertNotEqual(encoded, make_password(None), "Random password collision?")
def test_unspecified_password(self):
"""
Makes sure specifying no plain password with a valid encoded password
returns `False`.
"""
self.assertFalse(check_password(None, make_password("lètmein")))
def test_bad_algorithm(self):
msg = (
"Unknown password hashing algorithm '%s'. Did you specify it in "
"the PASSWORD_HASHERS setting?"
)
with self.assertRaisesMessage(ValueError, msg % "lolcat"):
make_password("lètmein", hasher="lolcat")
with self.assertRaisesMessage(ValueError, msg % "lolcat"):
identify_hasher("lolcat$salt$hash")
def test_is_password_usable(self):
passwords = ("lètmein_badencoded", "", None)
for password in passwords:
with self.subTest(password=password):
self.assertIs(is_password_usable(password), True)
def test_low_level_pbkdf2(self):
hasher = PBKDF2PasswordHasher()
encoded = hasher.encode("lètmein", "seasalt2")
self.assertEqual(
encoded,
"pbkdf2_sha256$390000$seasalt2$geC/uZ92nRXDSjSxeoiBqYyRcrLzMm8xK3r"
"o1QS1uo8=",
)
self.assertTrue(hasher.verify("lètmein", encoded))
def test_low_level_pbkdf2_sha1(self):
hasher = PBKDF2SHA1PasswordHasher()
encoded = hasher.encode("lètmein", "seasalt2")
self.assertEqual(
encoded, "pbkdf2_sha1$390000$seasalt2$aDapRanzW8aHTz97v2TcfHzWD+I="
)
self.assertTrue(hasher.verify("lètmein", encoded))
@skipUnless(bcrypt, "bcrypt not installed")
def test_bcrypt_salt_check(self):
hasher = BCryptPasswordHasher()
encoded = hasher.encode("lètmein", hasher.salt())
self.assertIs(hasher.must_update(encoded), False)
@skipUnless(bcrypt, "bcrypt not installed")
def test_bcryptsha256_salt_check(self):
hasher = BCryptSHA256PasswordHasher()
encoded = hasher.encode("lètmein", hasher.salt())
self.assertIs(hasher.must_update(encoded), False)
@override_settings(
PASSWORD_HASHERS=[
"django.contrib.auth.hashers.PBKDF2PasswordHasher",
"django.contrib.auth.hashers.SHA1PasswordHasher",
"django.contrib.auth.hashers.MD5PasswordHasher",
],
)
def test_upgrade(self):
self.assertEqual("pbkdf2_sha256", get_hasher("default").algorithm)
for algo in ("sha1", "md5"):
with self.subTest(algo=algo):
encoded = make_password("lètmein", hasher=algo)
state = {"upgraded": False}
def setter(password):
state["upgraded"] = True
self.assertTrue(check_password("lètmein", encoded, setter))
self.assertTrue(state["upgraded"])
def test_no_upgrade(self):
encoded = make_password("lètmein")
state = {"upgraded": False}
def setter():
state["upgraded"] = True
self.assertFalse(check_password("WRONG", encoded, setter))
self.assertFalse(state["upgraded"])
@override_settings(
PASSWORD_HASHERS=[
"django.contrib.auth.hashers.PBKDF2PasswordHasher",
"django.contrib.auth.hashers.SHA1PasswordHasher",
"django.contrib.auth.hashers.MD5PasswordHasher",
],
)
def test_no_upgrade_on_incorrect_pass(self):
self.assertEqual("pbkdf2_sha256", get_hasher("default").algorithm)
for algo in ("sha1", "md5"):
with self.subTest(algo=algo):
encoded = make_password("lètmein", hasher=algo)
state = {"upgraded": False}
def setter():
state["upgraded"] = True
self.assertFalse(check_password("WRONG", encoded, setter))
self.assertFalse(state["upgraded"])
def test_pbkdf2_upgrade(self):
hasher = get_hasher("default")
self.assertEqual("pbkdf2_sha256", hasher.algorithm)
self.assertNotEqual(hasher.iterations, 1)
old_iterations = hasher.iterations
try:
# Generate a password with 1 iteration.
hasher.iterations = 1
encoded = make_password("letmein")
algo, iterations, salt, hash = encoded.split("$", 3)
self.assertEqual(iterations, "1")
state = {"upgraded": False}
def setter(password):
state["upgraded"] = True
# No upgrade is triggered
self.assertTrue(check_password("letmein", encoded, setter))
self.assertFalse(state["upgraded"])
# Revert to the old iteration count and ...
hasher.iterations = old_iterations
# ... check if the password would get updated to the new iteration count.
self.assertTrue(check_password("letmein", encoded, setter))
self.assertTrue(state["upgraded"])
finally:
hasher.iterations = old_iterations
def test_pbkdf2_harden_runtime(self):
hasher = get_hasher("default")
self.assertEqual("pbkdf2_sha256", hasher.algorithm)
with mock.patch.object(hasher, "iterations", 1):
encoded = make_password("letmein")
with mock.patch.object(hasher, "iterations", 6), mock.patch.object(
hasher, "encode", side_effect=hasher.encode
):
hasher.harden_runtime("wrong_password", encoded)
# Encode should get called once ...
self.assertEqual(hasher.encode.call_count, 1)
# ... with the original salt and 5 iterations.
algorithm, iterations, salt, hash = encoded.split("$", 3)
expected_call = (("wrong_password", salt, 5),)
self.assertEqual(hasher.encode.call_args, expected_call)
def test_pbkdf2_upgrade_new_hasher(self):
hasher = get_hasher("default")
self.assertEqual("pbkdf2_sha256", hasher.algorithm)
self.assertNotEqual(hasher.iterations, 1)
state = {"upgraded": False}
def setter(password):
state["upgraded"] = True
with self.settings(
PASSWORD_HASHERS=["auth_tests.test_hashers.PBKDF2SingleIterationHasher"]
):
encoded = make_password("letmein")
algo, iterations, salt, hash = encoded.split("$", 3)
self.assertEqual(iterations, "1")
# No upgrade is triggered
self.assertTrue(check_password("letmein", encoded, setter))
self.assertFalse(state["upgraded"])
# Revert to the old iteration count and check if the password would get
# updated to the new iteration count.
with self.settings(
PASSWORD_HASHERS=[
"django.contrib.auth.hashers.PBKDF2PasswordHasher",
"auth_tests.test_hashers.PBKDF2SingleIterationHasher",
]
):
self.assertTrue(check_password("letmein", encoded, setter))
self.assertTrue(state["upgraded"])
def test_check_password_calls_harden_runtime(self):
hasher = get_hasher("default")
encoded = make_password("letmein")
with mock.patch.object(hasher, "harden_runtime"), mock.patch.object(
hasher, "must_update", return_value=True
):
# Correct password supplied, no hardening needed
check_password("letmein", encoded)
self.assertEqual(hasher.harden_runtime.call_count, 0)
# Wrong password supplied, hardening needed
check_password("wrong_password", encoded)
self.assertEqual(hasher.harden_runtime.call_count, 1)
def test_encode_invalid_salt(self):
hasher_classes = [
MD5PasswordHasher,
PBKDF2PasswordHasher,
PBKDF2SHA1PasswordHasher,
ScryptPasswordHasher,
SHA1PasswordHasher,
]
msg = "salt must be provided and cannot contain $."
for hasher_class in hasher_classes:
hasher = hasher_class()
for salt in [None, "", "sea$salt"]:
with self.subTest(hasher_class.__name__, salt=salt):
with self.assertRaisesMessage(ValueError, msg):
hasher.encode("password", salt)
def test_encode_password_required(self):
hasher_classes = [
MD5PasswordHasher,
PBKDF2PasswordHasher,
PBKDF2SHA1PasswordHasher,
ScryptPasswordHasher,
SHA1PasswordHasher,
]
msg = "password must be provided."
for hasher_class in hasher_classes:
hasher = hasher_class()
with self.subTest(hasher_class.__name__):
with self.assertRaisesMessage(TypeError, msg):
hasher.encode(None, "seasalt")
class BasePasswordHasherTests(SimpleTestCase):
not_implemented_msg = "subclasses of BasePasswordHasher must provide %s() method"
def setUp(self):
self.hasher = BasePasswordHasher()
def test_load_library_no_algorithm(self):
msg = "Hasher 'BasePasswordHasher' doesn't specify a library attribute"
with self.assertRaisesMessage(ValueError, msg):
self.hasher._load_library()
def test_load_library_importerror(self):
PlainHasher = type(
"PlainHasher",
(BasePasswordHasher,),
{"algorithm": "plain", "library": "plain"},
)
msg = "Couldn't load 'PlainHasher' algorithm library: No module named 'plain'"
with self.assertRaisesMessage(ValueError, msg):
PlainHasher()._load_library()
def test_attributes(self):
self.assertIsNone(self.hasher.algorithm)
self.assertIsNone(self.hasher.library)
def test_encode(self):
msg = self.not_implemented_msg % "an encode"
with self.assertRaisesMessage(NotImplementedError, msg):
self.hasher.encode("password", "salt")
def test_decode(self):
msg = self.not_implemented_msg % "a decode"
with self.assertRaisesMessage(NotImplementedError, msg):
self.hasher.decode("encoded")
def test_harden_runtime(self):
msg = (
"subclasses of BasePasswordHasher should provide a harden_runtime() method"
)
with self.assertWarnsMessage(Warning, msg):
self.hasher.harden_runtime("password", "encoded")
def test_must_update(self):
self.assertIs(self.hasher.must_update("encoded"), False)
def test_safe_summary(self):
msg = self.not_implemented_msg % "a safe_summary"
with self.assertRaisesMessage(NotImplementedError, msg):
self.hasher.safe_summary("encoded")
def test_verify(self):
msg = self.not_implemented_msg % "a verify"
with self.assertRaisesMessage(NotImplementedError, msg):
self.hasher.verify("password", "encoded")
@skipUnless(argon2, "argon2-cffi not installed")
@override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS)
class TestUtilsHashPassArgon2(SimpleTestCase):
def test_argon2(self):
encoded = make_password("lètmein", hasher="argon2")
self.assertTrue(is_password_usable(encoded))
self.assertTrue(encoded.startswith("argon2$argon2id$"))
self.assertTrue(check_password("lètmein", encoded))
self.assertFalse(check_password("lètmeinz", encoded))
self.assertEqual(identify_hasher(encoded).algorithm, "argon2")
# Blank passwords
blank_encoded = make_password("", hasher="argon2")
self.assertTrue(blank_encoded.startswith("argon2$argon2id$"))
self.assertTrue(is_password_usable(blank_encoded))
self.assertTrue(check_password("", blank_encoded))
self.assertFalse(check_password(" ", blank_encoded))
# Old hashes without version attribute
encoded = (
"argon2$argon2i$m=8,t=1,p=1$c29tZXNhbHQ$gwQOXSNhxiOxPOA0+PY10P9QFO"
"4NAYysnqRt1GSQLE55m+2GYDt9FEjPMHhP2Cuf0nOEXXMocVrsJAtNSsKyfg"
)
self.assertTrue(check_password("secret", encoded))
self.assertFalse(check_password("wrong", encoded))
# Old hashes with version attribute.
encoded = "argon2$argon2i$v=19$m=8,t=1,p=1$c2FsdHNhbHQ$YC9+jJCrQhs5R6db7LlN8Q"
self.assertIs(check_password("secret", encoded), True)
self.assertIs(check_password("wrong", encoded), False)
# Salt entropy check.
hasher = get_hasher("argon2")
encoded_weak_salt = make_password("lètmein", "iodizedsalt", "argon2")
encoded_strong_salt = make_password("lètmein", hasher.salt(), "argon2")
self.assertIs(hasher.must_update(encoded_weak_salt), True)
self.assertIs(hasher.must_update(encoded_strong_salt), False)
def test_argon2_decode(self):
salt = "abcdefghijk"
encoded = make_password("lètmein", salt=salt, hasher="argon2")
hasher = get_hasher("argon2")
decoded = hasher.decode(encoded)
self.assertEqual(decoded["memory_cost"], hasher.memory_cost)
self.assertEqual(decoded["parallelism"], hasher.parallelism)
self.assertEqual(decoded["salt"], salt)
self.assertEqual(decoded["time_cost"], hasher.time_cost)
def test_argon2_upgrade(self):
self._test_argon2_upgrade("time_cost", "time cost", 1)
self._test_argon2_upgrade("memory_cost", "memory cost", 64)
self._test_argon2_upgrade("parallelism", "parallelism", 1)
def test_argon2_version_upgrade(self):
hasher = get_hasher("argon2")
state = {"upgraded": False}
encoded = (
"argon2$argon2id$v=19$m=102400,t=2,p=8$Y041dExhNkljRUUy$TMa6A8fPJh"
"CAUXRhJXCXdw"
)
def setter(password):
state["upgraded"] = True
old_m = hasher.memory_cost
old_t = hasher.time_cost
old_p = hasher.parallelism
try:
hasher.memory_cost = 8
hasher.time_cost = 1
hasher.parallelism = 1
self.assertTrue(check_password("secret", encoded, setter, "argon2"))
self.assertTrue(state["upgraded"])
finally:
hasher.memory_cost = old_m
hasher.time_cost = old_t
hasher.parallelism = old_p
def _test_argon2_upgrade(self, attr, summary_key, new_value):
hasher = get_hasher("argon2")
self.assertEqual("argon2", hasher.algorithm)
self.assertNotEqual(getattr(hasher, attr), new_value)
old_value = getattr(hasher, attr)
try:
# Generate hash with attr set to 1
setattr(hasher, attr, new_value)
encoded = make_password("letmein", hasher="argon2")
attr_value = hasher.safe_summary(encoded)[summary_key]
self.assertEqual(attr_value, new_value)
state = {"upgraded": False}
def setter(password):
state["upgraded"] = True
# No upgrade is triggered.
self.assertTrue(check_password("letmein", encoded, setter, "argon2"))
self.assertFalse(state["upgraded"])
# Revert to the old rounds count and ...
setattr(hasher, attr, old_value)
# ... check if the password would get updated to the new count.
self.assertTrue(check_password("letmein", encoded, setter, "argon2"))
self.assertTrue(state["upgraded"])
finally:
setattr(hasher, attr, old_value)
@override_settings(PASSWORD_HASHERS=PASSWORD_HASHERS)
class TestUtilsHashPassScrypt(SimpleTestCase):
def test_scrypt(self):
encoded = make_password("lètmein", "seasalt", "scrypt")
self.assertEqual(
encoded,
"scrypt$16384$seasalt$8$1$Qj3+9PPyRjSJIebHnG81TMjsqtaIGxNQG/aEB/NY"
"afTJ7tibgfYz71m0ldQESkXFRkdVCBhhY8mx7rQwite/Pw==",
)
self.assertIs(is_password_usable(encoded), True)
self.assertIs(check_password("lètmein", encoded), True)
self.assertIs(check_password("lètmeinz", encoded), False)
self.assertEqual(identify_hasher(encoded).algorithm, "scrypt")
# Blank passwords.
blank_encoded = make_password("", "seasalt", "scrypt")
self.assertIs(blank_encoded.startswith("scrypt$"), True)
self.assertIs(is_password_usable(blank_encoded), True)
self.assertIs(check_password("", blank_encoded), True)
self.assertIs(check_password(" ", blank_encoded), False)
def test_scrypt_decode(self):
encoded = make_password("lètmein", "seasalt", "scrypt")
hasher = get_hasher("scrypt")
decoded = hasher.decode(encoded)
tests = [
("block_size", hasher.block_size),
("parallelism", hasher.parallelism),
("salt", "seasalt"),
("work_factor", hasher.work_factor),
]
for key, excepted in tests:
with self.subTest(key=key):
self.assertEqual(decoded[key], excepted)
def _test_scrypt_upgrade(self, attr, summary_key, new_value):
hasher = get_hasher("scrypt")
self.assertEqual(hasher.algorithm, "scrypt")
self.assertNotEqual(getattr(hasher, attr), new_value)
old_value = getattr(hasher, attr)
try:
# Generate hash with attr set to the new value.
setattr(hasher, attr, new_value)
encoded = make_password("lètmein", "seasalt", "scrypt")
attr_value = hasher.safe_summary(encoded)[summary_key]
self.assertEqual(attr_value, new_value)
state = {"upgraded": False}
def setter(password):
state["upgraded"] = True
# No update is triggered.
self.assertIs(check_password("lètmein", encoded, setter, "scrypt"), True)
self.assertIs(state["upgraded"], False)
# Revert to the old value.
setattr(hasher, attr, old_value)
# Password is updated.
self.assertIs(check_password("lètmein", encoded, setter, "scrypt"), True)
self.assertIs(state["upgraded"], True)
finally:
setattr(hasher, attr, old_value)
def test_scrypt_upgrade(self):
tests = [
("work_factor", "work factor", 2**11),
("block_size", "block size", 10),
("parallelism", "parallelism", 2),
]
for attr, summary_key, new_value in tests:
with self.subTest(attr=attr):
self._test_scrypt_upgrade(attr, summary_key, new_value)
|
6584db774b8a0e9da6ad60750d13982706d56433fe5a0905be93b91fe257a04e | from django.contrib.auth.middleware import AuthenticationMiddleware
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured
from django.http import HttpRequest, HttpResponse
from django.test import TestCase
class TestAuthenticationMiddleware(TestCase):
@classmethod
def setUpTestData(cls):
cls.user = User.objects.create_user(
"test_user", "[email protected]", "test_password"
)
def setUp(self):
self.middleware = AuthenticationMiddleware(lambda req: HttpResponse())
self.client.force_login(self.user)
self.request = HttpRequest()
self.request.session = self.client.session
def test_no_password_change_doesnt_invalidate_session(self):
self.request.session = self.client.session
self.middleware(self.request)
self.assertIsNotNone(self.request.user)
self.assertFalse(self.request.user.is_anonymous)
def test_changed_password_invalidates_session(self):
# After password change, user should be anonymous
self.user.set_password("new_password")
self.user.save()
self.middleware(self.request)
self.assertIsNotNone(self.request.user)
self.assertTrue(self.request.user.is_anonymous)
# session should be flushed
self.assertIsNone(self.request.session.session_key)
def test_no_session(self):
msg = (
"The Django authentication middleware requires session middleware "
"to be installed. Edit your MIDDLEWARE setting to insert "
"'django.contrib.sessions.middleware.SessionMiddleware' before "
"'django.contrib.auth.middleware.AuthenticationMiddleware'."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.middleware(HttpRequest())
|
ea8f760438132baf8bd8dacdff4a69a72d679871f5838268be09a4cc71d4eccc | import os
from django.core.exceptions import SuspiciousFileOperation
from django.core.files.base import ContentFile
from django.core.files.storage import FileSystemStorage, Storage
from django.db.models import FileField
from django.test import SimpleTestCase
class AWSS3Storage(Storage):
"""
Simulate an AWS S3 storage which uses Unix-like paths and allows any
characters in file names but where there aren't actual folders but just
keys.
"""
prefix = "mys3folder/"
def _save(self, name, content):
"""
This method is important to test that Storage.save() doesn't replace
'\' with '/' (rather FileSystemStorage.save() does).
"""
return name
def get_valid_name(self, name):
return name
def get_available_name(self, name, max_length=None):
return name
def generate_filename(self, filename):
"""
This is the method that's important to override when using S3 so that
os.path() isn't called, which would break S3 keys.
"""
return self.prefix + self.get_valid_name(filename)
class GenerateFilenameStorageTests(SimpleTestCase):
def test_storage_dangerous_paths(self):
candidates = [
("/tmp/..", ".."),
("/tmp/.", "."),
("", ""),
]
s = FileSystemStorage()
msg = "Could not derive file name from '%s'"
for file_name, base_name in candidates:
with self.subTest(file_name=file_name):
with self.assertRaisesMessage(SuspiciousFileOperation, msg % base_name):
s.get_available_name(file_name)
with self.assertRaisesMessage(SuspiciousFileOperation, msg % base_name):
s.generate_filename(file_name)
def test_storage_dangerous_paths_dir_name(self):
candidates = [
("tmp/../path", "tmp/.."),
("tmp\\..\\path", "tmp/.."),
("/tmp/../path", "/tmp/.."),
("\\tmp\\..\\path", "/tmp/.."),
]
s = FileSystemStorage()
for file_name, path in candidates:
msg = "Detected path traversal attempt in '%s'" % path
with self.subTest(file_name=file_name):
with self.assertRaisesMessage(SuspiciousFileOperation, msg):
s.get_available_name(file_name)
with self.assertRaisesMessage(SuspiciousFileOperation, msg):
s.generate_filename(file_name)
def test_filefield_dangerous_filename(self):
candidates = [
("..", "some/folder/.."),
(".", "some/folder/."),
("", "some/folder/"),
("???", "???"),
("$.$.$", "$.$.$"),
]
f = FileField(upload_to="some/folder/")
for file_name, msg_file_name in candidates:
msg = f"Could not derive file name from '{msg_file_name}'"
with self.subTest(file_name=file_name):
with self.assertRaisesMessage(SuspiciousFileOperation, msg):
f.generate_filename(None, file_name)
def test_filefield_dangerous_filename_dot_segments(self):
f = FileField(upload_to="some/folder/")
msg = "Detected path traversal attempt in 'some/folder/../path'"
with self.assertRaisesMessage(SuspiciousFileOperation, msg):
f.generate_filename(None, "../path")
def test_filefield_generate_filename_absolute_path(self):
f = FileField(upload_to="some/folder/")
candidates = [
"/tmp/path",
"/tmp/../path",
]
for file_name in candidates:
msg = f"Detected path traversal attempt in '{file_name}'"
with self.subTest(file_name=file_name):
with self.assertRaisesMessage(SuspiciousFileOperation, msg):
f.generate_filename(None, file_name)
def test_filefield_generate_filename(self):
f = FileField(upload_to="some/folder/")
self.assertEqual(
f.generate_filename(None, "test with space.txt"),
os.path.normpath("some/folder/test_with_space.txt"),
)
def test_filefield_generate_filename_with_upload_to(self):
def upload_to(instance, filename):
return "some/folder/" + filename
f = FileField(upload_to=upload_to)
self.assertEqual(
f.generate_filename(None, "test with space.txt"),
os.path.normpath("some/folder/test_with_space.txt"),
)
def test_filefield_generate_filename_upload_to_overrides_dangerous_filename(self):
def upload_to(instance, filename):
return "test.txt"
f = FileField(upload_to=upload_to)
candidates = [
"/tmp/.",
"/tmp/..",
"/tmp/../path",
"/tmp/path",
"some/folder/",
"some/folder/.",
"some/folder/..",
"some/folder/???",
"some/folder/$.$.$",
"some/../test.txt",
"",
]
for file_name in candidates:
with self.subTest(file_name=file_name):
self.assertEqual(f.generate_filename(None, file_name), "test.txt")
def test_filefield_generate_filename_upload_to_absolute_path(self):
def upload_to(instance, filename):
return "/tmp/" + filename
f = FileField(upload_to=upload_to)
candidates = [
"path",
"../path",
"???",
"$.$.$",
]
for file_name in candidates:
msg = f"Detected path traversal attempt in '/tmp/{file_name}'"
with self.subTest(file_name=file_name):
with self.assertRaisesMessage(SuspiciousFileOperation, msg):
f.generate_filename(None, file_name)
def test_filefield_generate_filename_upload_to_dangerous_filename(self):
def upload_to(instance, filename):
return "/tmp/" + filename
f = FileField(upload_to=upload_to)
candidates = ["..", ".", ""]
for file_name in candidates:
msg = f"Could not derive file name from '/tmp/{file_name}'"
with self.subTest(file_name=file_name):
with self.assertRaisesMessage(SuspiciousFileOperation, msg):
f.generate_filename(None, file_name)
def test_filefield_awss3_storage(self):
"""
Simulate a FileField with an S3 storage which uses keys rather than
folders and names. FileField and Storage shouldn't have any os.path()
calls that break the key.
"""
storage = AWSS3Storage()
folder = "not/a/folder/"
f = FileField(upload_to=folder, storage=storage)
key = "my-file-key\\with odd characters"
data = ContentFile("test")
expected_key = AWSS3Storage.prefix + folder + key
# Simulate call to f.save()
result_key = f.generate_filename(None, key)
self.assertEqual(result_key, expected_key)
result_key = storage.save(result_key, data)
self.assertEqual(result_key, expected_key)
# Repeat test with a callable.
def upload_to(instance, filename):
# Return a non-normalized path on purpose.
return folder + filename
f = FileField(upload_to=upload_to, storage=storage)
# Simulate call to f.save()
result_key = f.generate_filename(None, key)
self.assertEqual(result_key, expected_key)
result_key = storage.save(result_key, data)
self.assertEqual(result_key, expected_key)
|
4420785ee998751a391a9f3a9bc823f14c6d794f5e946ff4c22d5f98a6bece40 | import os
import shutil
import sys
import tempfile
import threading
import time
import unittest
from datetime import datetime, timedelta
from io import StringIO
from pathlib import Path
from urllib.request import urlopen
from django.core.cache import cache
from django.core.exceptions import SuspiciousFileOperation
from django.core.files.base import ContentFile, File
from django.core.files.storage import FileSystemStorage
from django.core.files.storage import Storage as BaseStorage
from django.core.files.storage import default_storage, get_storage_class
from django.core.files.uploadedfile import (
InMemoryUploadedFile,
SimpleUploadedFile,
TemporaryUploadedFile,
)
from django.db.models import FileField
from django.db.models.fields.files import FileDescriptor
from django.test import LiveServerTestCase, SimpleTestCase, TestCase, override_settings
from django.test.utils import requires_tz_support
from django.urls import NoReverseMatch, reverse_lazy
from django.utils import timezone
from django.utils._os import symlinks_supported
from .models import Storage, callable_storage, temp_storage, temp_storage_location
FILE_SUFFIX_REGEX = "[A-Za-z0-9]{7}"
class GetStorageClassTests(SimpleTestCase):
def test_get_filesystem_storage(self):
"""
get_storage_class returns the class for a storage backend name/path.
"""
self.assertEqual(
get_storage_class("django.core.files.storage.FileSystemStorage"),
FileSystemStorage,
)
def test_get_invalid_storage_module(self):
"""
get_storage_class raises an error if the requested import don't exist.
"""
with self.assertRaisesMessage(ImportError, "No module named 'storage'"):
get_storage_class("storage.NonexistentStorage")
def test_get_nonexistent_storage_class(self):
"""
get_storage_class raises an error if the requested class don't exist.
"""
with self.assertRaises(ImportError):
get_storage_class("django.core.files.storage.NonexistentStorage")
def test_get_nonexistent_storage_module(self):
"""
get_storage_class raises an error if the requested module don't exist.
"""
with self.assertRaisesMessage(
ImportError, "No module named 'django.core.files.nonexistent_storage'"
):
get_storage_class(
"django.core.files.nonexistent_storage.NonexistentStorage"
)
class FileSystemStorageTests(unittest.TestCase):
def test_deconstruction(self):
path, args, kwargs = temp_storage.deconstruct()
self.assertEqual(path, "django.core.files.storage.FileSystemStorage")
self.assertEqual(args, ())
self.assertEqual(kwargs, {"location": temp_storage_location})
kwargs_orig = {
"location": temp_storage_location,
"base_url": "http://myfiles.example.com/",
}
storage = FileSystemStorage(**kwargs_orig)
path, args, kwargs = storage.deconstruct()
self.assertEqual(kwargs, kwargs_orig)
def test_lazy_base_url_init(self):
"""
FileSystemStorage.__init__() shouldn't evaluate base_url.
"""
storage = FileSystemStorage(base_url=reverse_lazy("app:url"))
with self.assertRaises(NoReverseMatch):
storage.url(storage.base_url)
class FileStorageTests(SimpleTestCase):
storage_class = FileSystemStorage
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self.storage = self.storage_class(
location=self.temp_dir, base_url="/test_media_url/"
)
# Set up a second temporary directory which is ensured to have a mixed
# case name.
self.temp_dir2 = tempfile.mkdtemp(suffix="aBc")
def tearDown(self):
shutil.rmtree(self.temp_dir)
shutil.rmtree(self.temp_dir2)
def test_empty_location(self):
"""
Makes sure an exception is raised if the location is empty
"""
storage = self.storage_class(location="")
self.assertEqual(storage.base_location, "")
self.assertEqual(storage.location, os.getcwd())
def test_file_access_options(self):
"""
Standard file access options are available, and work as expected.
"""
self.assertFalse(self.storage.exists("storage_test"))
f = self.storage.open("storage_test", "w")
f.write("storage contents")
f.close()
self.assertTrue(self.storage.exists("storage_test"))
f = self.storage.open("storage_test", "r")
self.assertEqual(f.read(), "storage contents")
f.close()
self.storage.delete("storage_test")
self.assertFalse(self.storage.exists("storage_test"))
def _test_file_time_getter(self, getter):
# Check for correct behavior under both USE_TZ=True and USE_TZ=False.
# The tests are similar since they both set up a situation where the
# system time zone, Django's TIME_ZONE, and UTC are distinct.
self._test_file_time_getter_tz_handling_on(getter)
self._test_file_time_getter_tz_handling_off(getter)
@override_settings(USE_TZ=True, TIME_ZONE="Africa/Algiers")
def _test_file_time_getter_tz_handling_on(self, getter):
# Django's TZ (and hence the system TZ) is set to Africa/Algiers which
# is UTC+1 and has no DST change. We can set the Django TZ to something
# else so that UTC, Django's TIME_ZONE, and the system timezone are all
# different.
now_in_algiers = timezone.make_aware(datetime.now())
with timezone.override(timezone.get_fixed_timezone(-300)):
# At this point the system TZ is +1 and the Django TZ
# is -5. The following will be aware in UTC.
now = timezone.now()
self.assertFalse(self.storage.exists("test.file.tz.on"))
f = ContentFile("custom contents")
f_name = self.storage.save("test.file.tz.on", f)
self.addCleanup(self.storage.delete, f_name)
dt = getter(f_name)
# dt should be aware, in UTC
self.assertTrue(timezone.is_aware(dt))
self.assertEqual(now.tzname(), dt.tzname())
# The three timezones are indeed distinct.
naive_now = datetime.now()
algiers_offset = now_in_algiers.tzinfo.utcoffset(naive_now)
django_offset = timezone.get_current_timezone().utcoffset(naive_now)
utc_offset = timezone.utc.utcoffset(naive_now)
self.assertGreater(algiers_offset, utc_offset)
self.assertLess(django_offset, utc_offset)
# dt and now should be the same effective time.
self.assertLess(abs(dt - now), timedelta(seconds=2))
@override_settings(USE_TZ=False, TIME_ZONE="Africa/Algiers")
def _test_file_time_getter_tz_handling_off(self, getter):
# Django's TZ (and hence the system TZ) is set to Africa/Algiers which
# is UTC+1 and has no DST change. We can set the Django TZ to something
# else so that UTC, Django's TIME_ZONE, and the system timezone are all
# different.
now_in_algiers = timezone.make_aware(datetime.now())
with timezone.override(timezone.get_fixed_timezone(-300)):
# At this point the system TZ is +1 and the Django TZ
# is -5.
self.assertFalse(self.storage.exists("test.file.tz.off"))
f = ContentFile("custom contents")
f_name = self.storage.save("test.file.tz.off", f)
self.addCleanup(self.storage.delete, f_name)
dt = getter(f_name)
# dt should be naive, in system (+1) TZ
self.assertTrue(timezone.is_naive(dt))
# The three timezones are indeed distinct.
naive_now = datetime.now()
algiers_offset = now_in_algiers.tzinfo.utcoffset(naive_now)
django_offset = timezone.get_current_timezone().utcoffset(naive_now)
utc_offset = timezone.utc.utcoffset(naive_now)
self.assertGreater(algiers_offset, utc_offset)
self.assertLess(django_offset, utc_offset)
# dt and naive_now should be the same effective time.
self.assertLess(abs(dt - naive_now), timedelta(seconds=2))
# If we convert dt to an aware object using the Algiers
# timezone then it should be the same effective time to
# now_in_algiers.
_dt = timezone.make_aware(dt, now_in_algiers.tzinfo)
self.assertLess(abs(_dt - now_in_algiers), timedelta(seconds=2))
def test_file_get_accessed_time(self):
"""
File storage returns a Datetime object for the last accessed time of
a file.
"""
self.assertFalse(self.storage.exists("test.file"))
f = ContentFile("custom contents")
f_name = self.storage.save("test.file", f)
self.addCleanup(self.storage.delete, f_name)
atime = self.storage.get_accessed_time(f_name)
self.assertEqual(
atime, datetime.fromtimestamp(os.path.getatime(self.storage.path(f_name)))
)
self.assertLess(
timezone.now() - self.storage.get_accessed_time(f_name),
timedelta(seconds=2),
)
@requires_tz_support
def test_file_get_accessed_time_timezone(self):
self._test_file_time_getter(self.storage.get_accessed_time)
def test_file_get_created_time(self):
"""
File storage returns a datetime for the creation time of a file.
"""
self.assertFalse(self.storage.exists("test.file"))
f = ContentFile("custom contents")
f_name = self.storage.save("test.file", f)
self.addCleanup(self.storage.delete, f_name)
ctime = self.storage.get_created_time(f_name)
self.assertEqual(
ctime, datetime.fromtimestamp(os.path.getctime(self.storage.path(f_name)))
)
self.assertLess(
timezone.now() - self.storage.get_created_time(f_name), timedelta(seconds=2)
)
@requires_tz_support
def test_file_get_created_time_timezone(self):
self._test_file_time_getter(self.storage.get_created_time)
def test_file_get_modified_time(self):
"""
File storage returns a datetime for the last modified time of a file.
"""
self.assertFalse(self.storage.exists("test.file"))
f = ContentFile("custom contents")
f_name = self.storage.save("test.file", f)
self.addCleanup(self.storage.delete, f_name)
mtime = self.storage.get_modified_time(f_name)
self.assertEqual(
mtime, datetime.fromtimestamp(os.path.getmtime(self.storage.path(f_name)))
)
self.assertLess(
timezone.now() - self.storage.get_modified_time(f_name),
timedelta(seconds=2),
)
@requires_tz_support
def test_file_get_modified_time_timezone(self):
self._test_file_time_getter(self.storage.get_modified_time)
def test_file_save_without_name(self):
"""
File storage extracts the filename from the content object if no
name is given explicitly.
"""
self.assertFalse(self.storage.exists("test.file"))
f = ContentFile("custom contents")
f.name = "test.file"
storage_f_name = self.storage.save(None, f)
self.assertEqual(storage_f_name, f.name)
self.assertTrue(os.path.exists(os.path.join(self.temp_dir, f.name)))
self.storage.delete(storage_f_name)
def test_file_save_with_path(self):
"""
Saving a pathname should create intermediate directories as necessary.
"""
self.assertFalse(self.storage.exists("path/to"))
self.storage.save("path/to/test.file", ContentFile("file saved with path"))
self.assertTrue(self.storage.exists("path/to"))
with self.storage.open("path/to/test.file") as f:
self.assertEqual(f.read(), b"file saved with path")
self.assertTrue(
os.path.exists(os.path.join(self.temp_dir, "path", "to", "test.file"))
)
self.storage.delete("path/to/test.file")
def test_file_save_abs_path(self):
test_name = "path/to/test.file"
f = ContentFile("file saved with path")
f_name = self.storage.save(os.path.join(self.temp_dir, test_name), f)
self.assertEqual(f_name, test_name)
@unittest.skipUnless(
symlinks_supported(), "Must be able to symlink to run this test."
)
def test_file_save_broken_symlink(self):
"""A new path is created on save when a broken symlink is supplied."""
nonexistent_file_path = os.path.join(self.temp_dir, "nonexistent.txt")
broken_symlink_path = os.path.join(self.temp_dir, "symlink.txt")
os.symlink(nonexistent_file_path, broken_symlink_path)
f = ContentFile("some content")
f_name = self.storage.save(broken_symlink_path, f)
self.assertIs(os.path.exists(os.path.join(self.temp_dir, f_name)), True)
def test_save_doesnt_close(self):
with TemporaryUploadedFile("test", "text/plain", 1, "utf8") as file:
file.write(b"1")
file.seek(0)
self.assertFalse(file.closed)
self.storage.save("path/to/test.file", file)
self.assertFalse(file.closed)
self.assertFalse(file.file.closed)
file = InMemoryUploadedFile(StringIO("1"), "", "test", "text/plain", 1, "utf8")
with file:
self.assertFalse(file.closed)
self.storage.save("path/to/test.file", file)
self.assertFalse(file.closed)
self.assertFalse(file.file.closed)
def test_file_path(self):
"""
File storage returns the full path of a file
"""
self.assertFalse(self.storage.exists("test.file"))
f = ContentFile("custom contents")
f_name = self.storage.save("test.file", f)
self.assertEqual(self.storage.path(f_name), os.path.join(self.temp_dir, f_name))
self.storage.delete(f_name)
def test_file_url(self):
"""
File storage returns a url to access a given file from the web.
"""
self.assertEqual(
self.storage.url("test.file"), self.storage.base_url + "test.file"
)
# should encode special chars except ~!*()'
# like encodeURIComponent() JavaScript function do
self.assertEqual(
self.storage.url(r"~!*()'@#$%^&*abc`+ =.file"),
"/test_media_url/~!*()'%40%23%24%25%5E%26*abc%60%2B%20%3D.file",
)
self.assertEqual(self.storage.url("ab\0c"), "/test_media_url/ab%00c")
# should translate os path separator(s) to the url path separator
self.assertEqual(
self.storage.url("""a/b\\c.file"""), "/test_media_url/a/b/c.file"
)
# #25905: remove leading slashes from file names to prevent unsafe url output
self.assertEqual(self.storage.url("/evil.com"), "/test_media_url/evil.com")
self.assertEqual(self.storage.url(r"\evil.com"), "/test_media_url/evil.com")
self.assertEqual(self.storage.url("///evil.com"), "/test_media_url/evil.com")
self.assertEqual(self.storage.url(r"\\\evil.com"), "/test_media_url/evil.com")
self.assertEqual(self.storage.url(None), "/test_media_url/")
def test_base_url(self):
"""
File storage returns a url even when its base_url is unset or modified.
"""
self.storage.base_url = None
with self.assertRaises(ValueError):
self.storage.url("test.file")
# #22717: missing ending slash in base_url should be auto-corrected
storage = self.storage_class(
location=self.temp_dir, base_url="/no_ending_slash"
)
self.assertEqual(
storage.url("test.file"), "%s%s" % (storage.base_url, "test.file")
)
def test_listdir(self):
"""
File storage returns a tuple containing directories and files.
"""
self.assertFalse(self.storage.exists("storage_test_1"))
self.assertFalse(self.storage.exists("storage_test_2"))
self.assertFalse(self.storage.exists("storage_dir_1"))
self.storage.save("storage_test_1", ContentFile("custom content"))
self.storage.save("storage_test_2", ContentFile("custom content"))
os.mkdir(os.path.join(self.temp_dir, "storage_dir_1"))
self.addCleanup(self.storage.delete, "storage_test_1")
self.addCleanup(self.storage.delete, "storage_test_2")
for directory in ("", Path("")):
with self.subTest(directory=directory):
dirs, files = self.storage.listdir(directory)
self.assertEqual(set(dirs), {"storage_dir_1"})
self.assertEqual(set(files), {"storage_test_1", "storage_test_2"})
def test_file_storage_prevents_directory_traversal(self):
"""
File storage prevents directory traversal (files can only be accessed if
they're below the storage location).
"""
with self.assertRaises(SuspiciousFileOperation):
self.storage.exists("..")
with self.assertRaises(SuspiciousFileOperation):
self.storage.exists("/etc/passwd")
def test_file_storage_preserves_filename_case(self):
"""The storage backend should preserve case of filenames."""
# Create a storage backend associated with the mixed case name
# directory.
other_temp_storage = self.storage_class(location=self.temp_dir2)
# Ask that storage backend to store a file with a mixed case filename.
mixed_case = "CaSe_SeNsItIvE"
file = other_temp_storage.open(mixed_case, "w")
file.write("storage contents")
file.close()
self.assertEqual(
os.path.join(self.temp_dir2, mixed_case),
other_temp_storage.path(mixed_case),
)
other_temp_storage.delete(mixed_case)
def test_makedirs_race_handling(self):
"""
File storage should be robust against directory creation race conditions.
"""
real_makedirs = os.makedirs
# Monkey-patch os.makedirs, to simulate a normal call, a raced call,
# and an error.
def fake_makedirs(path, mode=0o777, exist_ok=False):
if path == os.path.join(self.temp_dir, "normal"):
real_makedirs(path, mode, exist_ok)
elif path == os.path.join(self.temp_dir, "raced"):
real_makedirs(path, mode, exist_ok)
if not exist_ok:
raise FileExistsError()
elif path == os.path.join(self.temp_dir, "error"):
raise PermissionError()
else:
self.fail("unexpected argument %r" % path)
try:
os.makedirs = fake_makedirs
self.storage.save("normal/test.file", ContentFile("saved normally"))
with self.storage.open("normal/test.file") as f:
self.assertEqual(f.read(), b"saved normally")
self.storage.save("raced/test.file", ContentFile("saved with race"))
with self.storage.open("raced/test.file") as f:
self.assertEqual(f.read(), b"saved with race")
# Exceptions aside from FileExistsError are raised.
with self.assertRaises(PermissionError):
self.storage.save("error/test.file", ContentFile("not saved"))
finally:
os.makedirs = real_makedirs
def test_remove_race_handling(self):
"""
File storage should be robust against file removal race conditions.
"""
real_remove = os.remove
# Monkey-patch os.remove, to simulate a normal call, a raced call,
# and an error.
def fake_remove(path):
if path == os.path.join(self.temp_dir, "normal.file"):
real_remove(path)
elif path == os.path.join(self.temp_dir, "raced.file"):
real_remove(path)
raise FileNotFoundError()
elif path == os.path.join(self.temp_dir, "error.file"):
raise PermissionError()
else:
self.fail("unexpected argument %r" % path)
try:
os.remove = fake_remove
self.storage.save("normal.file", ContentFile("delete normally"))
self.storage.delete("normal.file")
self.assertFalse(self.storage.exists("normal.file"))
self.storage.save("raced.file", ContentFile("delete with race"))
self.storage.delete("raced.file")
self.assertFalse(self.storage.exists("normal.file"))
# Exceptions aside from FileNotFoundError are raised.
self.storage.save("error.file", ContentFile("delete with error"))
with self.assertRaises(PermissionError):
self.storage.delete("error.file")
finally:
os.remove = real_remove
def test_file_chunks_error(self):
"""
Test behavior when file.chunks() is raising an error
"""
f1 = ContentFile("chunks fails")
def failing_chunks():
raise OSError
f1.chunks = failing_chunks
with self.assertRaises(OSError):
self.storage.save("error.file", f1)
def test_delete_no_name(self):
"""
Calling delete with an empty name should not try to remove the base
storage directory, but fail loudly (#20660).
"""
msg = "The name must be given to delete()."
with self.assertRaisesMessage(ValueError, msg):
self.storage.delete(None)
with self.assertRaisesMessage(ValueError, msg):
self.storage.delete("")
def test_delete_deletes_directories(self):
tmp_dir = tempfile.mkdtemp(dir=self.storage.location)
self.storage.delete(tmp_dir)
self.assertFalse(os.path.exists(tmp_dir))
@override_settings(
MEDIA_ROOT="media_root",
MEDIA_URL="media_url/",
FILE_UPLOAD_PERMISSIONS=0o777,
FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o777,
)
def test_setting_changed(self):
"""
Properties using settings values as defaults should be updated on
referenced settings change while specified values should be unchanged.
"""
storage = self.storage_class(
location="explicit_location",
base_url="explicit_base_url/",
file_permissions_mode=0o666,
directory_permissions_mode=0o666,
)
defaults_storage = self.storage_class()
settings = {
"MEDIA_ROOT": "overridden_media_root",
"MEDIA_URL": "/overridden_media_url/",
"FILE_UPLOAD_PERMISSIONS": 0o333,
"FILE_UPLOAD_DIRECTORY_PERMISSIONS": 0o333,
}
with self.settings(**settings):
self.assertEqual(storage.base_location, "explicit_location")
self.assertIn("explicit_location", storage.location)
self.assertEqual(storage.base_url, "explicit_base_url/")
self.assertEqual(storage.file_permissions_mode, 0o666)
self.assertEqual(storage.directory_permissions_mode, 0o666)
self.assertEqual(defaults_storage.base_location, settings["MEDIA_ROOT"])
self.assertIn(settings["MEDIA_ROOT"], defaults_storage.location)
self.assertEqual(defaults_storage.base_url, settings["MEDIA_URL"])
self.assertEqual(
defaults_storage.file_permissions_mode,
settings["FILE_UPLOAD_PERMISSIONS"],
)
self.assertEqual(
defaults_storage.directory_permissions_mode,
settings["FILE_UPLOAD_DIRECTORY_PERMISSIONS"],
)
def test_file_methods_pathlib_path(self):
p = Path("test.file")
self.assertFalse(self.storage.exists(p))
f = ContentFile("custom contents")
f_name = self.storage.save(p, f)
# Storage basic methods.
self.assertEqual(self.storage.path(p), os.path.join(self.temp_dir, p))
self.assertEqual(self.storage.size(p), 15)
self.assertEqual(self.storage.url(p), self.storage.base_url + f_name)
with self.storage.open(p) as f:
self.assertEqual(f.read(), b"custom contents")
self.addCleanup(self.storage.delete, p)
class CustomStorage(FileSystemStorage):
def get_available_name(self, name, max_length=None):
"""
Append numbers to duplicate files rather than underscores, like Trac.
"""
basename, *ext = os.path.splitext(name)
number = 2
while self.exists(name):
name = "".join([basename, ".", str(number)] + ext)
number += 1
return name
class CustomStorageTests(FileStorageTests):
storage_class = CustomStorage
def test_custom_get_available_name(self):
first = self.storage.save("custom_storage", ContentFile("custom contents"))
self.assertEqual(first, "custom_storage")
second = self.storage.save("custom_storage", ContentFile("more contents"))
self.assertEqual(second, "custom_storage.2")
self.storage.delete(first)
self.storage.delete(second)
class OverwritingStorage(FileSystemStorage):
"""
Overwrite existing files instead of appending a suffix to generate an
unused name.
"""
# Mask out O_EXCL so os.open() doesn't raise OSError if the file exists.
OS_OPEN_FLAGS = FileSystemStorage.OS_OPEN_FLAGS & ~os.O_EXCL
def get_available_name(self, name, max_length=None):
"""Override the effort to find an used name."""
return name
class OverwritingStorageTests(FileStorageTests):
storage_class = OverwritingStorage
def test_save_overwrite_behavior(self):
"""Saving to same file name twice overwrites the first file."""
name = "test.file"
self.assertFalse(self.storage.exists(name))
content_1 = b"content one"
content_2 = b"second content"
f_1 = ContentFile(content_1)
f_2 = ContentFile(content_2)
stored_name_1 = self.storage.save(name, f_1)
try:
self.assertEqual(stored_name_1, name)
self.assertTrue(self.storage.exists(name))
self.assertTrue(os.path.exists(os.path.join(self.temp_dir, name)))
with self.storage.open(name) as fp:
self.assertEqual(fp.read(), content_1)
stored_name_2 = self.storage.save(name, f_2)
self.assertEqual(stored_name_2, name)
self.assertTrue(self.storage.exists(name))
self.assertTrue(os.path.exists(os.path.join(self.temp_dir, name)))
with self.storage.open(name) as fp:
self.assertEqual(fp.read(), content_2)
finally:
self.storage.delete(name)
class DiscardingFalseContentStorage(FileSystemStorage):
def _save(self, name, content):
if content:
return super()._save(name, content)
return ""
class DiscardingFalseContentStorageTests(FileStorageTests):
storage_class = DiscardingFalseContentStorage
def test_custom_storage_discarding_empty_content(self):
"""
When Storage.save() wraps a file-like object in File, it should include
the name argument so that bool(file) evaluates to True (#26495).
"""
output = StringIO("content")
self.storage.save("tests/stringio", output)
self.assertTrue(self.storage.exists("tests/stringio"))
with self.storage.open("tests/stringio") as f:
self.assertEqual(f.read(), b"content")
class FileFieldStorageTests(TestCase):
def tearDown(self):
shutil.rmtree(temp_storage_location)
def _storage_max_filename_length(self, storage):
"""
Query filesystem for maximum filename length (e.g. AUFS has 242).
"""
dir_to_test = storage.location
while not os.path.exists(dir_to_test):
dir_to_test = os.path.dirname(dir_to_test)
try:
return os.pathconf(dir_to_test, "PC_NAME_MAX")
except Exception:
return 255 # Should be safe on most backends
def test_files(self):
self.assertIsInstance(Storage.normal, FileDescriptor)
# An object without a file has limited functionality.
obj1 = Storage()
self.assertEqual(obj1.normal.name, "")
with self.assertRaises(ValueError):
obj1.normal.size
# Saving a file enables full functionality.
obj1.normal.save("django_test.txt", ContentFile("content"))
self.assertEqual(obj1.normal.name, "tests/django_test.txt")
self.assertEqual(obj1.normal.size, 7)
self.assertEqual(obj1.normal.read(), b"content")
obj1.normal.close()
# File objects can be assigned to FileField attributes, but shouldn't
# get committed until the model it's attached to is saved.
obj1.normal = SimpleUploadedFile("assignment.txt", b"content")
dirs, files = temp_storage.listdir("tests")
self.assertEqual(dirs, [])
self.assertNotIn("assignment.txt", files)
obj1.save()
dirs, files = temp_storage.listdir("tests")
self.assertEqual(sorted(files), ["assignment.txt", "django_test.txt"])
# Save another file with the same name.
obj2 = Storage()
obj2.normal.save("django_test.txt", ContentFile("more content"))
obj2_name = obj2.normal.name
self.assertRegex(obj2_name, "tests/django_test_%s.txt" % FILE_SUFFIX_REGEX)
self.assertEqual(obj2.normal.size, 12)
obj2.normal.close()
# Deleting an object does not delete the file it uses.
obj2.delete()
obj2.normal.save("django_test.txt", ContentFile("more content"))
self.assertNotEqual(obj2_name, obj2.normal.name)
self.assertRegex(
obj2.normal.name, "tests/django_test_%s.txt" % FILE_SUFFIX_REGEX
)
obj2.normal.close()
def test_filefield_read(self):
# Files can be read in a little at a time, if necessary.
obj = Storage.objects.create(
normal=SimpleUploadedFile("assignment.txt", b"content")
)
obj.normal.open()
self.assertEqual(obj.normal.read(3), b"con")
self.assertEqual(obj.normal.read(), b"tent")
self.assertEqual(
list(obj.normal.chunks(chunk_size=2)), [b"co", b"nt", b"en", b"t"]
)
obj.normal.close()
def test_filefield_write(self):
# Files can be written to.
obj = Storage.objects.create(
normal=SimpleUploadedFile("rewritten.txt", b"content")
)
with obj.normal as normal:
normal.open("wb")
normal.write(b"updated")
obj.refresh_from_db()
self.assertEqual(obj.normal.read(), b"updated")
obj.normal.close()
def test_filefield_reopen(self):
obj = Storage.objects.create(
normal=SimpleUploadedFile("reopen.txt", b"content")
)
with obj.normal as normal:
normal.open()
obj.normal.open()
obj.normal.file.seek(0)
obj.normal.close()
def test_duplicate_filename(self):
# Multiple files with the same name get _(7 random chars) appended to them.
objs = [Storage() for i in range(2)]
for o in objs:
o.normal.save("multiple_files.txt", ContentFile("Same Content"))
try:
names = [o.normal.name for o in objs]
self.assertEqual(names[0], "tests/multiple_files.txt")
self.assertRegex(
names[1], "tests/multiple_files_%s.txt" % FILE_SUFFIX_REGEX
)
finally:
for o in objs:
o.delete()
def test_file_truncation(self):
# Given the max_length is limited, when multiple files get uploaded
# under the same name, then the filename get truncated in order to fit
# in _(7 random chars). When most of the max_length is taken by
# dirname + extension and there are not enough characters in the
# filename to truncate, an exception should be raised.
objs = [Storage() for i in range(2)]
filename = "filename.ext"
for o in objs:
o.limited_length.save(filename, ContentFile("Same Content"))
try:
# Testing truncation.
names = [o.limited_length.name for o in objs]
self.assertEqual(names[0], "tests/%s" % filename)
self.assertRegex(names[1], "tests/fi_%s.ext" % FILE_SUFFIX_REGEX)
# Testing exception is raised when filename is too short to truncate.
filename = "short.longext"
objs[0].limited_length.save(filename, ContentFile("Same Content"))
with self.assertRaisesMessage(
SuspiciousFileOperation, "Storage can not find an available filename"
):
objs[1].limited_length.save(*(filename, ContentFile("Same Content")))
finally:
for o in objs:
o.delete()
@unittest.skipIf(
sys.platform == "win32",
"Windows supports at most 260 characters in a path.",
)
def test_extended_length_storage(self):
# Testing FileField with max_length > 255. Most systems have filename
# length limitation of 255. Path takes extra chars.
filename = (
self._storage_max_filename_length(temp_storage) - 4
) * "a" # 4 chars for extension.
obj = Storage()
obj.extended_length.save("%s.txt" % filename, ContentFile("Same Content"))
self.assertEqual(obj.extended_length.name, "tests/%s.txt" % filename)
self.assertEqual(obj.extended_length.read(), b"Same Content")
obj.extended_length.close()
def test_filefield_default(self):
# Default values allow an object to access a single file.
temp_storage.save("tests/default.txt", ContentFile("default content"))
obj = Storage.objects.create()
self.assertEqual(obj.default.name, "tests/default.txt")
self.assertEqual(obj.default.read(), b"default content")
obj.default.close()
# But it shouldn't be deleted, even if there are no more objects using
# it.
obj.delete()
obj = Storage()
self.assertEqual(obj.default.read(), b"default content")
obj.default.close()
def test_empty_upload_to(self):
# upload_to can be empty, meaning it does not use subdirectory.
obj = Storage()
obj.empty.save("django_test.txt", ContentFile("more content"))
self.assertEqual(obj.empty.name, "django_test.txt")
self.assertEqual(obj.empty.read(), b"more content")
obj.empty.close()
def test_pathlib_upload_to(self):
obj = Storage()
obj.pathlib_callable.save("some_file1.txt", ContentFile("some content"))
self.assertEqual(obj.pathlib_callable.name, "bar/some_file1.txt")
obj.pathlib_direct.save("some_file2.txt", ContentFile("some content"))
self.assertEqual(obj.pathlib_direct.name, "bar/some_file2.txt")
obj.random.close()
def test_random_upload_to(self):
# Verify the fix for #5655, making sure the directory is only
# determined once.
obj = Storage()
obj.random.save("random_file", ContentFile("random content"))
self.assertTrue(obj.random.name.endswith("/random_file"))
obj.random.close()
def test_custom_valid_name_callable_upload_to(self):
"""
Storage.get_valid_name() should be called when upload_to is a callable.
"""
obj = Storage()
obj.custom_valid_name.save("random_file", ContentFile("random content"))
# CustomValidNameStorage.get_valid_name() appends '_valid' to the name
self.assertTrue(obj.custom_valid_name.name.endswith("/random_file_valid"))
obj.custom_valid_name.close()
def test_filefield_pickling(self):
# Push an object into the cache to make sure it pickles properly
obj = Storage()
obj.normal.save("django_test.txt", ContentFile("more content"))
obj.normal.close()
cache.set("obj", obj)
self.assertEqual(cache.get("obj").normal.name, "tests/django_test.txt")
def test_file_object(self):
# Create sample file
temp_storage.save("tests/example.txt", ContentFile("some content"))
# Load it as Python file object
with open(temp_storage.path("tests/example.txt")) as file_obj:
# Save it using storage and read its content
temp_storage.save("tests/file_obj", file_obj)
self.assertTrue(temp_storage.exists("tests/file_obj"))
with temp_storage.open("tests/file_obj") as f:
self.assertEqual(f.read(), b"some content")
def test_stringio(self):
# Test passing StringIO instance as content argument to save
output = StringIO()
output.write("content")
output.seek(0)
# Save it and read written file
temp_storage.save("tests/stringio", output)
self.assertTrue(temp_storage.exists("tests/stringio"))
with temp_storage.open("tests/stringio") as f:
self.assertEqual(f.read(), b"content")
class FieldCallableFileStorageTests(SimpleTestCase):
def setUp(self):
self.temp_storage_location = tempfile.mkdtemp(
suffix="filefield_callable_storage"
)
def tearDown(self):
shutil.rmtree(self.temp_storage_location)
def test_callable_base_class_error_raises(self):
class NotStorage:
pass
msg = (
"FileField.storage must be a subclass/instance of "
"django.core.files.storage.Storage"
)
for invalid_type in (NotStorage, str, list, set, tuple):
with self.subTest(invalid_type=invalid_type):
with self.assertRaisesMessage(TypeError, msg):
FileField(storage=invalid_type)
def test_file_field_storage_none_uses_default_storage(self):
self.assertEqual(FileField().storage, default_storage)
def test_callable_function_storage_file_field(self):
storage = FileSystemStorage(location=self.temp_storage_location)
def get_storage():
return storage
obj = FileField(storage=get_storage)
self.assertEqual(obj.storage, storage)
self.assertEqual(obj.storage.location, storage.location)
def test_callable_class_storage_file_field(self):
class GetStorage(FileSystemStorage):
pass
obj = FileField(storage=GetStorage)
self.assertIsInstance(obj.storage, BaseStorage)
def test_callable_storage_file_field_in_model(self):
obj = Storage()
self.assertEqual(obj.storage_callable.storage, temp_storage)
self.assertEqual(obj.storage_callable.storage.location, temp_storage_location)
self.assertIsInstance(obj.storage_callable_class.storage, BaseStorage)
def test_deconstruction(self):
"""
Deconstructing gives the original callable, not the evaluated value.
"""
obj = Storage()
*_, kwargs = obj._meta.get_field("storage_callable").deconstruct()
storage = kwargs["storage"]
self.assertIs(storage, callable_storage)
# Tests for a race condition on file saving (#4948).
# This is written in such a way that it'll always pass on platforms
# without threading.
class SlowFile(ContentFile):
def chunks(self):
time.sleep(1)
return super().chunks()
class FileSaveRaceConditionTest(SimpleTestCase):
def setUp(self):
self.storage_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(self.storage_dir)
self.thread = threading.Thread(target=self.save_file, args=["conflict"])
def tearDown(self):
shutil.rmtree(self.storage_dir)
def save_file(self, name):
name = self.storage.save(name, SlowFile(b"Data"))
def test_race_condition(self):
self.thread.start()
self.save_file("conflict")
self.thread.join()
files = sorted(os.listdir(self.storage_dir))
self.assertEqual(files[0], "conflict")
self.assertRegex(files[1], "conflict_%s" % FILE_SUFFIX_REGEX)
@unittest.skipIf(
sys.platform == "win32", "Windows only partially supports umasks and chmod."
)
class FileStoragePermissions(unittest.TestCase):
def setUp(self):
self.umask = 0o027
self.old_umask = os.umask(self.umask)
self.storage_dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.storage_dir)
os.umask(self.old_umask)
@override_settings(FILE_UPLOAD_PERMISSIONS=0o654)
def test_file_upload_permissions(self):
self.storage = FileSystemStorage(self.storage_dir)
name = self.storage.save("the_file", ContentFile("data"))
actual_mode = os.stat(self.storage.path(name))[0] & 0o777
self.assertEqual(actual_mode, 0o654)
@override_settings(FILE_UPLOAD_PERMISSIONS=None)
def test_file_upload_default_permissions(self):
self.storage = FileSystemStorage(self.storage_dir)
fname = self.storage.save("some_file", ContentFile("data"))
mode = os.stat(self.storage.path(fname))[0] & 0o777
self.assertEqual(mode, 0o666 & ~self.umask)
@override_settings(FILE_UPLOAD_DIRECTORY_PERMISSIONS=0o765)
def test_file_upload_directory_permissions(self):
self.storage = FileSystemStorage(self.storage_dir)
name = self.storage.save("the_directory/subdir/the_file", ContentFile("data"))
file_path = Path(self.storage.path(name))
self.assertEqual(file_path.parent.stat().st_mode & 0o777, 0o765)
self.assertEqual(file_path.parent.parent.stat().st_mode & 0o777, 0o765)
@override_settings(FILE_UPLOAD_DIRECTORY_PERMISSIONS=None)
def test_file_upload_directory_default_permissions(self):
self.storage = FileSystemStorage(self.storage_dir)
name = self.storage.save("the_directory/subdir/the_file", ContentFile("data"))
file_path = Path(self.storage.path(name))
expected_mode = 0o777 & ~self.umask
self.assertEqual(file_path.parent.stat().st_mode & 0o777, expected_mode)
self.assertEqual(file_path.parent.parent.stat().st_mode & 0o777, expected_mode)
class FileStoragePathParsing(SimpleTestCase):
def setUp(self):
self.storage_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(self.storage_dir)
def tearDown(self):
shutil.rmtree(self.storage_dir)
def test_directory_with_dot(self):
"""Regression test for #9610.
If the directory name contains a dot and the file name doesn't, make
sure we still mangle the file name instead of the directory name.
"""
self.storage.save("dotted.path/test", ContentFile("1"))
self.storage.save("dotted.path/test", ContentFile("2"))
files = sorted(os.listdir(os.path.join(self.storage_dir, "dotted.path")))
self.assertFalse(os.path.exists(os.path.join(self.storage_dir, "dotted_.path")))
self.assertEqual(files[0], "test")
self.assertRegex(files[1], "test_%s" % FILE_SUFFIX_REGEX)
def test_first_character_dot(self):
"""
File names with a dot as their first character don't have an extension,
and the underscore should get added to the end.
"""
self.storage.save("dotted.path/.test", ContentFile("1"))
self.storage.save("dotted.path/.test", ContentFile("2"))
files = sorted(os.listdir(os.path.join(self.storage_dir, "dotted.path")))
self.assertFalse(os.path.exists(os.path.join(self.storage_dir, "dotted_.path")))
self.assertEqual(files[0], ".test")
self.assertRegex(files[1], ".test_%s" % FILE_SUFFIX_REGEX)
class ContentFileStorageTestCase(unittest.TestCase):
def setUp(self):
self.storage_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(self.storage_dir)
def tearDown(self):
shutil.rmtree(self.storage_dir)
def test_content_saving(self):
"""
ContentFile can be saved correctly with the filesystem storage,
if it was initialized with either bytes or unicode content.
"""
self.storage.save("bytes.txt", ContentFile(b"content"))
self.storage.save("unicode.txt", ContentFile("español"))
@override_settings(ROOT_URLCONF="file_storage.urls")
class FileLikeObjectTestCase(LiveServerTestCase):
"""
Test file-like objects (#15644).
"""
available_apps = []
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self.storage = FileSystemStorage(location=self.temp_dir)
def tearDown(self):
shutil.rmtree(self.temp_dir)
def test_urllib_request_urlopen(self):
"""
Test the File storage API with a file-like object coming from
urllib.request.urlopen().
"""
file_like_object = urlopen(self.live_server_url + "/")
f = File(file_like_object)
stored_filename = self.storage.save("remote_file.html", f)
remote_file = urlopen(self.live_server_url + "/")
with self.storage.open(stored_filename) as stored_file:
self.assertEqual(stored_file.read(), remote_file.read())
|
13696c79bf1820d9a90803796db79f63b4847a46997e544d2cb686805db35ca7 | """
Storing files according to a custom storage system
``FileField`` and its variations can take a ``storage`` argument to specify how
and where files should be stored.
"""
import random
import tempfile
from pathlib import Path
from django.core.files.storage import FileSystemStorage
from django.db import models
class CustomValidNameStorage(FileSystemStorage):
def get_valid_name(self, name):
# mark the name to show that this was called
return name + "_valid"
temp_storage_location = tempfile.mkdtemp()
temp_storage = FileSystemStorage(location=temp_storage_location)
def callable_storage():
return temp_storage
class CallableStorage(FileSystemStorage):
def __call__(self):
# no-op implementation.
return self
class Storage(models.Model):
def custom_upload_to(self, filename):
return "foo"
def random_upload_to(self, filename):
# This returns a different result each time,
# to make sure it only gets called once.
return "%s/%s" % (random.randint(100, 999), filename)
def pathlib_upload_to(self, filename):
return Path("bar") / filename
normal = models.FileField(storage=temp_storage, upload_to="tests")
custom = models.FileField(storage=temp_storage, upload_to=custom_upload_to)
pathlib_callable = models.FileField(
storage=temp_storage, upload_to=pathlib_upload_to
)
pathlib_direct = models.FileField(storage=temp_storage, upload_to=Path("bar"))
random = models.FileField(storage=temp_storage, upload_to=random_upload_to)
custom_valid_name = models.FileField(
storage=CustomValidNameStorage(location=temp_storage_location),
upload_to=random_upload_to,
)
storage_callable = models.FileField(
storage=callable_storage, upload_to="storage_callable"
)
storage_callable_class = models.FileField(
storage=CallableStorage, upload_to="storage_callable_class"
)
default = models.FileField(
storage=temp_storage, upload_to="tests", default="tests/default.txt"
)
empty = models.FileField(storage=temp_storage)
limited_length = models.FileField(
storage=temp_storage, upload_to="tests", max_length=20
)
extended_length = models.FileField(
storage=temp_storage, upload_to="tests", max_length=300
)
|
9f3f562e5c1c9610c2a40297546edbd70afc5801ba3ee5acaa89064b34183705 | from django.http import HttpResponse
from django.urls import path
urlpatterns = [
path("", lambda req: HttpResponse("example view")),
]
|
bf0bbd5c1f0a539396ee9e61edd7414b6ef6da16257f7f8750dc7bcccb327025 | import base64
class Base64Converter:
regex = r"[a-zA-Z0-9+/]*={0,2}"
def to_python(self, value):
return base64.b64decode(value)
def to_url(self, value):
return base64.b64encode(value).decode("ascii")
class DynamicConverter:
_dynamic_to_python = None
_dynamic_to_url = None
@property
def regex(self):
return r"[0-9a-zA-Z]+"
@regex.setter
def regex(self):
raise Exception("You can't modify the regular expression.")
def to_python(self, value):
return type(self)._dynamic_to_python(value)
def to_url(self, value):
return type(self)._dynamic_to_url(value)
@classmethod
def register_to_python(cls, value):
cls._dynamic_to_python = value
@classmethod
def register_to_url(cls, value):
cls._dynamic_to_url = value
|
255e25be5765720328e2283dcb6e1eb91f154847243dc6cc11cbf7409fba5cfb | import string
import uuid
from django.core.exceptions import ImproperlyConfigured
from django.test import SimpleTestCase
from django.test.utils import override_settings
from django.urls import NoReverseMatch, Resolver404, path, re_path, resolve, reverse
from django.views import View
from .converters import DynamicConverter
from .views import empty_view
included_kwargs = {"base": b"hello", "value": b"world"}
converter_test_data = (
# ('url', ('url_name', 'app_name', {kwargs})),
# aGVsbG8= is 'hello' encoded in base64.
("/base64/aGVsbG8=/", ("base64", "", {"value": b"hello"})),
(
"/base64/aGVsbG8=/subpatterns/d29ybGQ=/",
("subpattern-base64", "", included_kwargs),
),
(
"/base64/aGVsbG8=/namespaced/d29ybGQ=/",
("subpattern-base64", "namespaced-base64", included_kwargs),
),
)
@override_settings(ROOT_URLCONF="urlpatterns.path_urls")
class SimplifiedURLTests(SimpleTestCase):
def test_path_lookup_without_parameters(self):
match = resolve("/articles/2003/")
self.assertEqual(match.url_name, "articles-2003")
self.assertEqual(match.args, ())
self.assertEqual(match.kwargs, {})
self.assertEqual(match.route, "articles/2003/")
def test_path_lookup_with_typed_parameters(self):
match = resolve("/articles/2015/")
self.assertEqual(match.url_name, "articles-year")
self.assertEqual(match.args, ())
self.assertEqual(match.kwargs, {"year": 2015})
self.assertEqual(match.route, "articles/<int:year>/")
def test_path_lookup_with_multiple_parameters(self):
match = resolve("/articles/2015/04/12/")
self.assertEqual(match.url_name, "articles-year-month-day")
self.assertEqual(match.args, ())
self.assertEqual(match.kwargs, {"year": 2015, "month": 4, "day": 12})
self.assertEqual(match.route, "articles/<int:year>/<int:month>/<int:day>/")
def test_two_variable_at_start_of_path_pattern(self):
match = resolve("/en/foo/")
self.assertEqual(match.url_name, "lang-and-path")
self.assertEqual(match.kwargs, {"lang": "en", "url": "foo"})
self.assertEqual(match.route, "<lang>/<path:url>/")
def test_re_path(self):
match = resolve("/regex/1/")
self.assertEqual(match.url_name, "regex")
self.assertEqual(match.kwargs, {"pk": "1"})
self.assertEqual(match.route, "^regex/(?P<pk>[0-9]+)/$")
def test_re_path_with_optional_parameter(self):
for url, kwargs in (
("/regex_optional/1/2/", {"arg1": "1", "arg2": "2"}),
("/regex_optional/1/", {"arg1": "1"}),
):
with self.subTest(url=url):
match = resolve(url)
self.assertEqual(match.url_name, "regex_optional")
self.assertEqual(match.kwargs, kwargs)
self.assertEqual(
match.route,
r"^regex_optional/(?P<arg1>\d+)/(?:(?P<arg2>\d+)/)?",
)
def test_re_path_with_missing_optional_parameter(self):
match = resolve("/regex_only_optional/")
self.assertEqual(match.url_name, "regex_only_optional")
self.assertEqual(match.kwargs, {})
self.assertEqual(match.args, ())
self.assertEqual(
match.route,
r"^regex_only_optional/(?:(?P<arg1>\d+)/)?",
)
def test_path_lookup_with_inclusion(self):
match = resolve("/included_urls/extra/something/")
self.assertEqual(match.url_name, "inner-extra")
self.assertEqual(match.route, "included_urls/extra/<extra>/")
def test_path_lookup_with_empty_string_inclusion(self):
match = resolve("/more/99/")
self.assertEqual(match.url_name, "inner-more")
self.assertEqual(match.route, r"^more/(?P<extra>\w+)/$")
def test_path_lookup_with_double_inclusion(self):
match = resolve("/included_urls/more/some_value/")
self.assertEqual(match.url_name, "inner-more")
self.assertEqual(match.route, r"included_urls/more/(?P<extra>\w+)/$")
def test_path_reverse_without_parameter(self):
url = reverse("articles-2003")
self.assertEqual(url, "/articles/2003/")
def test_path_reverse_with_parameter(self):
url = reverse(
"articles-year-month-day", kwargs={"year": 2015, "month": 4, "day": 12}
)
self.assertEqual(url, "/articles/2015/4/12/")
@override_settings(ROOT_URLCONF="urlpatterns.path_base64_urls")
def test_converter_resolve(self):
for url, (url_name, app_name, kwargs) in converter_test_data:
with self.subTest(url=url):
match = resolve(url)
self.assertEqual(match.url_name, url_name)
self.assertEqual(match.app_name, app_name)
self.assertEqual(match.kwargs, kwargs)
@override_settings(ROOT_URLCONF="urlpatterns.path_base64_urls")
def test_converter_reverse(self):
for expected, (url_name, app_name, kwargs) in converter_test_data:
if app_name:
url_name = "%s:%s" % (app_name, url_name)
with self.subTest(url=url_name):
url = reverse(url_name, kwargs=kwargs)
self.assertEqual(url, expected)
@override_settings(ROOT_URLCONF="urlpatterns.path_base64_urls")
def test_converter_reverse_with_second_layer_instance_namespace(self):
kwargs = included_kwargs.copy()
kwargs["last_value"] = b"world"
url = reverse("instance-ns-base64:subsubpattern-base64", kwargs=kwargs)
self.assertEqual(url, "/base64/aGVsbG8=/subpatterns/d29ybGQ=/d29ybGQ=/")
def test_path_inclusion_is_matchable(self):
match = resolve("/included_urls/extra/something/")
self.assertEqual(match.url_name, "inner-extra")
self.assertEqual(match.kwargs, {"extra": "something"})
def test_path_inclusion_is_reversible(self):
url = reverse("inner-extra", kwargs={"extra": "something"})
self.assertEqual(url, "/included_urls/extra/something/")
def test_invalid_kwargs(self):
msg = "kwargs argument must be a dict, but got str."
with self.assertRaisesMessage(TypeError, msg):
path("hello/", empty_view, "name")
with self.assertRaisesMessage(TypeError, msg):
re_path("^hello/$", empty_view, "name")
def test_invalid_converter(self):
msg = "URL route 'foo/<nonexistent:var>/' uses invalid converter 'nonexistent'."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
path("foo/<nonexistent:var>/", empty_view)
def test_invalid_view(self):
msg = "view must be a callable or a list/tuple in the case of include()."
with self.assertRaisesMessage(TypeError, msg):
path("articles/", "invalid_view")
def test_invalid_view_instance(self):
class EmptyCBV(View):
pass
msg = "view must be a callable, pass EmptyCBV.as_view(), not EmptyCBV()."
with self.assertRaisesMessage(TypeError, msg):
path("foo", EmptyCBV())
def test_whitespace_in_route(self):
msg = (
"URL route 'space/<int:num>/extra/<str:%stest>' cannot contain "
"whitespace in angle brackets <…>"
)
for whitespace in string.whitespace:
with self.subTest(repr(whitespace)):
with self.assertRaisesMessage(ImproperlyConfigured, msg % whitespace):
path("space/<int:num>/extra/<str:%stest>" % whitespace, empty_view)
# Whitespaces are valid in paths.
p = path("space%s/<int:num>/" % string.whitespace, empty_view)
match = p.resolve("space%s/1/" % string.whitespace)
self.assertEqual(match.kwargs, {"num": 1})
def test_path_trailing_newlines(self):
tests = [
"/articles/2003/\n",
"/articles/2010/\n",
"/en/foo/\n",
"/included_urls/extra/\n",
"/regex/1/\n",
"/users/1/\n",
]
for url in tests:
with self.subTest(url=url), self.assertRaises(Resolver404):
resolve(url)
@override_settings(ROOT_URLCONF="urlpatterns.converter_urls")
class ConverterTests(SimpleTestCase):
def test_matching_urls(self):
def no_converter(x):
return x
test_data = (
("int", {"0", "1", "01", 1234567890}, int),
("str", {"abcxyz"}, no_converter),
("path", {"allows.ANY*characters"}, no_converter),
("slug", {"abcxyz-ABCXYZ_01234567890"}, no_converter),
("uuid", {"39da9369-838e-4750-91a5-f7805cd82839"}, uuid.UUID),
)
for url_name, url_suffixes, converter in test_data:
for url_suffix in url_suffixes:
url = "/%s/%s/" % (url_name, url_suffix)
with self.subTest(url=url):
match = resolve(url)
self.assertEqual(match.url_name, url_name)
self.assertEqual(match.kwargs, {url_name: converter(url_suffix)})
# reverse() works with string parameters.
string_kwargs = {url_name: url_suffix}
self.assertEqual(reverse(url_name, kwargs=string_kwargs), url)
# reverse() also works with native types (int, UUID, etc.).
if converter is not no_converter:
# The converted value might be different for int (a
# leading zero is lost in the conversion).
converted_value = match.kwargs[url_name]
converted_url = "/%s/%s/" % (url_name, converted_value)
self.assertEqual(
reverse(url_name, kwargs={url_name: converted_value}),
converted_url,
)
def test_nonmatching_urls(self):
test_data = (
("int", {"-1", "letters"}),
("str", {"", "/"}),
("path", {""}),
("slug", {"", "stars*notallowed"}),
(
"uuid",
{
"",
"9da9369-838e-4750-91a5-f7805cd82839",
"39da9369-838-4750-91a5-f7805cd82839",
"39da9369-838e-475-91a5-f7805cd82839",
"39da9369-838e-4750-91a-f7805cd82839",
"39da9369-838e-4750-91a5-f7805cd8283",
},
),
)
for url_name, url_suffixes in test_data:
for url_suffix in url_suffixes:
url = "/%s/%s/" % (url_name, url_suffix)
with self.subTest(url=url), self.assertRaises(Resolver404):
resolve(url)
@override_settings(ROOT_URLCONF="urlpatterns.path_same_name_urls")
class SameNameTests(SimpleTestCase):
def test_matching_urls_same_name(self):
@DynamicConverter.register_to_url
def requires_tiny_int(value):
if value > 5:
raise ValueError
return value
tests = [
(
"number_of_args",
[
([], {}, "0/"),
([1], {}, "1/1/"),
],
),
(
"kwargs_names",
[
([], {"a": 1}, "a/1/"),
([], {"b": 1}, "b/1/"),
],
),
(
"converter",
[
(["a/b"], {}, "path/a/b/"),
(["a b"], {}, "str/a%20b/"),
(["a-b"], {}, "slug/a-b/"),
(["2"], {}, "int/2/"),
(
["39da9369-838e-4750-91a5-f7805cd82839"],
{},
"uuid/39da9369-838e-4750-91a5-f7805cd82839/",
),
],
),
(
"regex",
[
(["ABC"], {}, "uppercase/ABC/"),
(["abc"], {}, "lowercase/abc/"),
],
),
(
"converter_to_url",
[
([6], {}, "int/6/"),
([1], {}, "tiny_int/1/"),
],
),
]
for url_name, cases in tests:
for args, kwargs, url_suffix in cases:
expected_url = "/%s/%s" % (url_name, url_suffix)
with self.subTest(url=expected_url):
self.assertEqual(
reverse(url_name, args=args, kwargs=kwargs),
expected_url,
)
class ParameterRestrictionTests(SimpleTestCase):
def test_integer_parameter_name_causes_exception(self):
msg = (
"URL route 'hello/<int:1>/' uses parameter name '1' which isn't "
"a valid Python identifier."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
path(r"hello/<int:1>/", lambda r: None)
def test_non_identifier_parameter_name_causes_exception(self):
msg = (
"URL route 'b/<int:book.id>/' uses parameter name 'book.id' which "
"isn't a valid Python identifier."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
path(r"b/<int:book.id>/", lambda r: None)
def test_allows_non_ascii_but_valid_identifiers(self):
# \u0394 is "GREEK CAPITAL LETTER DELTA", a valid identifier.
p = path("hello/<str:\u0394>/", lambda r: None)
match = p.resolve("hello/1/")
self.assertEqual(match.kwargs, {"\u0394": "1"})
@override_settings(ROOT_URLCONF="urlpatterns.path_dynamic_urls")
class ConversionExceptionTests(SimpleTestCase):
"""How are errors in Converter.to_python() and to_url() handled?"""
def test_resolve_value_error_means_no_match(self):
@DynamicConverter.register_to_python
def raises_value_error(value):
raise ValueError()
with self.assertRaises(Resolver404):
resolve("/dynamic/abc/")
def test_resolve_type_error_propagates(self):
@DynamicConverter.register_to_python
def raises_type_error(value):
raise TypeError("This type error propagates.")
with self.assertRaisesMessage(TypeError, "This type error propagates."):
resolve("/dynamic/abc/")
def test_reverse_value_error_means_no_match(self):
@DynamicConverter.register_to_url
def raises_value_error(value):
raise ValueError
with self.assertRaises(NoReverseMatch):
reverse("dynamic", kwargs={"value": object()})
def test_reverse_type_error_propagates(self):
@DynamicConverter.register_to_url
def raises_type_error(value):
raise TypeError("This type error propagates.")
with self.assertRaisesMessage(TypeError, "This type error propagates."):
reverse("dynamic", kwargs={"value": object()})
|
0abfdbc64979f1bc3957b3e2326dc999ed934b8823b6878ba22ba4461ef8e05c | from django.urls import re_path
from . import views
urlpatterns = [
re_path(r"^more/(?P<extra>\w+)/$", views.empty_view, name="inner-more"),
]
|
b0829b15c34dae4466ac91289c7c000b13ae6c36925913eaae42faf1ca3f4df8 | from django.test import SimpleTestCase
from django.test.utils import override_settings
from django.urls.resolvers import RegexPattern, RoutePattern, get_resolver
from django.utils.translation import gettext_lazy as _
class RegexPatternTests(SimpleTestCase):
def test_str(self):
self.assertEqual(str(RegexPattern(_("^translated/$"))), "^translated/$")
class RoutePatternTests(SimpleTestCase):
def test_str(self):
self.assertEqual(str(RoutePattern(_("translated/"))), "translated/")
class ResolverCacheTests(SimpleTestCase):
@override_settings(ROOT_URLCONF="urlpatterns.path_urls")
def test_resolver_cache_default__root_urlconf(self):
# resolver for a default URLconf (passing no argument) and for the
# settings.ROOT_URLCONF is the same cached object.
self.assertIs(get_resolver(), get_resolver("urlpatterns.path_urls"))
self.assertIsNot(get_resolver(), get_resolver("urlpatterns.path_dynamic_urls"))
|
93fa074391026fa9586ccc5c40e4ded648766bcbbee84c399cc83f2b7ac2f434 | from django.urls import path
from . import views
urlpatterns = [
path("{x}/<{x}:{x}>/".format(x=name), views.empty_view, name=name)
for name in ("int", "path", "slug", "str", "uuid")
]
|
4379835d4148fabb6aa53c2745684cd110d157e4d6bf3f77d58fa38cf1865f2c | from django.urls import path, re_path, register_converter
from . import converters, views
register_converter(converters.DynamicConverter, "to_url_value_error")
urlpatterns = [
# Different number of arguments.
path("number_of_args/0/", views.empty_view, name="number_of_args"),
path("number_of_args/1/<value>/", views.empty_view, name="number_of_args"),
# Different names of the keyword arguments.
path("kwargs_names/a/<a>/", views.empty_view, name="kwargs_names"),
path("kwargs_names/b/<b>/", views.empty_view, name="kwargs_names"),
# Different path converters.
path("converter/path/<path:value>/", views.empty_view, name="converter"),
path("converter/str/<str:value>/", views.empty_view, name="converter"),
path("converter/slug/<slug:value>/", views.empty_view, name="converter"),
path("converter/int/<int:value>/", views.empty_view, name="converter"),
path("converter/uuid/<uuid:value>/", views.empty_view, name="converter"),
# Different regular expressions.
re_path(r"^regex/uppercase/([A-Z]+)/", views.empty_view, name="regex"),
re_path(r"^regex/lowercase/([a-z]+)/", views.empty_view, name="regex"),
# converter.to_url() raises ValueError (no match).
path(
"converter_to_url/int/<value>/",
views.empty_view,
name="converter_to_url",
),
path(
"converter_to_url/tiny_int/<to_url_value_error:value>/",
views.empty_view,
name="converter_to_url",
),
]
|
0116213db8fa902dec1503768cc9f9bc5a8d621b5780892e70d6b1fb0b79174a | from django.urls import include, path, register_converter
from . import converters, views
register_converter(converters.Base64Converter, "base64")
subsubpatterns = [
path("<base64:last_value>/", views.empty_view, name="subsubpattern-base64"),
]
subpatterns = [
path("<base64:value>/", views.empty_view, name="subpattern-base64"),
path(
"<base64:value>/",
include(
(subsubpatterns, "second-layer-namespaced-base64"), "instance-ns-base64"
),
),
]
urlpatterns = [
path("base64/<base64:value>/", views.empty_view, name="base64"),
path("base64/<base64:base>/subpatterns/", include(subpatterns)),
path(
"base64/<base64:base>/namespaced/", include((subpatterns, "namespaced-base64"))
),
]
|
e4184dd8a05dc491e7ddf1aa23166c3df75c30fd17833a10177ae224e2f99ced | from django.urls import include, path
from . import views
urlpatterns = [
path("extra/<extra>/", views.empty_view, name="inner-extra"),
path("", include("urlpatterns.more_urls")),
]
|
786014dc2af30593e7a71d50b0b8a1c21635b9771ee0c4edd0d8b509e28ac3ef | from django.urls import include, path, re_path
from . import views
urlpatterns = [
path("articles/2003/", views.empty_view, name="articles-2003"),
path("articles/<int:year>/", views.empty_view, name="articles-year"),
path(
"articles/<int:year>/<int:month>/", views.empty_view, name="articles-year-month"
),
path(
"articles/<int:year>/<int:month>/<int:day>/",
views.empty_view,
name="articles-year-month-day",
),
path("users/", views.empty_view, name="users"),
path("users/<id>/", views.empty_view, name="user-with-id"),
path("included_urls/", include("urlpatterns.included_urls")),
re_path(r"^regex/(?P<pk>[0-9]+)/$", views.empty_view, name="regex"),
re_path(
r"^regex_optional/(?P<arg1>\d+)/(?:(?P<arg2>\d+)/)?",
views.empty_view,
name="regex_optional",
),
re_path(
r"^regex_only_optional/(?:(?P<arg1>\d+)/)?",
views.empty_view,
name="regex_only_optional",
),
path("", include("urlpatterns.more_urls")),
path("<lang>/<path:url>/", views.empty_view, name="lang-and-path"),
]
|
5d7c94cbd22d629eabc13b7dcbbfd28b94bf5585d15308db72b13502457bff92 | from django.urls import path, register_converter
from . import converters, views
register_converter(converters.DynamicConverter, "dynamic")
urlpatterns = [
path("dynamic/<dynamic:value>/", views.empty_view, name="dynamic"),
]
|
ff39a48d6fe077ab281c3c0f313c6d91f22beb65462e0e6b4070840ca9f224af | import gc
import sys
import weakref
from types import TracebackType
from django.dispatch import Signal, receiver
from django.test import SimpleTestCase
from django.test.utils import override_settings
if hasattr(sys, "pypy_version_info"):
def garbage_collect():
# Collecting weakreferences can take two collections on PyPy.
gc.collect()
gc.collect()
else:
def garbage_collect():
gc.collect()
def receiver_1_arg(val, **kwargs):
return val
class Callable:
def __call__(self, val, **kwargs):
return val
def a(self, val, **kwargs):
return val
a_signal = Signal()
b_signal = Signal()
c_signal = Signal()
d_signal = Signal(use_caching=True)
class DispatcherTests(SimpleTestCase):
def assertTestIsClean(self, signal):
"""Assert that everything has been cleaned up automatically"""
# Note that dead weakref cleanup happens as side effect of using
# the signal's receivers through the signals API. So, first do a
# call to an API method to force cleanup.
self.assertFalse(signal.has_listeners())
self.assertEqual(signal.receivers, [])
@override_settings(DEBUG=True)
def test_cannot_connect_no_kwargs(self):
def receiver_no_kwargs(sender):
pass
msg = "Signal receivers must accept keyword arguments (**kwargs)."
with self.assertRaisesMessage(ValueError, msg):
a_signal.connect(receiver_no_kwargs)
self.assertTestIsClean(a_signal)
@override_settings(DEBUG=True)
def test_cannot_connect_non_callable(self):
msg = "Signal receivers must be callable."
with self.assertRaisesMessage(TypeError, msg):
a_signal.connect(object())
self.assertTestIsClean(a_signal)
def test_send(self):
a_signal.connect(receiver_1_arg, sender=self)
result = a_signal.send(sender=self, val="test")
self.assertEqual(result, [(receiver_1_arg, "test")])
a_signal.disconnect(receiver_1_arg, sender=self)
self.assertTestIsClean(a_signal)
def test_send_no_receivers(self):
result = a_signal.send(sender=self, val="test")
self.assertEqual(result, [])
def test_send_connected_no_sender(self):
a_signal.connect(receiver_1_arg)
result = a_signal.send(sender=self, val="test")
self.assertEqual(result, [(receiver_1_arg, "test")])
a_signal.disconnect(receiver_1_arg)
self.assertTestIsClean(a_signal)
def test_send_different_no_sender(self):
a_signal.connect(receiver_1_arg, sender=object)
result = a_signal.send(sender=self, val="test")
self.assertEqual(result, [])
a_signal.disconnect(receiver_1_arg, sender=object)
self.assertTestIsClean(a_signal)
def test_garbage_collected(self):
a = Callable()
a_signal.connect(a.a, sender=self)
del a
garbage_collect()
result = a_signal.send(sender=self, val="test")
self.assertEqual(result, [])
self.assertTestIsClean(a_signal)
def test_cached_garbaged_collected(self):
"""
Make sure signal caching sender receivers don't prevent garbage
collection of senders.
"""
class sender:
pass
wref = weakref.ref(sender)
d_signal.connect(receiver_1_arg)
d_signal.send(sender, val="garbage")
del sender
garbage_collect()
try:
self.assertIsNone(wref())
finally:
# Disconnect after reference check since it flushes the tested cache.
d_signal.disconnect(receiver_1_arg)
def test_multiple_registration(self):
a = Callable()
a_signal.connect(a)
a_signal.connect(a)
a_signal.connect(a)
a_signal.connect(a)
a_signal.connect(a)
a_signal.connect(a)
result = a_signal.send(sender=self, val="test")
self.assertEqual(len(result), 1)
self.assertEqual(len(a_signal.receivers), 1)
del a
del result
garbage_collect()
self.assertTestIsClean(a_signal)
def test_uid_registration(self):
def uid_based_receiver_1(**kwargs):
pass
def uid_based_receiver_2(**kwargs):
pass
a_signal.connect(uid_based_receiver_1, dispatch_uid="uid")
a_signal.connect(uid_based_receiver_2, dispatch_uid="uid")
self.assertEqual(len(a_signal.receivers), 1)
a_signal.disconnect(dispatch_uid="uid")
self.assertTestIsClean(a_signal)
def test_send_robust_success(self):
a_signal.connect(receiver_1_arg)
result = a_signal.send_robust(sender=self, val="test")
self.assertEqual(result, [(receiver_1_arg, "test")])
a_signal.disconnect(receiver_1_arg)
self.assertTestIsClean(a_signal)
def test_send_robust_no_receivers(self):
result = a_signal.send_robust(sender=self, val="test")
self.assertEqual(result, [])
def test_send_robust_ignored_sender(self):
a_signal.connect(receiver_1_arg)
result = a_signal.send_robust(sender=self, val="test")
self.assertEqual(result, [(receiver_1_arg, "test")])
a_signal.disconnect(receiver_1_arg)
self.assertTestIsClean(a_signal)
def test_send_robust_fail(self):
def fails(val, **kwargs):
raise ValueError("this")
a_signal.connect(fails)
try:
with self.assertLogs("django.dispatch", "ERROR") as cm:
result = a_signal.send_robust(sender=self, val="test")
err = result[0][1]
self.assertIsInstance(err, ValueError)
self.assertEqual(err.args, ("this",))
self.assertIs(hasattr(err, "__traceback__"), True)
self.assertIsInstance(err.__traceback__, TracebackType)
log_record = cm.records[0]
self.assertEqual(
log_record.getMessage(),
"Error calling "
"DispatcherTests.test_send_robust_fail.<locals>.fails in "
"Signal.send_robust() (this)",
)
self.assertIsNotNone(log_record.exc_info)
_, exc_value, _ = log_record.exc_info
self.assertIsInstance(exc_value, ValueError)
self.assertEqual(str(exc_value), "this")
finally:
a_signal.disconnect(fails)
self.assertTestIsClean(a_signal)
def test_disconnection(self):
receiver_1 = Callable()
receiver_2 = Callable()
receiver_3 = Callable()
a_signal.connect(receiver_1)
a_signal.connect(receiver_2)
a_signal.connect(receiver_3)
a_signal.disconnect(receiver_1)
del receiver_2
garbage_collect()
a_signal.disconnect(receiver_3)
self.assertTestIsClean(a_signal)
def test_values_returned_by_disconnection(self):
receiver_1 = Callable()
receiver_2 = Callable()
a_signal.connect(receiver_1)
receiver_1_disconnected = a_signal.disconnect(receiver_1)
receiver_2_disconnected = a_signal.disconnect(receiver_2)
self.assertTrue(receiver_1_disconnected)
self.assertFalse(receiver_2_disconnected)
self.assertTestIsClean(a_signal)
def test_has_listeners(self):
self.assertFalse(a_signal.has_listeners())
self.assertFalse(a_signal.has_listeners(sender=object()))
receiver_1 = Callable()
a_signal.connect(receiver_1)
self.assertTrue(a_signal.has_listeners())
self.assertTrue(a_signal.has_listeners(sender=object()))
a_signal.disconnect(receiver_1)
self.assertFalse(a_signal.has_listeners())
self.assertFalse(a_signal.has_listeners(sender=object()))
class ReceiverTestCase(SimpleTestCase):
def test_receiver_single_signal(self):
@receiver(a_signal)
def f(val, **kwargs):
self.state = val
self.state = False
a_signal.send(sender=self, val=True)
self.assertTrue(self.state)
def test_receiver_signal_list(self):
@receiver([a_signal, b_signal, c_signal])
def f(val, **kwargs):
self.state.append(val)
self.state = []
a_signal.send(sender=self, val="a")
c_signal.send(sender=self, val="c")
b_signal.send(sender=self, val="b")
self.assertIn("a", self.state)
self.assertIn("b", self.state)
self.assertIn("c", self.state)
|
89f3a002038bd0841b005f56ade32500f65e658ead1f76ca5cd87f08ed31ff8f | from django.db import connection, models
from django.db.backends.utils import truncate_name
from django.test import TestCase
from .models.article import Article, Site
from .models.publication import Publication
class Advertisement(models.Model):
customer = models.CharField(max_length=100)
publications = models.ManyToManyField("model_package.Publication", blank=True)
class ModelPackageTests(TestCase):
def test_m2m_tables_in_subpackage_models(self):
"""
Regression for #12168: models split into subpackages still get M2M
tables.
"""
p = Publication.objects.create(title="FooBar")
site = Site.objects.create(name="example.com")
a = Article.objects.create(headline="a foo headline")
a.publications.add(p)
a.sites.add(site)
a = Article.objects.get(id=a.pk)
self.assertEqual(a.id, a.pk)
self.assertEqual(a.sites.count(), 1)
def test_models_in_the_test_package(self):
"""
Regression for #12245 - Models can exist in the test package, too.
"""
p = Publication.objects.create(title="FooBar")
ad = Advertisement.objects.create(customer="Lawrence Journal-World")
ad.publications.add(p)
ad = Advertisement.objects.get(id=ad.pk)
self.assertEqual(ad.publications.count(), 1)
def test_automatic_m2m_column_names(self):
"""
Regression for #12386 - field names on the autogenerated intermediate
class that are specified as dotted strings don't retain any path
component for the field or column name.
"""
self.assertEqual(Article.publications.through._meta.fields[1].name, "article")
self.assertEqual(
Article.publications.through._meta.fields[1].get_attname_column(),
("article_id", "article_id"),
)
self.assertEqual(
Article.publications.through._meta.fields[2].name, "publication"
)
self.assertEqual(
Article.publications.through._meta.fields[2].get_attname_column(),
("publication_id", "publication_id"),
)
self.assertEqual(
Article._meta.get_field("publications").m2m_db_table(),
truncate_name(
"model_package_article_publications", connection.ops.max_name_length()
),
)
self.assertEqual(
Article._meta.get_field("publications").m2m_column_name(), "article_id"
)
self.assertEqual(
Article._meta.get_field("publications").m2m_reverse_name(), "publication_id"
)
|
988686df639be487f35f15b55b9b02f9245de5ec09839b5910d96f630a809bec | import threading
import time
from unittest import mock
from multiple_database.routers import TestRouter
from django.core.exceptions import FieldError
from django.db import (
DatabaseError,
NotSupportedError,
connection,
connections,
router,
transaction,
)
from django.test import (
TransactionTestCase,
override_settings,
skipIfDBFeature,
skipUnlessDBFeature,
)
from django.test.utils import CaptureQueriesContext
from .models import (
City,
CityCountryProxy,
Country,
EUCity,
EUCountry,
Person,
PersonProfile,
)
class SelectForUpdateTests(TransactionTestCase):
available_apps = ["select_for_update"]
def setUp(self):
# This is executed in autocommit mode so that code in
# run_select_for_update can see this data.
self.country1 = Country.objects.create(name="Belgium")
self.country2 = Country.objects.create(name="France")
self.city1 = City.objects.create(name="Liberchies", country=self.country1)
self.city2 = City.objects.create(name="Samois-sur-Seine", country=self.country2)
self.person = Person.objects.create(
name="Reinhardt", born=self.city1, died=self.city2
)
self.person_profile = PersonProfile.objects.create(person=self.person)
# We need another database connection in transaction to test that one
# connection issuing a SELECT ... FOR UPDATE will block.
self.new_connection = connection.copy()
def tearDown(self):
try:
self.end_blocking_transaction()
except (DatabaseError, AttributeError):
pass
self.new_connection.close()
def start_blocking_transaction(self):
self.new_connection.set_autocommit(False)
# Start a blocking transaction. At some point,
# end_blocking_transaction() should be called.
self.cursor = self.new_connection.cursor()
sql = "SELECT * FROM %(db_table)s %(for_update)s;" % {
"db_table": Person._meta.db_table,
"for_update": self.new_connection.ops.for_update_sql(),
}
self.cursor.execute(sql, ())
self.cursor.fetchone()
def end_blocking_transaction(self):
# Roll back the blocking transaction.
self.cursor.close()
self.new_connection.rollback()
self.new_connection.set_autocommit(True)
def has_for_update_sql(self, queries, **kwargs):
# Examine the SQL that was executed to determine whether it
# contains the 'SELECT..FOR UPDATE' stanza.
for_update_sql = connection.ops.for_update_sql(**kwargs)
return any(for_update_sql in query["sql"] for query in queries)
@skipUnlessDBFeature("has_select_for_update")
def test_for_update_sql_generated(self):
"""
The backend's FOR UPDATE variant appears in
generated SQL when select_for_update is invoked.
"""
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(Person.objects.select_for_update())
self.assertTrue(self.has_for_update_sql(ctx.captured_queries))
@skipUnlessDBFeature("has_select_for_update_nowait")
def test_for_update_sql_generated_nowait(self):
"""
The backend's FOR UPDATE NOWAIT variant appears in
generated SQL when select_for_update is invoked.
"""
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(Person.objects.select_for_update(nowait=True))
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, nowait=True))
@skipUnlessDBFeature("has_select_for_update_skip_locked")
def test_for_update_sql_generated_skip_locked(self):
"""
The backend's FOR UPDATE SKIP LOCKED variant appears in
generated SQL when select_for_update is invoked.
"""
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(Person.objects.select_for_update(skip_locked=True))
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, skip_locked=True))
@skipUnlessDBFeature("has_select_for_no_key_update")
def test_update_sql_generated_no_key(self):
"""
The backend's FOR NO KEY UPDATE variant appears in generated SQL when
select_for_update() is invoked.
"""
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(Person.objects.select_for_update(no_key=True))
self.assertIs(self.has_for_update_sql(ctx.captured_queries, no_key=True), True)
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_sql_generated_of(self):
"""
The backend's FOR UPDATE OF variant appears in the generated SQL when
select_for_update() is invoked.
"""
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(
Person.objects.select_related(
"born__country",
)
.select_for_update(
of=("born__country",),
)
.select_for_update(of=("self", "born__country"))
)
features = connections["default"].features
if features.select_for_update_of_column:
expected = [
'select_for_update_person"."id',
'select_for_update_country"."entity_ptr_id',
]
else:
expected = ["select_for_update_person", "select_for_update_country"]
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_sql_model_inheritance_generated_of(self):
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(EUCountry.objects.select_for_update(of=("self",)))
if connection.features.select_for_update_of_column:
expected = ['select_for_update_eucountry"."country_ptr_id']
else:
expected = ["select_for_update_eucountry"]
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_sql_model_inheritance_ptr_generated_of(self):
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(
EUCountry.objects.select_for_update(
of=(
"self",
"country_ptr",
)
)
)
if connection.features.select_for_update_of_column:
expected = [
'select_for_update_eucountry"."country_ptr_id',
'select_for_update_country"."entity_ptr_id',
]
else:
expected = ["select_for_update_eucountry", "select_for_update_country"]
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_sql_related_model_inheritance_generated_of(self):
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(
EUCity.objects.select_related("country").select_for_update(
of=("self", "country"),
)
)
if connection.features.select_for_update_of_column:
expected = [
'select_for_update_eucity"."id',
'select_for_update_eucountry"."country_ptr_id',
]
else:
expected = ["select_for_update_eucity", "select_for_update_eucountry"]
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_sql_model_inheritance_nested_ptr_generated_of(self):
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(
EUCity.objects.select_related("country").select_for_update(
of=(
"self",
"country__country_ptr",
),
)
)
if connection.features.select_for_update_of_column:
expected = [
'select_for_update_eucity"."id',
'select_for_update_country"."entity_ptr_id',
]
else:
expected = ["select_for_update_eucity", "select_for_update_country"]
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_sql_multilevel_model_inheritance_ptr_generated_of(self):
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(
EUCountry.objects.select_for_update(
of=("country_ptr", "country_ptr__entity_ptr"),
)
)
if connection.features.select_for_update_of_column:
expected = [
'select_for_update_country"."entity_ptr_id',
'select_for_update_entity"."id',
]
else:
expected = ["select_for_update_country", "select_for_update_entity"]
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_sql_model_proxy_generated_of(self):
with transaction.atomic(), CaptureQueriesContext(connection) as ctx:
list(
CityCountryProxy.objects.select_related("country",).select_for_update(
of=("country",),
)
)
if connection.features.select_for_update_of_column:
expected = ['select_for_update_country"."entity_ptr_id']
else:
expected = ["select_for_update_country"]
expected = [connection.ops.quote_name(value) for value in expected]
self.assertTrue(self.has_for_update_sql(ctx.captured_queries, of=expected))
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_of_followed_by_values(self):
with transaction.atomic():
values = list(Person.objects.select_for_update(of=("self",)).values("pk"))
self.assertEqual(values, [{"pk": self.person.pk}])
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_of_followed_by_values_list(self):
with transaction.atomic():
values = list(
Person.objects.select_for_update(of=("self",)).values_list("pk")
)
self.assertEqual(values, [(self.person.pk,)])
@skipUnlessDBFeature("has_select_for_update_of")
def test_for_update_of_self_when_self_is_not_selected(self):
"""
select_for_update(of=['self']) when the only columns selected are from
related tables.
"""
with transaction.atomic():
values = list(
Person.objects.select_related("born")
.select_for_update(of=("self",))
.values("born__name")
)
self.assertEqual(values, [{"born__name": self.city1.name}])
@skipUnlessDBFeature(
"has_select_for_update_of",
"supports_select_for_update_with_limit",
)
def test_for_update_of_with_exists(self):
with transaction.atomic():
qs = Person.objects.select_for_update(of=("self", "born"))
self.assertIs(qs.exists(), True)
@skipUnlessDBFeature("has_select_for_update_nowait")
def test_nowait_raises_error_on_block(self):
"""
If nowait is specified, we expect an error to be raised rather
than blocking.
"""
self.start_blocking_transaction()
status = []
thread = threading.Thread(
target=self.run_select_for_update,
args=(status,),
kwargs={"nowait": True},
)
thread.start()
time.sleep(1)
thread.join()
self.end_blocking_transaction()
self.assertIsInstance(status[-1], DatabaseError)
@skipUnlessDBFeature("has_select_for_update_skip_locked")
def test_skip_locked_skips_locked_rows(self):
"""
If skip_locked is specified, the locked row is skipped resulting in
Person.DoesNotExist.
"""
self.start_blocking_transaction()
status = []
thread = threading.Thread(
target=self.run_select_for_update,
args=(status,),
kwargs={"skip_locked": True},
)
thread.start()
time.sleep(1)
thread.join()
self.end_blocking_transaction()
self.assertIsInstance(status[-1], Person.DoesNotExist)
@skipIfDBFeature("has_select_for_update_nowait")
@skipUnlessDBFeature("has_select_for_update")
def test_unsupported_nowait_raises_error(self):
"""
NotSupportedError is raised if a SELECT...FOR UPDATE NOWAIT is run on
a database backend that supports FOR UPDATE but not NOWAIT.
"""
with self.assertRaisesMessage(
NotSupportedError, "NOWAIT is not supported on this database backend."
):
with transaction.atomic():
Person.objects.select_for_update(nowait=True).get()
@skipIfDBFeature("has_select_for_update_skip_locked")
@skipUnlessDBFeature("has_select_for_update")
def test_unsupported_skip_locked_raises_error(self):
"""
NotSupportedError is raised if a SELECT...FOR UPDATE SKIP LOCKED is run
on a database backend that supports FOR UPDATE but not SKIP LOCKED.
"""
with self.assertRaisesMessage(
NotSupportedError, "SKIP LOCKED is not supported on this database backend."
):
with transaction.atomic():
Person.objects.select_for_update(skip_locked=True).get()
@skipIfDBFeature("has_select_for_update_of")
@skipUnlessDBFeature("has_select_for_update")
def test_unsupported_of_raises_error(self):
"""
NotSupportedError is raised if a SELECT...FOR UPDATE OF... is run on
a database backend that supports FOR UPDATE but not OF.
"""
msg = "FOR UPDATE OF is not supported on this database backend."
with self.assertRaisesMessage(NotSupportedError, msg):
with transaction.atomic():
Person.objects.select_for_update(of=("self",)).get()
@skipIfDBFeature("has_select_for_no_key_update")
@skipUnlessDBFeature("has_select_for_update")
def test_unsuported_no_key_raises_error(self):
"""
NotSupportedError is raised if a SELECT...FOR NO KEY UPDATE... is run
on a database backend that supports FOR UPDATE but not NO KEY.
"""
msg = "FOR NO KEY UPDATE is not supported on this database backend."
with self.assertRaisesMessage(NotSupportedError, msg):
with transaction.atomic():
Person.objects.select_for_update(no_key=True).get()
@skipUnlessDBFeature("has_select_for_update", "has_select_for_update_of")
def test_unrelated_of_argument_raises_error(self):
"""
FieldError is raised if a non-relation field is specified in of=(...).
"""
msg = (
"Invalid field name(s) given in select_for_update(of=(...)): %s. "
"Only relational fields followed in the query are allowed. "
"Choices are: self, born, born__country, "
"born__country__entity_ptr."
)
invalid_of = [
("nonexistent",),
("name",),
("born__nonexistent",),
("born__name",),
("born__nonexistent", "born__name"),
]
for of in invalid_of:
with self.subTest(of=of):
with self.assertRaisesMessage(FieldError, msg % ", ".join(of)):
with transaction.atomic():
Person.objects.select_related(
"born__country"
).select_for_update(of=of).get()
@skipUnlessDBFeature("has_select_for_update", "has_select_for_update_of")
def test_related_but_unselected_of_argument_raises_error(self):
"""
FieldError is raised if a relation field that is not followed in the
query is specified in of=(...).
"""
msg = (
"Invalid field name(s) given in select_for_update(of=(...)): %s. "
"Only relational fields followed in the query are allowed. "
"Choices are: self, born, profile."
)
for name in ["born__country", "died", "died__country"]:
with self.subTest(name=name):
with self.assertRaisesMessage(FieldError, msg % name):
with transaction.atomic():
Person.objects.select_related("born", "profile",).exclude(
profile=None
).select_for_update(of=(name,)).get()
@skipUnlessDBFeature("has_select_for_update", "has_select_for_update_of")
def test_model_inheritance_of_argument_raises_error_ptr_in_choices(self):
msg = (
"Invalid field name(s) given in select_for_update(of=(...)): "
"name. Only relational fields followed in the query are allowed. "
"Choices are: self, %s."
)
with self.assertRaisesMessage(
FieldError,
msg % "country, country__country_ptr, country__country_ptr__entity_ptr",
):
with transaction.atomic():
EUCity.objects.select_related(
"country",
).select_for_update(of=("name",)).get()
with self.assertRaisesMessage(
FieldError, msg % "country_ptr, country_ptr__entity_ptr"
):
with transaction.atomic():
EUCountry.objects.select_for_update(of=("name",)).get()
@skipUnlessDBFeature("has_select_for_update", "has_select_for_update_of")
def test_model_proxy_of_argument_raises_error_proxy_field_in_choices(self):
msg = (
"Invalid field name(s) given in select_for_update(of=(...)): "
"name. Only relational fields followed in the query are allowed. "
"Choices are: self, country, country__entity_ptr."
)
with self.assertRaisesMessage(FieldError, msg):
with transaction.atomic():
CityCountryProxy.objects.select_related(
"country",
).select_for_update(of=("name",)).get()
@skipUnlessDBFeature("has_select_for_update", "has_select_for_update_of")
def test_reverse_one_to_one_of_arguments(self):
"""
Reverse OneToOneFields may be included in of=(...) as long as NULLs
are excluded because LEFT JOIN isn't allowed in SELECT FOR UPDATE.
"""
with transaction.atomic():
person = (
Person.objects.select_related(
"profile",
)
.exclude(profile=None)
.select_for_update(of=("profile",))
.get()
)
self.assertEqual(person.profile, self.person_profile)
@skipUnlessDBFeature("has_select_for_update")
def test_for_update_after_from(self):
features_class = connections["default"].features.__class__
attribute_to_patch = "%s.%s.for_update_after_from" % (
features_class.__module__,
features_class.__name__,
)
with mock.patch(attribute_to_patch, return_value=True):
with transaction.atomic():
self.assertIn(
"FOR UPDATE WHERE",
str(Person.objects.filter(name="foo").select_for_update().query),
)
@skipUnlessDBFeature("has_select_for_update")
def test_for_update_requires_transaction(self):
"""
A TransactionManagementError is raised
when a select_for_update query is executed outside of a transaction.
"""
msg = "select_for_update cannot be used outside of a transaction."
with self.assertRaisesMessage(transaction.TransactionManagementError, msg):
list(Person.objects.select_for_update())
@skipUnlessDBFeature("has_select_for_update")
def test_for_update_requires_transaction_only_in_execution(self):
"""
No TransactionManagementError is raised
when select_for_update is invoked outside of a transaction -
only when the query is executed.
"""
people = Person.objects.select_for_update()
msg = "select_for_update cannot be used outside of a transaction."
with self.assertRaisesMessage(transaction.TransactionManagementError, msg):
list(people)
@skipUnlessDBFeature("supports_select_for_update_with_limit")
def test_select_for_update_with_limit(self):
other = Person.objects.create(name="Grappeli", born=self.city1, died=self.city2)
with transaction.atomic():
qs = list(Person.objects.order_by("pk").select_for_update()[1:2])
self.assertEqual(qs[0], other)
@skipIfDBFeature("supports_select_for_update_with_limit")
def test_unsupported_select_for_update_with_limit(self):
msg = (
"LIMIT/OFFSET is not supported with select_for_update on this database "
"backend."
)
with self.assertRaisesMessage(NotSupportedError, msg):
with transaction.atomic():
list(Person.objects.order_by("pk").select_for_update()[1:2])
def run_select_for_update(self, status, **kwargs):
"""
Utility method that runs a SELECT FOR UPDATE against all
Person instances. After the select_for_update, it attempts
to update the name of the only record, save, and commit.
This function expects to run in a separate thread.
"""
status.append("started")
try:
# We need to enter transaction management again, as this is done on
# per-thread basis
with transaction.atomic():
person = Person.objects.select_for_update(**kwargs).get()
person.name = "Fred"
person.save()
except (DatabaseError, Person.DoesNotExist) as e:
status.append(e)
finally:
# This method is run in a separate thread. It uses its own
# database connection. Close it without waiting for the GC.
connection.close()
@skipUnlessDBFeature("has_select_for_update")
@skipUnlessDBFeature("supports_transactions")
def test_block(self):
"""
A thread running a select_for_update that accesses rows being touched
by a similar operation on another connection blocks correctly.
"""
# First, let's start the transaction in our thread.
self.start_blocking_transaction()
# Now, try it again using the ORM's select_for_update
# facility. Do this in a separate thread.
status = []
thread = threading.Thread(target=self.run_select_for_update, args=(status,))
# The thread should immediately block, but we'll sleep
# for a bit to make sure.
thread.start()
sanity_count = 0
while len(status) != 1 and sanity_count < 10:
sanity_count += 1
time.sleep(1)
if sanity_count >= 10:
raise ValueError("Thread did not run and block")
# Check the person hasn't been updated. Since this isn't
# using FOR UPDATE, it won't block.
p = Person.objects.get(pk=self.person.pk)
self.assertEqual("Reinhardt", p.name)
# When we end our blocking transaction, our thread should
# be able to continue.
self.end_blocking_transaction()
thread.join(5.0)
# Check the thread has finished. Assuming it has, we should
# find that it has updated the person's name.
self.assertFalse(thread.is_alive())
# We must commit the transaction to ensure that MySQL gets a fresh read,
# since by default it runs in REPEATABLE READ mode
transaction.commit()
p = Person.objects.get(pk=self.person.pk)
self.assertEqual("Fred", p.name)
@skipUnlessDBFeature("has_select_for_update")
def test_raw_lock_not_available(self):
"""
Running a raw query which can't obtain a FOR UPDATE lock raises
the correct exception
"""
self.start_blocking_transaction()
def raw(status):
try:
list(
Person.objects.raw(
"SELECT * FROM %s %s"
% (
Person._meta.db_table,
connection.ops.for_update_sql(nowait=True),
)
)
)
except DatabaseError as e:
status.append(e)
finally:
# This method is run in a separate thread. It uses its own
# database connection. Close it without waiting for the GC.
# Connection cannot be closed on Oracle because cursor is still
# open.
if connection.vendor != "oracle":
connection.close()
status = []
thread = threading.Thread(target=raw, kwargs={"status": status})
thread.start()
time.sleep(1)
thread.join()
self.end_blocking_transaction()
self.assertIsInstance(status[-1], DatabaseError)
@skipUnlessDBFeature("has_select_for_update")
@override_settings(DATABASE_ROUTERS=[TestRouter()])
def test_select_for_update_on_multidb(self):
query = Person.objects.select_for_update()
self.assertEqual(router.db_for_write(Person), query.db)
@skipUnlessDBFeature("has_select_for_update")
def test_select_for_update_with_get(self):
with transaction.atomic():
person = Person.objects.select_for_update().get(name="Reinhardt")
self.assertEqual(person.name, "Reinhardt")
def test_nowait_and_skip_locked(self):
with self.assertRaisesMessage(
ValueError, "The nowait option cannot be used with skip_locked."
):
Person.objects.select_for_update(nowait=True, skip_locked=True)
def test_ordered_select_for_update(self):
"""
Subqueries should respect ordering as an ORDER BY clause may be useful
to specify a row locking order to prevent deadlocks (#27193).
"""
with transaction.atomic():
qs = Person.objects.filter(
id__in=Person.objects.order_by("-id").select_for_update()
)
self.assertIn("ORDER BY", str(qs.query))
|
6cc81e68d0ccf62821aceff7baf9a8c3ec958e1d5f47e8aa5217e9b5df1c3301 | from django.db import models
class Entity(models.Model):
pass
class Country(Entity):
name = models.CharField(max_length=30)
class EUCountry(Country):
join_date = models.DateField()
class City(models.Model):
name = models.CharField(max_length=30)
country = models.ForeignKey(Country, models.CASCADE)
class EUCity(models.Model):
name = models.CharField(max_length=30)
country = models.ForeignKey(EUCountry, models.CASCADE)
class CountryProxy(Country):
class Meta:
proxy = True
class CountryProxyProxy(CountryProxy):
class Meta:
proxy = True
class CityCountryProxy(models.Model):
country = models.ForeignKey(CountryProxyProxy, models.CASCADE)
class Person(models.Model):
name = models.CharField(max_length=30)
born = models.ForeignKey(City, models.CASCADE, related_name="+")
died = models.ForeignKey(City, models.CASCADE, related_name="+")
class PersonProfile(models.Model):
person = models.OneToOneField(Person, models.CASCADE, related_name="profile")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.